Add more comments

This commit is contained in:
ala89 2024-01-13 15:21:47 +01:00
parent 1b749a0091
commit fed70c7987
9 changed files with 25 additions and 10 deletions

View File

@ -444,7 +444,7 @@ EvalResult eval(Node &ast, Memory &memory, vector<string> history) {
return value; return value;
} break; } break;
case NodeType::LIncr: { case NodeType::LIncr: { // ++ and -- work on any type but nothing happens with non-arithmetic types
Token identifier_token = get<Token>(node.children[0]); Token identifier_token = get<Token>(node.children[0]);
string identifier = get<string>(identifier_token.data); string identifier = get<string>(identifier_token.data);

View File

@ -58,6 +58,9 @@ enum class ErrorType {
using ErrorData = variant<monostate, string, int>; using ErrorData = variant<monostate, string, int>;
/**
* Base class for errors that arise from incorrect user input, they are divided in 4 sub-classes
*/
class UserError : public exception { class UserError : public exception {
public: public:
explicit UserError(ErrorType type, CodePosition pos, ErrorData data = {}) explicit UserError(ErrorType type, CodePosition pos, ErrorData data = {})
@ -100,6 +103,9 @@ public:
const StackTrace trace; const StackTrace trace;
}; };
/**
* Base class for exceptions used internally for control
*/
class InternalError : public exception { class InternalError : public exception {
public: public:
explicit InternalError(CodePosition pos = {}) explicit InternalError(CodePosition pos = {})

View File

@ -16,6 +16,9 @@ typedef struct ExecArgs {
bool dump_mem=false; bool dump_mem=false;
} ExecArgs; } ExecArgs;
/**
* Encapsulates interpretation steps from raw input to the result
*/
EvalResult execute(vector<string> input, Memory& memory, int initial_line=0, ExecArgs args={}); EvalResult execute(vector<string> input, Memory& memory, int initial_line=0, ExecArgs args={});
#endif #endif

View File

@ -8,7 +8,7 @@
using namespace std; using namespace std;
/* /*
Parses a string into a vector of tokens Parses a vector of strings (one for each line) into a vector of tokens
*/ */
vector<Token> tokenize(vector<string> str, int initial_line=0); vector<Token> tokenize(vector<string> str, int initial_line=0);

View File

@ -17,7 +17,7 @@ class Memory {
void remove_scope(void); void remove_scope(void);
MemoryVar& get(string identifier); MemoryVar& get(string identifier);
Scope& get_function_scope(void); Scope& get_function_scope(void); // get closest function scope
void declare(string identifier, Type type); void declare(string identifier, Type type);
void update(string identifier, EvalResult value); void update(string identifier, EvalResult value);

View File

@ -21,7 +21,7 @@ enum class TypeType {
}; };
using ArgDefinition = tuple<Type, string>; using ArgDefinition = tuple<Type, string>;
using FunctionPrototype = vector<ArgDefinition>; using FunctionPrototype = vector<ArgDefinition>; // The return type comes first, unnamed types are represented by an empty string
using TypeData = variant<monostate, FunctionPrototype>; using TypeData = variant<monostate, FunctionPrototype>;
struct Type { struct Type {
@ -39,7 +39,7 @@ enum class TokenType {
Break, Continue, Return, Comma Break, Continue, Return, Comma
}; };
using TokenData = variant<monostate, int, double, string, Type>; using TokenData = variant<monostate, int, double, string>;
struct CodePosition { struct CodePosition {
int line; int line;
@ -189,7 +189,7 @@ struct InnerNode {
CodePosition pos; CodePosition pos;
}; };
// A Leaf is always corresponding to a Token // Leaves are systematically associated with a token, that's why we re-use this type
/** /**
* Node: AST * Node: AST
@ -205,13 +205,17 @@ struct ParseReturn {
*/ */
struct MemoryVar; struct MemoryVar;
using Closure = unordered_map<string, reference_wrapper<MemoryVar>>; using Closure = unordered_map<string, reference_wrapper<MemoryVar>>; // map of pointers to memory vars
using UserFunction = tuple<Node, Closure>; using UserFunction = tuple<Node, Closure>;
enum class InternalCall { enum class InternalCall {
ClearMemory, DumpMemory, DumpHistory ClearMemory, DumpMemory, DumpHistory
}; };
/**
* UserFunction: user defined functions are represented by an AST and a closure
* InternalCall: for standard library functions
*/
using Function = variant<UserFunction, InternalCall>; using Function = variant<UserFunction, InternalCall>;
using EvalResult = variant<monostate, int, double, Function>; using EvalResult = variant<monostate, int, double, Function>;
@ -229,8 +233,8 @@ struct Scope {
unordered_map<string, MemoryVar> vars; unordered_map<string, MemoryVar> vars;
int depth; int depth;
ScopeType type; ScopeType type;
MemoryVar* fn; MemoryVar* fn; // For function scopes only
CodePosition entry_pos; CodePosition entry_pos; // For function scopes only, call position
}; };
#endif #endif

View File

@ -35,7 +35,7 @@ vector<string> split_string(const string& input, char delimiter);
string type_type_to_string(TypeType type); string type_type_to_string(TypeType type);
/** /**
* Check if two types are equal * Check recursively if two types are equal
*/ */
bool equal_types(Type type1, Type type2); bool equal_types(Type type1, Type type2);

View File

@ -10,6 +10,7 @@ regex INT_REGEX ("\\d+");
regex DOUBLE_REGEX ("\\d+\\.\\d*|\\d*\\.\\d+"); regex DOUBLE_REGEX ("\\d+\\.\\d*|\\d*\\.\\d+");
regex IDENTIFIER_REGEX ("[A-Za-z_]\\w*"); regex IDENTIFIER_REGEX ("[A-Za-z_]\\w*");
// A list of tokens with simple parsing logic
vector<tuple<string, TokenType>> simple_tokens = { vector<tuple<string, TokenType>> simple_tokens = {
{ "if", TokenType::If }, { "if", TokenType::If },
{ "else", TokenType::Else }, { "else", TokenType::Else },

View File

@ -181,6 +181,7 @@ ParseReturn parse_statement(vector<Token> tokens) {
type = NodeType::While; type = NodeType::While;
break; break;
default: default:
throw exception();
break; // Impossible break; // Impossible
} }