diff --git a/src/include/tokenize.h b/src/include/tokenize.h index b6daf01..300f040 100644 --- a/src/include/tokenize.h +++ b/src/include/tokenize.h @@ -13,7 +13,7 @@ using TokenData = variant; struct Token { TokenType type; - TokenData data; + TokenData data { }; }; /* diff --git a/src/tokenize.cpp b/src/tokenize.cpp index d848438..cc55cde 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -128,11 +128,11 @@ vector tokenize(string str) { tokens.emplace_back(token); str.erase(0, 1); } - else if (isspace(str[0])) { + else if (isspace(str[0]) || str[0] == '\0') { str.erase(0, 1); } else { - cerr << "Unknown token: " << str << endl; + cerr << "Unknown token: \"" << str << "\"" << endl; break; } }