X-Git-Url: http://git.localhorst.tv/?a=blobdiff_plain;f=src%2Floader%2FTokenizer.h;h=6dda20fad0b9744fe6578fdb203b5b50be2000b7;hb=32b5ea1b0f05283eb588b2b069d667f7c36e84da;hp=15eadf8710e20b4d42ed550ad9b72f7697d3e2e7;hpb=996552758ec3bc748081b65b26b4a61bcd619324;p=l2e.git diff --git a/src/loader/Tokenizer.h b/src/loader/Tokenizer.h index 15eadf8..6dda20f 100644 --- a/src/loader/Tokenizer.h +++ b/src/loader/Tokenizer.h @@ -10,6 +10,7 @@ #include #include +#include #include namespace loader { @@ -17,7 +18,7 @@ namespace loader { class Tokenizer { public: - explicit Tokenizer(std::istream &in) : in(in), isPutback(false) { } + explicit Tokenizer(std::istream &in) : in(in), line(1), isPutback(false) { } ~Tokenizer() { } private: Tokenizer(const Tokenizer &); @@ -27,7 +28,7 @@ public: struct Token { enum Type { - UNKNOWN, + UNKNOWN = 0, ANGLE_BRACKET_OPEN = '{', ANGLE_BRACKET_CLOSE = '}', CHEVRON_OPEN = '<', @@ -36,14 +37,16 @@ public: COMMA = ',', BRACKET_OPEN = '[', BRACKET_CLOSE = ']', - NUMBER, - STRING, - KEYWORD_EXPORT, - KEYWORD_FALSE, - KEYWORD_INCLUDE, - KEYWORD_TRUE, - IDENTIFIER, - TYPE_NAME, + PARENTHESIS_OPEN = '(', + PARENTHESIS_CLOSE = ')', + NUMBER = '0', + STRING = '"', + KEYWORD_EXPORT = 'e', + KEYWORD_FALSE = 'f', + KEYWORD_INCLUDE = 'i', + KEYWORD_TRUE = 't', + IDENTIFIER = 'x', + TYPE_NAME = 'n', }; Token() : type(UNKNOWN), number(0) { } @@ -55,11 +58,23 @@ public: }; + class LexerError: public std::runtime_error { + public: + LexerError(int line, const std::string &msg) + : std::runtime_error(msg), line(line) { } + int Line() const { return line; } + private: + int line; + }; + bool HasMore(); Token GetNext(); + const Token &Peek(); void Putback(const Token &); + int Line() const { return line; } private: + void ScanSpace(); Token ReadToken(); Token ReadNumber(); @@ -71,6 +86,7 @@ private: private: std::istream ∈ Token putback; + int line; bool isPutback; }; @@ -93,6 +109,10 @@ inline const char *TokenTypeToString(Tokenizer::Token::Type t) { return "BRACKET_OPEN"; case Tokenizer::Token::BRACKET_CLOSE: return "BRACKET_CLOSE"; + case Tokenizer::Token::PARENTHESIS_OPEN: + return "PARENTHESIS_OPEN"; + case Tokenizer::Token::PARENTHESIS_CLOSE: + return "PARENTHESIS_CLOSE"; case Tokenizer::Token::NUMBER: return "NUMBER"; case Tokenizer::Token::STRING: