]> git.localhorst.tv Git - l2e.git/blobdiff - src/loader/Tokenizer.h
added peek function to tokenizer
[l2e.git] / src / loader / Tokenizer.h
index 15eadf8710e20b4d42ed550ad9b72f7697d3e2e7..b7ca72f664c148c1f92029a27cc132fbdb7682ce 100644 (file)
@@ -10,6 +10,7 @@
 
 #include <iosfwd>
 #include <ostream>
+#include <stdexcept>
 #include <string>
 
 namespace loader {
@@ -27,7 +28,7 @@ public:
        struct Token {
 
                enum Type {
-                       UNKNOWN,
+                       UNKNOWN = 0,
                        ANGLE_BRACKET_OPEN = '{',
                        ANGLE_BRACKET_CLOSE = '}',
                        CHEVRON_OPEN = '<',
@@ -36,14 +37,16 @@ public:
                        COMMA = ',',
                        BRACKET_OPEN = '[',
                        BRACKET_CLOSE = ']',
-                       NUMBER,
-                       STRING,
-                       KEYWORD_EXPORT,
-                       KEYWORD_FALSE,
-                       KEYWORD_INCLUDE,
-                       KEYWORD_TRUE,
-                       IDENTIFIER,
-                       TYPE_NAME,
+                       PARENTHESIS_OPEN = '(',
+                       PARENTHESIS_CLOSE = ')',
+                       NUMBER = '0',
+                       STRING = '"',
+                       KEYWORD_EXPORT = 'e',
+                       KEYWORD_FALSE = 'f',
+                       KEYWORD_INCLUDE = 'i',
+                       KEYWORD_TRUE = 't',
+                       IDENTIFIER = 'x',
+                       TYPE_NAME = 'n',
                };
 
                Token() : type(UNKNOWN), number(0) { }
@@ -55,8 +58,14 @@ public:
 
        };
 
+       class LexerError: public std::runtime_error {
+       public:
+               explicit LexerError(const std::string &msg) : std::runtime_error(msg) { }
+       };
+
        bool HasMore();
        Token GetNext();
+       const Token &Peek();
        void Putback(const Token &);
 
 private:
@@ -93,6 +102,10 @@ inline const char *TokenTypeToString(Tokenizer::Token::Type t) {
                        return "BRACKET_OPEN";
                case Tokenizer::Token::BRACKET_CLOSE:
                        return "BRACKET_CLOSE";
+               case Tokenizer::Token::PARENTHESIS_OPEN:
+                       return "PARENTHESIS_OPEN";
+               case Tokenizer::Token::PARENTHESIS_CLOSE:
+                       return "PARENTHESIS_CLOSE";
                case Tokenizer::Token::NUMBER:
                        return "NUMBER";
                case Tokenizer::Token::STRING: