]> git.localhorst.tv Git - l2e.git/blobdiff - src/loader/Tokenizer.h
added peek function to tokenizer
[l2e.git] / src / loader / Tokenizer.h
index f2ab4aaaadd2fb5883741f8641f24917e0cff5ab..b7ca72f664c148c1f92029a27cc132fbdb7682ce 100644 (file)
@@ -28,7 +28,7 @@ public:
        struct Token {
 
                enum Type {
-                       UNKNOWN,
+                       UNKNOWN = 0,
                        ANGLE_BRACKET_OPEN = '{',
                        ANGLE_BRACKET_CLOSE = '}',
                        CHEVRON_OPEN = '<',
@@ -37,14 +37,16 @@ public:
                        COMMA = ',',
                        BRACKET_OPEN = '[',
                        BRACKET_CLOSE = ']',
-                       NUMBER,
-                       STRING,
-                       KEYWORD_EXPORT,
-                       KEYWORD_FALSE,
-                       KEYWORD_INCLUDE,
-                       KEYWORD_TRUE,
-                       IDENTIFIER,
-                       TYPE_NAME,
+                       PARENTHESIS_OPEN = '(',
+                       PARENTHESIS_CLOSE = ')',
+                       NUMBER = '0',
+                       STRING = '"',
+                       KEYWORD_EXPORT = 'e',
+                       KEYWORD_FALSE = 'f',
+                       KEYWORD_INCLUDE = 'i',
+                       KEYWORD_TRUE = 't',
+                       IDENTIFIER = 'x',
+                       TYPE_NAME = 'n',
                };
 
                Token() : type(UNKNOWN), number(0) { }
@@ -63,6 +65,7 @@ public:
 
        bool HasMore();
        Token GetNext();
+       const Token &Peek();
        void Putback(const Token &);
 
 private:
@@ -99,6 +102,10 @@ inline const char *TokenTypeToString(Tokenizer::Token::Type t) {
                        return "BRACKET_OPEN";
                case Tokenizer::Token::BRACKET_CLOSE:
                        return "BRACKET_CLOSE";
+               case Tokenizer::Token::PARENTHESIS_OPEN:
+                       return "PARENTHESIS_OPEN";
+               case Tokenizer::Token::PARENTHESIS_CLOSE:
+                       return "PARENTHESIS_CLOSE";
                case Tokenizer::Token::NUMBER:
                        return "NUMBER";
                case Tokenizer::Token::STRING: