]> git.localhorst.tv Git - l2e.git/commitdiff
added comments (/*…*/ and //…\n)
authorDaniel Karbach <daniel.karbach@localhorst.tv>
Wed, 29 Aug 2012 18:56:15 +0000 (20:56 +0200)
committerDaniel Karbach <daniel.karbach@localhorst.tv>
Wed, 29 Aug 2012 18:56:15 +0000 (20:56 +0200)
src/loader/Tokenizer.cpp
src/loader/Tokenizer.h

index 995a7e2f20469d3ddb1f76231c0cae64c9523058..6acda5fce7ed93dc3df250fd95987b5559211c1f 100644 (file)
 namespace loader {
 
 bool Tokenizer::HasMore() {
+       if (isPutback) return true;
        ScanSpace();
-       return in;
+       if (!in) return false;
+
+       putback = ReadToken();
+       isPutback = true;
+       if (!skipComments || putback.type != Token::COMMENT) return true;
+
+       while (in && putback.type == Token::COMMENT) {
+               putback = ReadToken();
+               ScanSpace();
+       }
+       return putback.type != Token::COMMENT;
 }
 
 void Tokenizer::ScanSpace() {
@@ -48,6 +59,9 @@ const Tokenizer::Token &Tokenizer::Peek() {
 }
 
 Tokenizer::Token Tokenizer::GetNext() {
+       if (!HasMore()) {
+               throw LexerError(line, "read beyond last token");
+       }
        if (isPutback) {
                isPutback = false;
                return putback;
@@ -80,6 +94,19 @@ Tokenizer::Token Tokenizer::ReadToken() {
                case '"':
                        in.putback(c);
                        return ReadString();
+               case '/':
+                       {
+                               std::istream::char_type c2;
+                               in.get(c2);
+                               if (c2 == '/') {
+                                       return ReadComment();
+                               } else if (c2 == '*') {
+                                       return ReadMultilineComment();
+                               } else {
+                                       throw LexerError(line, std::string("Tokenizer: cannot parse token: ") + c + c2 + ": expected / or *");
+                               }
+                       }
+                       break;
                default:
                        in.putback(c);
                        {
@@ -177,6 +204,28 @@ Tokenizer::Token Tokenizer::ReadIdentifier() {
        return t;
 }
 
+Tokenizer::Token Tokenizer::ReadComment() {
+       std::istream::char_type c;
+       while (in.get(c) && c != '\n');
+       ++line;
+       return Token(Token::COMMENT);
+}
+
+Tokenizer::Token Tokenizer::ReadMultilineComment() {
+       std::istream::char_type c;
+       while (in.get(c)) {
+               if (c == '*') {
+                       std::istream::char_type c2;
+                       if (in.get(c2) && c2 == '/') {
+                               break;
+                       }
+               } else if (c == '\n') {
+                       ++line;
+               }
+       }
+       return Token(Token::COMMENT);
+}
+
 bool Tokenizer::CheckKeyword(Token &t) {
        if (t.str == "export") {
                t.type = Token::KEYWORD_EXPORT;
index 6dda20fad0b9744fe6578fdb203b5b50be2000b7..dff96fc6167495769ba82d5a56bd57d6dc9f497a 100644 (file)
@@ -18,7 +18,8 @@ namespace loader {
 class Tokenizer {
 
 public:
-       explicit Tokenizer(std::istream &in) : in(in), line(1), isPutback(false) { }
+       explicit Tokenizer(std::istream &in)
+       : in(in), line(1), isPutback(false), skipComments(true) { }
        ~Tokenizer() { }
 private:
        Tokenizer(const Tokenizer &);
@@ -47,6 +48,7 @@ public:
                        KEYWORD_TRUE = 't',
                        IDENTIFIER = 'x',
                        TYPE_NAME = 'n',
+                       COMMENT = 'c'
                };
 
                Token() : type(UNKNOWN), number(0) { }
@@ -81,6 +83,9 @@ private:
        Token ReadString();
        Token ReadIdentifier();
 
+       Token ReadComment();
+       Token ReadMultilineComment();
+
        bool CheckKeyword(Token &);
 
 private:
@@ -88,6 +93,7 @@ private:
        Token putback;
        int line;
        bool isPutback;
+       bool skipComments;
 
 };