]> git.localhorst.tv Git - l2e.git/blobdiff - src/loader/Parser.cpp
parse scripts
[l2e.git] / src / loader / Parser.cpp
index 01ddbe2bbdd19670bee0bcd0f88244063497d542..52585c4cdd0f11ba40829a7a039456a390674e6f 100644 (file)
@@ -85,23 +85,24 @@ Declaration *Parser::ProbeDefinition() {
        string typeName(ParseTypeName());
        string identifier(ParseIdentifier());
 
-       Tokenizer::Token t(GetToken());
-       tok.Putback(t);
-       if (BeginOfPropertyList(t)) {
-               auto_ptr<PropertyList> propertyList(ParsePropertyList());
-               auto_ptr<Definition> dfn(new Definition(typeName, identifier));
-               dfn->SetValue(propertyList.release());
-               product.AddDefinition(dfn.get());
-               return dfn.release();
-       } else if (BeginningOfLiteral(t)) {
-               auto_ptr<Literal> literal(ParseLiteral());
-               auto_ptr<Definition> dfn(new Definition(typeName, identifier));
-               dfn->SetValue(literal.release());
-               product.AddDefinition(dfn.get());
-               return dfn.release();
-       } else {
-               return new Declaration(typeName, identifier);
+       if (tok.HasMore()) {
+               Tokenizer::Token t(GetToken());
+               tok.Putback(t);
+               if (BeginOfPropertyList(t)) {
+                       auto_ptr<PropertyList> propertyList(ParsePropertyList());
+                       auto_ptr<Definition> dfn(new Definition(typeName, identifier));
+                       dfn->SetValue(propertyList.release());
+                       product.AddDefinition(dfn.get());
+                       return dfn.release();
+               } else if (BeginningOfPrimitiveLiteral(t)) {
+                       auto_ptr<Literal> literal(ParseLiteral());
+                       auto_ptr<Definition> dfn(new Definition(typeName, identifier));
+                       dfn->SetValue(literal.release());
+                       product.AddDefinition(dfn.get());
+                       return dfn.release();
+               }
        }
+       return new Declaration(typeName, identifier);
 }
 
 bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
@@ -111,6 +112,7 @@ bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
                case Tokenizer::Token::BRACKET_OPEN:
                case Tokenizer::Token::PARENTHESIS_OPEN:
                case Tokenizer::Token::NUMBER:
+               case Tokenizer::Token::SCRIPT_BEGIN:
                case Tokenizer::Token::STRING:
                case Tokenizer::Token::KEYWORD_FALSE:
                case Tokenizer::Token::KEYWORD_TRUE:
@@ -121,10 +123,30 @@ bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
        }
 }
 
+bool Parser::BeginningOfPrimitiveLiteral(const Tokenizer::Token &t) const {
+       switch (t.type) {
+               case Tokenizer::Token::CHEVRON_OPEN:
+               case Tokenizer::Token::COLON:
+               case Tokenizer::Token::BRACKET_OPEN:
+               case Tokenizer::Token::PARENTHESIS_OPEN:
+               case Tokenizer::Token::NUMBER:
+               case Tokenizer::Token::STRING:
+               case Tokenizer::Token::KEYWORD_FALSE:
+               case Tokenizer::Token::KEYWORD_TRUE:
+                       return true;
+               default:
+                       return false;
+       }
+}
+
 bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const {
        return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
 }
 
+bool Parser::BeginningOfScriptLiteral(const Tokenizer::Token &t) const {
+       return t.type == Tokenizer::Token::SCRIPT_BEGIN;
+}
+
 Definition *Parser::ParseDefinition() {
        string typeName(ParseTypeName());
        string identifier(ParseIdentifier());
@@ -201,7 +223,10 @@ Literal *Parser::ParseLiteral() {
        if (t.type == Tokenizer::Token::TYPE_NAME) {
                PropertyList *props(ParsePropertyList());
                return new Literal(t.str, props);
-       } else if (BeginningOfLiteral(t)) {
+       } else if (BeginningOfScriptLiteral(t)) {
+               tok.Putback(t);
+               return ParseScript();
+       } else if (BeginningOfPrimitiveLiteral(t)) {
                switch (t.type) {
                        case Tokenizer::Token::CHEVRON_OPEN:
                                tok.Putback(t);
@@ -237,9 +262,8 @@ Literal *Parser::ParseArray() {
        AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
 
        Tokenizer::Token probe(GetToken());
-       tok.Putback(probe);
 
-       if (probe.type == Tokenizer::Token::ANGLE_BRACKET_OPEN) {
+       if (probe.type == Tokenizer::Token::TYPE_NAME) {
                vector<PropertyList *> values;
                while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
                        PropertyList *value(ParsePropertyList());
@@ -250,8 +274,10 @@ Literal *Parser::ParseArray() {
                                throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
                        }
                }
-               return new Literal(values);
+               return new Literal(probe.str, values);
        } else {
+               tok.Putback(probe);
+
                vector<Value *> values;
                while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
                        Value *value(ParseValue());
@@ -322,6 +348,51 @@ Literal *Parser::ParseVector() {
        return new Literal(x.number, y.number);
 }
 
+Literal *Parser::ParseScript() {
+       std::string msg("error parsing script");
+       Tokenizer::Token t(GetToken());
+       AssertTokenType(t.type, Tokenizer::Token::SCRIPT_BEGIN, msg);
+
+       vector<ScriptToken *> script;
+       try {
+               while (t.type != Tokenizer::Token::SCRIPT_END) {
+                       if (BeginningOfPrimitiveLiteral(t)) {
+                               tok.Putback(t);
+                               script.push_back(new ScriptToken(ParseLiteral()));
+                       } else {
+                               switch (t.type) {
+                                       case Tokenizer::Token::COMMAND: {
+                                               Tokenizer::Token t2(GetToken());
+                                               AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER, msg);
+                                               script.push_back(new ScriptToken(t2.str, ScriptToken::COMMAND));
+                                               break;
+                                       }
+                                       case Tokenizer::Token::IDENTIFIER: {
+                                               script.push_back(new ScriptToken(t.str, ScriptToken::IDENTIFIER));
+                                               break;
+                                       }
+                                       case Tokenizer::Token::REGISTER: {
+                                               Tokenizer::Token t2(GetToken());
+                                               AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER, msg);
+                                               script.push_back(new ScriptToken(t2.str, ScriptToken::REGISTER));
+                                               break;
+                                       }
+                                       default:
+                                               throw Error(file, tok.Line(), string("unexpected token in script: ") + TokenTypeToString(t.type));
+                               }
+                       }
+                       t = GetToken();
+               }
+       } catch (...) {
+               for (vector<ScriptToken *>::const_iterator i(script.begin()), end(script.end()); i != end; ++i) {
+                       delete *i;
+               }
+               throw;
+       }
+       return new Literal(script);
+}
+
+
 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
        if (expected != actual) {
                throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));