X-Git-Url: http://git.localhorst.tv/?a=blobdiff_plain;f=src%2Floader%2FParser.cpp;h=183accbbb728f1cdca9c672b33a2a454c43d94e6;hb=40233fc8eea1c80e6c57a004dfe7e55dabf06edb;hp=68fb32674dcebcbae2464d6fc55c24c7d2ede687;hpb=4b3deb01b66307f5025938538d27e6ad86cfea13;p=l2e.git diff --git a/src/loader/Parser.cpp b/src/loader/Parser.cpp index 68fb326..183accb 100644 --- a/src/loader/Parser.cpp +++ b/src/loader/Parser.cpp @@ -7,6 +7,8 @@ #include "Parser.h" +#include "utility.h" + #include #include @@ -17,6 +19,17 @@ using std::vector; namespace loader { +Parser::Parser(const string &file, ParsedSource &product) +: file(file) +, dirname(Dirname(file)) +, in(this->file.c_str()) +, tok(in) +, product(product) { + if (!in) { + throw Error(file, 0, "unable to read file"); + } +} + void Parser::Parse() { while (tok.HasMore()) { ParseStatement(); @@ -64,7 +77,7 @@ void Parser::ParseExportDirective() { void Parser::ParseIncludeDirective() { Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::STRING); - Parser sub(t.str.c_str(), product); // TODO: resolve path name + Parser sub(CatPath(dirname, t.str), product); sub.Parse(); } @@ -72,26 +85,30 @@ Declaration *Parser::ProbeDefinition() { string typeName(ParseTypeName()); string identifier(ParseIdentifier()); - Tokenizer::Token t(GetToken()); - tok.Putback(t); - if (BeginOfPropertyList(t)) { - PropertyList *propertyList(ParsePropertyList()); - Definition *dfn(new Definition(typeName, identifier)); - dfn->SetValue(propertyList); - return dfn; - } else if (BeginningOfLiteral(t)) { - Literal *literal(ParseLiteral()); - Definition *dfn(new Definition(typeName, identifier)); - dfn->SetValue(literal); - return dfn; - } else { - return new Declaration(typeName, identifier); + if (tok.HasMore()) { + Tokenizer::Token t(GetToken()); + tok.Putback(t); + if (BeginOfPropertyList(t)) { + auto_ptr propertyList(ParsePropertyList()); + auto_ptr dfn(new Definition(typeName, identifier)); + dfn->SetValue(propertyList.release()); + product.AddDefinition(dfn.get()); + return dfn.release(); + } else if (BeginningOfPrimitiveLiteral(t)) { + auto_ptr literal(ParseLiteral()); + auto_ptr dfn(new Definition(typeName, identifier)); + dfn->SetValue(literal.release()); + product.AddDefinition(dfn.get()); + return dfn.release(); + } } + return new Declaration(typeName, identifier); } bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const { switch (t.type) { case Tokenizer::Token::CHEVRON_OPEN: + case Tokenizer::Token::COLON: case Tokenizer::Token::BRACKET_OPEN: case Tokenizer::Token::PARENTHESIS_OPEN: case Tokenizer::Token::NUMBER: @@ -105,6 +122,22 @@ bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const { } } +bool Parser::BeginningOfPrimitiveLiteral(const Tokenizer::Token &t) const { + switch (t.type) { + case Tokenizer::Token::CHEVRON_OPEN: + case Tokenizer::Token::COLON: + case Tokenizer::Token::BRACKET_OPEN: + case Tokenizer::Token::PARENTHESIS_OPEN: + case Tokenizer::Token::NUMBER: + case Tokenizer::Token::STRING: + case Tokenizer::Token::KEYWORD_FALSE: + case Tokenizer::Token::KEYWORD_TRUE: + return true; + default: + return false; + } +} + bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const { return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN; } @@ -190,6 +223,10 @@ Literal *Parser::ParseLiteral() { case Tokenizer::Token::CHEVRON_OPEN: tok.Putback(t); return ParseVector(); + case Tokenizer::Token::COLON: + t = GetToken(); + AssertTokenType(t.type, Tokenizer::Token::STRING); + return new Literal(dirname, t.str); case Tokenizer::Token::BRACKET_OPEN: tok.Putback(t); return ParseArray(); @@ -217,9 +254,8 @@ Literal *Parser::ParseArray() { AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN); Tokenizer::Token probe(GetToken()); - tok.Putback(probe); - if (probe.type == Tokenizer::Token::ANGLE_BRACKET_OPEN) { + if (probe.type == Tokenizer::Token::TYPE_NAME) { vector values; while (t.type != Tokenizer::Token::BRACKET_CLOSE) { PropertyList *value(ParsePropertyList()); @@ -230,8 +266,10 @@ Literal *Parser::ParseArray() { throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); } } - return new Literal(values); + return new Literal(probe.str, values); } else { + tok.Putback(probe); + vector values; while (t.type != Tokenizer::Token::BRACKET_CLOSE) { Value *value(ParseValue()); @@ -267,7 +305,7 @@ Literal *Parser::ParseColor() { AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color"); t = GetToken(); - if (t.type == Tokenizer::Token::BRACKET_CLOSE) { + if (t.type == Tokenizer::Token::PARENTHESIS_CLOSE) { return new Literal(red.number, green.number, blue.number); } else if (t.type != Tokenizer::Token::COMMA) { Tokenizer::Token alpha(GetToken());