X-Git-Url: http://git.localhorst.tv/?a=blobdiff_plain;f=src%2Floader%2FParser.cpp;h=183accbbb728f1cdca9c672b33a2a454c43d94e6;hb=40233fc8eea1c80e6c57a004dfe7e55dabf06edb;hp=2cf1a3486bc1d5f2fdc4633b28e7a6a8150796ce;hpb=af9e0b57dac45dc5591f16fb34236b1356cda8a2;p=l2e.git diff --git a/src/loader/Parser.cpp b/src/loader/Parser.cpp index 2cf1a34..183accb 100644 --- a/src/loader/Parser.cpp +++ b/src/loader/Parser.cpp @@ -7,6 +7,8 @@ #include "Parser.h" +#include "utility.h" + #include #include @@ -17,6 +19,17 @@ using std::vector; namespace loader { +Parser::Parser(const string &file, ParsedSource &product) +: file(file) +, dirname(Dirname(file)) +, in(this->file.c_str()) +, tok(in) +, product(product) { + if (!in) { + throw Error(file, 0, "unable to read file"); + } +} + void Parser::Parse() { while (tok.HasMore()) { ParseStatement(); @@ -24,7 +37,7 @@ void Parser::Parse() { } void Parser::ParseStatement() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); switch (t.type) { case Tokenizer::Token::KEYWORD_EXPORT: ParseExportDirective(); @@ -40,12 +53,18 @@ void Parser::ParseStatement() { } break; default: - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type)); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type)); } } +Tokenizer::Token Parser::GetToken() try { + return tok.GetNext(); +} catch (Tokenizer::LexerError &e) { + throw Error(file, e.Line(), e.what()); +} + void Parser::ParseExportDirective() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); if (t.type != Tokenizer::Token::IDENTIFIER) { tok.Putback(t); Declaration *decl(ProbeDefinition()); @@ -56,10 +75,9 @@ void Parser::ParseExportDirective() { } void Parser::ParseIncludeDirective() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::STRING); - ifstream file(t.str.c_str()); // TODO: resolve path name - Parser sub(file, product); + Parser sub(CatPath(dirname, t.str), product); sub.Parse(); } @@ -67,26 +85,30 @@ Declaration *Parser::ProbeDefinition() { string typeName(ParseTypeName()); string identifier(ParseIdentifier()); - Tokenizer::Token t(tok.GetNext()); - tok.Putback(t); - if (BeginOfPropertyList(t)) { - PropertyList *propertyList(ParsePropertyList()); - Definition *dfn(new Definition(typeName, identifier)); - dfn->SetValue(propertyList); - return dfn; - } else if (BeginningOfLiteral(t)) { - Literal *literal(ParseLiteral()); - Definition *dfn(new Definition(typeName, identifier)); - dfn->SetValue(literal); - return dfn; - } else { - return new Declaration(typeName, identifier); + if (tok.HasMore()) { + Tokenizer::Token t(GetToken()); + tok.Putback(t); + if (BeginOfPropertyList(t)) { + auto_ptr propertyList(ParsePropertyList()); + auto_ptr dfn(new Definition(typeName, identifier)); + dfn->SetValue(propertyList.release()); + product.AddDefinition(dfn.get()); + return dfn.release(); + } else if (BeginningOfPrimitiveLiteral(t)) { + auto_ptr literal(ParseLiteral()); + auto_ptr dfn(new Definition(typeName, identifier)); + dfn->SetValue(literal.release()); + product.AddDefinition(dfn.get()); + return dfn.release(); + } } + return new Declaration(typeName, identifier); } bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const { switch (t.type) { case Tokenizer::Token::CHEVRON_OPEN: + case Tokenizer::Token::COLON: case Tokenizer::Token::BRACKET_OPEN: case Tokenizer::Token::PARENTHESIS_OPEN: case Tokenizer::Token::NUMBER: @@ -100,6 +122,22 @@ bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const { } } +bool Parser::BeginningOfPrimitiveLiteral(const Tokenizer::Token &t) const { + switch (t.type) { + case Tokenizer::Token::CHEVRON_OPEN: + case Tokenizer::Token::COLON: + case Tokenizer::Token::BRACKET_OPEN: + case Tokenizer::Token::PARENTHESIS_OPEN: + case Tokenizer::Token::NUMBER: + case Tokenizer::Token::STRING: + case Tokenizer::Token::KEYWORD_FALSE: + case Tokenizer::Token::KEYWORD_TRUE: + return true; + default: + return false; + } +} + bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const { return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN; } @@ -108,7 +146,7 @@ Definition *Parser::ParseDefinition() { string typeName(ParseTypeName()); string identifier(ParseIdentifier()); - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); tok.Putback(t); if (BeginOfPropertyList(t)) { PropertyList *propertyList(ParsePropertyList()); @@ -121,41 +159,41 @@ Definition *Parser::ParseDefinition() { dfn->SetValue(literal); return dfn; } else { - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal"); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal"); } } string Parser::ParseIdentifier() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER); return t.str; } string Parser::ParseTypeName() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME); return t.str; } PropertyList *Parser::ParsePropertyList() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN); auto_ptr props(new PropertyList); while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) { - Tokenizer::Token name(tok.GetNext()); + Tokenizer::Token name(GetToken()); AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::COLON); Value *value(ParseValue()); props->SetProperty(name.str, value); - t = tok.GetNext(); + t = GetToken(); if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) { - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }"); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }"); } } @@ -163,7 +201,7 @@ PropertyList *Parser::ParsePropertyList() { } Value *Parser::ParseValue() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); if (t.type == Tokenizer::Token::IDENTIFIER) { return new Value(t.str); } else if (BeginningOfLiteral(t)) { @@ -171,12 +209,12 @@ Value *Parser::ParseValue() { Literal *literal(ParseLiteral()); return new Value(literal); } else { - throw new ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier"); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier"); } } Literal *Parser::ParseLiteral() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); if (t.type == Tokenizer::Token::TYPE_NAME) { PropertyList *props(ParsePropertyList()); return new Literal(t.str, props); @@ -185,6 +223,10 @@ Literal *Parser::ParseLiteral() { case Tokenizer::Token::CHEVRON_OPEN: tok.Putback(t); return ParseVector(); + case Tokenizer::Token::COLON: + t = GetToken(); + AssertTokenType(t.type, Tokenizer::Token::STRING); + return new Literal(dirname, t.str); case Tokenizer::Token::BRACKET_OPEN: tok.Putback(t); return ParseArray(); @@ -203,80 +245,96 @@ Literal *Parser::ParseLiteral() { throw std::logic_error("literal switch reached impossible default branch oO"); } } else { - throw new ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive"); + throw new Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive"); } } Literal *Parser::ParseArray() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN); - vector values; + Tokenizer::Token probe(GetToken()); - while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) { - Value *value(ParseValue()); - values.push_back(value); + if (probe.type == Tokenizer::Token::TYPE_NAME) { + vector values; + while (t.type != Tokenizer::Token::BRACKET_CLOSE) { + PropertyList *value(ParsePropertyList()); + values.push_back(value); - t = tok.GetNext(); - if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) { - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); + t = GetToken(); + if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) { + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); + } } - } + return new Literal(probe.str, values); + } else { + tok.Putback(probe); + + vector values; + while (t.type != Tokenizer::Token::BRACKET_CLOSE) { + Value *value(ParseValue()); + values.push_back(value); - return new Literal(values); + t = GetToken(); + if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) { + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); + } + } + return new Literal(values); + } } Literal *Parser::ParseColor() { string msg("error parsing color"); - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg); - Tokenizer::Token red(tok.GetNext()); + Tokenizer::Token red(GetToken()); AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::COMMA, msg); - Tokenizer::Token green(tok.GetNext()); + Tokenizer::Token green(GetToken()); AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::COMMA, msg); - Tokenizer::Token blue(tok.GetNext()); + Tokenizer::Token blue(GetToken()); AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color"); - t = tok.GetNext(); - if (t.type == Tokenizer::Token::BRACKET_CLOSE) { + t = GetToken(); + if (t.type == Tokenizer::Token::PARENTHESIS_CLOSE) { return new Literal(red.number, green.number, blue.number); } else if (t.type != Tokenizer::Token::COMMA) { - Tokenizer::Token alpha(tok.GetNext()); + Tokenizer::Token alpha(GetToken()); AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg); return new Literal(red.number, green.number, blue.number, alpha.number); } else { - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); } } Literal *Parser::ParseVector() { std::string msg("error parsing vector"); - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg); - Tokenizer::Token x(tok.GetNext()); + Tokenizer::Token x(GetToken()); AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::COMMA, msg); - Tokenizer::Token y(tok.GetNext()); + Tokenizer::Token y(GetToken()); AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg); return new Literal(x.number, y.number); @@ -284,13 +342,13 @@ Literal *Parser::ParseVector() { void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) { if (expected != actual) { - throw ParseError(string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected)); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected)); } } void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) { if (expected != actual) { - throw ParseError(msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected)); + throw Error(file, tok.Line(), msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected)); } }