X-Git-Url: http://git.localhorst.tv/?a=blobdiff_plain;f=src%2Floader%2FParser.cpp;h=68fb32674dcebcbae2464d6fc55c24c7d2ede687;hb=4b3deb01b66307f5025938538d27e6ad86cfea13;hp=2cf1a3486bc1d5f2fdc4633b28e7a6a8150796ce;hpb=af9e0b57dac45dc5591f16fb34236b1356cda8a2;p=l2e.git diff --git a/src/loader/Parser.cpp b/src/loader/Parser.cpp index 2cf1a34..68fb326 100644 --- a/src/loader/Parser.cpp +++ b/src/loader/Parser.cpp @@ -24,7 +24,7 @@ void Parser::Parse() { } void Parser::ParseStatement() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); switch (t.type) { case Tokenizer::Token::KEYWORD_EXPORT: ParseExportDirective(); @@ -40,12 +40,18 @@ void Parser::ParseStatement() { } break; default: - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type)); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type)); } } +Tokenizer::Token Parser::GetToken() try { + return tok.GetNext(); +} catch (Tokenizer::LexerError &e) { + throw Error(file, e.Line(), e.what()); +} + void Parser::ParseExportDirective() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); if (t.type != Tokenizer::Token::IDENTIFIER) { tok.Putback(t); Declaration *decl(ProbeDefinition()); @@ -56,10 +62,9 @@ void Parser::ParseExportDirective() { } void Parser::ParseIncludeDirective() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::STRING); - ifstream file(t.str.c_str()); // TODO: resolve path name - Parser sub(file, product); + Parser sub(t.str.c_str(), product); // TODO: resolve path name sub.Parse(); } @@ -67,7 +72,7 @@ Declaration *Parser::ProbeDefinition() { string typeName(ParseTypeName()); string identifier(ParseIdentifier()); - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); tok.Putback(t); if (BeginOfPropertyList(t)) { PropertyList *propertyList(ParsePropertyList()); @@ -108,7 +113,7 @@ Definition *Parser::ParseDefinition() { string typeName(ParseTypeName()); string identifier(ParseIdentifier()); - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); tok.Putback(t); if (BeginOfPropertyList(t)) { PropertyList *propertyList(ParsePropertyList()); @@ -121,41 +126,41 @@ Definition *Parser::ParseDefinition() { dfn->SetValue(literal); return dfn; } else { - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal"); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal"); } } string Parser::ParseIdentifier() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER); return t.str; } string Parser::ParseTypeName() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME); return t.str; } PropertyList *Parser::ParsePropertyList() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN); auto_ptr props(new PropertyList); while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) { - Tokenizer::Token name(tok.GetNext()); + Tokenizer::Token name(GetToken()); AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::COLON); Value *value(ParseValue()); props->SetProperty(name.str, value); - t = tok.GetNext(); + t = GetToken(); if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) { - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }"); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }"); } } @@ -163,7 +168,7 @@ PropertyList *Parser::ParsePropertyList() { } Value *Parser::ParseValue() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); if (t.type == Tokenizer::Token::IDENTIFIER) { return new Value(t.str); } else if (BeginningOfLiteral(t)) { @@ -171,12 +176,12 @@ Value *Parser::ParseValue() { Literal *literal(ParseLiteral()); return new Value(literal); } else { - throw new ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier"); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier"); } } Literal *Parser::ParseLiteral() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); if (t.type == Tokenizer::Token::TYPE_NAME) { PropertyList *props(ParsePropertyList()); return new Literal(t.str, props); @@ -203,80 +208,95 @@ Literal *Parser::ParseLiteral() { throw std::logic_error("literal switch reached impossible default branch oO"); } } else { - throw new ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive"); + throw new Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive"); } } Literal *Parser::ParseArray() { - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN); - vector values; + Tokenizer::Token probe(GetToken()); + tok.Putback(probe); - while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) { - Value *value(ParseValue()); - values.push_back(value); + if (probe.type == Tokenizer::Token::ANGLE_BRACKET_OPEN) { + vector values; + while (t.type != Tokenizer::Token::BRACKET_CLOSE) { + PropertyList *value(ParsePropertyList()); + values.push_back(value); - t = tok.GetNext(); - if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) { - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); + t = GetToken(); + if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) { + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); + } } + return new Literal(values); + } else { + vector values; + while (t.type != Tokenizer::Token::BRACKET_CLOSE) { + Value *value(ParseValue()); + values.push_back(value); + + t = GetToken(); + if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) { + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); + } + } + return new Literal(values); } - - return new Literal(values); } Literal *Parser::ParseColor() { string msg("error parsing color"); - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg); - Tokenizer::Token red(tok.GetNext()); + Tokenizer::Token red(GetToken()); AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::COMMA, msg); - Tokenizer::Token green(tok.GetNext()); + Tokenizer::Token green(GetToken()); AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::COMMA, msg); - Tokenizer::Token blue(tok.GetNext()); + Tokenizer::Token blue(GetToken()); AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color"); - t = tok.GetNext(); + t = GetToken(); if (t.type == Tokenizer::Token::BRACKET_CLOSE) { return new Literal(red.number, green.number, blue.number); } else if (t.type != Tokenizer::Token::COMMA) { - Tokenizer::Token alpha(tok.GetNext()); + Tokenizer::Token alpha(GetToken()); AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg); return new Literal(red.number, green.number, blue.number, alpha.number); } else { - throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]"); } } Literal *Parser::ParseVector() { std::string msg("error parsing vector"); - Tokenizer::Token t(tok.GetNext()); + Tokenizer::Token t(GetToken()); AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg); - Tokenizer::Token x(tok.GetNext()); + Tokenizer::Token x(GetToken()); AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::COMMA, msg); - Tokenizer::Token y(tok.GetNext()); + Tokenizer::Token y(GetToken()); AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector"); - t = tok.GetNext(); + t = GetToken(); AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg); return new Literal(x.number, y.number); @@ -284,13 +304,13 @@ Literal *Parser::ParseVector() { void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) { if (expected != actual) { - throw ParseError(string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected)); + throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected)); } } void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) { if (expected != actual) { - throw ParseError(msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected)); + throw Error(file, tok.Line(), msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected)); } }