}
void Parser::ParseStatement() {
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
switch (t.type) {
case Tokenizer::Token::KEYWORD_EXPORT:
ParseExportDirective();
}
break;
default:
- throw ParseError(string("unexpected token ") + TokenTypeToString(t.type));
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type));
+ }
+}
+
+Tokenizer::Token Parser::GetToken() {
+ try {
+ return tok.GetNext();
+ } catch (Tokenizer::LexerError &e) {
+ throw Error(file, e.Line(), e.what());
}
}
void Parser::ParseExportDirective() {
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
if (t.type != Tokenizer::Token::IDENTIFIER) {
tok.Putback(t);
Declaration *decl(ProbeDefinition());
}
void Parser::ParseIncludeDirective() {
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::STRING);
- ifstream file(t.str.c_str()); // TODO: resolve path name
- Parser sub(file, product);
+ Parser sub(t.str.c_str(), product); // TODO: resolve path name
sub.Parse();
}
string typeName(ParseTypeName());
string identifier(ParseIdentifier());
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
tok.Putback(t);
if (BeginOfPropertyList(t)) {
PropertyList *propertyList(ParsePropertyList());
string typeName(ParseTypeName());
string identifier(ParseIdentifier());
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
tok.Putback(t);
if (BeginOfPropertyList(t)) {
PropertyList *propertyList(ParsePropertyList());
dfn->SetValue(literal);
return dfn;
} else {
- throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal");
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal");
}
}
string Parser::ParseIdentifier() {
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER);
return t.str;
}
string Parser::ParseTypeName() {
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME);
return t.str;
}
PropertyList *Parser::ParsePropertyList() {
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN);
auto_ptr<PropertyList> props(new PropertyList);
while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
- Tokenizer::Token name(tok.GetNext());
+ Tokenizer::Token name(GetToken());
AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER);
- t = tok.GetNext();
+ t = GetToken();
AssertTokenType(t.type, Tokenizer::Token::COLON);
Value *value(ParseValue());
props->SetProperty(name.str, value);
- t = tok.GetNext();
+ t = GetToken();
if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
- throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }");
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }");
}
}
}
Value *Parser::ParseValue() {
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
if (t.type == Tokenizer::Token::IDENTIFIER) {
return new Value(t.str);
} else if (BeginningOfLiteral(t)) {
Literal *literal(ParseLiteral());
return new Value(literal);
} else {
- throw new ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier");
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier");
}
}
Literal *Parser::ParseLiteral() {
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
if (t.type == Tokenizer::Token::TYPE_NAME) {
PropertyList *props(ParsePropertyList());
return new Literal(t.str, props);
throw std::logic_error("literal switch reached impossible default branch oO");
}
} else {
- throw new ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive");
+ throw new Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive");
}
}
Literal *Parser::ParseArray() {
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
vector<Value *> values;
Value *value(ParseValue());
values.push_back(value);
- t = tok.GetNext();
+ t = GetToken();
if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
- throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
}
}
Literal *Parser::ParseColor() {
string msg("error parsing color");
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg);
- Tokenizer::Token red(tok.GetNext());
+ Tokenizer::Token red(GetToken());
AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color");
- t = tok.GetNext();
+ t = GetToken();
AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
- Tokenizer::Token green(tok.GetNext());
+ Tokenizer::Token green(GetToken());
AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color");
- t = tok.GetNext();
+ t = GetToken();
AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
- Tokenizer::Token blue(tok.GetNext());
+ Tokenizer::Token blue(GetToken());
AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color");
- t = tok.GetNext();
+ t = GetToken();
if (t.type == Tokenizer::Token::BRACKET_CLOSE) {
return new Literal(red.number, green.number, blue.number);
} else if (t.type != Tokenizer::Token::COMMA) {
- Tokenizer::Token alpha(tok.GetNext());
+ Tokenizer::Token alpha(GetToken());
AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color");
- t = tok.GetNext();
+ t = GetToken();
AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg);
return new Literal(red.number, green.number, blue.number, alpha.number);
} else {
- throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
}
}
Literal *Parser::ParseVector() {
std::string msg("error parsing vector");
- Tokenizer::Token t(tok.GetNext());
+ Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg);
- Tokenizer::Token x(tok.GetNext());
+ Tokenizer::Token x(GetToken());
AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector");
- t = tok.GetNext();
+ t = GetToken();
AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
- Tokenizer::Token y(tok.GetNext());
+ Tokenizer::Token y(GetToken());
AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector");
- t = tok.GetNext();
+ t = GetToken();
AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg);
return new Literal(x.number, y.number);
void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
if (expected != actual) {
- throw ParseError(string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
}
}
void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) {
if (expected != actual) {
- throw ParseError(msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
+ throw Error(file, tok.Line(), msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
}
}