using std::string;
using std::vector;
+typedef loader::Tokenizer::Token Token;
+
namespace loader {
Parser::Parser(const string &file, ParsedSource &product)
}
void Parser::ParseStatement() {
- Tokenizer::Token t(GetToken());
+ Token t(GetToken());
switch (t.type) {
- case Tokenizer::Token::KEYWORD_EXPORT:
+ case Token::KEYWORD_EXPORT:
ParseExportDirective();
break;
- case Tokenizer::Token::KEYWORD_INCLUDE:
+ case Token::KEYWORD_INCLUDE:
ParseIncludeDirective();
break;
- case Tokenizer::Token::TYPE_NAME:
+ case Token::TYPE_NAME:
tok.Putback(t);
{
Declaration *decl(ProbeDefinition());
}
}
-Tokenizer::Token Parser::GetToken() try {
+Token Parser::GetToken() try {
return tok.GetNext();
} catch (Tokenizer::LexerError &e) {
throw Error(file, e.Line(), e.what());
}
void Parser::ParseExportDirective() {
- Tokenizer::Token t(GetToken());
- if (t.type != Tokenizer::Token::IDENTIFIER) {
+ Token t(GetToken());
+ if (t.type != Token::IDENTIFIER) {
tok.Putback(t);
Declaration *decl(ProbeDefinition());
product.ExportDeclaration(decl);
}
void Parser::ParseIncludeDirective() {
- Tokenizer::Token t(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::STRING);
+ Token t(GetToken());
+ AssertTokenType(t.type, Token::STRING);
Parser sub(CatPath(dirname, t.str), product);
sub.Parse();
}
string identifier(ParseIdentifier());
if (tok.HasMore()) {
- Tokenizer::Token t(GetToken());
+ Token t(GetToken());
tok.Putback(t);
if (BeginOfPropertyList(t)) {
auto_ptr<PropertyList> propertyList(ParsePropertyList());
return new Declaration(typeName, identifier);
}
-bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
+bool Parser::BeginningOfLiteral(const Token &t) const {
switch (t.type) {
- case Tokenizer::Token::CHEVRON_OPEN:
- case Tokenizer::Token::COLON:
- case Tokenizer::Token::BRACKET_OPEN:
- case Tokenizer::Token::PARENTHESIS_OPEN:
- case Tokenizer::Token::NUMBER:
- case Tokenizer::Token::SCRIPT_BEGIN:
- case Tokenizer::Token::STRING:
- case Tokenizer::Token::KEYWORD_FALSE:
- case Tokenizer::Token::KEYWORD_TRUE:
- case Tokenizer::Token::TYPE_NAME:
+ case Token::CHEVRON_OPEN:
+ case Token::COLON:
+ case Token::BRACKET_OPEN:
+ case Token::PARENTHESIS_OPEN:
+ case Token::NUMBER:
+ case Token::SCRIPT_BEGIN:
+ case Token::STRING:
+ case Token::KEYWORD_FALSE:
+ case Token::KEYWORD_TRUE:
+ case Token::TYPE_NAME:
return true;
default:
return false;
}
}
-bool Parser::BeginningOfPrimitiveLiteral(const Tokenizer::Token &t) const {
+bool Parser::BeginningOfPrimitiveLiteral(const Token &t) const {
switch (t.type) {
- case Tokenizer::Token::CHEVRON_OPEN:
- case Tokenizer::Token::COLON:
- case Tokenizer::Token::BRACKET_OPEN:
- case Tokenizer::Token::PARENTHESIS_OPEN:
- case Tokenizer::Token::NUMBER:
- case Tokenizer::Token::STRING:
- case Tokenizer::Token::KEYWORD_FALSE:
- case Tokenizer::Token::KEYWORD_TRUE:
+ case Token::CHEVRON_OPEN:
+ case Token::COLON:
+ case Token::BRACKET_OPEN:
+ case Token::PARENTHESIS_OPEN:
+ case Token::NUMBER:
+ case Token::STRING:
+ case Token::KEYWORD_FALSE:
+ case Token::KEYWORD_TRUE:
return true;
default:
return false;
}
}
-bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const {
- return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
+bool Parser::BeginOfPropertyList(const Token &t) const {
+ return t.type == Token::ANGLE_BRACKET_OPEN;
}
-bool Parser::BeginningOfScriptLiteral(const Tokenizer::Token &t) const {
- return t.type == Tokenizer::Token::SCRIPT_BEGIN;
+bool Parser::BeginningOfScriptLiteral(const Token &t) const {
+ return t.type == Token::SCRIPT_BEGIN;
}
Definition *Parser::ParseDefinition() {
string typeName(ParseTypeName());
string identifier(ParseIdentifier());
- Tokenizer::Token t(GetToken());
+ Token t(GetToken());
tok.Putback(t);
if (BeginOfPropertyList(t)) {
PropertyList *propertyList(ParsePropertyList());
}
string Parser::ParseIdentifier() {
- Tokenizer::Token t(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER);
+ Token t(GetToken());
+ AssertTokenType(t.type, Token::IDENTIFIER);
return t.str;
}
string Parser::ParseTypeName() {
- Tokenizer::Token t(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME);
+ Token t(GetToken());
+ AssertTokenType(t.type, Token::TYPE_NAME);
return t.str;
}
PropertyList *Parser::ParsePropertyList() {
- Tokenizer::Token t(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN);
+ Token t(GetToken());
+ AssertTokenType(t.type, Token::ANGLE_BRACKET_OPEN);
auto_ptr<PropertyList> props(new PropertyList);
- while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
- Tokenizer::Token name(GetToken());
- AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER);
+ while (t.type != Token::ANGLE_BRACKET_CLOSE) {
+ Token name(GetToken());
+ AssertTokenType(name.type, Token::IDENTIFIER);
t = GetToken();
- AssertTokenType(t.type, Tokenizer::Token::COLON);
+ AssertTokenType(t.type, Token::COLON);
Value *value(ParseValue());
props->SetProperty(name.str, value);
t = GetToken();
- if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
+ if (t.type != Token::ANGLE_BRACKET_CLOSE && t.type != Token::COMMA) {
throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }");
}
}
}
Value *Parser::ParseValue() {
- Tokenizer::Token t(GetToken());
- if (t.type == Tokenizer::Token::IDENTIFIER) {
+ Token t(GetToken());
+ if (t.type == Token::IDENTIFIER) {
return new Value(t.str);
} else if (BeginningOfLiteral(t)) {
tok.Putback(t);
}
Literal *Parser::ParseLiteral() {
- Tokenizer::Token t(GetToken());
- if (t.type == Tokenizer::Token::TYPE_NAME) {
+ Token t(GetToken());
+ if (t.type == Token::TYPE_NAME) {
PropertyList *props(ParsePropertyList());
return new Literal(t.str, props);
} else if (BeginningOfScriptLiteral(t)) {
return ParseScript();
} else if (BeginningOfPrimitiveLiteral(t)) {
switch (t.type) {
- case Tokenizer::Token::CHEVRON_OPEN:
+ case Token::CHEVRON_OPEN:
tok.Putback(t);
return ParseVector();
- case Tokenizer::Token::COLON:
+ case Token::COLON:
t = GetToken();
- AssertTokenType(t.type, Tokenizer::Token::STRING);
+ AssertTokenType(t.type, Token::STRING);
return new Literal(dirname, t.str);
- case Tokenizer::Token::BRACKET_OPEN:
+ case Token::BRACKET_OPEN:
tok.Putback(t);
return ParseArray();
- case Tokenizer::Token::PARENTHESIS_OPEN:
+ case Token::PARENTHESIS_OPEN:
tok.Putback(t);
return ParseColor();
- case Tokenizer::Token::NUMBER:
+ case Token::NUMBER:
return new Literal(t.number);
- case Tokenizer::Token::STRING:
+ case Token::STRING:
return new Literal(t.str);
- case Tokenizer::Token::KEYWORD_FALSE:
+ case Token::KEYWORD_FALSE:
return new Literal(false);
- case Tokenizer::Token::KEYWORD_TRUE:
+ case Token::KEYWORD_TRUE:
return new Literal(true);
default:
throw std::logic_error("literal switch reached impossible default branch oO");
}
Literal *Parser::ParseArray() {
- Tokenizer::Token t(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
+ Token t(GetToken());
+ AssertTokenType(t.type, Token::BRACKET_OPEN);
- Tokenizer::Token probe(GetToken());
+ Token probe(GetToken());
- if (probe.type == Tokenizer::Token::TYPE_NAME) {
+ if (probe.type == Token::TYPE_NAME) {
vector<PropertyList *> values;
- while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
+ while (t.type != Token::BRACKET_CLOSE) {
PropertyList *value(ParsePropertyList());
values.push_back(value);
t = GetToken();
- if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
+ if (t.type != Token::BRACKET_CLOSE && t.type != Token::COMMA) {
throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
}
}
tok.Putback(probe);
vector<Value *> values;
- while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
+ while (t.type != Token::BRACKET_CLOSE) {
Value *value(ParseValue());
values.push_back(value);
t = GetToken();
- if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
+ if (t.type != Token::BRACKET_CLOSE && t.type != Token::COMMA) {
throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
}
}
Literal *Parser::ParseColor() {
string msg("error parsing color");
- Tokenizer::Token t(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg);
+ Token t(GetToken());
+ AssertTokenType(t.type, Token::PARENTHESIS_OPEN, msg);
- Tokenizer::Token red(GetToken());
- AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color");
+ Token red(GetToken());
+ AssertTokenType(red.type, Token::NUMBER, "error parsing red component of color");
t = GetToken();
- AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
+ AssertTokenType(t.type, Token::COMMA, msg);
- Tokenizer::Token green(GetToken());
- AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color");
+ Token green(GetToken());
+ AssertTokenType(green.type, Token::NUMBER, "error parsing green component of color");
t = GetToken();
- AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
+ AssertTokenType(t.type, Token::COMMA, msg);
- Tokenizer::Token blue(GetToken());
- AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color");
+ Token blue(GetToken());
+ AssertTokenType(blue.type, Token::NUMBER, "error parsing blue component of color");
t = GetToken();
- if (t.type == Tokenizer::Token::PARENTHESIS_CLOSE) {
+ if (t.type == Token::PARENTHESIS_CLOSE) {
return new Literal(red.number, green.number, blue.number);
- } else if (t.type != Tokenizer::Token::COMMA) {
- Tokenizer::Token alpha(GetToken());
- AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color");
+ } else if (t.type != Token::COMMA) {
+ Token alpha(GetToken());
+ AssertTokenType(alpha.type, Token::NUMBER, "error parsing alpha component of color");
t = GetToken();
- AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg);
+ AssertTokenType(t.type, Token::PARENTHESIS_CLOSE, msg);
return new Literal(red.number, green.number, blue.number, alpha.number);
} else {
Literal *Parser::ParseVector() {
std::string msg("error parsing vector");
- Tokenizer::Token t(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg);
+ Token t(GetToken());
+ AssertTokenType(t.type, Token::CHEVRON_OPEN, msg);
- Tokenizer::Token x(GetToken());
- AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector");
+ Token x(GetToken());
+ AssertTokenType(x.type, Token::NUMBER, "error parsing x component of vector");
t = GetToken();
- AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
+ AssertTokenType(t.type, Token::COMMA, msg);
- Tokenizer::Token y(GetToken());
- AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector");
+ Token y(GetToken());
+ AssertTokenType(y.type, Token::NUMBER, "error parsing y component of vector");
t = GetToken();
- AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg);
+ AssertTokenType(t.type, Token::CHEVRON_CLOSE, msg);
return new Literal(x.number, y.number);
}
Literal *Parser::ParseScript() {
std::string msg("error parsing script");
- Tokenizer::Token t(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::SCRIPT_BEGIN, msg);
+ Token t(GetToken());
+ AssertTokenType(t.type, Token::SCRIPT_BEGIN, msg);
vector<ScriptToken *> script;
try {
- while (t.type != Tokenizer::Token::SCRIPT_END) {
+ while (t.type != Token::SCRIPT_END) {
if (BeginningOfPrimitiveLiteral(t)) {
tok.Putback(t);
script.push_back(new ScriptToken(ParseLiteral()));
} else {
switch (t.type) {
- case Tokenizer::Token::COMMAND: {
- Tokenizer::Token t2(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER, msg);
+ case Token::COMMAND: {
+ Token t2(GetToken());
+ AssertTokenType(t.type, Token::IDENTIFIER, msg);
script.push_back(new ScriptToken(t2.str, ScriptToken::COMMAND));
break;
}
- case Tokenizer::Token::IDENTIFIER: {
+ case Token::IDENTIFIER: {
script.push_back(new ScriptToken(t.str, ScriptToken::IDENTIFIER));
break;
}
- case Tokenizer::Token::REGISTER: {
- Tokenizer::Token t2(GetToken());
- AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER, msg);
+ case Token::REGISTER: {
+ Token t2(GetToken());
+ AssertTokenType(t.type, Token::IDENTIFIER, msg);
script.push_back(new ScriptToken(t2.str, ScriptToken::REGISTER));
break;
}
}
-void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
+void Parser::AssertTokenType(Token::Type actual, Token::Type expected) {
if (expected != actual) {
throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
}
}
-void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) {
+void Parser::AssertTokenType(Token::Type actual, Token::Type expected, const string &msg) {
if (expected != actual) {
throw Error(file, tok.Line(), msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
}