#include "Parser.h"
+#include "utility.h"
+
#include <auto_ptr.h>
#include <fstream>
namespace loader {
+Parser::Parser(const string &file, ParsedSource &product)
+: file(file)
+, dirname(Dirname(file))
+, in(this->file.c_str())
+, tok(in)
+, product(product) {
+ if (!in) {
+ throw Error(file, 0, "unable to read file");
+ }
+}
+
void Parser::Parse() {
while (tok.HasMore()) {
ParseStatement();
void Parser::ParseIncludeDirective() {
Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::STRING);
- Parser sub(t.str.c_str(), product); // TODO: resolve path name
+ Parser sub(CatPath(dirname, t.str), product);
sub.Parse();
}
string typeName(ParseTypeName());
string identifier(ParseIdentifier());
- Tokenizer::Token t(GetToken());
- tok.Putback(t);
- if (BeginOfPropertyList(t)) {
- PropertyList *propertyList(ParsePropertyList());
- Definition *dfn(new Definition(typeName, identifier));
- dfn->SetValue(propertyList);
- return dfn;
- } else if (BeginningOfLiteral(t)) {
- Literal *literal(ParseLiteral());
- Definition *dfn(new Definition(typeName, identifier));
- dfn->SetValue(literal);
- return dfn;
- } else {
- return new Declaration(typeName, identifier);
+ if (tok.HasMore()) {
+ Tokenizer::Token t(GetToken());
+ tok.Putback(t);
+ if (BeginOfPropertyList(t)) {
+ auto_ptr<PropertyList> propertyList(ParsePropertyList());
+ auto_ptr<Definition> dfn(new Definition(typeName, identifier));
+ dfn->SetValue(propertyList.release());
+ product.AddDefinition(dfn.get());
+ return dfn.release();
+ } else if (BeginningOfPrimitiveLiteral(t)) {
+ auto_ptr<Literal> literal(ParseLiteral());
+ auto_ptr<Definition> dfn(new Definition(typeName, identifier));
+ dfn->SetValue(literal.release());
+ product.AddDefinition(dfn.get());
+ return dfn.release();
+ }
}
+ return new Declaration(typeName, identifier);
}
bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
switch (t.type) {
case Tokenizer::Token::CHEVRON_OPEN:
+ case Tokenizer::Token::COLON:
case Tokenizer::Token::BRACKET_OPEN:
case Tokenizer::Token::PARENTHESIS_OPEN:
case Tokenizer::Token::NUMBER:
}
}
+bool Parser::BeginningOfPrimitiveLiteral(const Tokenizer::Token &t) const {
+ switch (t.type) {
+ case Tokenizer::Token::CHEVRON_OPEN:
+ case Tokenizer::Token::COLON:
+ case Tokenizer::Token::BRACKET_OPEN:
+ case Tokenizer::Token::PARENTHESIS_OPEN:
+ case Tokenizer::Token::NUMBER:
+ case Tokenizer::Token::STRING:
+ case Tokenizer::Token::KEYWORD_FALSE:
+ case Tokenizer::Token::KEYWORD_TRUE:
+ return true;
+ default:
+ return false;
+ }
+}
+
bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const {
return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
}
case Tokenizer::Token::CHEVRON_OPEN:
tok.Putback(t);
return ParseVector();
+ case Tokenizer::Token::COLON:
+ t = GetToken();
+ AssertTokenType(t.type, Tokenizer::Token::STRING);
+ return new Literal(dirname, t.str);
case Tokenizer::Token::BRACKET_OPEN:
tok.Putback(t);
return ParseArray();
Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
- vector<Value *> values;
+ Tokenizer::Token probe(GetToken());
- while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
- Value *value(ParseValue());
- values.push_back(value);
+ if (probe.type == Tokenizer::Token::TYPE_NAME) {
+ vector<PropertyList *> values;
+ while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
+ PropertyList *value(ParsePropertyList());
+ values.push_back(value);
- t = GetToken();
- if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
- throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
+ t = GetToken();
+ if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
+ }
}
- }
+ return new Literal(probe.str, values);
+ } else {
+ tok.Putback(probe);
+
+ vector<Value *> values;
+ while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
+ Value *value(ParseValue());
+ values.push_back(value);
- return new Literal(values);
+ t = GetToken();
+ if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
+ }
+ }
+ return new Literal(values);
+ }
}
Literal *Parser::ParseColor() {
AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color");
t = GetToken();
- if (t.type == Tokenizer::Token::BRACKET_CLOSE) {
+ if (t.type == Tokenizer::Token::PARENTHESIS_CLOSE) {
return new Literal(red.number, green.number, blue.number);
} else if (t.type != Tokenizer::Token::COMMA) {
Tokenizer::Token alpha(GetToken());