case Tokenizer::Token::BRACKET_OPEN:
case Tokenizer::Token::PARENTHESIS_OPEN:
case Tokenizer::Token::NUMBER:
+ case Tokenizer::Token::SCRIPT_BEGIN:
case Tokenizer::Token::STRING:
case Tokenizer::Token::KEYWORD_FALSE:
case Tokenizer::Token::KEYWORD_TRUE:
return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
}
+bool Parser::BeginningOfScriptLiteral(const Tokenizer::Token &t) const {
+ return t.type == Tokenizer::Token::SCRIPT_BEGIN;
+}
+
Definition *Parser::ParseDefinition() {
string typeName(ParseTypeName());
string identifier(ParseIdentifier());
if (t.type == Tokenizer::Token::TYPE_NAME) {
PropertyList *props(ParsePropertyList());
return new Literal(t.str, props);
- } else if (BeginningOfLiteral(t)) {
+ } else if (BeginningOfScriptLiteral(t)) {
+ tok.Putback(t);
+ return ParseScript();
+ } else if (BeginningOfPrimitiveLiteral(t)) {
switch (t.type) {
case Tokenizer::Token::CHEVRON_OPEN:
tok.Putback(t);
AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
Tokenizer::Token probe(GetToken());
- tok.Putback(probe);
- if (probe.type == Tokenizer::Token::ANGLE_BRACKET_OPEN) {
+ if (probe.type == Tokenizer::Token::TYPE_NAME) {
vector<PropertyList *> values;
while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
PropertyList *value(ParsePropertyList());
throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
}
}
- return new Literal(values);
+ return new Literal(probe.str, values);
} else {
+ tok.Putback(probe);
+
vector<Value *> values;
while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
Value *value(ParseValue());
return new Literal(x.number, y.number);
}
+Literal *Parser::ParseScript() {
+ std::string msg("error parsing script");
+ Tokenizer::Token t(GetToken());
+ AssertTokenType(t.type, Tokenizer::Token::SCRIPT_BEGIN, msg);
+
+ vector<ScriptToken *> script;
+ try {
+ while (t.type != Tokenizer::Token::SCRIPT_END) {
+ if (BeginningOfPrimitiveLiteral(t)) {
+ tok.Putback(t);
+ script.push_back(new ScriptToken(ParseLiteral()));
+ } else {
+ switch (t.type) {
+ case Tokenizer::Token::COMMAND: {
+ Tokenizer::Token t2(GetToken());
+ AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER, msg);
+ script.push_back(new ScriptToken(t2.str, ScriptToken::COMMAND));
+ break;
+ }
+ case Tokenizer::Token::IDENTIFIER: {
+ script.push_back(new ScriptToken(t.str, ScriptToken::IDENTIFIER));
+ break;
+ }
+ case Tokenizer::Token::REGISTER: {
+ Tokenizer::Token t2(GetToken());
+ AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER, msg);
+ script.push_back(new ScriptToken(t2.str, ScriptToken::REGISTER));
+ break;
+ }
+ default:
+ throw Error(file, tok.Line(), string("unexpected token in script: ") + TokenTypeToString(t.type));
+ }
+ }
+ t = GetToken();
+ }
+ } catch (...) {
+ for (vector<ScriptToken *>::const_iterator i(script.begin()), end(script.end()); i != end; ++i) {
+ delete *i;
+ }
+ throw;
+ }
+ return new Literal(script);
+}
+
+
void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
if (expected != actual) {
throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));