}
}
-Tokenizer::Token Parser::GetToken() {
- try {
- return tok.GetNext();
- } catch (Tokenizer::LexerError &e) {
- throw Error(file, e.Line(), e.what());
- }
+Tokenizer::Token Parser::GetToken() try {
+ return tok.GetNext();
+} catch (Tokenizer::LexerError &e) {
+ throw Error(file, e.Line(), e.what());
}
void Parser::ParseExportDirective() {
Tokenizer::Token t(GetToken());
tok.Putback(t);
if (BeginOfPropertyList(t)) {
- PropertyList *propertyList(ParsePropertyList());
- Definition *dfn(new Definition(typeName, identifier));
- dfn->SetValue(propertyList);
- return dfn;
+ auto_ptr<PropertyList> propertyList(ParsePropertyList());
+ auto_ptr<Definition> dfn(new Definition(typeName, identifier));
+ dfn->SetValue(propertyList.release());
+ product.AddDefinition(dfn.get());
+ return dfn.release();
} else if (BeginningOfLiteral(t)) {
- Literal *literal(ParseLiteral());
- Definition *dfn(new Definition(typeName, identifier));
- dfn->SetValue(literal);
- return dfn;
+ auto_ptr<Literal> literal(ParseLiteral());
+ auto_ptr<Definition> dfn(new Definition(typeName, identifier));
+ dfn->SetValue(literal.release());
+ product.AddDefinition(dfn.get());
+ return dfn.release();
} else {
return new Declaration(typeName, identifier);
}
Tokenizer::Token t(GetToken());
AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
- vector<Value *> values;
+ Tokenizer::Token probe(GetToken());
+ tok.Putback(probe);
- while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
- Value *value(ParseValue());
- values.push_back(value);
+ if (probe.type == Tokenizer::Token::ANGLE_BRACKET_OPEN) {
+ vector<PropertyList *> values;
+ while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
+ PropertyList *value(ParsePropertyList());
+ values.push_back(value);
- t = GetToken();
- if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
- throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
+ t = GetToken();
+ if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
+ }
}
+ return new Literal(values);
+ } else {
+ vector<Value *> values;
+ while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
+ Value *value(ParseValue());
+ values.push_back(value);
+
+ t = GetToken();
+ if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
+ throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
+ }
+ }
+ return new Literal(values);
}
-
- return new Literal(values);
}
Literal *Parser::ParseColor() {