4 * Created on: Aug 26, 2012
22 Parser::Parser(const string &file, ParsedSource &product)
24 , dirname(Dirname(file))
25 , in(this->file.c_str())
29 throw Error(file, 0, "unable to read file");
33 void Parser::Parse() {
34 while (tok.HasMore()) {
39 void Parser::ParseStatement() {
40 Tokenizer::Token t(GetToken());
42 case Tokenizer::Token::KEYWORD_EXPORT:
43 ParseExportDirective();
45 case Tokenizer::Token::KEYWORD_INCLUDE:
46 ParseIncludeDirective();
48 case Tokenizer::Token::TYPE_NAME:
51 Declaration *decl(ProbeDefinition());
52 product.AddDeclaration(decl);
56 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type));
60 Tokenizer::Token Parser::GetToken() try {
62 } catch (Tokenizer::LexerError &e) {
63 throw Error(file, e.Line(), e.what());
66 void Parser::ParseExportDirective() {
67 Tokenizer::Token t(GetToken());
68 if (t.type != Tokenizer::Token::IDENTIFIER) {
70 Declaration *decl(ProbeDefinition());
71 product.ExportDeclaration(decl);
73 product.ExportIdentifier(t.str);
77 void Parser::ParseIncludeDirective() {
78 Tokenizer::Token t(GetToken());
79 AssertTokenType(t.type, Tokenizer::Token::STRING);
80 Parser sub(CatPath(dirname, t.str), product);
84 Declaration *Parser::ProbeDefinition() {
85 string typeName(ParseTypeName());
86 string identifier(ParseIdentifier());
89 Tokenizer::Token t(GetToken());
91 if (BeginOfPropertyList(t)) {
92 auto_ptr<PropertyList> propertyList(ParsePropertyList());
93 auto_ptr<Definition> dfn(new Definition(typeName, identifier));
94 dfn->SetValue(propertyList.release());
95 product.AddDefinition(dfn.get());
97 } else if (BeginningOfPrimitiveLiteral(t)) {
98 auto_ptr<Literal> literal(ParseLiteral());
99 auto_ptr<Definition> dfn(new Definition(typeName, identifier));
100 dfn->SetValue(literal.release());
101 product.AddDefinition(dfn.get());
102 return dfn.release();
105 return new Declaration(typeName, identifier);
108 bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
110 case Tokenizer::Token::CHEVRON_OPEN:
111 case Tokenizer::Token::COLON:
112 case Tokenizer::Token::BRACKET_OPEN:
113 case Tokenizer::Token::PARENTHESIS_OPEN:
114 case Tokenizer::Token::NUMBER:
115 case Tokenizer::Token::SCRIPT_BEGIN:
116 case Tokenizer::Token::STRING:
117 case Tokenizer::Token::KEYWORD_FALSE:
118 case Tokenizer::Token::KEYWORD_TRUE:
119 case Tokenizer::Token::TYPE_NAME:
126 bool Parser::BeginningOfPrimitiveLiteral(const Tokenizer::Token &t) const {
128 case Tokenizer::Token::CHEVRON_OPEN:
129 case Tokenizer::Token::COLON:
130 case Tokenizer::Token::BRACKET_OPEN:
131 case Tokenizer::Token::PARENTHESIS_OPEN:
132 case Tokenizer::Token::NUMBER:
133 case Tokenizer::Token::STRING:
134 case Tokenizer::Token::KEYWORD_FALSE:
135 case Tokenizer::Token::KEYWORD_TRUE:
142 bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const {
143 return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
146 bool Parser::BeginningOfScriptLiteral(const Tokenizer::Token &t) const {
147 return t.type == Tokenizer::Token::SCRIPT_BEGIN;
150 Definition *Parser::ParseDefinition() {
151 string typeName(ParseTypeName());
152 string identifier(ParseIdentifier());
154 Tokenizer::Token t(GetToken());
156 if (BeginOfPropertyList(t)) {
157 PropertyList *propertyList(ParsePropertyList());
158 Definition *dfn(new Definition(typeName, identifier));
159 dfn->SetValue(propertyList);
161 } else if (BeginningOfLiteral(t)) {
162 Literal *literal(ParseLiteral());
163 Definition *dfn(new Definition(typeName, identifier));
164 dfn->SetValue(literal);
167 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal");
171 string Parser::ParseIdentifier() {
172 Tokenizer::Token t(GetToken());
173 AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER);
177 string Parser::ParseTypeName() {
178 Tokenizer::Token t(GetToken());
179 AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME);
183 PropertyList *Parser::ParsePropertyList() {
184 Tokenizer::Token t(GetToken());
185 AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN);
187 auto_ptr<PropertyList> props(new PropertyList);
189 while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
190 Tokenizer::Token name(GetToken());
191 AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER);
194 AssertTokenType(t.type, Tokenizer::Token::COLON);
196 Value *value(ParseValue());
197 props->SetProperty(name.str, value);
200 if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
201 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }");
205 return props.release();
208 Value *Parser::ParseValue() {
209 Tokenizer::Token t(GetToken());
210 if (t.type == Tokenizer::Token::IDENTIFIER) {
211 return new Value(t.str);
212 } else if (BeginningOfLiteral(t)) {
214 Literal *literal(ParseLiteral());
215 return new Value(literal);
217 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier");
221 Literal *Parser::ParseLiteral() {
222 Tokenizer::Token t(GetToken());
223 if (t.type == Tokenizer::Token::TYPE_NAME) {
224 PropertyList *props(ParsePropertyList());
225 return new Literal(t.str, props);
226 } else if (BeginningOfScriptLiteral(t)) {
228 return ParseScript();
229 } else if (BeginningOfPrimitiveLiteral(t)) {
231 case Tokenizer::Token::CHEVRON_OPEN:
233 return ParseVector();
234 case Tokenizer::Token::COLON:
236 AssertTokenType(t.type, Tokenizer::Token::STRING);
237 return new Literal(dirname, t.str);
238 case Tokenizer::Token::BRACKET_OPEN:
241 case Tokenizer::Token::PARENTHESIS_OPEN:
244 case Tokenizer::Token::NUMBER:
245 return new Literal(t.number);
246 case Tokenizer::Token::STRING:
247 return new Literal(t.str);
248 case Tokenizer::Token::KEYWORD_FALSE:
249 return new Literal(false);
250 case Tokenizer::Token::KEYWORD_TRUE:
251 return new Literal(true);
253 throw std::logic_error("literal switch reached impossible default branch oO");
256 throw new Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive");
260 Literal *Parser::ParseArray() {
261 Tokenizer::Token t(GetToken());
262 AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
264 Tokenizer::Token probe(GetToken());
266 if (probe.type == Tokenizer::Token::TYPE_NAME) {
267 vector<PropertyList *> values;
268 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
269 PropertyList *value(ParsePropertyList());
270 values.push_back(value);
273 if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
274 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
277 return new Literal(probe.str, values);
281 vector<Value *> values;
282 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
283 Value *value(ParseValue());
284 values.push_back(value);
287 if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
288 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
291 return new Literal(values);
295 Literal *Parser::ParseColor() {
296 string msg("error parsing color");
297 Tokenizer::Token t(GetToken());
298 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg);
300 Tokenizer::Token red(GetToken());
301 AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color");
304 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
306 Tokenizer::Token green(GetToken());
307 AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color");
310 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
312 Tokenizer::Token blue(GetToken());
313 AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color");
316 if (t.type == Tokenizer::Token::PARENTHESIS_CLOSE) {
317 return new Literal(red.number, green.number, blue.number);
318 } else if (t.type != Tokenizer::Token::COMMA) {
319 Tokenizer::Token alpha(GetToken());
320 AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color");
323 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg);
325 return new Literal(red.number, green.number, blue.number, alpha.number);
327 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
331 Literal *Parser::ParseVector() {
332 std::string msg("error parsing vector");
333 Tokenizer::Token t(GetToken());
334 AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg);
336 Tokenizer::Token x(GetToken());
337 AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector");
340 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
342 Tokenizer::Token y(GetToken());
343 AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector");
346 AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg);
348 return new Literal(x.number, y.number);
351 Literal *Parser::ParseScript() {
352 std::string msg("error parsing script");
353 Tokenizer::Token t(GetToken());
354 AssertTokenType(t.type, Tokenizer::Token::SCRIPT_BEGIN, msg);
356 vector<ScriptToken *> script;
358 while (t.type != Tokenizer::Token::SCRIPT_END) {
359 if (BeginningOfPrimitiveLiteral(t)) {
361 script.push_back(new ScriptToken(ParseLiteral()));
364 case Tokenizer::Token::COMMAND: {
365 Tokenizer::Token t2(GetToken());
366 AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER, msg);
367 script.push_back(new ScriptToken(t2.str, ScriptToken::COMMAND));
370 case Tokenizer::Token::IDENTIFIER: {
371 script.push_back(new ScriptToken(t.str, ScriptToken::IDENTIFIER));
374 case Tokenizer::Token::REGISTER: {
375 Tokenizer::Token t2(GetToken());
376 AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER, msg);
377 script.push_back(new ScriptToken(t2.str, ScriptToken::REGISTER));
381 throw Error(file, tok.Line(), string("unexpected token in script: ") + TokenTypeToString(t.type));
387 for (vector<ScriptToken *>::const_iterator i(script.begin()), end(script.end()); i != end; ++i) {
392 return new Literal(script);
396 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
397 if (expected != actual) {
398 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
402 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) {
403 if (expected != actual) {
404 throw Error(file, tok.Line(), msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));