4 * Created on: Aug 26, 2012
22 Parser::Parser(const string &file, ParsedSource &product)
24 , dirname(Dirname(file))
25 , in(this->file.c_str())
29 throw Error(file, 0, "unable to read file");
33 void Parser::Parse() {
34 while (tok.HasMore()) {
39 void Parser::ParseStatement() {
40 Tokenizer::Token t(GetToken());
42 case Tokenizer::Token::KEYWORD_EXPORT:
43 ParseExportDirective();
45 case Tokenizer::Token::KEYWORD_INCLUDE:
46 ParseIncludeDirective();
48 case Tokenizer::Token::TYPE_NAME:
51 Declaration *decl(ProbeDefinition());
52 product.AddDeclaration(decl);
56 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type));
60 Tokenizer::Token Parser::GetToken() try {
62 } catch (Tokenizer::LexerError &e) {
63 throw Error(file, e.Line(), e.what());
66 void Parser::ParseExportDirective() {
67 Tokenizer::Token t(GetToken());
68 if (t.type != Tokenizer::Token::IDENTIFIER) {
70 Declaration *decl(ProbeDefinition());
71 product.ExportDeclaration(decl);
73 product.ExportIdentifier(t.str);
77 void Parser::ParseIncludeDirective() {
78 Tokenizer::Token t(GetToken());
79 AssertTokenType(t.type, Tokenizer::Token::STRING);
80 Parser sub(CatPath(dirname, t.str), product);
84 Declaration *Parser::ProbeDefinition() {
85 string typeName(ParseTypeName());
86 string identifier(ParseIdentifier());
89 Tokenizer::Token t(GetToken());
91 if (BeginOfPropertyList(t)) {
92 auto_ptr<PropertyList> propertyList(ParsePropertyList());
93 auto_ptr<Definition> dfn(new Definition(typeName, identifier));
94 dfn->SetValue(propertyList.release());
95 product.AddDefinition(dfn.get());
97 } else if (BeginningOfPrimitiveLiteral(t)) {
98 auto_ptr<Literal> literal(ParseLiteral());
99 auto_ptr<Definition> dfn(new Definition(typeName, identifier));
100 dfn->SetValue(literal.release());
101 product.AddDefinition(dfn.get());
102 return dfn.release();
105 return new Declaration(typeName, identifier);
108 bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
110 case Tokenizer::Token::CHEVRON_OPEN:
111 case Tokenizer::Token::COLON:
112 case Tokenizer::Token::BRACKET_OPEN:
113 case Tokenizer::Token::PARENTHESIS_OPEN:
114 case Tokenizer::Token::NUMBER:
115 case Tokenizer::Token::STRING:
116 case Tokenizer::Token::KEYWORD_FALSE:
117 case Tokenizer::Token::KEYWORD_TRUE:
118 case Tokenizer::Token::TYPE_NAME:
125 bool Parser::BeginningOfPrimitiveLiteral(const Tokenizer::Token &t) const {
127 case Tokenizer::Token::CHEVRON_OPEN:
128 case Tokenizer::Token::COLON:
129 case Tokenizer::Token::BRACKET_OPEN:
130 case Tokenizer::Token::PARENTHESIS_OPEN:
131 case Tokenizer::Token::NUMBER:
132 case Tokenizer::Token::STRING:
133 case Tokenizer::Token::KEYWORD_FALSE:
134 case Tokenizer::Token::KEYWORD_TRUE:
141 bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const {
142 return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
145 Definition *Parser::ParseDefinition() {
146 string typeName(ParseTypeName());
147 string identifier(ParseIdentifier());
149 Tokenizer::Token t(GetToken());
151 if (BeginOfPropertyList(t)) {
152 PropertyList *propertyList(ParsePropertyList());
153 Definition *dfn(new Definition(typeName, identifier));
154 dfn->SetValue(propertyList);
156 } else if (BeginningOfLiteral(t)) {
157 Literal *literal(ParseLiteral());
158 Definition *dfn(new Definition(typeName, identifier));
159 dfn->SetValue(literal);
162 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal");
166 string Parser::ParseIdentifier() {
167 Tokenizer::Token t(GetToken());
168 AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER);
172 string Parser::ParseTypeName() {
173 Tokenizer::Token t(GetToken());
174 AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME);
178 PropertyList *Parser::ParsePropertyList() {
179 Tokenizer::Token t(GetToken());
180 AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN);
182 auto_ptr<PropertyList> props(new PropertyList);
184 while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
185 Tokenizer::Token name(GetToken());
186 AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER);
189 AssertTokenType(t.type, Tokenizer::Token::COLON);
191 Value *value(ParseValue());
192 props->SetProperty(name.str, value);
195 if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
196 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }");
200 return props.release();
203 Value *Parser::ParseValue() {
204 Tokenizer::Token t(GetToken());
205 if (t.type == Tokenizer::Token::IDENTIFIER) {
206 return new Value(t.str);
207 } else if (BeginningOfLiteral(t)) {
209 Literal *literal(ParseLiteral());
210 return new Value(literal);
212 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier");
216 Literal *Parser::ParseLiteral() {
217 Tokenizer::Token t(GetToken());
218 if (t.type == Tokenizer::Token::TYPE_NAME) {
219 PropertyList *props(ParsePropertyList());
220 return new Literal(t.str, props);
221 } else if (BeginningOfLiteral(t)) {
223 case Tokenizer::Token::CHEVRON_OPEN:
225 return ParseVector();
226 case Tokenizer::Token::COLON:
228 AssertTokenType(t.type, Tokenizer::Token::STRING);
229 return new Literal(dirname, t.str);
230 case Tokenizer::Token::BRACKET_OPEN:
233 case Tokenizer::Token::PARENTHESIS_OPEN:
236 case Tokenizer::Token::NUMBER:
237 return new Literal(t.number);
238 case Tokenizer::Token::STRING:
239 return new Literal(t.str);
240 case Tokenizer::Token::KEYWORD_FALSE:
241 return new Literal(false);
242 case Tokenizer::Token::KEYWORD_TRUE:
243 return new Literal(true);
245 throw std::logic_error("literal switch reached impossible default branch oO");
248 throw new Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive");
252 Literal *Parser::ParseArray() {
253 Tokenizer::Token t(GetToken());
254 AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
256 Tokenizer::Token probe(GetToken());
258 if (probe.type == Tokenizer::Token::TYPE_NAME) {
259 vector<PropertyList *> values;
260 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
261 PropertyList *value(ParsePropertyList());
262 values.push_back(value);
265 if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
266 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
269 return new Literal(probe.str, values);
273 vector<Value *> values;
274 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
275 Value *value(ParseValue());
276 values.push_back(value);
279 if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
280 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
283 return new Literal(values);
287 Literal *Parser::ParseColor() {
288 string msg("error parsing color");
289 Tokenizer::Token t(GetToken());
290 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg);
292 Tokenizer::Token red(GetToken());
293 AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color");
296 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
298 Tokenizer::Token green(GetToken());
299 AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color");
302 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
304 Tokenizer::Token blue(GetToken());
305 AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color");
308 if (t.type == Tokenizer::Token::PARENTHESIS_CLOSE) {
309 return new Literal(red.number, green.number, blue.number);
310 } else if (t.type != Tokenizer::Token::COMMA) {
311 Tokenizer::Token alpha(GetToken());
312 AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color");
315 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg);
317 return new Literal(red.number, green.number, blue.number, alpha.number);
319 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
323 Literal *Parser::ParseVector() {
324 std::string msg("error parsing vector");
325 Tokenizer::Token t(GetToken());
326 AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg);
328 Tokenizer::Token x(GetToken());
329 AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector");
332 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
334 Tokenizer::Token y(GetToken());
335 AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector");
338 AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg);
340 return new Literal(x.number, y.number);
343 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
344 if (expected != actual) {
345 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
349 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) {
350 if (expected != actual) {
351 throw Error(file, tok.Line(), msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));