4 * Created on: Aug 26, 2012
22 Parser::Parser(const string &file, ParsedSource &product)
24 , dirname(Dirname(file))
25 , in(this->file.c_str())
29 throw Error(file, 0, "unable to read file");
33 void Parser::Parse() {
34 while (tok.HasMore()) {
39 void Parser::ParseStatement() {
40 Tokenizer::Token t(GetToken());
42 case Tokenizer::Token::KEYWORD_EXPORT:
43 ParseExportDirective();
45 case Tokenizer::Token::KEYWORD_INCLUDE:
46 ParseIncludeDirective();
48 case Tokenizer::Token::TYPE_NAME:
51 Declaration *decl(ProbeDefinition());
52 product.AddDeclaration(decl);
56 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type));
60 Tokenizer::Token Parser::GetToken() try {
62 } catch (Tokenizer::LexerError &e) {
63 throw Error(file, e.Line(), e.what());
66 void Parser::ParseExportDirective() {
67 Tokenizer::Token t(GetToken());
68 if (t.type != Tokenizer::Token::IDENTIFIER) {
70 Declaration *decl(ProbeDefinition());
71 product.ExportDeclaration(decl);
73 product.ExportIdentifier(t.str);
77 void Parser::ParseIncludeDirective() {
78 Tokenizer::Token t(GetToken());
79 AssertTokenType(t.type, Tokenizer::Token::STRING);
80 Parser sub(CatPath(dirname, t.str), product);
84 Declaration *Parser::ProbeDefinition() {
85 string typeName(ParseTypeName());
86 string identifier(ParseIdentifier());
89 Tokenizer::Token t(GetToken());
91 if (BeginOfPropertyList(t)) {
92 auto_ptr<PropertyList> propertyList(ParsePropertyList());
93 auto_ptr<Definition> dfn(new Definition(typeName, identifier));
94 dfn->SetValue(propertyList.release());
95 product.AddDefinition(dfn.get());
97 } else if (BeginningOfPrimitiveLiteral(t)) {
98 auto_ptr<Literal> literal(ParseLiteral());
99 auto_ptr<Definition> dfn(new Definition(typeName, identifier));
100 dfn->SetValue(literal.release());
101 product.AddDefinition(dfn.get());
102 return dfn.release();
105 return new Declaration(typeName, identifier);
108 bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
110 case Tokenizer::Token::CHEVRON_OPEN:
111 case Tokenizer::Token::COLON:
112 case Tokenizer::Token::BRACKET_OPEN:
113 case Tokenizer::Token::PARENTHESIS_OPEN:
114 case Tokenizer::Token::NUMBER:
115 case Tokenizer::Token::STRING:
116 case Tokenizer::Token::KEYWORD_FALSE:
117 case Tokenizer::Token::KEYWORD_TRUE:
118 case Tokenizer::Token::TYPE_NAME:
125 bool Parser::BeginningOfPrimitiveLiteral(const Tokenizer::Token &t) const {
127 case Tokenizer::Token::CHEVRON_OPEN:
128 case Tokenizer::Token::COLON:
129 case Tokenizer::Token::BRACKET_OPEN:
130 case Tokenizer::Token::PARENTHESIS_OPEN:
131 case Tokenizer::Token::NUMBER:
132 case Tokenizer::Token::STRING:
133 case Tokenizer::Token::KEYWORD_FALSE:
134 case Tokenizer::Token::KEYWORD_TRUE:
141 bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const {
142 return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
145 Definition *Parser::ParseDefinition() {
146 string typeName(ParseTypeName());
147 string identifier(ParseIdentifier());
149 Tokenizer::Token t(GetToken());
151 if (BeginOfPropertyList(t)) {
152 PropertyList *propertyList(ParsePropertyList());
153 Definition *dfn(new Definition(typeName, identifier));
154 dfn->SetValue(propertyList);
156 } else if (BeginningOfLiteral(t)) {
157 Literal *literal(ParseLiteral());
158 Definition *dfn(new Definition(typeName, identifier));
159 dfn->SetValue(literal);
162 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal");
166 string Parser::ParseIdentifier() {
167 Tokenizer::Token t(GetToken());
168 AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER);
172 string Parser::ParseTypeName() {
173 Tokenizer::Token t(GetToken());
174 AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME);
178 PropertyList *Parser::ParsePropertyList() {
179 Tokenizer::Token t(GetToken());
180 AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN);
182 auto_ptr<PropertyList> props(new PropertyList);
184 while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
185 Tokenizer::Token name(GetToken());
186 AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER);
189 AssertTokenType(t.type, Tokenizer::Token::COLON);
191 Value *value(ParseValue());
192 props->SetProperty(name.str, value);
195 if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
196 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }");
200 return props.release();
203 Value *Parser::ParseValue() {
204 Tokenizer::Token t(GetToken());
205 if (t.type == Tokenizer::Token::IDENTIFIER) {
206 return new Value(t.str);
207 } else if (BeginningOfLiteral(t)) {
209 Literal *literal(ParseLiteral());
210 return new Value(literal);
212 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier");
216 Literal *Parser::ParseLiteral() {
217 Tokenizer::Token t(GetToken());
218 if (t.type == Tokenizer::Token::TYPE_NAME) {
219 PropertyList *props(ParsePropertyList());
220 return new Literal(t.str, props);
221 } else if (BeginningOfLiteral(t)) {
223 case Tokenizer::Token::CHEVRON_OPEN:
225 return ParseVector();
226 case Tokenizer::Token::COLON:
228 AssertTokenType(t.type, Tokenizer::Token::STRING);
229 return new Literal(dirname, t.str);
230 case Tokenizer::Token::BRACKET_OPEN:
233 case Tokenizer::Token::PARENTHESIS_OPEN:
236 case Tokenizer::Token::NUMBER:
237 return new Literal(t.number);
238 case Tokenizer::Token::STRING:
239 return new Literal(t.str);
240 case Tokenizer::Token::KEYWORD_FALSE:
241 return new Literal(false);
242 case Tokenizer::Token::KEYWORD_TRUE:
243 return new Literal(true);
245 throw std::logic_error("literal switch reached impossible default branch oO");
248 throw new Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive");
252 Literal *Parser::ParseArray() {
253 Tokenizer::Token t(GetToken());
254 AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
256 Tokenizer::Token probe(GetToken());
259 if (probe.type == Tokenizer::Token::ANGLE_BRACKET_OPEN) {
260 vector<PropertyList *> values;
261 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
262 PropertyList *value(ParsePropertyList());
263 values.push_back(value);
266 if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
267 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
270 return new Literal(values);
272 vector<Value *> values;
273 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
274 Value *value(ParseValue());
275 values.push_back(value);
278 if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
279 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
282 return new Literal(values);
286 Literal *Parser::ParseColor() {
287 string msg("error parsing color");
288 Tokenizer::Token t(GetToken());
289 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg);
291 Tokenizer::Token red(GetToken());
292 AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color");
295 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
297 Tokenizer::Token green(GetToken());
298 AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color");
301 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
303 Tokenizer::Token blue(GetToken());
304 AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color");
307 if (t.type == Tokenizer::Token::PARENTHESIS_CLOSE) {
308 return new Literal(red.number, green.number, blue.number);
309 } else if (t.type != Tokenizer::Token::COMMA) {
310 Tokenizer::Token alpha(GetToken());
311 AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color");
314 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg);
316 return new Literal(red.number, green.number, blue.number, alpha.number);
318 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
322 Literal *Parser::ParseVector() {
323 std::string msg("error parsing vector");
324 Tokenizer::Token t(GetToken());
325 AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg);
327 Tokenizer::Token x(GetToken());
328 AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector");
331 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
333 Tokenizer::Token y(GetToken());
334 AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector");
337 AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg);
339 return new Literal(x.number, y.number);
342 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
343 if (expected != actual) {
344 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
348 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) {
349 if (expected != actual) {
350 throw Error(file, tok.Line(), msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));