4 * Created on: Aug 26, 2012
20 Parser::Parser(const char *file, ParsedSource &product)
26 throw Error(file, 0, "unable to read file");
30 void Parser::Parse() {
31 while (tok.HasMore()) {
36 void Parser::ParseStatement() {
37 Tokenizer::Token t(GetToken());
39 case Tokenizer::Token::KEYWORD_EXPORT:
40 ParseExportDirective();
42 case Tokenizer::Token::KEYWORD_INCLUDE:
43 ParseIncludeDirective();
45 case Tokenizer::Token::TYPE_NAME:
48 Declaration *decl(ProbeDefinition());
49 product.AddDeclaration(decl);
53 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type));
57 Tokenizer::Token Parser::GetToken() try {
59 } catch (Tokenizer::LexerError &e) {
60 throw Error(file, e.Line(), e.what());
63 void Parser::ParseExportDirective() {
64 Tokenizer::Token t(GetToken());
65 if (t.type != Tokenizer::Token::IDENTIFIER) {
67 Declaration *decl(ProbeDefinition());
68 product.ExportDeclaration(decl);
70 product.ExportIdentifier(t.str);
74 void Parser::ParseIncludeDirective() {
75 Tokenizer::Token t(GetToken());
76 AssertTokenType(t.type, Tokenizer::Token::STRING);
77 Parser sub(t.str.c_str(), product); // TODO: resolve path name
81 Declaration *Parser::ProbeDefinition() {
82 string typeName(ParseTypeName());
83 string identifier(ParseIdentifier());
85 Tokenizer::Token t(GetToken());
87 if (BeginOfPropertyList(t)) {
88 auto_ptr<PropertyList> propertyList(ParsePropertyList());
89 auto_ptr<Definition> dfn(new Definition(typeName, identifier));
90 dfn->SetValue(propertyList.release());
91 product.AddDefinition(dfn.get());
93 } else if (BeginningOfLiteral(t)) {
94 auto_ptr<Literal> literal(ParseLiteral());
95 auto_ptr<Definition> dfn(new Definition(typeName, identifier));
96 dfn->SetValue(literal.release());
97 product.AddDefinition(dfn.get());
100 return new Declaration(typeName, identifier);
104 bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
106 case Tokenizer::Token::CHEVRON_OPEN:
107 case Tokenizer::Token::BRACKET_OPEN:
108 case Tokenizer::Token::PARENTHESIS_OPEN:
109 case Tokenizer::Token::NUMBER:
110 case Tokenizer::Token::STRING:
111 case Tokenizer::Token::KEYWORD_FALSE:
112 case Tokenizer::Token::KEYWORD_TRUE:
113 case Tokenizer::Token::TYPE_NAME:
120 bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const {
121 return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
124 Definition *Parser::ParseDefinition() {
125 string typeName(ParseTypeName());
126 string identifier(ParseIdentifier());
128 Tokenizer::Token t(GetToken());
130 if (BeginOfPropertyList(t)) {
131 PropertyList *propertyList(ParsePropertyList());
132 Definition *dfn(new Definition(typeName, identifier));
133 dfn->SetValue(propertyList);
135 } else if (BeginningOfLiteral(t)) {
136 Literal *literal(ParseLiteral());
137 Definition *dfn(new Definition(typeName, identifier));
138 dfn->SetValue(literal);
141 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal");
145 string Parser::ParseIdentifier() {
146 Tokenizer::Token t(GetToken());
147 AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER);
151 string Parser::ParseTypeName() {
152 Tokenizer::Token t(GetToken());
153 AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME);
157 PropertyList *Parser::ParsePropertyList() {
158 Tokenizer::Token t(GetToken());
159 AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN);
161 auto_ptr<PropertyList> props(new PropertyList);
163 while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
164 Tokenizer::Token name(GetToken());
165 AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER);
168 AssertTokenType(t.type, Tokenizer::Token::COLON);
170 Value *value(ParseValue());
171 props->SetProperty(name.str, value);
174 if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
175 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }");
179 return props.release();
182 Value *Parser::ParseValue() {
183 Tokenizer::Token t(GetToken());
184 if (t.type == Tokenizer::Token::IDENTIFIER) {
185 return new Value(t.str);
186 } else if (BeginningOfLiteral(t)) {
188 Literal *literal(ParseLiteral());
189 return new Value(literal);
191 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier");
195 Literal *Parser::ParseLiteral() {
196 Tokenizer::Token t(GetToken());
197 if (t.type == Tokenizer::Token::TYPE_NAME) {
198 PropertyList *props(ParsePropertyList());
199 return new Literal(t.str, props);
200 } else if (BeginningOfLiteral(t)) {
202 case Tokenizer::Token::CHEVRON_OPEN:
204 return ParseVector();
205 case Tokenizer::Token::BRACKET_OPEN:
208 case Tokenizer::Token::PARENTHESIS_OPEN:
211 case Tokenizer::Token::NUMBER:
212 return new Literal(t.number);
213 case Tokenizer::Token::STRING:
214 return new Literal(t.str);
215 case Tokenizer::Token::KEYWORD_FALSE:
216 return new Literal(false);
217 case Tokenizer::Token::KEYWORD_TRUE:
218 return new Literal(true);
220 throw std::logic_error("literal switch reached impossible default branch oO");
223 throw new Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive");
227 Literal *Parser::ParseArray() {
228 Tokenizer::Token t(GetToken());
229 AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
231 Tokenizer::Token probe(GetToken());
234 if (probe.type == Tokenizer::Token::ANGLE_BRACKET_OPEN) {
235 vector<PropertyList *> values;
236 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
237 PropertyList *value(ParsePropertyList());
238 values.push_back(value);
241 if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
242 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
245 return new Literal(values);
247 vector<Value *> values;
248 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
249 Value *value(ParseValue());
250 values.push_back(value);
253 if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
254 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
257 return new Literal(values);
261 Literal *Parser::ParseColor() {
262 string msg("error parsing color");
263 Tokenizer::Token t(GetToken());
264 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg);
266 Tokenizer::Token red(GetToken());
267 AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color");
270 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
272 Tokenizer::Token green(GetToken());
273 AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color");
276 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
278 Tokenizer::Token blue(GetToken());
279 AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color");
282 if (t.type == Tokenizer::Token::BRACKET_CLOSE) {
283 return new Literal(red.number, green.number, blue.number);
284 } else if (t.type != Tokenizer::Token::COMMA) {
285 Tokenizer::Token alpha(GetToken());
286 AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color");
289 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg);
291 return new Literal(red.number, green.number, blue.number, alpha.number);
293 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
297 Literal *Parser::ParseVector() {
298 std::string msg("error parsing vector");
299 Tokenizer::Token t(GetToken());
300 AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg);
302 Tokenizer::Token x(GetToken());
303 AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector");
306 AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
308 Tokenizer::Token y(GetToken());
309 AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector");
312 AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg);
314 return new Literal(x.number, y.number);
317 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
318 if (expected != actual) {
319 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
323 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) {
324 if (expected != actual) {
325 throw Error(file, tok.Line(), msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));