]> git.localhorst.tv Git - l2e.git/blob - src/loader/Parser.cpp
added basic parser (not completely tested)
[l2e.git] / src / loader / Parser.cpp
1 /*
2  * Parser.cpp
3  *
4  *  Created on: Aug 26, 2012
5  *      Author: holy
6  */
7
8 #include "Parser.h"
9
10 #include <auto_ptr.h>
11 #include <fstream>
12
13 using std::auto_ptr;
14 using std::ifstream;
15 using std::string;
16 using std::vector;
17
18 namespace loader {
19
20 void Parser::Parse() {
21         while (tok.HasMore()) {
22                 ParseStatement();
23         }
24 }
25
26 void Parser::ParseStatement() {
27         Tokenizer::Token t(tok.GetNext());
28         switch (t.type) {
29                 case Tokenizer::Token::KEYWORD_EXPORT:
30                         ParseExportDirective();
31                         break;
32                 case Tokenizer::Token::KEYWORD_INCLUDE:
33                         ParseIncludeDirective();
34                         break;
35                 case Tokenizer::Token::TYPE_NAME:
36                         tok.Putback(t);
37                         {
38                                 Declaration *decl(ProbeDefinition());
39                                 product.AddDeclaration(decl);
40                         }
41                         break;
42                 default:
43                         throw ParseError(string("unexpected token ") + TokenTypeToString(t.type));
44         }
45 }
46
47 void Parser::ParseExportDirective() {
48         Tokenizer::Token t(tok.GetNext());
49         if (t.type != Tokenizer::Token::IDENTIFIER) {
50                 tok.Putback(t);
51                 Declaration *decl(ProbeDefinition());
52                 product.ExportDeclaration(decl);
53         } else {
54                 product.ExportIdentifier(t.str);
55         }
56 }
57
58 void Parser::ParseIncludeDirective() {
59         Tokenizer::Token t(tok.GetNext());
60         AssertTokenType(t.type, Tokenizer::Token::STRING);
61         ifstream file(t.str.c_str()); // TODO: resolve path name
62         Parser sub(file, product);
63         sub.Parse();
64 }
65
66 Declaration *Parser::ProbeDefinition() {
67         string typeName(ParseTypeName());
68         string identifier(ParseIdentifier());
69
70         Tokenizer::Token t(tok.GetNext());
71         tok.Putback(t);
72         if (BeginOfPropertyList(t)) {
73                 PropertyList *propertyList(ParsePropertyList());
74                 Definition *dfn(new Definition(typeName, identifier));
75                 dfn->SetValue(propertyList);
76                 return dfn;
77         } else if (BeginningOfLiteral(t)) {
78                 Literal *literal(ParseLiteral());
79                 Definition *dfn(new Definition(typeName, identifier));
80                 dfn->SetValue(literal);
81                 return dfn;
82         } else {
83                 return new Declaration(typeName, identifier);
84         }
85 }
86
87 bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
88         switch (t.type) {
89                 case Tokenizer::Token::CHEVRON_OPEN:
90                 case Tokenizer::Token::BRACKET_OPEN:
91                 case Tokenizer::Token::PARENTHESIS_OPEN:
92                 case Tokenizer::Token::NUMBER:
93                 case Tokenizer::Token::STRING:
94                 case Tokenizer::Token::KEYWORD_FALSE:
95                 case Tokenizer::Token::KEYWORD_TRUE:
96                 case Tokenizer::Token::TYPE_NAME:
97                         return true;
98                 default:
99                         return false;
100         }
101 }
102
103 bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const {
104         return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
105 }
106
107 Definition *Parser::ParseDefinition() {
108         string typeName(ParseTypeName());
109         string identifier(ParseIdentifier());
110
111         Tokenizer::Token t(tok.GetNext());
112         tok.Putback(t);
113         if (BeginOfPropertyList(t)) {
114                 PropertyList *propertyList(ParsePropertyList());
115                 Definition *dfn(new Definition(typeName, identifier));
116                 dfn->SetValue(propertyList);
117                 return dfn;
118         } else if (BeginningOfLiteral(t)) {
119                 Literal *literal(ParseLiteral());
120                 Definition *dfn(new Definition(typeName, identifier));
121                 dfn->SetValue(literal);
122                 return dfn;
123         } else {
124                 throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal");
125         }
126 }
127
128 string Parser::ParseIdentifier() {
129         Tokenizer::Token t(tok.GetNext());
130         AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER);
131         return t.str;
132 }
133
134 string Parser::ParseTypeName() {
135         Tokenizer::Token t(tok.GetNext());
136         AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME);
137         return t.str;
138 }
139
140 PropertyList *Parser::ParsePropertyList() {
141         Tokenizer::Token t(tok.GetNext());
142         AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN);
143
144         auto_ptr<PropertyList> props(new PropertyList);
145
146         while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
147                 Tokenizer::Token name(tok.GetNext());
148                 AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER);
149
150                 t = tok.GetNext();
151                 AssertTokenType(t.type, Tokenizer::Token::COLON);
152
153                 Value *value(ParseValue());
154                 props->SetProperty(name.str, value);
155
156                 t = tok.GetNext();
157                 if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
158                         throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }");
159                 }
160         }
161
162         return props.release();
163 }
164
165 Value *Parser::ParseValue() {
166         Tokenizer::Token t(tok.GetNext());
167         if (t.type == Tokenizer::Token::IDENTIFIER) {
168                 return new Value(t.str);
169         } else if (BeginningOfLiteral(t)) {
170                 tok.Putback(t);
171                 Literal *literal(ParseLiteral());
172                 return new Value(literal);
173         } else {
174                 throw new ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier");
175         }
176 }
177
178 Literal *Parser::ParseLiteral() {
179         Tokenizer::Token t(tok.GetNext());
180         if (t.type == Tokenizer::Token::TYPE_NAME) {
181                 PropertyList *props(ParsePropertyList());
182                 return new Literal(t.str, props);
183         } else if (BeginningOfLiteral(t)) {
184                 switch (t.type) {
185                         case Tokenizer::Token::CHEVRON_OPEN:
186                                 tok.Putback(t);
187                                 return ParseVector();
188                         case Tokenizer::Token::BRACKET_OPEN:
189                                 tok.Putback(t);
190                                 return ParseArray();
191                         case Tokenizer::Token::PARENTHESIS_OPEN:
192                                 tok.Putback(t);
193                                 return ParseColor();
194                         case Tokenizer::Token::NUMBER:
195                                 return new Literal(t.number);
196                         case Tokenizer::Token::STRING:
197                                 return new Literal(t.str);
198                         case Tokenizer::Token::KEYWORD_FALSE:
199                                 return new Literal(false);
200                         case Tokenizer::Token::KEYWORD_TRUE:
201                                 return new Literal(true);
202                         default:
203                                 throw std::logic_error("literal switch reached impossible default branch oO");
204                 }
205         } else {
206                 throw new ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive");
207         }
208 }
209
210 Literal *Parser::ParseArray() {
211         Tokenizer::Token t(tok.GetNext());
212         AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
213
214         vector<Value *> values;
215
216         while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
217                 Value *value(ParseValue());
218                 values.push_back(value);
219
220                 t = tok.GetNext();
221                 if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
222                         throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
223                 }
224         }
225
226         return new Literal(values);
227 }
228
229 Literal *Parser::ParseColor() {
230         string msg("error parsing color");
231         Tokenizer::Token t(tok.GetNext());
232         AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg);
233
234         Tokenizer::Token red(tok.GetNext());
235         AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color");
236
237         t = tok.GetNext();
238         AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
239
240         Tokenizer::Token green(tok.GetNext());
241         AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color");
242
243         t = tok.GetNext();
244         AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
245
246         Tokenizer::Token blue(tok.GetNext());
247         AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color");
248
249         t = tok.GetNext();
250         if (t.type == Tokenizer::Token::BRACKET_CLOSE) {
251                 return new Literal(red.number, green.number, blue.number);
252         } else if (t.type != Tokenizer::Token::COMMA) {
253                 Tokenizer::Token alpha(tok.GetNext());
254                 AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color");
255
256                 t = tok.GetNext();
257                 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg);
258
259                 return new Literal(red.number, green.number, blue.number, alpha.number);
260         } else {
261                 throw ParseError(string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
262         }
263 }
264
265 Literal *Parser::ParseVector() {
266         std::string msg("error parsing vector");
267         Tokenizer::Token t(tok.GetNext());
268         AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg);
269
270         Tokenizer::Token x(tok.GetNext());
271         AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector");
272
273         t = tok.GetNext();
274         AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
275
276         Tokenizer::Token y(tok.GetNext());
277         AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector");
278
279         t = tok.GetNext();
280         AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg);
281
282         return new Literal(x.number, y.number);
283 }
284
285 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
286         if (expected != actual) {
287                 throw ParseError(string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
288         }
289 }
290
291 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) {
292         if (expected != actual) {
293                 throw ParseError(msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
294         }
295 }
296
297 }