]> git.localhorst.tv Git - l2e.git/blob - src/loader/Parser.cpp
added error message for missing definitions in PLAY mode
[l2e.git] / src / loader / Parser.cpp
1 /*
2  * Parser.cpp
3  *
4  *  Created on: Aug 26, 2012
5  *      Author: holy
6  */
7
8 #include "Parser.h"
9
10 #include "utility.h"
11
12 #include <auto_ptr.h>
13 #include <fstream>
14
15 using std::auto_ptr;
16 using std::ifstream;
17 using std::string;
18 using std::vector;
19
20 namespace loader {
21
22 Parser::Parser(const string &file, ParsedSource &product)
23 : file(file)
24 , dirname(Dirname(file))
25 , in(this->file.c_str())
26 , tok(in)
27 , product(product) {
28         if (!in) {
29                 throw Error(file, 0, "unable to read file");
30         }
31 }
32
33 void Parser::Parse() {
34         while (tok.HasMore()) {
35                 ParseStatement();
36         }
37 }
38
39 void Parser::ParseStatement() {
40         Tokenizer::Token t(GetToken());
41         switch (t.type) {
42                 case Tokenizer::Token::KEYWORD_EXPORT:
43                         ParseExportDirective();
44                         break;
45                 case Tokenizer::Token::KEYWORD_INCLUDE:
46                         ParseIncludeDirective();
47                         break;
48                 case Tokenizer::Token::TYPE_NAME:
49                         tok.Putback(t);
50                         {
51                                 Declaration *decl(ProbeDefinition());
52                                 product.AddDeclaration(decl);
53                         }
54                         break;
55                 default:
56                         throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type));
57         }
58 }
59
60 Tokenizer::Token Parser::GetToken() try {
61         return tok.GetNext();
62 } catch (Tokenizer::LexerError &e) {
63         throw Error(file, e.Line(), e.what());
64 }
65
66 void Parser::ParseExportDirective() {
67         Tokenizer::Token t(GetToken());
68         if (t.type != Tokenizer::Token::IDENTIFIER) {
69                 tok.Putback(t);
70                 Declaration *decl(ProbeDefinition());
71                 product.ExportDeclaration(decl);
72         } else {
73                 product.ExportIdentifier(t.str);
74         }
75 }
76
77 void Parser::ParseIncludeDirective() {
78         Tokenizer::Token t(GetToken());
79         AssertTokenType(t.type, Tokenizer::Token::STRING);
80         Parser sub(CatPath(dirname, t.str), product);
81         sub.Parse();
82 }
83
84 Declaration *Parser::ProbeDefinition() {
85         string typeName(ParseTypeName());
86         string identifier(ParseIdentifier());
87
88         if (tok.HasMore()) {
89                 Tokenizer::Token t(GetToken());
90                 tok.Putback(t);
91                 if (BeginOfPropertyList(t)) {
92                         auto_ptr<PropertyList> propertyList(ParsePropertyList());
93                         auto_ptr<Definition> dfn(new Definition(typeName, identifier));
94                         dfn->SetValue(propertyList.release());
95                         product.AddDefinition(dfn.get());
96                         return dfn.release();
97                 } else if (BeginningOfPrimitiveLiteral(t)) {
98                         auto_ptr<Literal> literal(ParseLiteral());
99                         auto_ptr<Definition> dfn(new Definition(typeName, identifier));
100                         dfn->SetValue(literal.release());
101                         product.AddDefinition(dfn.get());
102                         return dfn.release();
103                 }
104         }
105         return new Declaration(typeName, identifier);
106 }
107
108 bool Parser::BeginningOfLiteral(const Tokenizer::Token &t) const {
109         switch (t.type) {
110                 case Tokenizer::Token::CHEVRON_OPEN:
111                 case Tokenizer::Token::COLON:
112                 case Tokenizer::Token::BRACKET_OPEN:
113                 case Tokenizer::Token::PARENTHESIS_OPEN:
114                 case Tokenizer::Token::NUMBER:
115                 case Tokenizer::Token::STRING:
116                 case Tokenizer::Token::KEYWORD_FALSE:
117                 case Tokenizer::Token::KEYWORD_TRUE:
118                 case Tokenizer::Token::TYPE_NAME:
119                         return true;
120                 default:
121                         return false;
122         }
123 }
124
125 bool Parser::BeginningOfPrimitiveLiteral(const Tokenizer::Token &t) const {
126         switch (t.type) {
127                 case Tokenizer::Token::CHEVRON_OPEN:
128                 case Tokenizer::Token::COLON:
129                 case Tokenizer::Token::BRACKET_OPEN:
130                 case Tokenizer::Token::PARENTHESIS_OPEN:
131                 case Tokenizer::Token::NUMBER:
132                 case Tokenizer::Token::STRING:
133                 case Tokenizer::Token::KEYWORD_FALSE:
134                 case Tokenizer::Token::KEYWORD_TRUE:
135                         return true;
136                 default:
137                         return false;
138         }
139 }
140
141 bool Parser::BeginOfPropertyList(const Tokenizer::Token &t) const {
142         return t.type == Tokenizer::Token::ANGLE_BRACKET_OPEN;
143 }
144
145 Definition *Parser::ParseDefinition() {
146         string typeName(ParseTypeName());
147         string identifier(ParseIdentifier());
148
149         Tokenizer::Token t(GetToken());
150         tok.Putback(t);
151         if (BeginOfPropertyList(t)) {
152                 PropertyList *propertyList(ParsePropertyList());
153                 Definition *dfn(new Definition(typeName, identifier));
154                 dfn->SetValue(propertyList);
155                 return dfn;
156         } else if (BeginningOfLiteral(t)) {
157                 Literal *literal(ParseLiteral());
158                 Definition *dfn(new Definition(typeName, identifier));
159                 dfn->SetValue(literal);
160                 return dfn;
161         } else {
162                 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected property-list or literal");
163         }
164 }
165
166 string Parser::ParseIdentifier() {
167         Tokenizer::Token t(GetToken());
168         AssertTokenType(t.type, Tokenizer::Token::IDENTIFIER);
169         return t.str;
170 }
171
172 string Parser::ParseTypeName() {
173         Tokenizer::Token t(GetToken());
174         AssertTokenType(t.type, Tokenizer::Token::TYPE_NAME);
175         return t.str;
176 }
177
178 PropertyList *Parser::ParsePropertyList() {
179         Tokenizer::Token t(GetToken());
180         AssertTokenType(t.type, Tokenizer::Token::ANGLE_BRACKET_OPEN);
181
182         auto_ptr<PropertyList> props(new PropertyList);
183
184         while (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE) {
185                 Tokenizer::Token name(GetToken());
186                 AssertTokenType(name.type, Tokenizer::Token::IDENTIFIER);
187
188                 t = GetToken();
189                 AssertTokenType(t.type, Tokenizer::Token::COLON);
190
191                 Value *value(ParseValue());
192                 props->SetProperty(name.str, value);
193
194                 t = GetToken();
195                 if (t.type != Tokenizer::Token::ANGLE_BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
196                         throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or }");
197                 }
198         }
199
200         return props.release();
201 }
202
203 Value *Parser::ParseValue() {
204         Tokenizer::Token t(GetToken());
205         if (t.type == Tokenizer::Token::IDENTIFIER) {
206                 return new Value(t.str);
207         } else if (BeginningOfLiteral(t)) {
208                 tok.Putback(t);
209                 Literal *literal(ParseLiteral());
210                 return new Value(literal);
211         } else {
212                 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected literal or identifier");
213         }
214 }
215
216 Literal *Parser::ParseLiteral() {
217         Tokenizer::Token t(GetToken());
218         if (t.type == Tokenizer::Token::TYPE_NAME) {
219                 PropertyList *props(ParsePropertyList());
220                 return new Literal(t.str, props);
221         } else if (BeginningOfLiteral(t)) {
222                 switch (t.type) {
223                         case Tokenizer::Token::CHEVRON_OPEN:
224                                 tok.Putback(t);
225                                 return ParseVector();
226                         case Tokenizer::Token::COLON:
227                                 t = GetToken();
228                                 AssertTokenType(t.type, Tokenizer::Token::STRING);
229                                 return new Literal(dirname, t.str);
230                         case Tokenizer::Token::BRACKET_OPEN:
231                                 tok.Putback(t);
232                                 return ParseArray();
233                         case Tokenizer::Token::PARENTHESIS_OPEN:
234                                 tok.Putback(t);
235                                 return ParseColor();
236                         case Tokenizer::Token::NUMBER:
237                                 return new Literal(t.number);
238                         case Tokenizer::Token::STRING:
239                                 return new Literal(t.str);
240                         case Tokenizer::Token::KEYWORD_FALSE:
241                                 return new Literal(false);
242                         case Tokenizer::Token::KEYWORD_TRUE:
243                                 return new Literal(true);
244                         default:
245                                 throw std::logic_error("literal switch reached impossible default branch oO");
246                 }
247         } else {
248                 throw new Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected type-name or primitive");
249         }
250 }
251
252 Literal *Parser::ParseArray() {
253         Tokenizer::Token t(GetToken());
254         AssertTokenType(t.type, Tokenizer::Token::BRACKET_OPEN);
255
256         Tokenizer::Token probe(GetToken());
257
258         if (probe.type == Tokenizer::Token::TYPE_NAME) {
259                 vector<PropertyList *> values;
260                 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
261                         PropertyList *value(ParsePropertyList());
262                         values.push_back(value);
263
264                         t = GetToken();
265                         if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
266                                 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
267                         }
268                 }
269                 return new Literal(probe.str, values);
270         } else {
271                 tok.Putback(probe);
272
273                 vector<Value *> values;
274                 while (t.type != Tokenizer::Token::BRACKET_CLOSE) {
275                         Value *value(ParseValue());
276                         values.push_back(value);
277
278                         t = GetToken();
279                         if (t.type != Tokenizer::Token::BRACKET_CLOSE && t.type != Tokenizer::Token::COMMA) {
280                                 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
281                         }
282                 }
283                 return new Literal(values);
284         }
285 }
286
287 Literal *Parser::ParseColor() {
288         string msg("error parsing color");
289         Tokenizer::Token t(GetToken());
290         AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_OPEN, msg);
291
292         Tokenizer::Token red(GetToken());
293         AssertTokenType(red.type, Tokenizer::Token::NUMBER, "error parsing red component of color");
294
295         t = GetToken();
296         AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
297
298         Tokenizer::Token green(GetToken());
299         AssertTokenType(green.type, Tokenizer::Token::NUMBER, "error parsing green component of color");
300
301         t = GetToken();
302         AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
303
304         Tokenizer::Token blue(GetToken());
305         AssertTokenType(blue.type, Tokenizer::Token::NUMBER, "error parsing blue component of color");
306
307         t = GetToken();
308         if (t.type == Tokenizer::Token::PARENTHESIS_CLOSE) {
309                 return new Literal(red.number, green.number, blue.number);
310         } else if (t.type != Tokenizer::Token::COMMA) {
311                 Tokenizer::Token alpha(GetToken());
312                 AssertTokenType(alpha.type, Tokenizer::Token::NUMBER, "error parsing alpha component of color");
313
314                 t = GetToken();
315                 AssertTokenType(t.type, Tokenizer::Token::PARENTHESIS_CLOSE, msg);
316
317                 return new Literal(red.number, green.number, blue.number, alpha.number);
318         } else {
319                 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(t.type) + ", expected , or ]");
320         }
321 }
322
323 Literal *Parser::ParseVector() {
324         std::string msg("error parsing vector");
325         Tokenizer::Token t(GetToken());
326         AssertTokenType(t.type, Tokenizer::Token::CHEVRON_OPEN, msg);
327
328         Tokenizer::Token x(GetToken());
329         AssertTokenType(x.type, Tokenizer::Token::NUMBER, "error parsing x component of vector");
330
331         t = GetToken();
332         AssertTokenType(t.type, Tokenizer::Token::COMMA, msg);
333
334         Tokenizer::Token y(GetToken());
335         AssertTokenType(y.type, Tokenizer::Token::NUMBER, "error parsing y component of vector");
336
337         t = GetToken();
338         AssertTokenType(t.type, Tokenizer::Token::CHEVRON_CLOSE, msg);
339
340         return new Literal(x.number, y.number);
341 }
342
343 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected) {
344         if (expected != actual) {
345                 throw Error(file, tok.Line(), string("unexpected token ") + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
346         }
347 }
348
349 void Parser::AssertTokenType(Tokenizer::Token::Type actual, Tokenizer::Token::Type expected, const string &msg) {
350         if (expected != actual) {
351                 throw Error(file, tok.Line(), msg + ": unexpected token " + TokenTypeToString(actual) + ", expected " + TokenTypeToString(expected));
352         }
353 }
354
355 }