aboutsummaryrefslogtreecommitdiff
path: root/src/tests/bnfcompile.cpp
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--src/tests/bnfcompile.cpp750
1 files changed, 375 insertions, 375 deletions
diff --git a/src/tests/bnfcompile.cpp b/src/tests/bnfcompile.cpp
index af7a0eb..40d410d 100644
--- a/src/tests/bnfcompile.cpp
+++ b/src/tests/bnfcompile.cpp
@@ -15,408 +15,408 @@ using namespace Bu;
15 15
16enum TokenType 16enum TokenType
17{ 17{
18 tokIdentifier, 18 tokIdentifier,
19 tokColon, 19 tokColon,
20 tokOr, 20 tokOr,
21 tokSemiColon, 21 tokSemiColon,
22 tokTokens, 22 tokTokens,
23 tokEquals, 23 tokEquals,
24 tokOpenCurly, 24 tokOpenCurly,
25 tokCloseCurly, 25 tokCloseCurly,
26 tokOpenSquare, 26 tokOpenSquare,
27 tokCloseSquare, 27 tokCloseSquare,
28 28
29 tokEos=-1 29 tokEos=-1
30}; 30};
31 31
32class BnfLexer : public Lexer 32class BnfLexer : public Lexer
33{ 33{
34public: 34public:
35 BnfLexer( Stream &rSrc ) : 35 BnfLexer( Stream &rSrc ) :
36 rSrc( rSrc ) 36 rSrc( rSrc )
37 { 37 {
38 } 38 }
39 39
40 virtual ~BnfLexer() 40 virtual ~BnfLexer()
41 { 41 {
42 } 42 }
43 43
44 virtual Token *nextToken() 44 virtual Token *nextToken()
45 { 45 {
46 char cBuf; 46 char cBuf;
47 47
48 for(;;) 48 for(;;)
49 { 49 {
50 if( qbIn.getSize() == 0 ) 50 if( qbIn.getSize() == 0 )
51 { 51 {
52 char buf[4096]; 52 char buf[4096];
53 qbIn.write( buf, rSrc.read( buf, 4096 ) ); 53 qbIn.write( buf, rSrc.read( buf, 4096 ) );
54 54
55 if( rSrc.isEos() && qbIn.getSize() == 0 ) 55 if( rSrc.isEos() && qbIn.getSize() == 0 )
56 return new Token( tokEos ); 56 return new Token( tokEos );
57 } 57 }
58 qbIn.peek( &cBuf, 1 ); 58 qbIn.peek( &cBuf, 1 );
59 if( (cBuf >= 'a' && cBuf <= 'z') || 59 if( (cBuf >= 'a' && cBuf <= 'z') ||
60 (cBuf >= 'A' && cBuf <= 'Z') || 60 (cBuf >= 'A' && cBuf <= 'Z') ||
61 (cBuf >= '0' && cBuf <= '9') || 61 (cBuf >= '0' && cBuf <= '9') ||
62 cBuf == '_' ) 62 cBuf == '_' )
63 { 63 {
64 sBuf.append( cBuf ); 64 sBuf.append( cBuf );
65 qbIn.seek( 1 ); 65 qbIn.seek( 1 );
66 } 66 }
67 else if( sBuf.isSet() ) 67 else if( sBuf.isSet() )
68 { 68 {
69 if( sBuf == "tokens" ) 69 if( sBuf == "tokens" )
70 { 70 {
71 sBuf.clear(); 71 sBuf.clear();
72 return new Token( tokTokens ); 72 return new Token( tokTokens );
73 } 73 }
74 else 74 else
75 { 75 {
76 Token *pRet = new Token( tokIdentifier, sBuf ); 76 Token *pRet = new Token( tokIdentifier, sBuf );
77 sBuf.clear(); 77 sBuf.clear();
78 return pRet; 78 return pRet;
79 } 79 }
80 } 80 }
81 else 81 else
82 { 82 {
83 switch( cBuf ) 83 switch( cBuf )
84 { 84 {
85 case ' ': 85 case ' ':
86 case '\t': 86 case '\t':
87 case '\n': 87 case '\n':
88 case '\r': 88 case '\r':
89 qbIn.seek( 1 ); 89 qbIn.seek( 1 );
90 continue; 90 continue;
91 91
92 case ':': 92 case ':':
93 qbIn.seek( 1 ); 93 qbIn.seek( 1 );
94 return new Token( tokColon ); 94 return new Token( tokColon );
95 95
96 case ';': 96 case ';':
97 qbIn.seek( 1 ); 97 qbIn.seek( 1 );
98 return new Token( tokSemiColon ); 98 return new Token( tokSemiColon );
99 99
100 case '|': 100 case '|':
101 qbIn.seek( 1 ); 101 qbIn.seek( 1 );
102 return new Token( tokOr ); 102 return new Token( tokOr );
103 103
104 case '=': 104 case '=':
105 qbIn.seek( 1 ); 105 qbIn.seek( 1 );
106 return new Token( tokEquals ); 106 return new Token( tokEquals );
107 107
108 case '[': 108 case '[':
109 qbIn.seek( 1 ); 109 qbIn.seek( 1 );
110 return new Token( tokOpenSquare ); 110 return new Token( tokOpenSquare );
111 111
112 case ']': 112 case ']':
113 qbIn.seek( 1 ); 113 qbIn.seek( 1 );
114 return new Token( tokCloseSquare ); 114 return new Token( tokCloseSquare );
115 115
116 case '{': 116 case '{':
117 qbIn.seek( 1 ); 117 qbIn.seek( 1 );
118 return new Token( tokOpenCurly ); 118 return new Token( tokOpenCurly );
119 119
120 case '}': 120 case '}':
121 qbIn.seek( 1 ); 121 qbIn.seek( 1 );
122 return new Token( tokCloseCurly ); 122 return new Token( tokCloseCurly );
123 123
124 default: 124 default:
125 throw ExceptionBase("Unexpected character '%c'.", 125 throw ExceptionBase("Unexpected character '%c'.",
126 cBuf ); 126 cBuf );
127 break; 127 break;
128 } 128 }
129 } 129 }
130 } 130 }
131 } 131 }
132 132
133 virtual String tokenToString( const Token &t ) 133 virtual String tokenToString( const Token &t )
134 { 134 {
135 switch( (TokenType)t.iToken ) 135 switch( (TokenType)t.iToken )
136 { 136 {
137 case tokIdentifier: return "tokIdentifier"; 137 case tokIdentifier: return "tokIdentifier";
138 case tokColon: return "tokColon"; 138 case tokColon: return "tokColon";
139 case tokOr: return "tokOr"; 139 case tokOr: return "tokOr";
140 case tokSemiColon: return "tokSemiColon"; 140 case tokSemiColon: return "tokSemiColon";
141 case tokTokens: return "tokTokens"; 141 case tokTokens: return "tokTokens";
142 case tokEquals: return "tokEquals"; 142 case tokEquals: return "tokEquals";
143 case tokOpenCurly: return "tokOpenCurly"; 143 case tokOpenCurly: return "tokOpenCurly";
144 case tokCloseCurly: return "tokCloseCurly"; 144 case tokCloseCurly: return "tokCloseCurly";
145 case tokOpenSquare: return "tokOpenSquare"; 145 case tokOpenSquare: return "tokOpenSquare";
146 case tokCloseSquare: return "tokCloseSquare"; 146 case tokCloseSquare: return "tokCloseSquare";
147 case tokEos: return "tokEos"; 147 case tokEos: return "tokEos";
148 } 148 }
149 149
150 return "???"; 150 return "???";
151 } 151 }
152 152
153private: 153private:
154 Stream &rSrc; 154 Stream &rSrc;
155 QueueBuf qbIn; 155 QueueBuf qbIn;
156 String sBuf; 156 String sBuf;
157}; 157};
158 158
159class BnfParser 159class BnfParser
160{ 160{
161public: 161public:
162 BnfParser( BnfLexer &l ) : 162 BnfParser( BnfLexer &l ) :
163 l( l ), 163 l( l ),
164 pCur( NULL ), 164 pCur( NULL ),
165 iLastToken( 0 ) 165 iLastToken( 0 )
166 { 166 {
167 } 167 }
168 168
169 virtual ~BnfParser() 169 virtual ~BnfParser()
170 { 170 {
171 delete pCur; 171 delete pCur;
172 pCur = NULL; 172 pCur = NULL;
173 } 173 }
174 174
175 void parse() 175 void parse()
176 { 176 {
177 for(;;) 177 for(;;)
178 { 178 {
179 next(); 179 next();
180 switch( pCur->iToken ) 180 switch( pCur->iToken )
181 { 181 {
182 case tokTokens: 182 case tokTokens:
183 tokens(); 183 tokens();
184 break; 184 break;
185 185
186 case tokIdentifier: 186 case tokIdentifier:
187 nonTerminal(); 187 nonTerminal();
188 break; 188 break;
189 189
190 case tokEos: 190 case tokEos:
191 return; 191 return;
192 break; 192 break;
193 193
194 default: 194 default:
195 tokenError("tokTokens, tokIdentifier, or tokEos"); 195 tokenError("tokTokens, tokIdentifier, or tokEos");
196 } 196 }
197 } 197 }
198 } 198 }
199 199
200private: 200private:
201 void tokens() 201 void tokens()
202 { 202 {
203 next(); 203 next();
204 if( pCur->iToken != tokEquals ) 204 if( pCur->iToken != tokEquals )
205 tokenError("tokEquals"); 205 tokenError("tokEquals");
206 for(;;) 206 for(;;)
207 { 207 {
208 next(); 208 next();
209 if( pCur->iToken == tokIdentifier ) 209 if( pCur->iToken == tokIdentifier )
210 { 210 {
211 hTokens.insert( pCur->vExtra.get<Bu::String>(), ++iLastToken ); 211 hTokens.insert( pCur->vExtra.get<Bu::String>(), ++iLastToken );
212 sio << "Added token[" << iLastToken << "]: " 212 sio << "Added token[" << iLastToken << "]: "
213 << pCur->vExtra.get<Bu::String>() << sio.nl; 213 << pCur->vExtra.get<Bu::String>() << sio.nl;
214 } 214 }
215 else if( pCur->iToken == tokSemiColon ) 215 else if( pCur->iToken == tokSemiColon )
216 break; 216 break;
217 else 217 else
218 tokenError("tokIdentifier or tokSemiColon"); 218 tokenError("tokIdentifier or tokSemiColon");
219 } 219 }
220 } 220 }
221 221
222 void nonTerminal() 222 void nonTerminal()
223 { 223 {
224 Bu::String sNtName = pCur->vExtra.get<Bu::String>(); 224 Bu::String sNtName = pCur->vExtra.get<Bu::String>();
225 Parser::NonTerminal nt; 225 Parser::NonTerminal nt;
226 p.addNonTerminal( sNtName ); 226 p.addNonTerminal( sNtName );
227 sio.incIndent(); 227 sio.incIndent();
228 sio << "Created non-terminal: " << sNtName << sio.nl; 228 sio << "Created non-terminal: " << sNtName << sio.nl;
229 229
230 next(); 230 next();
231 if( pCur->iToken != tokColon ) 231 if( pCur->iToken != tokColon )
232 tokenError("tokColon"); 232 tokenError("tokColon");
233 production( nt ); 233 production( nt );
234 for(;;) 234 for(;;)
235 { 235 {
236 switch( pCur->iToken ) 236 switch( pCur->iToken )
237 { 237 {
238 case tokOr: 238 case tokOr:
239 production( nt ); 239 production( nt );
240 break; 240 break;
241 241
242 case tokSemiColon: 242 case tokSemiColon:
243 p.setNonTerminal( sNtName, nt ); 243 p.setNonTerminal( sNtName, nt );
244 sio.decIndent(); 244 sio.decIndent();
245 sio << "Closing non-terminal." << sio.nl; 245 sio << "Closing non-terminal." << sio.nl;
246 return; 246 return;
247 247
248 default: 248 default:
249 tokenError("tkOr or tokSemiColon"); 249 tokenError("tkOr or tokSemiColon");
250 break; 250 break;
251 } 251 }
252 } 252 }
253 } 253 }
254 254
255 void production( Parser::NonTerminal &nt ) 255 void production( Parser::NonTerminal &nt )
256 { 256 {
257 sio.incIndent(); 257 sio.incIndent();
258 sio << "Adding new production:" << sio.nl; 258 sio << "Adding new production:" << sio.nl;
259 Parser::Production pr; 259 Parser::Production pr;
260 bool bAnything = false; 260 bool bAnything = false;
261 for(;;) 261 for(;;)
262 { 262 {
263 next(); 263 next();
264 switch( pCur->iToken ) 264 switch( pCur->iToken )
265 { 265 {
266 case tokIdentifier: 266 case tokIdentifier:
267 { 267 {
268 const Bu::String &sName = 268 const Bu::String &sName =
269 pCur->vExtra.get<Bu::String>(); 269 pCur->vExtra.get<Bu::String>();
270 if( hTokens.has( sName ) ) 270 if( hTokens.has( sName ) )
271 { 271 {
272 pr.append( 272 pr.append(
273 Parser::State( 273 Parser::State(
274 Parser::State::typeTerminal, 274 Parser::State::typeTerminal,
275 hTokens.get( sName ) 275 hTokens.get( sName )
276 ) 276 )
277 ); 277 );
278 sio << "Added terminal " << sName << sio.nl; 278 sio << "Added terminal " << sName << sio.nl;
279 } 279 }
280 else 280 else
281 { 281 {
282 if( !p.hasNonTerminal( sName ) ) 282 if( !p.hasNonTerminal( sName ) )
283 { 283 {
284 p.addNonTerminal( sName ); 284 p.addNonTerminal( sName );
285 } 285 }
286 pr.append( 286 pr.append(
287 Parser::State( 287 Parser::State(
288 Parser::State::typeNonTerminal, 288 Parser::State::typeNonTerminal,
289 p.getNonTerminalId( sName ) 289 p.getNonTerminalId( sName )
290 ) 290 )
291 ); 291 );
292 sio << "Added non-terminal " << sName << sio.nl; 292 sio << "Added non-terminal " << sName << sio.nl;
293 } 293 }
294 } 294 }
295 break; 295 break;
296 296
297 case tokOpenSquare: 297 case tokOpenSquare:
298 { 298 {
299 next(); 299 next();
300 if( pCur->iToken != tokIdentifier ) 300 if( pCur->iToken != tokIdentifier )
301 tokenError("tokIdentifier"); 301 tokenError("tokIdentifier");
302 Bu::String sName = 302 Bu::String sName =
303 pCur->vExtra.get<Bu::String>(); 303 pCur->vExtra.get<Bu::String>();
304 next(); 304 next();
305 if( pCur->iToken != tokCloseSquare ) 305 if( pCur->iToken != tokCloseSquare )
306 tokenError("tokCloseSquare"); 306 tokenError("tokCloseSquare");
307 307
308 if( !hTokens.has( sName ) ) 308 if( !hTokens.has( sName ) )
309 throw ExceptionBase("Only token names may be " 309 throw ExceptionBase("Only token names may be "
310 "enclosed in square brackets."); 310 "enclosed in square brackets.");
311 311
312 pr.append( 312 pr.append(
313 Parser::State( 313 Parser::State(
314 Parser::State::typeTerminalPush, 314 Parser::State::typeTerminalPush,
315 hTokens.get( sName ) 315 hTokens.get( sName )
316 ) 316 )
317 ); 317 );
318 sio << "Added terminal-push " << sName << sio.nl; 318 sio << "Added terminal-push " << sName << sio.nl;
319 } 319 }
320 break; 320 break;
321 321
322 case tokOpenCurly: 322 case tokOpenCurly:
323 { 323 {
324 next(); 324 next();
325 if( pCur->iToken != tokIdentifier ) 325 if( pCur->iToken != tokIdentifier )
326 tokenError("tokIdentifier"); 326 tokenError("tokIdentifier");
327 Bu::String sName = 327 Bu::String sName =
328 pCur->vExtra.get<Bu::String>(); 328 pCur->vExtra.get<Bu::String>();
329 next(); 329 next();
330 if( pCur->iToken != tokCloseCurly ) 330 if( pCur->iToken != tokCloseCurly )
331 tokenError("tokCloseCurly"); 331 tokenError("tokCloseCurly");
332 332
333 if( !p.hasReduction( sName ) ) 333 if( !p.hasReduction( sName ) )
334 p.addReduction( sName ); 334 p.addReduction( sName );
335 335
336 pr.append( 336 pr.append(
337 Parser::State( 337 Parser::State(
338 Parser::State::typeReduction, 338 Parser::State::typeReduction,
339 p.getReductionId( sName ) 339 p.getReductionId( sName )
340 ) 340 )
341 ); 341 );
342 sio << "Added reduction " << sName << sio.nl; 342 sio << "Added reduction " << sName << sio.nl;
343 } 343 }
344 break; 344 break;
345 345
346 case tokOr: 346 case tokOr:
347 case tokSemiColon: 347 case tokSemiColon:
348 if( bAnything ) 348 if( bAnything )
349 { 349 {
350 nt.addProduction( pr ); 350 nt.addProduction( pr );
351 sio.decIndent(); 351 sio.decIndent();
352 sio << "Closing production." << sio.nl; 352 sio << "Closing production." << sio.nl;
353 } 353 }
354 else 354 else
355 { 355 {
356 nt.setCanSkip(); 356 nt.setCanSkip();
357 sio.decIndent(); 357 sio.decIndent();
358 sio << "Closing empty production." << sio.nl; 358 sio << "Closing empty production." << sio.nl;
359 } 359 }
360 return; 360 return;
361 361
362 default: 362 default:
363 tokenError("tokIdentifier, tokOpenSquare, tokOr, " 363 tokenError("tokIdentifier, tokOpenSquare, tokOr, "
364 "tokOpenCurly, or tokSemiColon"); 364 "tokOpenCurly, or tokSemiColon");
365 } 365 }
366 } 366 }
367 } 367 }
368 368
369private: 369private:
370 void next() 370 void next()
371 { 371 {
372 delete pCur; 372 delete pCur;
373 pCur = l.nextToken(); 373 pCur = l.nextToken();
374 } 374 }
375 375
376 void tokenError( const String &s ) 376 void tokenError( const String &s )
377 { 377 {
378 throw ExceptionBase( ("Expected " + s + " but found " 378 throw ExceptionBase( ("Expected " + s + " but found "
379 + l.tokenToString( *pCur ) + ".").getStr() ); 379 + l.tokenToString( *pCur ) + ".").getStr() );
380 } 380 }
381 381
382private: 382private:
383 typedef Bu::Hash<Bu::String, int> TokenHash; 383 typedef Bu::Hash<Bu::String, int> TokenHash;
384 TokenHash hTokens; 384 TokenHash hTokens;
385 BnfLexer &l; 385 BnfLexer &l;
386 BnfLexer::Token *pCur; 386 BnfLexer::Token *pCur;
387 int iLastToken; 387 int iLastToken;
388 Parser p; 388 Parser p;
389}; 389};
390 390
391int main( int argc, char *argv[] ) 391int main( int argc, char *argv[] )
392{ 392{
393 if( argc < 2 ) 393 if( argc < 2 )
394 { 394 {
395 println("Provide an input filename as the first parameter."); 395 println("Provide an input filename as the first parameter.");
396 return 0; 396 return 0;
397 } 397 }
398 File fIn( argv[1], File::Read ); 398 File fIn( argv[1], File::Read );
399 399
400 BnfLexer bl( fIn ); 400 BnfLexer bl( fIn );
401 BnfParser parser( bl ); 401 BnfParser parser( bl );
402 402
403 parser.parse(); 403 parser.parse();
404 404
405/* 405/*
406 for(;;) 406 for(;;)
407 { 407 {
408 Lexer::Token *pTok = bl.nextToken(); 408 Lexer::Token *pTok = bl.nextToken();
409 sio << bl.tokenToString(*pTok); 409 sio << bl.tokenToString(*pTok);
410 if( pTok->vExtra.isSet() ) 410 if( pTok->vExtra.isSet() )
411 { 411 {
412 sio << " - " << pTok->vExtra; 412 sio << " - " << pTok->vExtra;
413 } 413 }
414 sio << sio.nl; 414 sio << sio.nl;
415 if( pTok->iToken == tokEos ) 415 if( pTok->iToken == tokEos )
416 break; 416 break;
417 } 417 }
418*/ 418*/
419 419
420 return 0; 420 return 0;
421} 421}
422 422