diff options
author | Mike Buland <eichlan@xagasoft.com> | 2011-01-10 21:04:17 +0000 |
---|---|---|
committer | Mike Buland <eichlan@xagasoft.com> | 2011-01-10 21:04:17 +0000 |
commit | 2ba3f84ab559da02a11aa000b3cecb3b3668af61 (patch) | |
tree | 266f450b512f607ec54d54af4fa8c13fdbe7ef91 /src/tools | |
parent | ea18007633b31901f2ae275cc0576c3f7ce99fc9 (diff) | |
parent | 3611f253f6fdfa4954d374ab85ddaa7f799c130c (diff) | |
download | libbu++-2ba3f84ab559da02a11aa000b3cecb3b3668af61.tar.gz libbu++-2ba3f84ab559da02a11aa000b3cecb3b3668af61.tar.bz2 libbu++-2ba3f84ab559da02a11aa000b3cecb3b3668af61.tar.xz libbu++-2ba3f84ab559da02a11aa000b3cecb3b3668af61.zip |
Merged in the core branch. This is a major update that fixes many things, and
changes many others, including source files that were deleted and renamed.
Before doing this update, I reccomend a full clean, or even a fresh checkout.
Things to note, most outstanding about this update:
- Bu::Socket was changed to Bu::TcpSocket and the default mode is blocking.
- All templatized container classes are SharedCore now, which is good, but
SharedCore is inherently non-reentrant safe. However, all SharedCore classes
have a "clone" function that return a non-shared copy of the object, safe for
passing into a reentrant safe function accessing shared memory.
Diffstat (limited to 'src/tools')
-rw-r--r-- | src/tools/bnfcompile.cpp | 410 | ||||
-rw-r--r-- | src/tools/myriadfs.cpp | 38 | ||||
-rw-r--r-- | src/tools/parser.cpp | 89 |
3 files changed, 528 insertions, 9 deletions
diff --git a/src/tools/bnfcompile.cpp b/src/tools/bnfcompile.cpp new file mode 100644 index 0000000..16e75a5 --- /dev/null +++ b/src/tools/bnfcompile.cpp | |||
@@ -0,0 +1,410 @@ | |||
1 | #include <bu/sio.h> | ||
2 | #include <bu/lexer.h> | ||
3 | #include <bu/parser.h> | ||
4 | #include <bu/file.h> | ||
5 | #include <bu/queuebuf.h> | ||
6 | |||
7 | using namespace Bu; | ||
8 | |||
9 | enum TokenType | ||
10 | { | ||
11 | tokIdentifier, | ||
12 | tokColon, | ||
13 | tokOr, | ||
14 | tokSemiColon, | ||
15 | tokTokens, | ||
16 | tokEquals, | ||
17 | tokOpenCurly, | ||
18 | tokCloseCurly, | ||
19 | tokOpenSquare, | ||
20 | tokCloseSquare, | ||
21 | |||
22 | tokEos=-1 | ||
23 | }; | ||
24 | |||
25 | class BnfLexer : public Lexer | ||
26 | { | ||
27 | public: | ||
28 | BnfLexer( Stream &rSrc ) : | ||
29 | rSrc( rSrc ) | ||
30 | { | ||
31 | } | ||
32 | |||
33 | virtual ~BnfLexer() | ||
34 | { | ||
35 | } | ||
36 | |||
37 | virtual Token *nextToken() | ||
38 | { | ||
39 | char cBuf; | ||
40 | |||
41 | for(;;) | ||
42 | { | ||
43 | if( qbIn.getSize() == 0 ) | ||
44 | { | ||
45 | char buf[4096]; | ||
46 | qbIn.write( buf, rSrc.read( buf, 4096 ) ); | ||
47 | |||
48 | if( rSrc.isEos() && qbIn.getSize() == 0 ) | ||
49 | return new Token( tokEos ); | ||
50 | } | ||
51 | qbIn.peek( &cBuf, 1 ); | ||
52 | if( (cBuf >= 'a' && cBuf <= 'z') || | ||
53 | (cBuf >= 'A' && cBuf <= 'Z') || | ||
54 | (cBuf >= '0' && cBuf <= '9') || | ||
55 | cBuf == '_' ) | ||
56 | { | ||
57 | sBuf.append( cBuf ); | ||
58 | qbIn.seek( 1 ); | ||
59 | } | ||
60 | else if( sBuf.isSet() ) | ||
61 | { | ||
62 | if( sBuf == "tokens" ) | ||
63 | { | ||
64 | sBuf.clear(); | ||
65 | return new Token( tokTokens ); | ||
66 | } | ||
67 | else | ||
68 | { | ||
69 | Token *pRet = new Token( tokIdentifier, sBuf ); | ||
70 | sBuf.clear(); | ||
71 | return pRet; | ||
72 | } | ||
73 | } | ||
74 | else | ||
75 | { | ||
76 | switch( cBuf ) | ||
77 | { | ||
78 | case ' ': | ||
79 | case '\t': | ||
80 | case '\n': | ||
81 | case '\r': | ||
82 | qbIn.seek( 1 ); | ||
83 | continue; | ||
84 | |||
85 | case ':': | ||
86 | qbIn.seek( 1 ); | ||
87 | return new Token( tokColon ); | ||
88 | |||
89 | case ';': | ||
90 | qbIn.seek( 1 ); | ||
91 | return new Token( tokSemiColon ); | ||
92 | |||
93 | case '|': | ||
94 | qbIn.seek( 1 ); | ||
95 | return new Token( tokOr ); | ||
96 | |||
97 | case '=': | ||
98 | qbIn.seek( 1 ); | ||
99 | return new Token( tokEquals ); | ||
100 | |||
101 | case '[': | ||
102 | qbIn.seek( 1 ); | ||
103 | return new Token( tokOpenSquare ); | ||
104 | |||
105 | case ']': | ||
106 | qbIn.seek( 1 ); | ||
107 | return new Token( tokCloseSquare ); | ||
108 | |||
109 | case '{': | ||
110 | qbIn.seek( 1 ); | ||
111 | return new Token( tokOpenCurly ); | ||
112 | |||
113 | case '}': | ||
114 | qbIn.seek( 1 ); | ||
115 | return new Token( tokCloseCurly ); | ||
116 | |||
117 | default: | ||
118 | throw ExceptionBase("Unexpected character '%c'.", | ||
119 | cBuf ); | ||
120 | break; | ||
121 | } | ||
122 | } | ||
123 | } | ||
124 | } | ||
125 | |||
126 | virtual FString tokenToString( const Token &t ) | ||
127 | { | ||
128 | switch( (TokenType)t.iToken ) | ||
129 | { | ||
130 | case tokIdentifier: return "tokIdentifier"; | ||
131 | case tokColon: return "tokColon"; | ||
132 | case tokOr: return "tokOr"; | ||
133 | case tokSemiColon: return "tokSemiColon"; | ||
134 | case tokTokens: return "tokTokens"; | ||
135 | case tokEquals: return "tokEquals"; | ||
136 | case tokOpenCurly: return "tokOpenCurly"; | ||
137 | case tokCloseCurly: return "tokCloseCurly"; | ||
138 | case tokOpenSquare: return "tokOpenSquare"; | ||
139 | case tokCloseSquare: return "tokCloseSquare"; | ||
140 | case tokEos: return "tokEos"; | ||
141 | } | ||
142 | |||
143 | return "???"; | ||
144 | } | ||
145 | |||
146 | private: | ||
147 | Stream &rSrc; | ||
148 | QueueBuf qbIn; | ||
149 | FString sBuf; | ||
150 | }; | ||
151 | |||
152 | class BnfParser | ||
153 | { | ||
154 | public: | ||
155 | BnfParser( BnfLexer &l ) : | ||
156 | l( l ), | ||
157 | pCur( NULL ), | ||
158 | iLastToken( 0 ) | ||
159 | { | ||
160 | } | ||
161 | |||
162 | virtual ~BnfParser() | ||
163 | { | ||
164 | delete pCur; | ||
165 | pCur = NULL; | ||
166 | } | ||
167 | |||
168 | void parse() | ||
169 | { | ||
170 | for(;;) | ||
171 | { | ||
172 | next(); | ||
173 | switch( pCur->iToken ) | ||
174 | { | ||
175 | case tokTokens: | ||
176 | tokens(); | ||
177 | break; | ||
178 | |||
179 | case tokIdentifier: | ||
180 | nonTerminal(); | ||
181 | break; | ||
182 | |||
183 | case tokEos: | ||
184 | return; | ||
185 | break; | ||
186 | |||
187 | default: | ||
188 | tokenError("tokTokens, tokIdentifier, or tokEos"); | ||
189 | } | ||
190 | } | ||
191 | } | ||
192 | |||
193 | private: | ||
194 | void tokens() | ||
195 | { | ||
196 | next(); | ||
197 | if( pCur->iToken != tokEquals ) | ||
198 | tokenError("tokEquals"); | ||
199 | for(;;) | ||
200 | { | ||
201 | next(); | ||
202 | if( pCur->iToken == tokIdentifier ) | ||
203 | { | ||
204 | hTokens.insert( pCur->vExtra.get<Bu::FString>(), ++iLastToken ); | ||
205 | sio << "Added token[" << iLastToken << "]: " | ||
206 | << pCur->vExtra.get<Bu::FString>() << sio.nl; | ||
207 | } | ||
208 | else if( pCur->iToken == tokSemiColon ) | ||
209 | break; | ||
210 | else | ||
211 | tokenError("tokIdentifier or tokSemiColon"); | ||
212 | } | ||
213 | } | ||
214 | |||
215 | void nonTerminal() | ||
216 | { | ||
217 | Bu::FString sNtName = pCur->vExtra.get<Bu::FString>(); | ||
218 | Parser::NonTerminal nt; | ||
219 | p.addNonTerminal( sNtName ); | ||
220 | sio.incIndent(); | ||
221 | sio << "Created non-terminal: " << sNtName << sio.nl; | ||
222 | |||
223 | next(); | ||
224 | if( pCur->iToken != tokColon ) | ||
225 | tokenError("tokColon"); | ||
226 | production( nt ); | ||
227 | for(;;) | ||
228 | { | ||
229 | switch( pCur->iToken ) | ||
230 | { | ||
231 | case tokOr: | ||
232 | production( nt ); | ||
233 | break; | ||
234 | |||
235 | case tokSemiColon: | ||
236 | p.setNonTerminal( sNtName, nt ); | ||
237 | sio.decIndent(); | ||
238 | sio << "Closing non-terminal." << sio.nl; | ||
239 | return; | ||
240 | |||
241 | default: | ||
242 | tokenError("tkOr or tokSemiColon"); | ||
243 | break; | ||
244 | } | ||
245 | } | ||
246 | } | ||
247 | |||
248 | void production( Parser::NonTerminal &nt ) | ||
249 | { | ||
250 | sio.incIndent(); | ||
251 | sio << "Adding new production:" << sio.nl; | ||
252 | Parser::Production pr; | ||
253 | bool bAnything = false; | ||
254 | for(;;) | ||
255 | { | ||
256 | next(); | ||
257 | switch( pCur->iToken ) | ||
258 | { | ||
259 | case tokIdentifier: | ||
260 | { | ||
261 | const Bu::FString &sName = | ||
262 | pCur->vExtra.get<Bu::FString>(); | ||
263 | if( hTokens.has( sName ) ) | ||
264 | { | ||
265 | pr.append( | ||
266 | Parser::State( | ||
267 | Parser::State::typeTerminal, | ||
268 | hTokens.get( sName ) | ||
269 | ) | ||
270 | ); | ||
271 | sio << "Added terminal " << sName << sio.nl; | ||
272 | } | ||
273 | else | ||
274 | { | ||
275 | if( !p.hasNonTerminal( sName ) ) | ||
276 | { | ||
277 | p.addNonTerminal( sName ); | ||
278 | } | ||
279 | pr.append( | ||
280 | Parser::State( | ||
281 | Parser::State::typeNonTerminal, | ||
282 | p.getNonTerminalId( sName ) | ||
283 | ) | ||
284 | ); | ||
285 | sio << "Added non-terminal " << sName << sio.nl; | ||
286 | } | ||
287 | } | ||
288 | break; | ||
289 | |||
290 | case tokOpenSquare: | ||
291 | { | ||
292 | next(); | ||
293 | if( pCur->iToken != tokIdentifier ) | ||
294 | tokenError("tokIdentifier"); | ||
295 | Bu::FString sName = | ||
296 | pCur->vExtra.get<Bu::FString>(); | ||
297 | next(); | ||
298 | if( pCur->iToken != tokCloseSquare ) | ||
299 | tokenError("tokCloseSquare"); | ||
300 | |||
301 | if( !hTokens.has( sName ) ) | ||
302 | throw ExceptionBase("Only token names may be " | ||
303 | "enclosed in square brackets."); | ||
304 | |||
305 | pr.append( | ||
306 | Parser::State( | ||
307 | Parser::State::typeTerminalPush, | ||
308 | hTokens.get( sName ) | ||
309 | ) | ||
310 | ); | ||
311 | sio << "Added terminal-push " << sName << sio.nl; | ||
312 | } | ||
313 | break; | ||
314 | |||
315 | case tokOpenCurly: | ||
316 | { | ||
317 | next(); | ||
318 | if( pCur->iToken != tokIdentifier ) | ||
319 | tokenError("tokIdentifier"); | ||
320 | Bu::FString sName = | ||
321 | pCur->vExtra.get<Bu::FString>(); | ||
322 | next(); | ||
323 | if( pCur->iToken != tokCloseCurly ) | ||
324 | tokenError("tokCloseCurly"); | ||
325 | |||
326 | if( !p.hasReduction( sName ) ) | ||
327 | p.addReduction( sName ); | ||
328 | |||
329 | pr.append( | ||
330 | Parser::State( | ||
331 | Parser::State::typeReduction, | ||
332 | p.getReductionId( sName ) | ||
333 | ) | ||
334 | ); | ||
335 | sio << "Added reduction " << sName << sio.nl; | ||
336 | } | ||
337 | break; | ||
338 | |||
339 | case tokOr: | ||
340 | case tokSemiColon: | ||
341 | if( bAnything ) | ||
342 | { | ||
343 | nt.addProduction( pr ); | ||
344 | sio.decIndent(); | ||
345 | sio << "Closing production." << sio.nl; | ||
346 | } | ||
347 | else | ||
348 | { | ||
349 | nt.setCanSkip(); | ||
350 | sio.decIndent(); | ||
351 | sio << "Closing empty production." << sio.nl; | ||
352 | } | ||
353 | return; | ||
354 | |||
355 | default: | ||
356 | tokenError("tokIdentifier, tokOpenSquare, tokOr, " | ||
357 | "tokOpenCurly, or tokSemiColon"); | ||
358 | } | ||
359 | } | ||
360 | } | ||
361 | |||
362 | private: | ||
363 | void next() | ||
364 | { | ||
365 | delete pCur; | ||
366 | pCur = l.nextToken(); | ||
367 | } | ||
368 | |||
369 | void tokenError( const FString &s ) | ||
370 | { | ||
371 | throw ExceptionBase( ("Expected " + s + " but found " | ||
372 | + l.tokenToString( *pCur ) + ".").getStr() ); | ||
373 | } | ||
374 | |||
375 | private: | ||
376 | typedef Bu::Hash<Bu::FString, int> TokenHash; | ||
377 | TokenHash hTokens; | ||
378 | BnfLexer &l; | ||
379 | BnfLexer::Token *pCur; | ||
380 | int iLastToken; | ||
381 | Parser p; | ||
382 | }; | ||
383 | |||
384 | int main( int argc, char *argv[] ) | ||
385 | { | ||
386 | File fIn( argv[1], File::Read ); | ||
387 | |||
388 | BnfLexer bl( fIn ); | ||
389 | BnfParser parser( bl ); | ||
390 | |||
391 | parser.parse(); | ||
392 | |||
393 | /* | ||
394 | for(;;) | ||
395 | { | ||
396 | Lexer::Token *pTok = bl.nextToken(); | ||
397 | sio << bl.tokenToString(*pTok); | ||
398 | if( pTok->vExtra.isSet() ) | ||
399 | { | ||
400 | sio << " - " << pTok->vExtra; | ||
401 | } | ||
402 | sio << sio.nl; | ||
403 | if( pTok->iToken == tokEos ) | ||
404 | break; | ||
405 | } | ||
406 | */ | ||
407 | |||
408 | return 0; | ||
409 | } | ||
410 | |||
diff --git a/src/tools/myriadfs.cpp b/src/tools/myriadfs.cpp new file mode 100644 index 0000000..66955a5 --- /dev/null +++ b/src/tools/myriadfs.cpp | |||
@@ -0,0 +1,38 @@ | |||
1 | #define FUSE_USE_VERSION 26 | ||
2 | |||
3 | #include <fuse.h> | ||
4 | |||
5 | #include <string.h> | ||
6 | |||
7 | extern "C" { | ||
8 | static int myriadfs_getattr( const char *sPath, struct stat *stbuf ) | ||
9 | { | ||
10 | |||
11 | } | ||
12 | |||
13 | static int myriadfs_readdir( const char *sPath, void *buf, | ||
14 | fuse_fill_dir_t filler, off_t offset, struct fuse_file_info *fi ) | ||
15 | { | ||
16 | } | ||
17 | |||
18 | static int myriadfs_open( const char *sPath, struct fuse_file_info *fi ) | ||
19 | { | ||
20 | } | ||
21 | |||
22 | static int myriadfs_read( const char *sPath, char *buf, size_t iSize, | ||
23 | off_t iOffset, struct fuse_file_info *fi ) | ||
24 | { | ||
25 | } | ||
26 | |||
27 | static struct fuse_operations myriadfs_oper; | ||
28 | |||
29 | int main( int argc, char *argv[] ) | ||
30 | { | ||
31 | memset( &myriadfs_oper, sizeof(myriadfs_oper), 0 ); | ||
32 | myriadfs_oper.getattr = myriadfs_getattr; | ||
33 | myriadfs_oper.readdir = myriadfs_readdir; | ||
34 | myriadfs_oper.open = myriadfs_open; | ||
35 | myriadfs_oper.read = myriadfs_read; | ||
36 | return fuse_main( argc, argv, &myriadfs_oper, NULL ); | ||
37 | } | ||
38 | } | ||
diff --git a/src/tools/parser.cpp b/src/tools/parser.cpp index 76d4a72..7933f31 100644 --- a/src/tools/parser.cpp +++ b/src/tools/parser.cpp | |||
@@ -151,10 +151,43 @@ private: | |||
151 | 151 | ||
152 | void redAdd( Bu::Parser &p ) | 152 | void redAdd( Bu::Parser &p ) |
153 | { | 153 | { |
154 | Lexer::Token *a = p.popToken(); | ||
155 | Lexer::Token *b = p.popToken(); | ||
156 | |||
157 | sio << "Add! " << b->vExtra.get<double>() << " + " | ||
158 | << a->vExtra.get<double>() << sio.nl; | ||
159 | |||
160 | Lexer::Token *c = new Lexer::Token( tokNumber, | ||
161 | b->vExtra.get<double>() + a->vExtra.get<double>() | ||
162 | ); | ||
163 | p.pushToken( c ); | ||
164 | |||
165 | delete a; | ||
166 | delete b; | ||
167 | } | ||
168 | |||
169 | void redSubtract( Bu::Parser &p ) | ||
170 | { | ||
171 | Lexer::Token *a = p.popToken(); | ||
172 | Lexer::Token *b = p.popToken(); | ||
173 | |||
174 | sio << "Subtract! " << b->vExtra.get<double>() << " - " | ||
175 | << a->vExtra.get<double>() << sio.nl; | ||
176 | |||
177 | Lexer::Token *c = new Lexer::Token( tokNumber, | ||
178 | b->vExtra.get<double>() - a->vExtra.get<double>() | ||
179 | ); | ||
180 | p.pushToken( c ); | ||
181 | |||
182 | delete a; | ||
183 | delete b; | ||
154 | } | 184 | } |
155 | 185 | ||
156 | void redPrint( Bu::Parser &p ) | 186 | void redPrint( Bu::Parser &p ) |
157 | { | 187 | { |
188 | Lexer::Token *a = p.popToken(); | ||
189 | sio << "Print! = " << a->vExtra.get<double>() << sio.nl; | ||
190 | delete a; | ||
158 | } | 191 | } |
159 | 192 | ||
160 | /* Basic grammer example: | 193 | /* Basic grammer example: |
@@ -170,14 +203,15 @@ void redPrint( Bu::Parser &p ) | |||
170 | * The problem is, that we can't actually make something left hand recursive, | 203 | * The problem is, that we can't actually make something left hand recursive, |
171 | * so we break it into two exprs: | 204 | * so we break it into two exprs: |
172 | * | 205 | * |
173 | * expr': '(' expr ')' | 206 | * expr-sub1: '(' expr ')' |
174 | * | NUMBER | 207 | * | NUMBER |
175 | * ; | 208 | * ; |
176 | * | 209 | * |
177 | * expr: expr' expr'' | 210 | * expr: expr-sub1 expr-sub2 |
178 | * ; | 211 | * ; |
179 | * | 212 | * |
180 | * expr'': '+' expr | 213 | * expr-sub2: '+' expr |
214 | * | '-' expr | ||
181 | * | | 215 | * | |
182 | * ; | 216 | * ; |
183 | * | 217 | * |
@@ -191,8 +225,8 @@ int main( int argc, char *argv[] ) | |||
191 | Parser p; | 225 | Parser p; |
192 | 226 | ||
193 | p.addNonTerminal("expr"); | 227 | p.addNonTerminal("expr"); |
194 | p.addNonTerminal("expr'"); | 228 | p.addNonTerminal("expr-sub1"); |
195 | p.addNonTerminal("expr''"); | 229 | p.addNonTerminal("expr-sub2"); |
196 | { | 230 | { |
197 | Parser::NonTerminal nt; | 231 | Parser::NonTerminal nt; |
198 | nt.addProduction( | 232 | nt.addProduction( |
@@ -215,10 +249,28 @@ int main( int argc, char *argv[] ) | |||
215 | ); | 249 | ); |
216 | nt.addProduction( | 250 | nt.addProduction( |
217 | Parser::Production( | 251 | Parser::Production( |
252 | Parser::State( | ||
253 | Parser::State::typeTerminal, | ||
254 | tokMinus | ||
255 | ) | ||
256 | ).append( | ||
257 | Parser::State( | ||
258 | Parser::State::typeNonTerminal, | ||
259 | p.getNonTerminalId("expr") | ||
260 | ) | ||
261 | ).append( | ||
262 | Parser::State( | ||
263 | Parser::State::typeReduction, | ||
264 | p.addReduction("subtract") | ||
265 | ) | ||
266 | ) | ||
267 | ); | ||
268 | nt.addProduction( | ||
269 | Parser::Production( | ||
218 | ) | 270 | ) |
219 | ); | 271 | ); |
220 | nt.setCanSkip(); | 272 | nt.setCanSkip(); |
221 | p.setNonTerminal("expr''", nt ); | 273 | p.setNonTerminal("expr-sub2", nt ); |
222 | } | 274 | } |
223 | { | 275 | { |
224 | Parser::NonTerminal nt; | 276 | Parser::NonTerminal nt; |
@@ -230,7 +282,25 @@ int main( int argc, char *argv[] ) | |||
230 | ) | 282 | ) |
231 | ) | 283 | ) |
232 | ); | 284 | ); |
233 | p.setNonTerminal("expr'", nt ); | 285 | nt.addProduction( |
286 | Parser::Production( | ||
287 | Parser::State( | ||
288 | Parser::State::typeTerminal, | ||
289 | tokOpenParen | ||
290 | ) | ||
291 | ).append( | ||
292 | Parser::State( | ||
293 | Parser::State::typeNonTerminal, | ||
294 | p.getNonTerminalId("expr") | ||
295 | ) | ||
296 | ).append( | ||
297 | Parser::State( | ||
298 | Parser::State::typeTerminal, | ||
299 | tokCloseParen | ||
300 | ) | ||
301 | ) | ||
302 | ); | ||
303 | p.setNonTerminal("expr-sub1", nt ); | ||
234 | } | 304 | } |
235 | { | 305 | { |
236 | Parser::NonTerminal nt; | 306 | Parser::NonTerminal nt; |
@@ -238,12 +308,12 @@ int main( int argc, char *argv[] ) | |||
238 | Parser::Production( | 308 | Parser::Production( |
239 | Parser::State( | 309 | Parser::State( |
240 | Parser::State::typeNonTerminal, | 310 | Parser::State::typeNonTerminal, |
241 | p.getNonTerminalId("expr'") | 311 | p.getNonTerminalId("expr-sub1") |
242 | ) | 312 | ) |
243 | ).append( | 313 | ).append( |
244 | Parser::State( | 314 | Parser::State( |
245 | Parser::State::typeNonTerminal, | 315 | Parser::State::typeNonTerminal, |
246 | p.getNonTerminalId("expr''") | 316 | p.getNonTerminalId("expr-sub2") |
247 | ) | 317 | ) |
248 | ) | 318 | ) |
249 | ); | 319 | ); |
@@ -275,6 +345,7 @@ int main( int argc, char *argv[] ) | |||
275 | p.setRootNonTerminal("input"); | 345 | p.setRootNonTerminal("input"); |
276 | 346 | ||
277 | p.setReduction("add", Bu::slot( &redAdd ) ); | 347 | p.setReduction("add", Bu::slot( &redAdd ) ); |
348 | p.setReduction("subtract", Bu::slot( &redSubtract ) ); | ||
278 | p.setReduction("print", Bu::slot( &redPrint ) ); | 349 | p.setReduction("print", Bu::slot( &redPrint ) ); |
279 | 350 | ||
280 | p.pushLexer( new MathLexer( fIn ) ); | 351 | p.pushLexer( new MathLexer( fIn ) ); |