1
|
"use strict";
|
2
|
|
3
|
Object.defineProperty(exports, "__esModule", {
|
4
|
value: true
|
5
|
});
|
6
|
exports.parse = parse;
|
7
|
|
8
|
var _helperCodeFrame = require("@webassemblyjs/helper-code-frame");
|
9
|
|
10
|
var t = _interopRequireWildcard(require("@webassemblyjs/ast"));
|
11
|
|
12
|
var _numberLiterals = require("./number-literals");
|
13
|
|
14
|
var _stringLiterals = require("./string-literals");
|
15
|
|
16
|
var _tokenizer = require("./tokenizer");
|
17
|
|
18
|
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
|
19
|
|
20
|
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
21
|
|
22
|
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
|
23
|
|
24
|
function hasPlugin(name) {
|
25
|
if (name !== "wast") throw new Error("unknow plugin");
|
26
|
return true;
|
27
|
}
|
28
|
|
29
|
function isKeyword(token, id) {
|
30
|
return token.type === _tokenizer.tokens.keyword && token.value === id;
|
31
|
}
|
32
|
|
33
|
function tokenToString(token) {
|
34
|
if (token.type === "keyword") {
|
35
|
return "keyword (".concat(token.value, ")");
|
36
|
}
|
37
|
|
38
|
return token.type;
|
39
|
}
|
40
|
|
41
|
function identifierFromToken(token) {
|
42
|
var _token$loc = token.loc,
|
43
|
end = _token$loc.end,
|
44
|
start = _token$loc.start;
|
45
|
return t.withLoc(t.identifier(token.value), end, start);
|
46
|
}
|
47
|
|
48
|
function parse(tokensList, source) {
|
49
|
var current = 0;
|
50
|
var getUniqueName = t.getUniqueNameGenerator();
|
51
|
var state = {
|
52
|
registredExportedElements: []
|
53
|
}; // But this time we're going to use recursion instead of a `while` loop. So we
|
54
|
// define a `walk` function.
|
55
|
|
56
|
function walk() {
|
57
|
var token = tokensList[current];
|
58
|
|
59
|
function eatToken() {
|
60
|
token = tokensList[++current];
|
61
|
}
|
62
|
|
63
|
function getEndLoc() {
|
64
|
var currentToken = token;
|
65
|
|
66
|
if (typeof currentToken === "undefined") {
|
67
|
var lastToken = tokensList[tokensList.length - 1];
|
68
|
currentToken = lastToken;
|
69
|
}
|
70
|
|
71
|
return currentToken.loc.end;
|
72
|
}
|
73
|
|
74
|
function getStartLoc() {
|
75
|
return token.loc.start;
|
76
|
}
|
77
|
|
78
|
function eatTokenOfType(type) {
|
79
|
if (token.type !== type) {
|
80
|
throw new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "Assertion error: expected token of type " + type + ", given " + tokenToString(token));
|
81
|
}
|
82
|
|
83
|
eatToken();
|
84
|
}
|
85
|
|
86
|
function parseExportIndex(token) {
|
87
|
if (token.type === _tokenizer.tokens.identifier) {
|
88
|
var index = identifierFromToken(token);
|
89
|
eatToken();
|
90
|
return index;
|
91
|
} else if (token.type === _tokenizer.tokens.number) {
|
92
|
var _index = t.numberLiteralFromRaw(token.value);
|
93
|
|
94
|
eatToken();
|
95
|
return _index;
|
96
|
} else {
|
97
|
throw function () {
|
98
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "unknown export index" + ", given " + tokenToString(token));
|
99
|
}();
|
100
|
}
|
101
|
}
|
102
|
|
103
|
function lookaheadAndCheck() {
|
104
|
var len = arguments.length;
|
105
|
|
106
|
for (var i = 0; i < len; i++) {
|
107
|
var tokenAhead = tokensList[current + i];
|
108
|
var expectedToken = i < 0 || arguments.length <= i ? undefined : arguments[i];
|
109
|
|
110
|
if (tokenAhead.type === "keyword") {
|
111
|
if (isKeyword(tokenAhead, expectedToken) === false) {
|
112
|
return false;
|
113
|
}
|
114
|
} else if (expectedToken !== tokenAhead.type) {
|
115
|
return false;
|
116
|
}
|
117
|
}
|
118
|
|
119
|
return true;
|
120
|
} // TODO(sven): there is probably a better way to do this
|
121
|
// can refactor it if it get out of hands
|
122
|
|
123
|
|
124
|
function maybeIgnoreComment() {
|
125
|
if (typeof token === "undefined") {
|
126
|
// Ignore
|
127
|
return;
|
128
|
}
|
129
|
|
130
|
while (token.type === _tokenizer.tokens.comment) {
|
131
|
eatToken();
|
132
|
|
133
|
if (typeof token === "undefined") {
|
134
|
// Hit the end
|
135
|
break;
|
136
|
}
|
137
|
}
|
138
|
}
|
139
|
/**
|
140
|
* Parses a memory instruction
|
141
|
*
|
142
|
* WAST:
|
143
|
*
|
144
|
* memory: ( memory <name>? <memory_sig> )
|
145
|
* ( memory <name>? ( export <string> ) <...> )
|
146
|
* ( memory <name>? ( import <string> <string> ) <memory_sig> )
|
147
|
* ( memory <name>? ( export <string> )* ( data <string>* )
|
148
|
* memory_sig: <nat> <nat>?
|
149
|
*
|
150
|
*/
|
151
|
|
152
|
|
153
|
function parseMemory() {
|
154
|
var id = t.identifier(getUniqueName("memory"));
|
155
|
var limits = t.limit(0);
|
156
|
|
157
|
if (token.type === _tokenizer.tokens.string || token.type === _tokenizer.tokens.identifier) {
|
158
|
id = t.identifier(token.value);
|
159
|
eatToken();
|
160
|
} else {
|
161
|
id = t.withRaw(id, ""); // preserve anonymous
|
162
|
}
|
163
|
/**
|
164
|
* Maybe data
|
165
|
*/
|
166
|
|
167
|
|
168
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.data)) {
|
169
|
eatToken(); // (
|
170
|
|
171
|
eatToken(); // data
|
172
|
// TODO(sven): do something with the data collected here
|
173
|
|
174
|
var stringInitializer = token.value;
|
175
|
eatTokenOfType(_tokenizer.tokens.string); // Update limits accordingly
|
176
|
|
177
|
limits = t.limit(stringInitializer.length);
|
178
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
179
|
}
|
180
|
/**
|
181
|
* Maybe export
|
182
|
*/
|
183
|
|
184
|
|
185
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.export)) {
|
186
|
eatToken(); // (
|
187
|
|
188
|
eatToken(); // export
|
189
|
|
190
|
if (token.type !== _tokenizer.tokens.string) {
|
191
|
throw function () {
|
192
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Expected string in export" + ", given " + tokenToString(token));
|
193
|
}();
|
194
|
}
|
195
|
|
196
|
var _name = token.value;
|
197
|
eatToken();
|
198
|
state.registredExportedElements.push({
|
199
|
exportType: "Memory",
|
200
|
name: _name,
|
201
|
id: id
|
202
|
});
|
203
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
204
|
}
|
205
|
/**
|
206
|
* Memory signature
|
207
|
*/
|
208
|
|
209
|
|
210
|
if (token.type === _tokenizer.tokens.number) {
|
211
|
limits = t.limit((0, _numberLiterals.parse32I)(token.value));
|
212
|
eatToken();
|
213
|
|
214
|
if (token.type === _tokenizer.tokens.number) {
|
215
|
limits.max = (0, _numberLiterals.parse32I)(token.value);
|
216
|
eatToken();
|
217
|
}
|
218
|
}
|
219
|
|
220
|
return t.memory(limits, id);
|
221
|
}
|
222
|
/**
|
223
|
* Parses a data section
|
224
|
* https://webassembly.github.io/spec/core/text/modules.html#data-segments
|
225
|
*
|
226
|
* WAST:
|
227
|
*
|
228
|
* data: ( data <index>? <offset> <string> )
|
229
|
*/
|
230
|
|
231
|
|
232
|
function parseData() {
|
233
|
// optional memory index
|
234
|
var memidx = 0;
|
235
|
|
236
|
if (token.type === _tokenizer.tokens.number) {
|
237
|
memidx = token.value;
|
238
|
eatTokenOfType(_tokenizer.tokens.number); // .
|
239
|
}
|
240
|
|
241
|
eatTokenOfType(_tokenizer.tokens.openParen);
|
242
|
var offset;
|
243
|
|
244
|
if (token.type === _tokenizer.tokens.valtype) {
|
245
|
eatTokenOfType(_tokenizer.tokens.valtype); // i32
|
246
|
|
247
|
eatTokenOfType(_tokenizer.tokens.dot); // .
|
248
|
|
249
|
if (token.value !== "const") {
|
250
|
throw new Error("constant expression required");
|
251
|
}
|
252
|
|
253
|
eatTokenOfType(_tokenizer.tokens.name); // const
|
254
|
|
255
|
var numberLiteral = t.numberLiteralFromRaw(token.value, "i32");
|
256
|
offset = t.objectInstruction("const", "i32", [numberLiteral]);
|
257
|
eatToken();
|
258
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
259
|
} else {
|
260
|
eatTokenOfType(_tokenizer.tokens.name); // get_global
|
261
|
|
262
|
var _numberLiteral = t.numberLiteralFromRaw(token.value, "i32");
|
263
|
|
264
|
offset = t.instruction("get_global", [_numberLiteral]);
|
265
|
eatToken();
|
266
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
267
|
}
|
268
|
|
269
|
var byteArray = (0, _stringLiterals.parseString)(token.value);
|
270
|
eatToken(); // "string"
|
271
|
|
272
|
return t.data(t.memIndexLiteral(memidx), offset, t.byteArray(byteArray));
|
273
|
}
|
274
|
/**
|
275
|
* Parses a table instruction
|
276
|
*
|
277
|
* WAST:
|
278
|
*
|
279
|
* table: ( table <name>? <table_type> )
|
280
|
* ( table <name>? ( export <string> ) <...> )
|
281
|
* ( table <name>? ( import <string> <string> ) <table_type> )
|
282
|
* ( table <name>? ( export <string> )* <elem_type> ( elem <var>* ) )
|
283
|
*
|
284
|
* table_type: <nat> <nat>? <elem_type>
|
285
|
* elem_type: anyfunc
|
286
|
*
|
287
|
* elem: ( elem <var>? (offset <instr>* ) <var>* )
|
288
|
* ( elem <var>? <expr> <var>* )
|
289
|
*/
|
290
|
|
291
|
|
292
|
function parseTable() {
|
293
|
var name = t.identifier(getUniqueName("table"));
|
294
|
var limit = t.limit(0);
|
295
|
var elemIndices = [];
|
296
|
var elemType = "anyfunc";
|
297
|
|
298
|
if (token.type === _tokenizer.tokens.string || token.type === _tokenizer.tokens.identifier) {
|
299
|
name = identifierFromToken(token);
|
300
|
eatToken();
|
301
|
} else {
|
302
|
name = t.withRaw(name, ""); // preserve anonymous
|
303
|
}
|
304
|
|
305
|
while (token.type !== _tokenizer.tokens.closeParen) {
|
306
|
/**
|
307
|
* Maybe export
|
308
|
*/
|
309
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.elem)) {
|
310
|
eatToken(); // (
|
311
|
|
312
|
eatToken(); // elem
|
313
|
|
314
|
while (token.type === _tokenizer.tokens.identifier) {
|
315
|
elemIndices.push(t.identifier(token.value));
|
316
|
eatToken();
|
317
|
}
|
318
|
|
319
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
320
|
} else if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.export)) {
|
321
|
eatToken(); // (
|
322
|
|
323
|
eatToken(); // export
|
324
|
|
325
|
if (token.type !== _tokenizer.tokens.string) {
|
326
|
throw function () {
|
327
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Expected string in export" + ", given " + tokenToString(token));
|
328
|
}();
|
329
|
}
|
330
|
|
331
|
var exportName = token.value;
|
332
|
eatToken();
|
333
|
state.registredExportedElements.push({
|
334
|
exportType: "Table",
|
335
|
name: exportName,
|
336
|
id: name
|
337
|
});
|
338
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
339
|
} else if (isKeyword(token, _tokenizer.keywords.anyfunc)) {
|
340
|
// It's the default value, we can ignore it
|
341
|
eatToken(); // anyfunc
|
342
|
} else if (token.type === _tokenizer.tokens.number) {
|
343
|
/**
|
344
|
* Table type
|
345
|
*/
|
346
|
var min = parseInt(token.value);
|
347
|
eatToken();
|
348
|
|
349
|
if (token.type === _tokenizer.tokens.number) {
|
350
|
var max = parseInt(token.value);
|
351
|
eatToken();
|
352
|
limit = t.limit(min, max);
|
353
|
} else {
|
354
|
limit = t.limit(min);
|
355
|
}
|
356
|
|
357
|
eatToken();
|
358
|
} else {
|
359
|
throw function () {
|
360
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token" + ", given " + tokenToString(token));
|
361
|
}();
|
362
|
}
|
363
|
}
|
364
|
|
365
|
if (elemIndices.length > 0) {
|
366
|
return t.table(elemType, limit, name, elemIndices);
|
367
|
} else {
|
368
|
return t.table(elemType, limit, name);
|
369
|
}
|
370
|
}
|
371
|
/**
|
372
|
* Parses an import statement
|
373
|
*
|
374
|
* WAST:
|
375
|
*
|
376
|
* import: ( import <string> <string> <imkind> )
|
377
|
* imkind: ( func <name>? <func_sig> )
|
378
|
* ( global <name>? <global_sig> )
|
379
|
* ( table <name>? <table_sig> )
|
380
|
* ( memory <name>? <memory_sig> )
|
381
|
*
|
382
|
* global_sig: <type> | ( mut <type> )
|
383
|
*/
|
384
|
|
385
|
|
386
|
function parseImport() {
|
387
|
if (token.type !== _tokenizer.tokens.string) {
|
388
|
throw new Error("Expected a string, " + token.type + " given.");
|
389
|
}
|
390
|
|
391
|
var moduleName = token.value;
|
392
|
eatToken();
|
393
|
|
394
|
if (token.type !== _tokenizer.tokens.string) {
|
395
|
throw new Error("Expected a string, " + token.type + " given.");
|
396
|
}
|
397
|
|
398
|
var name = token.value;
|
399
|
eatToken();
|
400
|
eatTokenOfType(_tokenizer.tokens.openParen);
|
401
|
var descr;
|
402
|
|
403
|
if (isKeyword(token, _tokenizer.keywords.func)) {
|
404
|
eatToken(); // keyword
|
405
|
|
406
|
var fnParams = [];
|
407
|
var fnResult = [];
|
408
|
var typeRef;
|
409
|
var fnName = t.identifier(getUniqueName("func"));
|
410
|
|
411
|
if (token.type === _tokenizer.tokens.identifier) {
|
412
|
fnName = identifierFromToken(token);
|
413
|
eatToken();
|
414
|
}
|
415
|
|
416
|
while (token.type === _tokenizer.tokens.openParen) {
|
417
|
eatToken();
|
418
|
|
419
|
if (lookaheadAndCheck(_tokenizer.keywords.type) === true) {
|
420
|
eatToken();
|
421
|
typeRef = parseTypeReference();
|
422
|
} else if (lookaheadAndCheck(_tokenizer.keywords.param) === true) {
|
423
|
eatToken();
|
424
|
fnParams.push.apply(fnParams, _toConsumableArray(parseFuncParam()));
|
425
|
} else if (lookaheadAndCheck(_tokenizer.keywords.result) === true) {
|
426
|
eatToken();
|
427
|
fnResult.push.apply(fnResult, _toConsumableArray(parseFuncResult()));
|
428
|
} else {
|
429
|
throw function () {
|
430
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in import of type" + ", given " + tokenToString(token));
|
431
|
}();
|
432
|
}
|
433
|
|
434
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
435
|
}
|
436
|
|
437
|
if (typeof fnName === "undefined") {
|
438
|
throw new Error("Imported function must have a name");
|
439
|
}
|
440
|
|
441
|
descr = t.funcImportDescr(fnName, typeRef !== undefined ? typeRef : t.signature(fnParams, fnResult));
|
442
|
} else if (isKeyword(token, _tokenizer.keywords.global)) {
|
443
|
eatToken(); // keyword
|
444
|
|
445
|
if (token.type === _tokenizer.tokens.openParen) {
|
446
|
eatToken(); // (
|
447
|
|
448
|
eatTokenOfType(_tokenizer.tokens.keyword); // mut keyword
|
449
|
|
450
|
var valtype = token.value;
|
451
|
eatToken();
|
452
|
descr = t.globalType(valtype, "var");
|
453
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
454
|
} else {
|
455
|
var _valtype = token.value;
|
456
|
eatTokenOfType(_tokenizer.tokens.valtype);
|
457
|
descr = t.globalType(_valtype, "const");
|
458
|
}
|
459
|
} else if (isKeyword(token, _tokenizer.keywords.memory) === true) {
|
460
|
eatToken(); // Keyword
|
461
|
|
462
|
descr = parseMemory();
|
463
|
} else if (isKeyword(token, _tokenizer.keywords.table) === true) {
|
464
|
eatToken(); // Keyword
|
465
|
|
466
|
descr = parseTable();
|
467
|
} else {
|
468
|
throw new Error("Unsupported import type: " + tokenToString(token));
|
469
|
}
|
470
|
|
471
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
472
|
return t.moduleImport(moduleName, name, descr);
|
473
|
}
|
474
|
/**
|
475
|
* Parses a block instruction
|
476
|
*
|
477
|
* WAST:
|
478
|
*
|
479
|
* expr: ( block <name>? <block_sig> <instr>* )
|
480
|
* instr: block <name>? <block_sig> <instr>* end <name>?
|
481
|
* block_sig : ( result <type>* )*
|
482
|
*
|
483
|
*/
|
484
|
|
485
|
|
486
|
function parseBlock() {
|
487
|
var label = t.identifier(getUniqueName("block"));
|
488
|
var blockResult = null;
|
489
|
var instr = [];
|
490
|
|
491
|
if (token.type === _tokenizer.tokens.identifier) {
|
492
|
label = identifierFromToken(token);
|
493
|
eatToken();
|
494
|
} else {
|
495
|
label = t.withRaw(label, ""); // preserve anonymous
|
496
|
}
|
497
|
|
498
|
while (token.type === _tokenizer.tokens.openParen) {
|
499
|
eatToken();
|
500
|
|
501
|
if (lookaheadAndCheck(_tokenizer.keywords.result) === true) {
|
502
|
eatToken();
|
503
|
blockResult = token.value;
|
504
|
eatToken();
|
505
|
} else if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
|
506
|
) {
|
507
|
// Instruction
|
508
|
instr.push(parseFuncInstr());
|
509
|
} else {
|
510
|
throw function () {
|
511
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in block body of type" + ", given " + tokenToString(token));
|
512
|
}();
|
513
|
}
|
514
|
|
515
|
maybeIgnoreComment();
|
516
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
517
|
}
|
518
|
|
519
|
return t.blockInstruction(label, instr, blockResult);
|
520
|
}
|
521
|
/**
|
522
|
* Parses a if instruction
|
523
|
*
|
524
|
* WAST:
|
525
|
*
|
526
|
* expr:
|
527
|
* ( if <name>? <block_sig> ( then <instr>* ) ( else <instr>* )? )
|
528
|
* ( if <name>? <block_sig> <expr>+ ( then <instr>* ) ( else <instr>* )? )
|
529
|
*
|
530
|
* instr:
|
531
|
* if <name>? <block_sig> <instr>* end <name>?
|
532
|
* if <name>? <block_sig> <instr>* else <name>? <instr>* end <name>?
|
533
|
*
|
534
|
* block_sig : ( result <type>* )*
|
535
|
*
|
536
|
*/
|
537
|
|
538
|
|
539
|
function parseIf() {
|
540
|
var blockResult = null;
|
541
|
var label = t.identifier(getUniqueName("if"));
|
542
|
var testInstrs = [];
|
543
|
var consequent = [];
|
544
|
var alternate = [];
|
545
|
|
546
|
if (token.type === _tokenizer.tokens.identifier) {
|
547
|
label = identifierFromToken(token);
|
548
|
eatToken();
|
549
|
} else {
|
550
|
label = t.withRaw(label, ""); // preserve anonymous
|
551
|
}
|
552
|
|
553
|
while (token.type === _tokenizer.tokens.openParen) {
|
554
|
eatToken(); // (
|
555
|
|
556
|
/**
|
557
|
* Block signature
|
558
|
*/
|
559
|
|
560
|
if (isKeyword(token, _tokenizer.keywords.result) === true) {
|
561
|
eatToken();
|
562
|
blockResult = token.value;
|
563
|
eatTokenOfType(_tokenizer.tokens.valtype);
|
564
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
565
|
continue;
|
566
|
}
|
567
|
/**
|
568
|
* Then
|
569
|
*/
|
570
|
|
571
|
|
572
|
if (isKeyword(token, _tokenizer.keywords.then) === true) {
|
573
|
eatToken(); // then
|
574
|
|
575
|
while (token.type === _tokenizer.tokens.openParen) {
|
576
|
eatToken(); // Instruction
|
577
|
|
578
|
if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
|
579
|
) {
|
580
|
consequent.push(parseFuncInstr());
|
581
|
} else {
|
582
|
throw function () {
|
583
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in consequent body of type" + ", given " + tokenToString(token));
|
584
|
}();
|
585
|
}
|
586
|
|
587
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
588
|
}
|
589
|
|
590
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
591
|
continue;
|
592
|
}
|
593
|
/**
|
594
|
* Alternate
|
595
|
*/
|
596
|
|
597
|
|
598
|
if (isKeyword(token, _tokenizer.keywords.else)) {
|
599
|
eatToken(); // else
|
600
|
|
601
|
while (token.type === _tokenizer.tokens.openParen) {
|
602
|
eatToken(); // Instruction
|
603
|
|
604
|
if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
|
605
|
) {
|
606
|
alternate.push(parseFuncInstr());
|
607
|
} else {
|
608
|
throw function () {
|
609
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in alternate body of type" + ", given " + tokenToString(token));
|
610
|
}();
|
611
|
}
|
612
|
|
613
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
614
|
}
|
615
|
|
616
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
617
|
continue;
|
618
|
}
|
619
|
/**
|
620
|
* Test instruction
|
621
|
*/
|
622
|
|
623
|
|
624
|
if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
|
625
|
) {
|
626
|
testInstrs.push(parseFuncInstr());
|
627
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
628
|
continue;
|
629
|
}
|
630
|
|
631
|
throw function () {
|
632
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in if body" + ", given " + tokenToString(token));
|
633
|
}();
|
634
|
}
|
635
|
|
636
|
return t.ifInstruction(label, testInstrs, blockResult, consequent, alternate);
|
637
|
}
|
638
|
/**
|
639
|
* Parses a loop instruction
|
640
|
*
|
641
|
* WAT:
|
642
|
*
|
643
|
* blockinstr :: 'loop' I:label rt:resulttype (in:instr*) 'end' id?
|
644
|
*
|
645
|
* WAST:
|
646
|
*
|
647
|
* instr :: loop <name>? <block_sig> <instr>* end <name>?
|
648
|
* expr :: ( loop <name>? <block_sig> <instr>* )
|
649
|
* block_sig :: ( result <type>* )*
|
650
|
*
|
651
|
*/
|
652
|
|
653
|
|
654
|
function parseLoop() {
|
655
|
var label = t.identifier(getUniqueName("loop"));
|
656
|
var blockResult;
|
657
|
var instr = [];
|
658
|
|
659
|
if (token.type === _tokenizer.tokens.identifier) {
|
660
|
label = identifierFromToken(token);
|
661
|
eatToken();
|
662
|
} else {
|
663
|
label = t.withRaw(label, ""); // preserve anonymous
|
664
|
}
|
665
|
|
666
|
while (token.type === _tokenizer.tokens.openParen) {
|
667
|
eatToken();
|
668
|
|
669
|
if (lookaheadAndCheck(_tokenizer.keywords.result) === true) {
|
670
|
eatToken();
|
671
|
blockResult = token.value;
|
672
|
eatToken();
|
673
|
} else if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
|
674
|
) {
|
675
|
// Instruction
|
676
|
instr.push(parseFuncInstr());
|
677
|
} else {
|
678
|
throw function () {
|
679
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in loop body" + ", given " + tokenToString(token));
|
680
|
}();
|
681
|
}
|
682
|
|
683
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
684
|
}
|
685
|
|
686
|
return t.loopInstruction(label, blockResult, instr);
|
687
|
}
|
688
|
|
689
|
function parseCallIndirect() {
|
690
|
var typeRef;
|
691
|
var params = [];
|
692
|
var results = [];
|
693
|
var instrs = [];
|
694
|
|
695
|
while (token.type !== _tokenizer.tokens.closeParen) {
|
696
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.type)) {
|
697
|
eatToken(); // (
|
698
|
|
699
|
eatToken(); // type
|
700
|
|
701
|
typeRef = parseTypeReference();
|
702
|
} else if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.param)) {
|
703
|
eatToken(); // (
|
704
|
|
705
|
eatToken(); // param
|
706
|
|
707
|
/**
|
708
|
* Params can be empty:
|
709
|
* (params)`
|
710
|
*/
|
711
|
|
712
|
if (token.type !== _tokenizer.tokens.closeParen) {
|
713
|
params.push.apply(params, _toConsumableArray(parseFuncParam()));
|
714
|
}
|
715
|
} else if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.result)) {
|
716
|
eatToken(); // (
|
717
|
|
718
|
eatToken(); // result
|
719
|
|
720
|
/**
|
721
|
* Results can be empty:
|
722
|
* (result)`
|
723
|
*/
|
724
|
|
725
|
if (token.type !== _tokenizer.tokens.closeParen) {
|
726
|
results.push.apply(results, _toConsumableArray(parseFuncResult()));
|
727
|
}
|
728
|
} else {
|
729
|
eatTokenOfType(_tokenizer.tokens.openParen);
|
730
|
instrs.push(parseFuncInstr());
|
731
|
}
|
732
|
|
733
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
734
|
}
|
735
|
|
736
|
return t.callIndirectInstruction(typeRef !== undefined ? typeRef : t.signature(params, results), instrs);
|
737
|
}
|
738
|
/**
|
739
|
* Parses an export instruction
|
740
|
*
|
741
|
* WAT:
|
742
|
*
|
743
|
* export: ( export <string> <exkind> )
|
744
|
* exkind: ( func <var> )
|
745
|
* ( global <var> )
|
746
|
* ( table <var> )
|
747
|
* ( memory <var> )
|
748
|
* var: <nat> | <name>
|
749
|
*
|
750
|
*/
|
751
|
|
752
|
|
753
|
function parseExport() {
|
754
|
if (token.type !== _tokenizer.tokens.string) {
|
755
|
throw new Error("Expected string after export, got: " + token.type);
|
756
|
}
|
757
|
|
758
|
var name = token.value;
|
759
|
eatToken();
|
760
|
var moduleExportDescr = parseModuleExportDescr();
|
761
|
return t.moduleExport(name, moduleExportDescr);
|
762
|
}
|
763
|
|
764
|
function parseModuleExportDescr() {
|
765
|
var startLoc = getStartLoc();
|
766
|
var type = "";
|
767
|
var index;
|
768
|
eatTokenOfType(_tokenizer.tokens.openParen);
|
769
|
|
770
|
while (token.type !== _tokenizer.tokens.closeParen) {
|
771
|
if (isKeyword(token, _tokenizer.keywords.func)) {
|
772
|
type = "Func";
|
773
|
eatToken();
|
774
|
index = parseExportIndex(token);
|
775
|
} else if (isKeyword(token, _tokenizer.keywords.table)) {
|
776
|
type = "Table";
|
777
|
eatToken();
|
778
|
index = parseExportIndex(token);
|
779
|
} else if (isKeyword(token, _tokenizer.keywords.global)) {
|
780
|
type = "Global";
|
781
|
eatToken();
|
782
|
index = parseExportIndex(token);
|
783
|
} else if (isKeyword(token, _tokenizer.keywords.memory)) {
|
784
|
type = "Memory";
|
785
|
eatToken();
|
786
|
index = parseExportIndex(token);
|
787
|
}
|
788
|
|
789
|
eatToken();
|
790
|
}
|
791
|
|
792
|
if (type === "") {
|
793
|
throw new Error("Unknown export type");
|
794
|
}
|
795
|
|
796
|
if (index === undefined) {
|
797
|
throw new Error("Exported function must have a name");
|
798
|
}
|
799
|
|
800
|
var node = t.moduleExportDescr(type, index);
|
801
|
var endLoc = getEndLoc();
|
802
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
803
|
return t.withLoc(node, endLoc, startLoc);
|
804
|
}
|
805
|
|
806
|
function parseModule() {
|
807
|
var name = null;
|
808
|
var isBinary = false;
|
809
|
var isQuote = false;
|
810
|
var moduleFields = [];
|
811
|
|
812
|
if (token.type === _tokenizer.tokens.identifier) {
|
813
|
name = token.value;
|
814
|
eatToken();
|
815
|
}
|
816
|
|
817
|
if (hasPlugin("wast") && token.type === _tokenizer.tokens.name && token.value === "binary") {
|
818
|
eatToken();
|
819
|
isBinary = true;
|
820
|
}
|
821
|
|
822
|
if (hasPlugin("wast") && token.type === _tokenizer.tokens.name && token.value === "quote") {
|
823
|
eatToken();
|
824
|
isQuote = true;
|
825
|
}
|
826
|
|
827
|
if (isBinary === true) {
|
828
|
var blob = [];
|
829
|
|
830
|
while (token.type === _tokenizer.tokens.string) {
|
831
|
blob.push(token.value);
|
832
|
eatToken();
|
833
|
maybeIgnoreComment();
|
834
|
}
|
835
|
|
836
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
837
|
return t.binaryModule(name, blob);
|
838
|
}
|
839
|
|
840
|
if (isQuote === true) {
|
841
|
var string = [];
|
842
|
|
843
|
while (token.type === _tokenizer.tokens.string) {
|
844
|
string.push(token.value);
|
845
|
eatToken();
|
846
|
}
|
847
|
|
848
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
849
|
return t.quoteModule(name, string);
|
850
|
}
|
851
|
|
852
|
while (token.type !== _tokenizer.tokens.closeParen) {
|
853
|
moduleFields.push(walk());
|
854
|
|
855
|
if (state.registredExportedElements.length > 0) {
|
856
|
state.registredExportedElements.forEach(function (decl) {
|
857
|
moduleFields.push(t.moduleExport(decl.name, t.moduleExportDescr(decl.exportType, decl.id)));
|
858
|
});
|
859
|
state.registredExportedElements = [];
|
860
|
}
|
861
|
|
862
|
token = tokensList[current];
|
863
|
}
|
864
|
|
865
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
866
|
return t.module(name, moduleFields);
|
867
|
}
|
868
|
/**
|
869
|
* Parses the arguments of an instruction
|
870
|
*/
|
871
|
|
872
|
|
873
|
function parseFuncInstrArguments(signature) {
|
874
|
var args = [];
|
875
|
var namedArgs = {};
|
876
|
var signaturePtr = 0;
|
877
|
|
878
|
while (token.type === _tokenizer.tokens.name || isKeyword(token, _tokenizer.keywords.offset)) {
|
879
|
var key = token.value;
|
880
|
eatToken();
|
881
|
eatTokenOfType(_tokenizer.tokens.equal);
|
882
|
var value = void 0;
|
883
|
|
884
|
if (token.type === _tokenizer.tokens.number) {
|
885
|
value = t.numberLiteralFromRaw(token.value);
|
886
|
} else {
|
887
|
throw new Error("Unexpected type for argument: " + token.type);
|
888
|
}
|
889
|
|
890
|
namedArgs[key] = value;
|
891
|
eatToken();
|
892
|
} // $FlowIgnore
|
893
|
|
894
|
|
895
|
var signatureLength = signature.vector ? Infinity : signature.length;
|
896
|
|
897
|
while (token.type !== _tokenizer.tokens.closeParen && ( // $FlowIgnore
|
898
|
token.type === _tokenizer.tokens.openParen || signaturePtr < signatureLength)) {
|
899
|
if (token.type === _tokenizer.tokens.identifier) {
|
900
|
args.push(t.identifier(token.value));
|
901
|
eatToken();
|
902
|
} else if (token.type === _tokenizer.tokens.valtype) {
|
903
|
// Handle locals
|
904
|
args.push(t.valtypeLiteral(token.value));
|
905
|
eatToken();
|
906
|
} else if (token.type === _tokenizer.tokens.string) {
|
907
|
args.push(t.stringLiteral(token.value));
|
908
|
eatToken();
|
909
|
} else if (token.type === _tokenizer.tokens.number) {
|
910
|
args.push( // TODO(sven): refactor the type signature handling
|
911
|
// https://github.com/xtuc/webassemblyjs/pull/129 is a good start
|
912
|
t.numberLiteralFromRaw(token.value, // $FlowIgnore
|
913
|
signature[signaturePtr] || "f64")); // $FlowIgnore
|
914
|
|
915
|
if (!signature.vector) {
|
916
|
++signaturePtr;
|
917
|
}
|
918
|
|
919
|
eatToken();
|
920
|
} else if (token.type === _tokenizer.tokens.openParen) {
|
921
|
/**
|
922
|
* Maybe some nested instructions
|
923
|
*/
|
924
|
eatToken(); // Instruction
|
925
|
|
926
|
if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
|
927
|
) {
|
928
|
// $FlowIgnore
|
929
|
args.push(parseFuncInstr());
|
930
|
} else {
|
931
|
throw function () {
|
932
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in nested instruction" + ", given " + tokenToString(token));
|
933
|
}();
|
934
|
}
|
935
|
|
936
|
if (token.type === _tokenizer.tokens.closeParen) {
|
937
|
eatToken();
|
938
|
}
|
939
|
} else {
|
940
|
throw function () {
|
941
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in instruction argument" + ", given " + tokenToString(token));
|
942
|
}();
|
943
|
}
|
944
|
}
|
945
|
|
946
|
return {
|
947
|
args: args,
|
948
|
namedArgs: namedArgs
|
949
|
};
|
950
|
}
|
951
|
/**
|
952
|
* Parses an instruction
|
953
|
*
|
954
|
* WAT:
|
955
|
*
|
956
|
* instr :: plaininst
|
957
|
* blockinstr
|
958
|
*
|
959
|
* blockinstr :: 'block' I:label rt:resulttype (in:instr*) 'end' id?
|
960
|
* 'loop' I:label rt:resulttype (in:instr*) 'end' id?
|
961
|
* 'if' I:label rt:resulttype (in:instr*) 'else' id? (in2:intr*) 'end' id?
|
962
|
*
|
963
|
* plaininst :: 'unreachable'
|
964
|
* 'nop'
|
965
|
* 'br' l:labelidx
|
966
|
* 'br_if' l:labelidx
|
967
|
* 'br_table' l*:vec(labelidx) ln:labelidx
|
968
|
* 'return'
|
969
|
* 'call' x:funcidx
|
970
|
* 'call_indirect' x, I:typeuse
|
971
|
*
|
972
|
* WAST:
|
973
|
*
|
974
|
* instr:
|
975
|
* <expr>
|
976
|
* <op>
|
977
|
* block <name>? <block_sig> <instr>* end <name>?
|
978
|
* loop <name>? <block_sig> <instr>* end <name>?
|
979
|
* if <name>? <block_sig> <instr>* end <name>?
|
980
|
* if <name>? <block_sig> <instr>* else <name>? <instr>* end <name>?
|
981
|
*
|
982
|
* expr:
|
983
|
* ( <op> )
|
984
|
* ( <op> <expr>+ )
|
985
|
* ( block <name>? <block_sig> <instr>* )
|
986
|
* ( loop <name>? <block_sig> <instr>* )
|
987
|
* ( if <name>? <block_sig> ( then <instr>* ) ( else <instr>* )? )
|
988
|
* ( if <name>? <block_sig> <expr>+ ( then <instr>* ) ( else <instr>* )? )
|
989
|
*
|
990
|
* op:
|
991
|
* unreachable
|
992
|
* nop
|
993
|
* br <var>
|
994
|
* br_if <var>
|
995
|
* br_table <var>+
|
996
|
* return
|
997
|
* call <var>
|
998
|
* call_indirect <func_sig>
|
999
|
* drop
|
1000
|
* select
|
1001
|
* get_local <var>
|
1002
|
* set_local <var>
|
1003
|
* tee_local <var>
|
1004
|
* get_global <var>
|
1005
|
* set_global <var>
|
1006
|
* <type>.load((8|16|32)_<sign>)? <offset>? <align>?
|
1007
|
* <type>.store(8|16|32)? <offset>? <align>?
|
1008
|
* current_memory
|
1009
|
* grow_memory
|
1010
|
* <type>.const <value>
|
1011
|
* <type>.<unop>
|
1012
|
* <type>.<binop>
|
1013
|
* <type>.<testop>
|
1014
|
* <type>.<relop>
|
1015
|
* <type>.<cvtop>/<type>
|
1016
|
*
|
1017
|
* func_type: ( type <var> )? <param>* <result>*
|
1018
|
*/
|
1019
|
|
1020
|
|
1021
|
function parseFuncInstr() {
|
1022
|
var startLoc = getStartLoc();
|
1023
|
maybeIgnoreComment();
|
1024
|
/**
|
1025
|
* A simple instruction
|
1026
|
*/
|
1027
|
|
1028
|
if (token.type === _tokenizer.tokens.name || token.type === _tokenizer.tokens.valtype) {
|
1029
|
var _name2 = token.value;
|
1030
|
var object;
|
1031
|
eatToken();
|
1032
|
|
1033
|
if (token.type === _tokenizer.tokens.dot) {
|
1034
|
object = _name2;
|
1035
|
eatToken();
|
1036
|
|
1037
|
if (token.type !== _tokenizer.tokens.name) {
|
1038
|
throw new TypeError("Unknown token: " + token.type + ", name expected");
|
1039
|
}
|
1040
|
|
1041
|
_name2 = token.value;
|
1042
|
eatToken();
|
1043
|
}
|
1044
|
|
1045
|
if (token.type === _tokenizer.tokens.closeParen) {
|
1046
|
var _endLoc = token.loc.end;
|
1047
|
|
1048
|
if (typeof object === "undefined") {
|
1049
|
return t.withLoc(t.instruction(_name2), _endLoc, startLoc);
|
1050
|
} else {
|
1051
|
return t.withLoc(t.objectInstruction(_name2, object, []), _endLoc, startLoc);
|
1052
|
}
|
1053
|
}
|
1054
|
|
1055
|
var signature = t.signatureForOpcode(object || "", _name2);
|
1056
|
|
1057
|
var _parseFuncInstrArgume = parseFuncInstrArguments(signature),
|
1058
|
_args = _parseFuncInstrArgume.args,
|
1059
|
_namedArgs = _parseFuncInstrArgume.namedArgs;
|
1060
|
|
1061
|
var endLoc = token.loc.end;
|
1062
|
|
1063
|
if (typeof object === "undefined") {
|
1064
|
return t.withLoc(t.instruction(_name2, _args, _namedArgs), endLoc, startLoc);
|
1065
|
} else {
|
1066
|
return t.withLoc(t.objectInstruction(_name2, object, _args, _namedArgs), endLoc, startLoc);
|
1067
|
}
|
1068
|
} else if (isKeyword(token, _tokenizer.keywords.loop)) {
|
1069
|
/**
|
1070
|
* Else a instruction with a keyword (loop or block)
|
1071
|
*/
|
1072
|
eatToken(); // keyword
|
1073
|
|
1074
|
return parseLoop();
|
1075
|
} else if (isKeyword(token, _tokenizer.keywords.block)) {
|
1076
|
eatToken(); // keyword
|
1077
|
|
1078
|
return parseBlock();
|
1079
|
} else if (isKeyword(token, _tokenizer.keywords.call_indirect)) {
|
1080
|
eatToken(); // keyword
|
1081
|
|
1082
|
return parseCallIndirect();
|
1083
|
} else if (isKeyword(token, _tokenizer.keywords.call)) {
|
1084
|
eatToken(); // keyword
|
1085
|
|
1086
|
var index;
|
1087
|
|
1088
|
if (token.type === _tokenizer.tokens.identifier) {
|
1089
|
index = identifierFromToken(token);
|
1090
|
eatToken();
|
1091
|
} else if (token.type === _tokenizer.tokens.number) {
|
1092
|
index = t.indexLiteral(token.value);
|
1093
|
eatToken();
|
1094
|
}
|
1095
|
|
1096
|
var instrArgs = []; // Nested instruction
|
1097
|
|
1098
|
while (token.type === _tokenizer.tokens.openParen) {
|
1099
|
eatToken();
|
1100
|
instrArgs.push(parseFuncInstr());
|
1101
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1102
|
}
|
1103
|
|
1104
|
if (typeof index === "undefined") {
|
1105
|
throw new Error("Missing argument in call instruciton");
|
1106
|
}
|
1107
|
|
1108
|
if (instrArgs.length > 0) {
|
1109
|
return t.callInstruction(index, instrArgs);
|
1110
|
} else {
|
1111
|
return t.callInstruction(index);
|
1112
|
}
|
1113
|
} else if (isKeyword(token, _tokenizer.keywords.if)) {
|
1114
|
eatToken(); // Keyword
|
1115
|
|
1116
|
return parseIf();
|
1117
|
} else if (isKeyword(token, _tokenizer.keywords.module) && hasPlugin("wast")) {
|
1118
|
eatToken(); // In WAST you can have a module as an instruction's argument
|
1119
|
// we will cast it into a instruction to not break the flow
|
1120
|
// $FlowIgnore
|
1121
|
|
1122
|
var module = parseModule();
|
1123
|
return module;
|
1124
|
} else {
|
1125
|
throw function () {
|
1126
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected instruction in function body" + ", given " + tokenToString(token));
|
1127
|
}();
|
1128
|
}
|
1129
|
}
|
1130
|
/*
|
1131
|
* Parses a function
|
1132
|
*
|
1133
|
* WAT:
|
1134
|
*
|
1135
|
* functype :: ( 'func' t1:vec(param) t2:vec(result) )
|
1136
|
* param :: ( 'param' id? t:valtype )
|
1137
|
* result :: ( 'result' t:valtype )
|
1138
|
*
|
1139
|
* WAST:
|
1140
|
*
|
1141
|
* func :: ( func <name>? <func_sig> <local>* <instr>* )
|
1142
|
* ( func <name>? ( export <string> ) <...> )
|
1143
|
* ( func <name>? ( import <string> <string> ) <func_sig> )
|
1144
|
* func_sig :: ( type <var> )? <param>* <result>*
|
1145
|
* param :: ( param <type>* ) | ( param <name> <type> )
|
1146
|
* result :: ( result <type>* )
|
1147
|
* local :: ( local <type>* ) | ( local <name> <type> )
|
1148
|
*
|
1149
|
*/
|
1150
|
|
1151
|
|
1152
|
function parseFunc() {
|
1153
|
var fnName = t.identifier(getUniqueName("func"));
|
1154
|
var typeRef;
|
1155
|
var fnBody = [];
|
1156
|
var fnParams = [];
|
1157
|
var fnResult = []; // name
|
1158
|
|
1159
|
if (token.type === _tokenizer.tokens.identifier) {
|
1160
|
fnName = identifierFromToken(token);
|
1161
|
eatToken();
|
1162
|
} else {
|
1163
|
fnName = t.withRaw(fnName, ""); // preserve anonymous
|
1164
|
}
|
1165
|
|
1166
|
maybeIgnoreComment();
|
1167
|
|
1168
|
while (token.type === _tokenizer.tokens.openParen || token.type === _tokenizer.tokens.name || token.type === _tokenizer.tokens.valtype) {
|
1169
|
// Instructions without parens
|
1170
|
if (token.type === _tokenizer.tokens.name || token.type === _tokenizer.tokens.valtype) {
|
1171
|
fnBody.push(parseFuncInstr());
|
1172
|
continue;
|
1173
|
}
|
1174
|
|
1175
|
eatToken();
|
1176
|
|
1177
|
if (lookaheadAndCheck(_tokenizer.keywords.param) === true) {
|
1178
|
eatToken();
|
1179
|
fnParams.push.apply(fnParams, _toConsumableArray(parseFuncParam()));
|
1180
|
} else if (lookaheadAndCheck(_tokenizer.keywords.result) === true) {
|
1181
|
eatToken();
|
1182
|
fnResult.push.apply(fnResult, _toConsumableArray(parseFuncResult()));
|
1183
|
} else if (lookaheadAndCheck(_tokenizer.keywords.export) === true) {
|
1184
|
eatToken();
|
1185
|
parseFuncExport(fnName);
|
1186
|
} else if (lookaheadAndCheck(_tokenizer.keywords.type) === true) {
|
1187
|
eatToken();
|
1188
|
typeRef = parseTypeReference();
|
1189
|
} else if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
|
1190
|
) {
|
1191
|
// Instruction
|
1192
|
fnBody.push(parseFuncInstr());
|
1193
|
} else {
|
1194
|
throw function () {
|
1195
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in func body" + ", given " + tokenToString(token));
|
1196
|
}();
|
1197
|
}
|
1198
|
|
1199
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1200
|
}
|
1201
|
|
1202
|
return t.func(fnName, typeRef !== undefined ? typeRef : t.signature(fnParams, fnResult), fnBody);
|
1203
|
}
|
1204
|
/**
|
1205
|
* Parses shorthand export in func
|
1206
|
*
|
1207
|
* export :: ( export <string> )
|
1208
|
*/
|
1209
|
|
1210
|
|
1211
|
function parseFuncExport(funcId) {
|
1212
|
if (token.type !== _tokenizer.tokens.string) {
|
1213
|
throw function () {
|
1214
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Function export expected a string" + ", given " + tokenToString(token));
|
1215
|
}();
|
1216
|
}
|
1217
|
|
1218
|
var name = token.value;
|
1219
|
eatToken();
|
1220
|
/**
|
1221
|
* Func export shorthand, we trait it as a syntaxic sugar.
|
1222
|
* A export ModuleField will be added later.
|
1223
|
*
|
1224
|
* We give the anonymous function a generated name and export it.
|
1225
|
*/
|
1226
|
|
1227
|
var id = t.identifier(funcId.value);
|
1228
|
state.registredExportedElements.push({
|
1229
|
exportType: "Func",
|
1230
|
name: name,
|
1231
|
id: id
|
1232
|
});
|
1233
|
}
|
1234
|
/**
|
1235
|
* Parses a type instruction
|
1236
|
*
|
1237
|
* WAST:
|
1238
|
*
|
1239
|
* typedef: ( type <name>? ( func <param>* <result>* ) )
|
1240
|
*/
|
1241
|
|
1242
|
|
1243
|
function parseType() {
|
1244
|
var id;
|
1245
|
var params = [];
|
1246
|
var result = [];
|
1247
|
|
1248
|
if (token.type === _tokenizer.tokens.identifier) {
|
1249
|
id = identifierFromToken(token);
|
1250
|
eatToken();
|
1251
|
}
|
1252
|
|
1253
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.func)) {
|
1254
|
eatToken(); // (
|
1255
|
|
1256
|
eatToken(); // func
|
1257
|
|
1258
|
if (token.type === _tokenizer.tokens.closeParen) {
|
1259
|
eatToken(); // function with an empty signature, we can abort here
|
1260
|
|
1261
|
return t.typeInstruction(id, t.signature([], []));
|
1262
|
}
|
1263
|
|
1264
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.param)) {
|
1265
|
eatToken(); // (
|
1266
|
|
1267
|
eatToken(); // param
|
1268
|
|
1269
|
params = parseFuncParam();
|
1270
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1271
|
}
|
1272
|
|
1273
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.result)) {
|
1274
|
eatToken(); // (
|
1275
|
|
1276
|
eatToken(); // result
|
1277
|
|
1278
|
result = parseFuncResult();
|
1279
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1280
|
}
|
1281
|
|
1282
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1283
|
}
|
1284
|
|
1285
|
return t.typeInstruction(id, t.signature(params, result));
|
1286
|
}
|
1287
|
/**
|
1288
|
* Parses a function result
|
1289
|
*
|
1290
|
* WAST:
|
1291
|
*
|
1292
|
* result :: ( result <type>* )
|
1293
|
*/
|
1294
|
|
1295
|
|
1296
|
function parseFuncResult() {
|
1297
|
var results = [];
|
1298
|
|
1299
|
while (token.type !== _tokenizer.tokens.closeParen) {
|
1300
|
if (token.type !== _tokenizer.tokens.valtype) {
|
1301
|
throw function () {
|
1302
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in func result" + ", given " + tokenToString(token));
|
1303
|
}();
|
1304
|
}
|
1305
|
|
1306
|
var valtype = token.value;
|
1307
|
eatToken();
|
1308
|
results.push(valtype);
|
1309
|
}
|
1310
|
|
1311
|
return results;
|
1312
|
}
|
1313
|
/**
|
1314
|
* Parses a type reference
|
1315
|
*
|
1316
|
*/
|
1317
|
|
1318
|
|
1319
|
function parseTypeReference() {
|
1320
|
var ref;
|
1321
|
|
1322
|
if (token.type === _tokenizer.tokens.identifier) {
|
1323
|
ref = identifierFromToken(token);
|
1324
|
eatToken();
|
1325
|
} else if (token.type === _tokenizer.tokens.number) {
|
1326
|
ref = t.numberLiteralFromRaw(token.value);
|
1327
|
eatToken();
|
1328
|
}
|
1329
|
|
1330
|
return ref;
|
1331
|
}
|
1332
|
/**
|
1333
|
* Parses a global instruction
|
1334
|
*
|
1335
|
* WAST:
|
1336
|
*
|
1337
|
* global: ( global <name>? <global_sig> <instr>* )
|
1338
|
* ( global <name>? ( export <string> ) <...> )
|
1339
|
* ( global <name>? ( import <string> <string> ) <global_sig> )
|
1340
|
*
|
1341
|
* global_sig: <type> | ( mut <type> )
|
1342
|
*
|
1343
|
*/
|
1344
|
|
1345
|
|
1346
|
function parseGlobal() {
|
1347
|
var name = t.identifier(getUniqueName("global"));
|
1348
|
var type; // Keep informations in case of a shorthand import
|
1349
|
|
1350
|
var importing = null;
|
1351
|
maybeIgnoreComment();
|
1352
|
|
1353
|
if (token.type === _tokenizer.tokens.identifier) {
|
1354
|
name = identifierFromToken(token);
|
1355
|
eatToken();
|
1356
|
} else {
|
1357
|
name = t.withRaw(name, ""); // preserve anonymous
|
1358
|
}
|
1359
|
/**
|
1360
|
* maybe export
|
1361
|
*/
|
1362
|
|
1363
|
|
1364
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.export)) {
|
1365
|
eatToken(); // (
|
1366
|
|
1367
|
eatToken(); // export
|
1368
|
|
1369
|
var exportName = token.value;
|
1370
|
eatTokenOfType(_tokenizer.tokens.string);
|
1371
|
state.registredExportedElements.push({
|
1372
|
exportType: "Global",
|
1373
|
name: exportName,
|
1374
|
id: name
|
1375
|
});
|
1376
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1377
|
}
|
1378
|
/**
|
1379
|
* maybe import
|
1380
|
*/
|
1381
|
|
1382
|
|
1383
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.import)) {
|
1384
|
eatToken(); // (
|
1385
|
|
1386
|
eatToken(); // import
|
1387
|
|
1388
|
var moduleName = token.value;
|
1389
|
eatTokenOfType(_tokenizer.tokens.string);
|
1390
|
var _name3 = token.value;
|
1391
|
eatTokenOfType(_tokenizer.tokens.string);
|
1392
|
importing = {
|
1393
|
module: moduleName,
|
1394
|
name: _name3,
|
1395
|
descr: undefined
|
1396
|
};
|
1397
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1398
|
}
|
1399
|
/**
|
1400
|
* global_sig
|
1401
|
*/
|
1402
|
|
1403
|
|
1404
|
if (token.type === _tokenizer.tokens.valtype) {
|
1405
|
type = t.globalType(token.value, "const");
|
1406
|
eatToken();
|
1407
|
} else if (token.type === _tokenizer.tokens.openParen) {
|
1408
|
eatToken(); // (
|
1409
|
|
1410
|
if (isKeyword(token, _tokenizer.keywords.mut) === false) {
|
1411
|
throw function () {
|
1412
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unsupported global type, expected mut" + ", given " + tokenToString(token));
|
1413
|
}();
|
1414
|
}
|
1415
|
|
1416
|
eatToken(); // mut
|
1417
|
|
1418
|
type = t.globalType(token.value, "var");
|
1419
|
eatToken();
|
1420
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1421
|
}
|
1422
|
|
1423
|
if (type === undefined) {
|
1424
|
throw function () {
|
1425
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Could not determine global type" + ", given " + tokenToString(token));
|
1426
|
}();
|
1427
|
}
|
1428
|
|
1429
|
maybeIgnoreComment();
|
1430
|
var init = [];
|
1431
|
|
1432
|
if (importing != null) {
|
1433
|
importing.descr = type;
|
1434
|
init.push(t.moduleImport(importing.module, importing.name, importing.descr));
|
1435
|
}
|
1436
|
/**
|
1437
|
* instr*
|
1438
|
*/
|
1439
|
|
1440
|
|
1441
|
while (token.type === _tokenizer.tokens.openParen) {
|
1442
|
eatToken();
|
1443
|
init.push(parseFuncInstr());
|
1444
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1445
|
}
|
1446
|
|
1447
|
return t.global(type, init, name);
|
1448
|
}
|
1449
|
/**
|
1450
|
* Parses a function param
|
1451
|
*
|
1452
|
* WAST:
|
1453
|
*
|
1454
|
* param :: ( param <type>* ) | ( param <name> <type> )
|
1455
|
*/
|
1456
|
|
1457
|
|
1458
|
function parseFuncParam() {
|
1459
|
var params = [];
|
1460
|
var id;
|
1461
|
var valtype;
|
1462
|
|
1463
|
if (token.type === _tokenizer.tokens.identifier) {
|
1464
|
id = token.value;
|
1465
|
eatToken();
|
1466
|
}
|
1467
|
|
1468
|
if (token.type === _tokenizer.tokens.valtype) {
|
1469
|
valtype = token.value;
|
1470
|
eatToken();
|
1471
|
params.push({
|
1472
|
id: id,
|
1473
|
valtype: valtype
|
1474
|
});
|
1475
|
/**
|
1476
|
* Shorthand notation for multiple anonymous parameters
|
1477
|
* @see https://webassembly.github.io/spec/core/text/types.html#function-types
|
1478
|
* @see https://github.com/xtuc/webassemblyjs/issues/6
|
1479
|
*/
|
1480
|
|
1481
|
if (id === undefined) {
|
1482
|
while (token.type === _tokenizer.tokens.valtype) {
|
1483
|
valtype = token.value;
|
1484
|
eatToken();
|
1485
|
params.push({
|
1486
|
id: undefined,
|
1487
|
valtype: valtype
|
1488
|
});
|
1489
|
}
|
1490
|
}
|
1491
|
} else {// ignore
|
1492
|
}
|
1493
|
|
1494
|
return params;
|
1495
|
}
|
1496
|
/**
|
1497
|
* Parses an element segments instruction
|
1498
|
*
|
1499
|
* WAST:
|
1500
|
*
|
1501
|
* elem: ( elem <var>? (offset <instr>* ) <var>* )
|
1502
|
* ( elem <var>? <expr> <var>* )
|
1503
|
*
|
1504
|
* var: <nat> | <name>
|
1505
|
*/
|
1506
|
|
1507
|
|
1508
|
function parseElem() {
|
1509
|
var tableIndex = t.indexLiteral(0);
|
1510
|
var offset = [];
|
1511
|
var funcs = [];
|
1512
|
|
1513
|
if (token.type === _tokenizer.tokens.identifier) {
|
1514
|
tableIndex = identifierFromToken(token);
|
1515
|
eatToken();
|
1516
|
}
|
1517
|
|
1518
|
if (token.type === _tokenizer.tokens.number) {
|
1519
|
tableIndex = t.indexLiteral(token.value);
|
1520
|
eatToken();
|
1521
|
}
|
1522
|
|
1523
|
while (token.type !== _tokenizer.tokens.closeParen) {
|
1524
|
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.offset)) {
|
1525
|
eatToken(); // (
|
1526
|
|
1527
|
eatToken(); // offset
|
1528
|
|
1529
|
while (token.type !== _tokenizer.tokens.closeParen) {
|
1530
|
eatTokenOfType(_tokenizer.tokens.openParen);
|
1531
|
offset.push(parseFuncInstr());
|
1532
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1533
|
}
|
1534
|
|
1535
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1536
|
} else if (token.type === _tokenizer.tokens.identifier) {
|
1537
|
funcs.push(t.identifier(token.value));
|
1538
|
eatToken();
|
1539
|
} else if (token.type === _tokenizer.tokens.number) {
|
1540
|
funcs.push(t.indexLiteral(token.value));
|
1541
|
eatToken();
|
1542
|
} else if (token.type === _tokenizer.tokens.openParen) {
|
1543
|
eatToken(); // (
|
1544
|
|
1545
|
offset.push(parseFuncInstr());
|
1546
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1547
|
} else {
|
1548
|
throw function () {
|
1549
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unsupported token in elem" + ", given " + tokenToString(token));
|
1550
|
}();
|
1551
|
}
|
1552
|
}
|
1553
|
|
1554
|
return t.elem(tableIndex, offset, funcs);
|
1555
|
}
|
1556
|
/**
|
1557
|
* Parses the start instruction in a module
|
1558
|
*
|
1559
|
* WAST:
|
1560
|
*
|
1561
|
* start: ( start <var> )
|
1562
|
* var: <nat> | <name>
|
1563
|
*
|
1564
|
* WAT:
|
1565
|
* start ::= ‘(’ ‘start’ x:funcidx ‘)’
|
1566
|
*/
|
1567
|
|
1568
|
|
1569
|
function parseStart() {
|
1570
|
if (token.type === _tokenizer.tokens.identifier) {
|
1571
|
var index = identifierFromToken(token);
|
1572
|
eatToken();
|
1573
|
return t.start(index);
|
1574
|
}
|
1575
|
|
1576
|
if (token.type === _tokenizer.tokens.number) {
|
1577
|
var _index2 = t.indexLiteral(token.value);
|
1578
|
|
1579
|
eatToken();
|
1580
|
return t.start(_index2);
|
1581
|
}
|
1582
|
|
1583
|
throw new Error("Unknown start, token: " + tokenToString(token));
|
1584
|
}
|
1585
|
|
1586
|
if (token.type === _tokenizer.tokens.openParen) {
|
1587
|
eatToken();
|
1588
|
var startLoc = getStartLoc();
|
1589
|
|
1590
|
if (isKeyword(token, _tokenizer.keywords.export)) {
|
1591
|
eatToken();
|
1592
|
var node = parseExport();
|
1593
|
|
1594
|
var _endLoc2 = getEndLoc();
|
1595
|
|
1596
|
return t.withLoc(node, _endLoc2, startLoc);
|
1597
|
}
|
1598
|
|
1599
|
if (isKeyword(token, _tokenizer.keywords.loop)) {
|
1600
|
eatToken();
|
1601
|
|
1602
|
var _node = parseLoop();
|
1603
|
|
1604
|
var _endLoc3 = getEndLoc();
|
1605
|
|
1606
|
return t.withLoc(_node, _endLoc3, startLoc);
|
1607
|
}
|
1608
|
|
1609
|
if (isKeyword(token, _tokenizer.keywords.func)) {
|
1610
|
eatToken();
|
1611
|
|
1612
|
var _node2 = parseFunc();
|
1613
|
|
1614
|
var _endLoc4 = getEndLoc();
|
1615
|
|
1616
|
maybeIgnoreComment();
|
1617
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1618
|
return t.withLoc(_node2, _endLoc4, startLoc);
|
1619
|
}
|
1620
|
|
1621
|
if (isKeyword(token, _tokenizer.keywords.module)) {
|
1622
|
eatToken();
|
1623
|
|
1624
|
var _node3 = parseModule();
|
1625
|
|
1626
|
var _endLoc5 = getEndLoc();
|
1627
|
|
1628
|
return t.withLoc(_node3, _endLoc5, startLoc);
|
1629
|
}
|
1630
|
|
1631
|
if (isKeyword(token, _tokenizer.keywords.import)) {
|
1632
|
eatToken();
|
1633
|
|
1634
|
var _node4 = parseImport();
|
1635
|
|
1636
|
var _endLoc6 = getEndLoc();
|
1637
|
|
1638
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1639
|
return t.withLoc(_node4, _endLoc6, startLoc);
|
1640
|
}
|
1641
|
|
1642
|
if (isKeyword(token, _tokenizer.keywords.block)) {
|
1643
|
eatToken();
|
1644
|
|
1645
|
var _node5 = parseBlock();
|
1646
|
|
1647
|
var _endLoc7 = getEndLoc();
|
1648
|
|
1649
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1650
|
return t.withLoc(_node5, _endLoc7, startLoc);
|
1651
|
}
|
1652
|
|
1653
|
if (isKeyword(token, _tokenizer.keywords.memory)) {
|
1654
|
eatToken();
|
1655
|
|
1656
|
var _node6 = parseMemory();
|
1657
|
|
1658
|
var _endLoc8 = getEndLoc();
|
1659
|
|
1660
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1661
|
return t.withLoc(_node6, _endLoc8, startLoc);
|
1662
|
}
|
1663
|
|
1664
|
if (isKeyword(token, _tokenizer.keywords.data)) {
|
1665
|
eatToken();
|
1666
|
|
1667
|
var _node7 = parseData();
|
1668
|
|
1669
|
var _endLoc9 = getEndLoc();
|
1670
|
|
1671
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1672
|
return t.withLoc(_node7, _endLoc9, startLoc);
|
1673
|
}
|
1674
|
|
1675
|
if (isKeyword(token, _tokenizer.keywords.table)) {
|
1676
|
eatToken();
|
1677
|
|
1678
|
var _node8 = parseTable();
|
1679
|
|
1680
|
var _endLoc10 = getEndLoc();
|
1681
|
|
1682
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1683
|
return t.withLoc(_node8, _endLoc10, startLoc);
|
1684
|
}
|
1685
|
|
1686
|
if (isKeyword(token, _tokenizer.keywords.global)) {
|
1687
|
eatToken();
|
1688
|
|
1689
|
var _node9 = parseGlobal();
|
1690
|
|
1691
|
var _endLoc11 = getEndLoc();
|
1692
|
|
1693
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1694
|
return t.withLoc(_node9, _endLoc11, startLoc);
|
1695
|
}
|
1696
|
|
1697
|
if (isKeyword(token, _tokenizer.keywords.type)) {
|
1698
|
eatToken();
|
1699
|
|
1700
|
var _node10 = parseType();
|
1701
|
|
1702
|
var _endLoc12 = getEndLoc();
|
1703
|
|
1704
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1705
|
return t.withLoc(_node10, _endLoc12, startLoc);
|
1706
|
}
|
1707
|
|
1708
|
if (isKeyword(token, _tokenizer.keywords.start)) {
|
1709
|
eatToken();
|
1710
|
|
1711
|
var _node11 = parseStart();
|
1712
|
|
1713
|
var _endLoc13 = getEndLoc();
|
1714
|
|
1715
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1716
|
return t.withLoc(_node11, _endLoc13, startLoc);
|
1717
|
}
|
1718
|
|
1719
|
if (isKeyword(token, _tokenizer.keywords.elem)) {
|
1720
|
eatToken();
|
1721
|
|
1722
|
var _node12 = parseElem();
|
1723
|
|
1724
|
var _endLoc14 = getEndLoc();
|
1725
|
|
1726
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1727
|
return t.withLoc(_node12, _endLoc14, startLoc);
|
1728
|
}
|
1729
|
|
1730
|
var instruction = parseFuncInstr();
|
1731
|
var endLoc = getEndLoc();
|
1732
|
maybeIgnoreComment();
|
1733
|
|
1734
|
if (_typeof(instruction) === "object") {
|
1735
|
if (typeof token !== "undefined") {
|
1736
|
eatTokenOfType(_tokenizer.tokens.closeParen);
|
1737
|
}
|
1738
|
|
1739
|
return t.withLoc(instruction, endLoc, startLoc);
|
1740
|
}
|
1741
|
}
|
1742
|
|
1743
|
if (token.type === _tokenizer.tokens.comment) {
|
1744
|
var _startLoc = getStartLoc();
|
1745
|
|
1746
|
var builder = token.opts.type === "leading" ? t.leadingComment : t.blockComment;
|
1747
|
|
1748
|
var _node13 = builder(token.value);
|
1749
|
|
1750
|
eatToken(); // comment
|
1751
|
|
1752
|
var _endLoc15 = getEndLoc();
|
1753
|
|
1754
|
return t.withLoc(_node13, _endLoc15, _startLoc);
|
1755
|
}
|
1756
|
|
1757
|
throw function () {
|
1758
|
return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unknown token" + ", given " + tokenToString(token));
|
1759
|
}();
|
1760
|
}
|
1761
|
|
1762
|
var body = [];
|
1763
|
|
1764
|
while (current < tokensList.length) {
|
1765
|
body.push(walk());
|
1766
|
}
|
1767
|
|
1768
|
return t.program(body);
|
1769
|
}
|