OLD | NEW |
| (Empty) |
1 // Copyright (c) 2015, the Dartino project authors. Please see the AUTHORS file | |
2 // for details. All rights reserved. Use of this source code is governed by a | |
3 // BSD-style license that can be found in the LICENSE.md file. | |
4 | |
5 library fletchc.bytecode_assembler; | |
6 | |
7 import '../bytecodes.dart'; | |
8 | |
9 const int IMPLICIT_STACK_OVERFLOW_LIMIT = 32; | |
10 const int frameDescriptorSize = 3; | |
11 | |
12 class BytecodeLabel { | |
13 int position = -1; | |
14 final List<int> usage = <int>[]; | |
15 | |
16 void addUsage(int bytecodePosition) { | |
17 usage.add(bytecodePosition); | |
18 } | |
19 | |
20 void forEach(f(int index)) { | |
21 usage.forEach(f); | |
22 } | |
23 | |
24 int get lastIndex { | |
25 if (usage.isEmpty) return -1; | |
26 return usage.last; | |
27 } | |
28 | |
29 void removeLastUsage() { | |
30 usage.removeLast(); | |
31 } | |
32 | |
33 void bind(int value) { | |
34 position = value; | |
35 usage.clear(); | |
36 } | |
37 | |
38 bool get isBound => position != -1; | |
39 | |
40 bool get isUsed => usage.isNotEmpty; | |
41 } | |
42 | |
43 class BytecodeAssembler { | |
44 final List<Bytecode> bytecodes = <Bytecode>[]; | |
45 final List<int> catchRanges = <int>[]; | |
46 | |
47 final int functionArity; | |
48 | |
49 int byteSize = 0; | |
50 int stackSize = 0; | |
51 int maxStackSize = 0; | |
52 | |
53 // A bind after a terminator will still look like the last bytecode | |
54 // is a terminator, however, due to the bind it's not. | |
55 bool hasBindAfterTerminator = false; | |
56 | |
57 // A bind after a pop will still look like the last bytecode is a | |
58 // pop, however, due to the bind we cannot collapse more pops | |
59 // together. | |
60 bool hasBindAfterPop = false; | |
61 | |
62 BytecodeAssembler(this.functionArity); | |
63 | |
64 int computeParameterSlot(int parameter) { | |
65 assert(parameter >= 0 && parameter < functionArity); | |
66 return parameter - frameDescriptorSize - functionArity; | |
67 } | |
68 | |
69 void reuse() { | |
70 bytecodes.clear(); | |
71 catchRanges.clear(); | |
72 byteSize = 0; | |
73 stackSize = 0; | |
74 maxStackSize = 0; | |
75 } | |
76 | |
77 /** | |
78 * Apply a fix to the currently known stack size. | |
79 */ | |
80 void applyStackSizeFix(int diff) { | |
81 stackSize += diff; | |
82 if (stackSize > maxStackSize) maxStackSize = stackSize; | |
83 } | |
84 | |
85 void addCatchFrameRange(int start, int end) { | |
86 catchRanges | |
87 ..add(start) | |
88 ..add(end) | |
89 ..add(stackSize); | |
90 } | |
91 | |
92 void loadConst(int id) { | |
93 internalAdd(new LoadConst(id)); | |
94 } | |
95 | |
96 void loadLocal(int offset) { | |
97 assert(offset < stackSize); | |
98 loadLocalHelper(offset); | |
99 } | |
100 | |
101 void loadLocalHelper(int offset) { | |
102 assert(offset >= 0); | |
103 Bytecode bytecode; | |
104 switch (offset) { | |
105 case 0: | |
106 bytecode = const LoadLocal0(); | |
107 break; | |
108 case 1: | |
109 bytecode = const LoadLocal1(); | |
110 break; | |
111 case 2: | |
112 bytecode = const LoadLocal2(); | |
113 break; | |
114 case 3: | |
115 bytecode = const LoadLocal3(); | |
116 break; | |
117 case 4: | |
118 bytecode = const LoadLocal4(); | |
119 break; | |
120 case 5: | |
121 bytecode = const LoadLocal5(); | |
122 break; | |
123 default: | |
124 if (offset >= 256) { | |
125 bytecode = new LoadLocalWide(offset); | |
126 } else { | |
127 bytecode = new LoadLocal(offset); | |
128 } | |
129 break; | |
130 } | |
131 internalAdd(bytecode); | |
132 } | |
133 | |
134 void loadBoxed(int offset) { | |
135 assert(offset < stackSize); | |
136 loadBoxedHelper(offset); | |
137 } | |
138 | |
139 void loadBoxedHelper(int offset) { | |
140 assert(offset >= 0 && offset <= 255); | |
141 internalAdd(new LoadBoxed(offset)); | |
142 } | |
143 | |
144 void dup() { | |
145 loadLocal(0); | |
146 } | |
147 | |
148 /** | |
149 * A 'slot' is an artificial indexing, that are frame relative. That means | |
150 * the current frame is indexed by where 0 .. frameSize-1, -1 is the return | |
151 * address and -1 - functionArity is the first argument, -2 is the last | |
152 * argument. | |
153 * | |
154 * This kind of indexing are sometimes easier to use than stack-relative, | |
155 * as locals and parameters have a fixed value. | |
156 */ | |
157 void loadSlot(int slot) { | |
158 int offset = stackSize - slot - 1; | |
159 loadLocal(offset); | |
160 } | |
161 | |
162 void loadBoxedSlot(int slot) { | |
163 int offset = stackSize - slot - 1; | |
164 loadBoxed(offset); | |
165 } | |
166 | |
167 int computeParameterOffset(int parameter) { | |
168 return frameDescriptorSize + stackSize + functionArity - parameter - 1; | |
169 } | |
170 | |
171 void loadParameter(int parameter) { | |
172 assert(parameter >= 0 && parameter < functionArity); | |
173 loadLocalHelper(computeParameterOffset(parameter)); | |
174 } | |
175 | |
176 void loadBoxedParameter(int parameter) { | |
177 assert(parameter >= 0 && parameter < functionArity); | |
178 loadBoxedHelper(computeParameterOffset(parameter)); | |
179 } | |
180 | |
181 void loadParameterSlot(int parameterSlot) { | |
182 int offset = stackSize - parameterSlot - 1; | |
183 loadLocalHelper(offset); | |
184 } | |
185 | |
186 void loadBoxedParameterSlot(int parameterSlot) { | |
187 int offset = stackSize - parameterSlot - 1; | |
188 loadBoxedHelper(offset); | |
189 } | |
190 | |
191 void loadStatic(int index) { | |
192 internalAdd(new LoadStatic(index)); | |
193 } | |
194 | |
195 void loadStaticInit(int index) { | |
196 internalAdd(new LoadStaticInit(index)); | |
197 } | |
198 | |
199 void loadField(int index) { | |
200 if (index >= 256) { | |
201 internalAdd(new LoadFieldWide(index)); | |
202 } else { | |
203 internalAdd(new LoadField(index)); | |
204 } | |
205 } | |
206 | |
207 void loadLiteralNull() { | |
208 internalAdd(new LoadLiteralNull()); | |
209 } | |
210 | |
211 void loadLiteralTrue() { | |
212 internalAdd(new LoadLiteralTrue()); | |
213 } | |
214 | |
215 void loadLiteralFalse() { | |
216 internalAdd(new LoadLiteralFalse()); | |
217 } | |
218 | |
219 void loadLiteral(int value) { | |
220 if (value == 0) { | |
221 internalAdd(const LoadLiteral0()); | |
222 } else if (value == 1) { | |
223 internalAdd(const LoadLiteral1()); | |
224 } else if (value < 256) { | |
225 internalAdd(new LoadLiteral(value)); | |
226 } else { | |
227 internalAdd(new LoadLiteralWide(value)); | |
228 } | |
229 } | |
230 | |
231 void storeLocal(int offset) { | |
232 assert(offset < stackSize); | |
233 storeLocalHelper(offset); | |
234 } | |
235 | |
236 void storeLocalHelper(int offset) { | |
237 assert(offset >= 0 && offset <= 255); | |
238 internalAdd(new StoreLocal(offset)); | |
239 } | |
240 | |
241 void storeBoxed(int offset) { | |
242 assert(offset < stackSize); | |
243 storeBoxedHelper(offset); | |
244 } | |
245 | |
246 void storeBoxedHelper(int offset) { | |
247 assert(offset >= 0 && offset <= 255); | |
248 internalAdd(new StoreBoxed(offset)); | |
249 } | |
250 | |
251 /** | |
252 * See loadSlot for information about 'slots'. | |
253 */ | |
254 void storeSlot(int slot) { | |
255 int offset = stackSize - slot - 1; | |
256 storeLocal(offset); | |
257 } | |
258 | |
259 void storeBoxedSlot(int slot) { | |
260 int offset = stackSize - slot - 1; | |
261 storeBoxed(offset); | |
262 } | |
263 | |
264 void storeParameter(int parameter) { | |
265 assert(parameter >= 0 && parameter < functionArity); | |
266 storeLocalHelper(computeParameterOffset(parameter)); | |
267 } | |
268 | |
269 void storeBoxedParameter(int parameter) { | |
270 assert(parameter >= 0 && parameter < functionArity); | |
271 storeBoxedHelper(computeParameterOffset(parameter)); | |
272 } | |
273 | |
274 void storeParameterSlot(int parameterSlot) { | |
275 int offset = stackSize - parameterSlot - 1; | |
276 storeLocalHelper(offset); | |
277 } | |
278 | |
279 void storeBoxedParameterSlot(int parameterSlot) { | |
280 int offset = stackSize - parameterSlot - 1; | |
281 storeBoxedHelper(offset); | |
282 } | |
283 | |
284 void storeStatic(int index) { | |
285 internalAdd(new StoreStatic(index)); | |
286 } | |
287 | |
288 void storeField(int index) { | |
289 if (index >= 256) { | |
290 internalAdd(new StoreFieldWide(index)); | |
291 } else { | |
292 internalAdd(new StoreField(index)); | |
293 } | |
294 } | |
295 | |
296 void invokeStatic(int id, int arity) { | |
297 internalAddStackPointerDifference( | |
298 new InvokeStatic(id), | |
299 1 - arity); | |
300 } | |
301 | |
302 void invokeFactory(int id, int arity) { | |
303 internalAddStackPointerDifference( | |
304 new InvokeFactory(id), | |
305 1 - arity); | |
306 } | |
307 | |
308 void invokeMethod(int selector, int arity, [String name]) { | |
309 var bytecode; | |
310 switch (name) { | |
311 case '==': | |
312 bytecode = new InvokeEqUnfold(selector); | |
313 break; | |
314 | |
315 case '<': | |
316 bytecode = new InvokeLtUnfold(selector); | |
317 break; | |
318 | |
319 case '<=': | |
320 bytecode = new InvokeLeUnfold(selector); | |
321 break; | |
322 | |
323 case '>': | |
324 bytecode = new InvokeGtUnfold(selector); | |
325 break; | |
326 | |
327 case '>=': | |
328 bytecode = new InvokeGeUnfold(selector); | |
329 break; | |
330 | |
331 case '+': | |
332 bytecode = new InvokeAddUnfold(selector); | |
333 break; | |
334 | |
335 case '-': | |
336 bytecode = new InvokeSubUnfold(selector); | |
337 break; | |
338 | |
339 case '*': | |
340 bytecode = new InvokeMulUnfold(selector); | |
341 break; | |
342 | |
343 case '~/': | |
344 bytecode = new InvokeTruncDivUnfold(selector); | |
345 break; | |
346 | |
347 case '%': | |
348 bytecode = new InvokeModUnfold(selector); | |
349 break; | |
350 | |
351 case '~': | |
352 bytecode = new InvokeBitNotUnfold(selector); | |
353 break; | |
354 | |
355 case '&': | |
356 bytecode = new InvokeBitAndUnfold(selector); | |
357 break; | |
358 | |
359 case '|': | |
360 bytecode = new InvokeBitOrUnfold(selector); | |
361 break; | |
362 | |
363 case '^': | |
364 bytecode = new InvokeBitXorUnfold(selector); | |
365 break; | |
366 | |
367 case '<<': | |
368 bytecode = new InvokeBitShlUnfold(selector); | |
369 break; | |
370 | |
371 case '>>': | |
372 bytecode = new InvokeBitShrUnfold(selector); | |
373 break; | |
374 | |
375 default: | |
376 bytecode = new InvokeMethodUnfold(selector); | |
377 break; | |
378 } | |
379 internalAddStackPointerDifference(bytecode, -arity); | |
380 } | |
381 | |
382 void invokeTest(int selector, int arity) { | |
383 internalAddStackPointerDifference(new InvokeTestUnfold(selector), -arity); | |
384 } | |
385 | |
386 void invokeSelector(int slot) { | |
387 internalAddStackPointerDifference(new InvokeSelector(slot), 0); | |
388 } | |
389 | |
390 void pop() { | |
391 if (hasBindAfterPop) { | |
392 internalAdd(new Pop()); | |
393 hasBindAfterPop = false; | |
394 return; | |
395 } | |
396 Bytecode last = bytecodes.last; | |
397 if (last.opcode == Opcode.Drop) { | |
398 Drop drop = last; | |
399 int amount = drop.uint8Argument0 + 1; | |
400 if (amount <= 255) { | |
401 bytecodes[bytecodes.length - 1] = new Drop(amount); | |
402 applyStackSizeFix(-1); | |
403 } else { | |
404 internalAdd(new Pop()); | |
405 } | |
406 } else if (last.opcode == Opcode.Pop) { | |
407 bytecodes[bytecodes.length - 1] = new Drop(2); | |
408 byteSize += 1; | |
409 applyStackSizeFix(-1); | |
410 } else { | |
411 internalAdd(new Pop()); | |
412 } | |
413 | |
414 } | |
415 | |
416 void popMany(int count) { | |
417 while (count > 255) { | |
418 internalAddStackPointerDifference(new Drop(255), -255); | |
419 count -= 255; | |
420 } | |
421 if (count > 1) { | |
422 internalAddStackPointerDifference(new Drop(count), -count); | |
423 } else if (count == 1) { | |
424 internalAdd(new Pop()); | |
425 } | |
426 hasBindAfterPop = false; | |
427 } | |
428 | |
429 void ret() { | |
430 hasBindAfterTerminator = false; | |
431 if (stackSize <= 0) throw "Bad stackSize for return bytecode: $stackSize"; | |
432 internalAdd(const Return()); | |
433 } | |
434 | |
435 void returnNull() { | |
436 hasBindAfterTerminator = false; | |
437 internalAdd(const ReturnNull()); | |
438 } | |
439 | |
440 void identical() { | |
441 internalAdd(const Identical()); | |
442 } | |
443 | |
444 void identicalNonNumeric() { | |
445 internalAdd(const IdenticalNonNumeric()); | |
446 } | |
447 | |
448 void negate() { | |
449 internalAdd(const Negate()); | |
450 } | |
451 | |
452 void bind(BytecodeLabel label) { | |
453 internalBind(label, false); | |
454 } | |
455 | |
456 void internalBind(BytecodeLabel label, bool isSubroutineReturn) { | |
457 if (label.isUsed) hasBindAfterTerminator = true; | |
458 hasBindAfterPop = true; | |
459 assert(label.position == -1); | |
460 // TODO(ajohnsen): If the previous bytecode is a branch to this label, | |
461 // consider popping it - if no other binds has happened at this bytecode | |
462 // index. | |
463 int position = byteSize; | |
464 label.forEach((int index) { | |
465 var bytecode = bytecodes[index]; | |
466 switch (bytecode.opcode) { | |
467 case Opcode.BranchIfTrueWide: | |
468 int offset = position - bytecode.uint32Argument0; | |
469 bytecodes[index] = new BranchIfTrueWide(offset); | |
470 break; | |
471 | |
472 case Opcode.BranchIfFalseWide: | |
473 int offset = position - bytecode.uint32Argument0; | |
474 bytecodes[index] = new BranchIfFalseWide(offset); | |
475 break; | |
476 | |
477 case Opcode.BranchWide: | |
478 int offset = position - bytecode.uint32Argument0; | |
479 bytecodes[index] = new BranchWide(offset); | |
480 break; | |
481 | |
482 case Opcode.PopAndBranchWide: | |
483 int offset = position - bytecode.uint32Argument1; | |
484 bytecodes[index] = new PopAndBranchWide( | |
485 bytecode.uint8Argument0, | |
486 offset); | |
487 break; | |
488 | |
489 case Opcode.EnterNoSuchMethod: | |
490 int offset = position - bytecode.uint8Argument0; | |
491 bytecodes[index] = new EnterNoSuchMethod(offset); | |
492 break; | |
493 | |
494 case Opcode.SubroutineCall: | |
495 if (isSubroutineReturn) { | |
496 int offset = position - bytecode.uint32Argument1; | |
497 offset -= bytecode.size; | |
498 bytecodes[index] = new SubroutineCall( | |
499 bytecode.uint32Argument0, | |
500 offset); | |
501 } else { | |
502 int offset = position - bytecode.uint32Argument0; | |
503 bytecodes[index] = new SubroutineCall( | |
504 offset, | |
505 bytecode.uint32Argument1); | |
506 } | |
507 break; | |
508 | |
509 default: | |
510 throw "Unhandled bind bytecode: $bytecode"; | |
511 } | |
512 }); | |
513 label.bind(position); | |
514 } | |
515 | |
516 void branchIfTrue(BytecodeLabel label) { | |
517 if (label.isBound) { | |
518 internalBranchBack( | |
519 label, | |
520 (v) => new BranchBackIfTrue(v), | |
521 (v) => new BranchBackIfTrueWide(v)); | |
522 } else { | |
523 label.addUsage(bytecodes.length); | |
524 internalAdd(new BranchIfTrueWide(byteSize)); | |
525 } | |
526 } | |
527 | |
528 void branchIfFalse(BytecodeLabel label) { | |
529 if (label.isBound) { | |
530 internalBranchBack( | |
531 label, | |
532 (v) => new BranchBackIfFalse(v), | |
533 (v) => new BranchBackIfFalseWide(v)); | |
534 } else { | |
535 label.addUsage(bytecodes.length); | |
536 internalAdd(new BranchIfFalseWide(byteSize)); | |
537 } | |
538 } | |
539 | |
540 void branch(BytecodeLabel label) { | |
541 if (label.isBound) { | |
542 internalBranchBack( | |
543 label, | |
544 (v) => new BranchBack(v), | |
545 (v) => new BranchBackWide(v)); | |
546 } else { | |
547 label.addUsage(bytecodes.length); | |
548 internalAdd(new BranchWide(byteSize)); | |
549 } | |
550 } | |
551 | |
552 void popAndBranch(int diff, BytecodeLabel label) { | |
553 assert(diff >= 0 && diff <= 255); | |
554 if (label.isBound) { | |
555 internalBranchBack( | |
556 label, | |
557 (v) => new PopAndBranchBackWide(diff, v), | |
558 (v) => new PopAndBranchBackWide(diff, v)); | |
559 } else { | |
560 label.addUsage(bytecodes.length); | |
561 internalAdd(new PopAndBranchWide(diff, byteSize)); | |
562 } | |
563 } | |
564 | |
565 void internalBranchBack( | |
566 BytecodeLabel label, | |
567 Bytecode short(int offset), | |
568 Bytecode long(int offset)) { | |
569 int offset = byteSize - label.position; | |
570 if (offset < 255) { | |
571 internalAdd(short(offset)); | |
572 } else { | |
573 internalAdd(long(offset)); | |
574 } | |
575 } | |
576 | |
577 void allocate(int classId, int fields, {bool immutable: false}) { | |
578 var instruction = immutable ? | |
579 new AllocateImmutable(classId) : new Allocate(classId); | |
580 internalAddStackPointerDifference(instruction, 1 - fields); | |
581 } | |
582 | |
583 void allocateBoxed() { | |
584 internalAdd(const AllocateBoxed()); | |
585 } | |
586 | |
587 void subroutineCall(BytecodeLabel label, BytecodeLabel returnLabel) { | |
588 assert(!label.isBound); | |
589 assert(!returnLabel.isBound); | |
590 label.addUsage(bytecodes.length); | |
591 returnLabel.addUsage(bytecodes.length); | |
592 internalAddStackPointerDifference( | |
593 new SubroutineCall(byteSize, byteSize), | |
594 0); | |
595 } | |
596 | |
597 void subroutineReturn(BytecodeLabel returnLabel) { | |
598 internalBind(returnLabel, true); | |
599 internalAdd(const SubroutineReturn()); | |
600 } | |
601 | |
602 bool get endsWithTerminator { | |
603 if (bytecodes.isEmpty) return false; | |
604 if (hasBindAfterTerminator) return false; | |
605 Opcode opcode = bytecodes.last.opcode; | |
606 return opcode == Opcode.Return || opcode == Opcode.Throw; | |
607 } | |
608 | |
609 void enterNoSuchMethod(BytecodeLabel skipGetterLabel) { | |
610 assert(!skipGetterLabel.isBound); | |
611 skipGetterLabel.addUsage(bytecodes.length); | |
612 internalAddStackPointerDifference(new EnterNoSuchMethod(byteSize), 0); | |
613 } | |
614 | |
615 void exitNoSuchMethod() { | |
616 internalAdd(const ExitNoSuchMethod()); | |
617 } | |
618 | |
619 void methodEnd() { | |
620 if (maxStackSize > IMPLICIT_STACK_OVERFLOW_LIMIT) { | |
621 var bytecode = new StackOverflowCheck( | |
622 maxStackSize - IMPLICIT_STACK_OVERFLOW_LIMIT); | |
623 bytecodes.insert(0, bytecode); | |
624 byteSize += bytecode.size; | |
625 } | |
626 int value = (byteSize << 1) | (catchRanges.isNotEmpty ? 1 : 0); | |
627 internalAdd(new MethodEnd(value)); | |
628 } | |
629 | |
630 void processYield() { | |
631 internalAdd(const ProcessYield()); | |
632 } | |
633 | |
634 void coroutineChange() { | |
635 internalAdd(const CoroutineChange()); | |
636 } | |
637 | |
638 void internalAdd(Bytecode bytecode) { | |
639 internalAddStackPointerDifference( | |
640 bytecode, | |
641 bytecode.stackPointerDifference); | |
642 } | |
643 | |
644 void internalAddStackPointerDifference( | |
645 Bytecode bytecode, | |
646 int stackPointerDifference) { | |
647 assert(stackPointerDifference != VAR_DIFF); | |
648 assert(bytecodes.isEmpty || bytecodes.last.opcode != Opcode.MethodEnd); | |
649 bytecodes.add(bytecode); | |
650 byteSize += bytecode.size; | |
651 applyStackSizeFix(stackPointerDifference); | |
652 } | |
653 | |
654 void invokeNative(int arity, int index) { | |
655 internalAdd(new InvokeNative(arity, index)); | |
656 } | |
657 | |
658 void invokeDetachableNative(int arity, int index) { | |
659 internalAdd(new InvokeDetachableNative(arity, index)); | |
660 } | |
661 | |
662 void invokeNativeYield(int arity, int index) { | |
663 internalAdd(new InvokeNativeYield(arity, index)); | |
664 } | |
665 | |
666 void emitThrow() { | |
667 hasBindAfterTerminator = false; | |
668 internalAdd(const Throw()); | |
669 } | |
670 } | |
OLD | NEW |