Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(305)

Side by Side Diff: runtime/vm/stub_code_mips.cc

Issue 23672011: Fix the previously ineffective assert checking the number of arguments passed to (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/stub_code_ia32_test.cc ('k') | runtime/vm/stub_code_mips_test.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" 5 #include "vm/globals.h"
6 #if defined(TARGET_ARCH_MIPS) 6 #if defined(TARGET_ARCH_MIPS)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/code_generator.h" 9 #include "vm/code_generator.h"
10 #include "vm/compiler.h" 10 #include "vm/compiler.h"
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
118 } 118 }
119 END_LEAF_RUNTIME_ENTRY 119 END_LEAF_RUNTIME_ENTRY
120 120
121 121
122 // Input parameters: 122 // Input parameters:
123 // A0 : stop message (const char*). 123 // A0 : stop message (const char*).
124 // Must preserve all registers. 124 // Must preserve all registers.
125 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) { 125 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) {
126 __ EnterCallRuntimeFrame(0); 126 __ EnterCallRuntimeFrame(0);
127 // Call the runtime leaf function. A0 already contains the parameter. 127 // Call the runtime leaf function. A0 already contains the parameter.
128 __ CallRuntime(kPrintStopMessageRuntimeEntry); 128 __ CallRuntime(kPrintStopMessageRuntimeEntry, 1);
129 __ LeaveCallRuntimeFrame(); 129 __ LeaveCallRuntimeFrame();
130 __ Ret(); 130 __ Ret();
131 } 131 }
132 132
133 133
134 // Input parameters: 134 // Input parameters:
135 // RA : return address. 135 // RA : return address.
136 // SP : address of return value. 136 // SP : address of return value.
137 // T5 : address of the native function to call. 137 // T5 : address of the native function to call.
138 // A2 : address of first argument in argument array. 138 // A2 : address of first argument in argument array.
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
321 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { 321 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) {
322 __ TraceSimMsg("CallStaticFunctionStub"); 322 __ TraceSimMsg("CallStaticFunctionStub");
323 __ EnterStubFrame(); 323 __ EnterStubFrame();
324 // Setup space on stack for return value and preserve arguments descriptor. 324 // Setup space on stack for return value and preserve arguments descriptor.
325 325
326 __ addiu(SP, SP, Immediate(-2 * kWordSize)); 326 __ addiu(SP, SP, Immediate(-2 * kWordSize));
327 __ sw(S4, Address(SP, 1 * kWordSize)); 327 __ sw(S4, Address(SP, 1 * kWordSize));
328 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 328 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
329 __ sw(TMP, Address(SP, 0 * kWordSize)); 329 __ sw(TMP, Address(SP, 0 * kWordSize));
330 330
331 __ CallRuntime(kPatchStaticCallRuntimeEntry); 331 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
332 __ TraceSimMsg("CallStaticFunctionStub return"); 332 __ TraceSimMsg("CallStaticFunctionStub return");
333 333
334 // Get Code object result and restore arguments descriptor array. 334 // Get Code object result and restore arguments descriptor array.
335 __ lw(T0, Address(SP, 0 * kWordSize)); 335 __ lw(T0, Address(SP, 0 * kWordSize));
336 __ lw(S4, Address(SP, 1 * kWordSize)); 336 __ lw(S4, Address(SP, 1 * kWordSize));
337 __ addiu(SP, SP, Immediate(2 * kWordSize)); 337 __ addiu(SP, SP, Immediate(2 * kWordSize));
338 338
339 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); 339 __ lw(T0, FieldAddress(T0, Code::instructions_offset()));
340 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); 340 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag);
341 341
342 // Remove the stub frame as we are about to jump to the dart function. 342 // Remove the stub frame as we are about to jump to the dart function.
343 __ LeaveStubFrameAndReturn(T0); 343 __ LeaveStubFrameAndReturn(T0);
344 } 344 }
345 345
346 346
347 // Called from a static call only when an invalid code has been entered 347 // Called from a static call only when an invalid code has been entered
348 // (invalid because its function was optimized or deoptimized). 348 // (invalid because its function was optimized or deoptimized).
349 // S4: arguments descriptor array. 349 // S4: arguments descriptor array.
350 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { 350 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) {
351 // Create a stub frame as we are pushing some objects on the stack before 351 // Create a stub frame as we are pushing some objects on the stack before
352 // calling into the runtime. 352 // calling into the runtime.
353 __ TraceSimMsg("FixCallersTarget"); 353 __ TraceSimMsg("FixCallersTarget");
354 __ EnterStubFrame(); 354 __ EnterStubFrame();
355 // Setup space on stack for return value and preserve arguments descriptor. 355 // Setup space on stack for return value and preserve arguments descriptor.
356 __ addiu(SP, SP, Immediate(-2 * kWordSize)); 356 __ addiu(SP, SP, Immediate(-2 * kWordSize));
357 __ sw(S4, Address(SP, 1 * kWordSize)); 357 __ sw(S4, Address(SP, 1 * kWordSize));
358 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 358 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
359 __ sw(TMP, Address(SP, 0 * kWordSize)); 359 __ sw(TMP, Address(SP, 0 * kWordSize));
360 __ CallRuntime(kFixCallersTargetRuntimeEntry); 360 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
361 // Get Code object result and restore arguments descriptor array. 361 // Get Code object result and restore arguments descriptor array.
362 __ lw(T0, Address(SP, 0 * kWordSize)); 362 __ lw(T0, Address(SP, 0 * kWordSize));
363 __ lw(S4, Address(SP, 1 * kWordSize)); 363 __ lw(S4, Address(SP, 1 * kWordSize));
364 __ addiu(SP, SP, Immediate(2 * kWordSize)); 364 __ addiu(SP, SP, Immediate(2 * kWordSize));
365 365
366 // Jump to the dart function. 366 // Jump to the dart function.
367 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); 367 __ lw(T0, FieldAddress(T0, Code::instructions_offset()));
368 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag); 368 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag);
369 369
370 // Remove the stub frame. 370 // Remove the stub frame.
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
431 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 431 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
432 __ sw(TMP, Address(SP, 3 * kWordSize)); 432 __ sw(TMP, Address(SP, 3 * kWordSize));
433 __ sw(T1, Address(SP, 2 * kWordSize)); 433 __ sw(T1, Address(SP, 2 * kWordSize));
434 __ sw(S5, Address(SP, 1 * kWordSize)); 434 __ sw(S5, Address(SP, 1 * kWordSize));
435 __ sw(S4, Address(SP, 0 * kWordSize)); 435 __ sw(S4, Address(SP, 0 * kWordSize));
436 436
437 // A1: Smi-tagged arguments array length. 437 // A1: Smi-tagged arguments array length.
438 PushArgumentsArray(assembler); 438 PushArgumentsArray(assembler);
439 __ TraceSimMsg("InstanceFunctionLookupStub return"); 439 __ TraceSimMsg("InstanceFunctionLookupStub return");
440 440
441 __ CallRuntime(kInstanceFunctionLookupRuntimeEntry); 441 __ CallRuntime(kInstanceFunctionLookupRuntimeEntry, 4);
442 442
443 __ lw(V0, Address(SP, 4 * kWordSize)); // Get result into V0. 443 __ lw(V0, Address(SP, 4 * kWordSize)); // Get result into V0.
444 __ addiu(SP, SP, Immediate(5 * kWordSize)); // Remove arguments. 444 __ addiu(SP, SP, Immediate(5 * kWordSize)); // Remove arguments.
445 445
446 __ LeaveStubFrameAndReturn(); 446 __ LeaveStubFrameAndReturn();
447 } 447 }
448 448
449 449
450 DECLARE_LEAF_RUNTIME_ENTRY(intptr_t, DeoptimizeCopyFrame, 450 DECLARE_LEAF_RUNTIME_ENTRY(intptr_t, DeoptimizeCopyFrame,
451 intptr_t deopt_reason, 451 intptr_t deopt_reason,
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
514 } 514 }
515 for (int i = 0; i < kNumberOfFRegisters; i++) { 515 for (int i = 0; i < kNumberOfFRegisters; i++) {
516 // These go below the CPU registers. 516 // These go below the CPU registers.
517 const int slot = 4 + kNumberOfCpuRegisters + kNumberOfFRegisters - i; 517 const int slot = 4 + kNumberOfCpuRegisters + kNumberOfFRegisters - i;
518 FRegister reg = static_cast<FRegister>(i); 518 FRegister reg = static_cast<FRegister>(i);
519 __ swc1(reg, Address(SP, kPushedRegistersSize - slot * kWordSize)); 519 __ swc1(reg, Address(SP, kPushedRegistersSize - slot * kWordSize));
520 } 520 }
521 521
522 __ mov(A0, SP); // Pass address of saved registers block. 522 __ mov(A0, SP); // Pass address of saved registers block.
523 __ ReserveAlignedFrameSpace(1 * kWordSize); 523 __ ReserveAlignedFrameSpace(1 * kWordSize);
524 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry); 524 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 1);
525 // Result (V0) is stack-size (FP - SP) in bytes, incl. the return address. 525 // Result (V0) is stack-size (FP - SP) in bytes, incl. the return address.
526 526
527 if (preserve_result) { 527 if (preserve_result) {
528 // Restore result into T1 temporarily. 528 // Restore result into T1 temporarily.
529 __ lw(T1, Address(FP, saved_result_slot_from_fp * kWordSize)); 529 __ lw(T1, Address(FP, saved_result_slot_from_fp * kWordSize));
530 } 530 }
531 531
532 __ addiu(SP, FP, Immediate(-kWordSize)); 532 __ addiu(SP, FP, Immediate(-kWordSize));
533 __ lw(RA, Address(SP, 2 * kWordSize)); 533 __ lw(RA, Address(SP, 2 * kWordSize));
534 __ lw(FP, Address(SP, 1 * kWordSize)); 534 __ lw(FP, Address(SP, 1 * kWordSize));
535 __ lw(PP, Address(SP, 0 * kWordSize)); 535 __ lw(PP, Address(SP, 0 * kWordSize));
536 __ subu(SP, FP, V0); 536 __ subu(SP, FP, V0);
537 537
538 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there 538 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there
539 // is no need to set the correct PC marker or load PP, since they get patched. 539 // is no need to set the correct PC marker or load PP, since they get patched.
540 __ addiu(SP, SP, Immediate(-4 * kWordSize)); 540 __ addiu(SP, SP, Immediate(-4 * kWordSize));
541 __ sw(ZR, Address(SP, 3 * kWordSize)); 541 __ sw(ZR, Address(SP, 3 * kWordSize));
542 __ sw(RA, Address(SP, 2 * kWordSize)); 542 __ sw(RA, Address(SP, 2 * kWordSize));
543 __ sw(FP, Address(SP, 1 * kWordSize)); 543 __ sw(FP, Address(SP, 1 * kWordSize));
544 __ sw(PP, Address(SP, 0 * kWordSize)); 544 __ sw(PP, Address(SP, 0 * kWordSize));
545 __ addiu(FP, SP, Immediate(kWordSize)); 545 __ addiu(FP, SP, Immediate(kWordSize));
546 546
547 __ mov(A0, FP); // Get last FP address. 547 __ mov(A0, FP); // Get last FP address.
548 if (preserve_result) { 548 if (preserve_result) {
549 __ Push(T1); // Preserve result as first local. 549 __ Push(T1); // Preserve result as first local.
550 } 550 }
551 __ ReserveAlignedFrameSpace(1 * kWordSize); 551 __ ReserveAlignedFrameSpace(1 * kWordSize);
552 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry); // Pass last FP in A0. 552 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); // Pass last FP in A0.
553 if (preserve_result) { 553 if (preserve_result) {
554 // Restore result into T1. 554 // Restore result into T1.
555 __ lw(T1, Address(FP, kFirstLocalSlotFromFp * kWordSize)); 555 __ lw(T1, Address(FP, kFirstLocalSlotFromFp * kWordSize));
556 } 556 }
557 // Code above cannot cause GC. 557 // Code above cannot cause GC.
558 __ addiu(SP, FP, Immediate(-kWordSize)); 558 __ addiu(SP, FP, Immediate(-kWordSize));
559 __ lw(RA, Address(SP, 2 * kWordSize)); 559 __ lw(RA, Address(SP, 2 * kWordSize));
560 __ lw(FP, Address(SP, 1 * kWordSize)); 560 __ lw(FP, Address(SP, 1 * kWordSize));
561 __ lw(PP, Address(SP, 0 * kWordSize)); 561 __ lw(PP, Address(SP, 0 * kWordSize));
562 __ addiu(SP, SP, Immediate(4 * kWordSize)); 562 __ addiu(SP, SP, Immediate(4 * kWordSize));
563 563
564 // Frame is fully rewritten at this point and it is safe to perform a GC. 564 // Frame is fully rewritten at this point and it is safe to perform a GC.
565 // Materialize any objects that were deferred by FillFrame because they 565 // Materialize any objects that were deferred by FillFrame because they
566 // require allocation. 566 // require allocation.
567 __ EnterStubFrame(); 567 __ EnterStubFrame();
568 if (preserve_result) { 568 if (preserve_result) {
569 __ Push(T1); // Preserve result, it will be GC-d here. 569 __ Push(T1); // Preserve result, it will be GC-d here.
570 } 570 }
571 __ PushObject(Smi::ZoneHandle()); // Space for the result. 571 __ PushObject(Smi::ZoneHandle()); // Space for the result.
572 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry); 572 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0);
573 // Result tells stub how many bytes to remove from the expression stack 573 // Result tells stub how many bytes to remove from the expression stack
574 // of the bottom-most frame. They were used as materialization arguments. 574 // of the bottom-most frame. They were used as materialization arguments.
575 __ Pop(T1); 575 __ Pop(T1);
576 if (preserve_result) { 576 if (preserve_result) {
577 __ Pop(V0); // Restore result. 577 __ Pop(V0); // Restore result.
578 } 578 }
579 __ LeaveStubFrame(); 579 __ LeaveStubFrame();
580 // Remove materialization arguments. 580 // Remove materialization arguments.
581 __ SmiUntag(T1); 581 __ SmiUntag(T1);
582 __ addu(SP, SP, T1); 582 __ addu(SP, SP, T1);
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
614 // Push space for the return value. 614 // Push space for the return value.
615 // Push the receiver. 615 // Push the receiver.
616 // Push IC data object. 616 // Push IC data object.
617 // Push arguments descriptor array. 617 // Push arguments descriptor array.
618 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 618 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
619 __ sw(TMP, Address(SP, 3 * kWordSize)); 619 __ sw(TMP, Address(SP, 3 * kWordSize));
620 __ sw(T6, Address(SP, 2 * kWordSize)); 620 __ sw(T6, Address(SP, 2 * kWordSize));
621 __ sw(S5, Address(SP, 1 * kWordSize)); 621 __ sw(S5, Address(SP, 1 * kWordSize));
622 __ sw(S4, Address(SP, 0 * kWordSize)); 622 __ sw(S4, Address(SP, 0 * kWordSize));
623 623
624 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry); 624 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3);
625 625
626 __ lw(T0, Address(SP, 3 * kWordSize)); // Get result. 626 __ lw(T0, Address(SP, 3 * kWordSize)); // Get result.
627 __ lw(S4, Address(SP, 4 * kWordSize)); // Restore argument descriptor. 627 __ lw(S4, Address(SP, 4 * kWordSize)); // Restore argument descriptor.
628 __ lw(S5, Address(SP, 5 * kWordSize)); // Restore IC data. 628 __ lw(S5, Address(SP, 5 * kWordSize)); // Restore IC data.
629 __ addiu(SP, SP, Immediate(6 * kWordSize)); 629 __ addiu(SP, SP, Immediate(6 * kWordSize));
630 630
631 __ LeaveStubFrame(); 631 __ LeaveStubFrame();
632 632
633 Label nonnull; 633 Label nonnull;
634 __ BranchNotEqual(T0, reinterpret_cast<int32_t>(Object::null()), &nonnull); 634 __ BranchNotEqual(T0, reinterpret_cast<int32_t>(Object::null()), &nonnull);
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
761 // Create a stub frame as we are pushing some objects on the stack before 761 // Create a stub frame as we are pushing some objects on the stack before
762 // calling into the runtime. 762 // calling into the runtime.
763 __ EnterStubFrame(); 763 __ EnterStubFrame();
764 // Setup space on stack for return value. 764 // Setup space on stack for return value.
765 // Push array length as Smi and element type. 765 // Push array length as Smi and element type.
766 __ addiu(SP, SP, Immediate(-3 * kWordSize)); 766 __ addiu(SP, SP, Immediate(-3 * kWordSize));
767 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 767 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
768 __ sw(TMP, Address(SP, 2 * kWordSize)); 768 __ sw(TMP, Address(SP, 2 * kWordSize));
769 __ sw(A1, Address(SP, 1 * kWordSize)); 769 __ sw(A1, Address(SP, 1 * kWordSize));
770 __ sw(A0, Address(SP, 0 * kWordSize)); 770 __ sw(A0, Address(SP, 0 * kWordSize));
771 __ CallRuntime(kAllocateArrayRuntimeEntry); 771 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
772 __ TraceSimMsg("AllocateArrayStub return"); 772 __ TraceSimMsg("AllocateArrayStub return");
773 // Pop arguments; result is popped in IP. 773 // Pop arguments; result is popped in IP.
774 __ lw(V0, Address(SP, 2 * kWordSize)); 774 __ lw(V0, Address(SP, 2 * kWordSize));
775 __ lw(A1, Address(SP, 1 * kWordSize)); 775 __ lw(A1, Address(SP, 1 * kWordSize));
776 __ lw(A0, Address(SP, 0 * kWordSize)); 776 __ lw(A0, Address(SP, 0 * kWordSize));
777 __ addiu(SP, SP, Immediate(3 * kWordSize)); 777 __ addiu(SP, SP, Immediate(3 * kWordSize));
778 778
779 __ LeaveStubFrameAndReturn(); 779 __ LeaveStubFrameAndReturn();
780 } 780 }
781 781
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
831 __ bne(T0, T7, &function_compiled); 831 __ bne(T0, T7, &function_compiled);
832 832
833 // Create a stub frame as we are pushing some objects on the stack before 833 // Create a stub frame as we are pushing some objects on the stack before
834 // calling into the runtime. 834 // calling into the runtime.
835 __ EnterStubFrame(); 835 __ EnterStubFrame();
836 836
837 // Preserve arguments descriptor array and read-only function object argument. 837 // Preserve arguments descriptor array and read-only function object argument.
838 __ addiu(SP, SP, Immediate(-2 * kWordSize)); 838 __ addiu(SP, SP, Immediate(-2 * kWordSize));
839 __ sw(S4, Address(SP, 1 * kWordSize)); 839 __ sw(S4, Address(SP, 1 * kWordSize));
840 __ sw(T2, Address(SP, 0 * kWordSize)); 840 __ sw(T2, Address(SP, 0 * kWordSize));
841 __ CallRuntime(kCompileFunctionRuntimeEntry); 841 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
842 __ TraceSimMsg("GenerateCallClosureFunctionStub return"); 842 __ TraceSimMsg("GenerateCallClosureFunctionStub return");
843 // Restore arguments descriptor array and read-only function object argument. 843 // Restore arguments descriptor array and read-only function object argument.
844 __ lw(T2, Address(SP, 0 * kWordSize)); 844 __ lw(T2, Address(SP, 0 * kWordSize));
845 __ lw(S4, Address(SP, 1 * kWordSize)); 845 __ lw(S4, Address(SP, 1 * kWordSize));
846 __ addiu(SP, SP, Immediate(2 * kWordSize)); 846 __ addiu(SP, SP, Immediate(2 * kWordSize));
847 // Restore T0. 847 // Restore T0.
848 __ lw(T0, FieldAddress(T2, Function::code_offset())); 848 __ lw(T0, FieldAddress(T2, Function::code_offset()));
849 849
850 // Remove the stub frame as we are about to jump to the closure function. 850 // Remove the stub frame as we are about to jump to the closure function.
851 __ LeaveStubFrame(); 851 __ LeaveStubFrame();
(...skipping 29 matching lines...) Expand all
881 881
882 // Stack: 882 // Stack:
883 // TOS + 0: Argument array. 883 // TOS + 0: Argument array.
884 // TOS + 1: Arguments descriptor array. 884 // TOS + 1: Arguments descriptor array.
885 // TOS + 2: Place for result from the call. 885 // TOS + 2: Place for result from the call.
886 // TOS + 3: Saved FP of previous frame. 886 // TOS + 3: Saved FP of previous frame.
887 // TOS + 4: Dart code return address. 887 // TOS + 4: Dart code return address.
888 // TOS + 5: PC marker (0 for stub). 888 // TOS + 5: PC marker (0 for stub).
889 // TOS + 6: Last argument of caller. 889 // TOS + 6: Last argument of caller.
890 // .... 890 // ....
891 __ CallRuntime(kInvokeNonClosureRuntimeEntry); 891 __ CallRuntime(kInvokeNonClosureRuntimeEntry, 2);
892 __ lw(V0, Address(SP, 2 * kWordSize)); // Get result into V0. 892 __ lw(V0, Address(SP, 2 * kWordSize)); // Get result into V0.
893 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Remove arguments. 893 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Remove arguments.
894 894
895 // Remove the stub frame as we are about to return. 895 // Remove the stub frame as we are about to return.
896 __ LeaveStubFrameAndReturn(); 896 __ LeaveStubFrameAndReturn();
897 } 897 }
898 898
899 899
900 // Called when invoking Dart code from C++ (VM code). 900 // Called when invoking Dart code from C++ (VM code).
901 // Input parameters: 901 // Input parameters:
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after
1122 } 1122 }
1123 // Create a stub frame as we are pushing some objects on the stack before 1123 // Create a stub frame as we are pushing some objects on the stack before
1124 // calling into the runtime. 1124 // calling into the runtime.
1125 __ EnterStubFrame(); 1125 __ EnterStubFrame();
1126 // Setup space on stack for return value. 1126 // Setup space on stack for return value.
1127 __ SmiTag(T1); 1127 __ SmiTag(T1);
1128 __ addiu(SP, SP, Immediate(-2 * kWordSize)); 1128 __ addiu(SP, SP, Immediate(-2 * kWordSize));
1129 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 1129 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
1130 __ sw(TMP, Address(SP, 1 * kWordSize)); // Store null. 1130 __ sw(TMP, Address(SP, 1 * kWordSize)); // Store null.
1131 __ sw(T1, Address(SP, 0 * kWordSize)); 1131 __ sw(T1, Address(SP, 0 * kWordSize));
1132 __ CallRuntime(kAllocateContextRuntimeEntry); // Allocate context. 1132 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context.
1133 __ lw(V0, Address(SP, 1 * kWordSize)); // Get the new context. 1133 __ lw(V0, Address(SP, 1 * kWordSize)); // Get the new context.
1134 __ addiu(SP, SP, Immediate(2 * kWordSize)); // Pop argument and return. 1134 __ addiu(SP, SP, Immediate(2 * kWordSize)); // Pop argument and return.
1135 1135
1136 // V0: new object 1136 // V0: new object
1137 // Restore the frame pointer. 1137 // Restore the frame pointer.
1138 __ LeaveStubFrameAndReturn(); 1138 __ LeaveStubFrameAndReturn();
1139 } 1139 }
1140 1140
1141 1141
1142 DECLARE_LEAF_RUNTIME_ENTRY(void, StoreBufferBlockProcess, Isolate* isolate); 1142 DECLARE_LEAF_RUNTIME_ENTRY(void, StoreBufferBlockProcess, Isolate* isolate);
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
1199 __ beq(CMPRES, ZR, &L); 1199 __ beq(CMPRES, ZR, &L);
1200 __ delay_slot()->addiu(SP, SP, Immediate(3 * kWordSize)); 1200 __ delay_slot()->addiu(SP, SP, Immediate(3 * kWordSize));
1201 __ Ret(); 1201 __ Ret();
1202 1202
1203 // Handle overflow: Call the runtime leaf function. 1203 // Handle overflow: Call the runtime leaf function.
1204 __ Bind(&L); 1204 __ Bind(&L);
1205 // Setup frame, push callee-saved registers. 1205 // Setup frame, push callee-saved registers.
1206 1206
1207 __ EnterCallRuntimeFrame(1 * kWordSize); 1207 __ EnterCallRuntimeFrame(1 * kWordSize);
1208 __ lw(A0, FieldAddress(CTX, Context::isolate_offset())); 1208 __ lw(A0, FieldAddress(CTX, Context::isolate_offset()));
1209 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry); 1209 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1);
1210 __ TraceSimMsg("UpdateStoreBufferStub return"); 1210 __ TraceSimMsg("UpdateStoreBufferStub return");
1211 // Restore callee-saved registers, tear down frame. 1211 // Restore callee-saved registers, tear down frame.
1212 __ LeaveCallRuntimeFrame(); 1212 __ LeaveCallRuntimeFrame();
1213 __ Ret(); 1213 __ Ret();
1214 } 1214 }
1215 1215
1216 1216
1217 // Called for inline allocation of objects. 1217 // Called for inline allocation of objects.
1218 // Input parameters: 1218 // Input parameters:
1219 // RA : return address. 1219 // RA : return address.
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
1372 if (is_cls_parameterized) { 1372 if (is_cls_parameterized) {
1373 // Push type arguments of object to be allocated and of instantiator. 1373 // Push type arguments of object to be allocated and of instantiator.
1374 __ sw(T1, Address(SP, 1 * kWordSize)); 1374 __ sw(T1, Address(SP, 1 * kWordSize));
1375 __ sw(T0, Address(SP, 0 * kWordSize)); 1375 __ sw(T0, Address(SP, 0 * kWordSize));
1376 } else { 1376 } else {
1377 // Push null type arguments and kNoInstantiator. 1377 // Push null type arguments and kNoInstantiator.
1378 __ LoadImmediate(T1, Smi::RawValue(StubCode::kNoInstantiator)); 1378 __ LoadImmediate(T1, Smi::RawValue(StubCode::kNoInstantiator));
1379 __ sw(T7, Address(SP, 1 * kWordSize)); 1379 __ sw(T7, Address(SP, 1 * kWordSize));
1380 __ sw(T1, Address(SP, 0 * kWordSize)); 1380 __ sw(T1, Address(SP, 0 * kWordSize));
1381 } 1381 }
1382 __ CallRuntime(kAllocateObjectRuntimeEntry); // Allocate object. 1382 __ CallRuntime(kAllocateObjectRuntimeEntry, 3); // Allocate object.
1383 __ TraceSimMsg("AllocationStubForClass return"); 1383 __ TraceSimMsg("AllocationStubForClass return");
1384 // Pop result (newly allocated object). 1384 // Pop result (newly allocated object).
1385 __ lw(V0, Address(SP, 3 * kWordSize)); 1385 __ lw(V0, Address(SP, 3 * kWordSize));
1386 __ addiu(SP, SP, Immediate(4 * kWordSize)); // Pop arguments. 1386 __ addiu(SP, SP, Immediate(4 * kWordSize)); // Pop arguments.
1387 // V0: new object 1387 // V0: new object
1388 // Restore the frame pointer and return. 1388 // Restore the frame pointer and return.
1389 __ LeaveStubFrameAndReturn(RA, true); 1389 __ LeaveStubFrameAndReturn(RA, true);
1390 } 1390 }
1391 1391
1392 1392
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
1511 if (is_implicit_instance_closure) { 1511 if (is_implicit_instance_closure) {
1512 __ lw(T1, Address(FP, kReceiverFPOffset)); 1512 __ lw(T1, Address(FP, kReceiverFPOffset));
1513 __ sw(T1, Address(SP, (num_slots - 3) * kWordSize)); // Receiver. 1513 __ sw(T1, Address(SP, (num_slots - 3) * kWordSize)); // Receiver.
1514 } 1514 }
1515 if (has_type_arguments) { 1515 if (has_type_arguments) {
1516 __ lw(T2, Address(FP, kTypeArgumentsFPOffset)); 1516 __ lw(T2, Address(FP, kTypeArgumentsFPOffset));
1517 } 1517 }
1518 __ sw(T2, Address(SP, 0 * kWordSize)); 1518 __ sw(T2, Address(SP, 0 * kWordSize));
1519 1519
1520 if (is_implicit_instance_closure) { 1520 if (is_implicit_instance_closure) {
1521 __ CallRuntime(kAllocateImplicitInstanceClosureRuntimeEntry); 1521 __ CallRuntime(kAllocateImplicitInstanceClosureRuntimeEntry, 3);
1522 __ TraceSimMsg("AllocationStubForClosure return"); 1522 __ TraceSimMsg("AllocationStubForClosure return");
1523 } else { 1523 } else {
1524 ASSERT(func.IsNonImplicitClosureFunction()); 1524 ASSERT(func.IsNonImplicitClosureFunction());
1525 __ CallRuntime(kAllocateClosureRuntimeEntry); 1525 __ CallRuntime(kAllocateClosureRuntimeEntry, 2);
1526 __ TraceSimMsg("AllocationStubForClosure return"); 1526 __ TraceSimMsg("AllocationStubForClosure return");
1527 } 1527 }
1528 __ lw(V0, Address(SP, (num_slots - 1) * kWordSize)); // Pop function object. 1528 __ lw(V0, Address(SP, (num_slots - 1) * kWordSize)); // Pop function object.
1529 __ addiu(SP, SP, Immediate(num_slots * kWordSize)); 1529 __ addiu(SP, SP, Immediate(num_slots * kWordSize));
1530 1530
1531 // V0: new object 1531 // V0: new object
1532 // Restore the frame pointer. 1532 // Restore the frame pointer.
1533 __ LeaveStubFrameAndReturn(RA, true); 1533 __ LeaveStubFrameAndReturn(RA, true);
1534 } 1534 }
1535 1535
(...skipping 22 matching lines...) Expand all
1558 __ addiu(SP, SP, Immediate(-4 * kWordSize)); 1558 __ addiu(SP, SP, Immediate(-4 * kWordSize));
1559 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 1559 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
1560 __ sw(TMP, Address(SP, 3 * kWordSize)); 1560 __ sw(TMP, Address(SP, 3 * kWordSize));
1561 __ sw(T6, Address(SP, 2 * kWordSize)); 1561 __ sw(T6, Address(SP, 2 * kWordSize));
1562 __ sw(S5, Address(SP, 1 * kWordSize)); 1562 __ sw(S5, Address(SP, 1 * kWordSize));
1563 __ sw(S4, Address(SP, 0 * kWordSize)); 1563 __ sw(S4, Address(SP, 0 * kWordSize));
1564 1564
1565 // A1: Smi-tagged arguments array length. 1565 // A1: Smi-tagged arguments array length.
1566 PushArgumentsArray(assembler); 1566 PushArgumentsArray(assembler);
1567 1567
1568 __ CallRuntime(kInvokeNoSuchMethodFunctionRuntimeEntry); 1568 __ CallRuntime(kInvokeNoSuchMethodFunctionRuntimeEntry, 4);
1569 1569
1570 __ lw(V0, Address(SP, 4 * kWordSize)); // Get result into V0. 1570 __ lw(V0, Address(SP, 4 * kWordSize)); // Get result into V0.
1571 __ LeaveStubFrameAndReturn(); 1571 __ LeaveStubFrameAndReturn();
1572 } 1572 }
1573 1573
1574 1574
1575 // T0: function object. 1575 // T0: function object.
1576 // S5: inline cache data object. 1576 // S5: inline cache data object.
1577 // Cannot use function object from ICData as it may be the inlined 1577 // Cannot use function object from ICData as it may be the inlined
1578 // function and not the top-scope function. 1578 // function and not the top-scope function.
1579 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { 1579 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) {
1580 __ TraceSimMsg("OptimizedUsageCounterIncrement"); 1580 __ TraceSimMsg("OptimizedUsageCounterIncrement");
1581 Register ic_reg = S5; 1581 Register ic_reg = S5;
1582 Register func_reg = T0; 1582 Register func_reg = T0;
1583 if (FLAG_trace_optimized_ic_calls) { 1583 if (FLAG_trace_optimized_ic_calls) {
1584 __ EnterStubFrame(); 1584 __ EnterStubFrame();
1585 __ addiu(SP, SP, Immediate(-4 * kWordSize)); 1585 __ addiu(SP, SP, Immediate(-4 * kWordSize));
1586 __ sw(T0, Address(SP, 3 * kWordSize)); 1586 __ sw(T0, Address(SP, 3 * kWordSize));
1587 __ sw(S5, Address(SP, 2 * kWordSize)); 1587 __ sw(S5, Address(SP, 2 * kWordSize));
1588 __ sw(ic_reg, Address(SP, 1 * kWordSize)); // Argument. 1588 __ sw(ic_reg, Address(SP, 1 * kWordSize)); // Argument.
1589 __ sw(func_reg, Address(SP, 0 * kWordSize)); // Argument. 1589 __ sw(func_reg, Address(SP, 0 * kWordSize)); // Argument.
1590 __ CallRuntime(kTraceICCallRuntimeEntry); 1590 __ CallRuntime(kTraceICCallRuntimeEntry, 2);
1591 __ lw(S5, Address(SP, 2 * kWordSize)); 1591 __ lw(S5, Address(SP, 2 * kWordSize));
1592 __ lw(T0, Address(SP, 3 * kWordSize)); 1592 __ lw(T0, Address(SP, 3 * kWordSize));
1593 __ addiu(SP, SP, Immediate(4 * kWordSize)); // Discard argument; 1593 __ addiu(SP, SP, Immediate(4 * kWordSize)); // Discard argument;
1594 __ LeaveStubFrame(); 1594 __ LeaveStubFrame();
1595 } 1595 }
1596 __ lw(T7, FieldAddress(func_reg, Function::usage_counter_offset())); 1596 __ lw(T7, FieldAddress(func_reg, Function::usage_counter_offset()));
1597 __ addiu(T7, T7, Immediate(1)); 1597 __ addiu(T7, T7, Immediate(1));
1598 __ sw(T7, FieldAddress(func_reg, Function::usage_counter_offset())); 1598 __ sw(T7, FieldAddress(func_reg, Function::usage_counter_offset()));
1599 } 1599 }
1600 1600
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1643 1643
1644 // Check single stepping. 1644 // Check single stepping.
1645 Label not_stepping; 1645 Label not_stepping;
1646 __ lw(T0, FieldAddress(CTX, Context::isolate_offset())); 1646 __ lw(T0, FieldAddress(CTX, Context::isolate_offset()));
1647 __ lbu(T0, Address(T0, Isolate::single_step_offset())); 1647 __ lbu(T0, Address(T0, Isolate::single_step_offset()));
1648 __ BranchEqual(T0, 0, &not_stepping); 1648 __ BranchEqual(T0, 0, &not_stepping);
1649 // Call single step callback in debugger. 1649 // Call single step callback in debugger.
1650 __ addiu(SP, SP, Immediate(-2 * kWordSize)); 1650 __ addiu(SP, SP, Immediate(-2 * kWordSize));
1651 __ sw(S5, Address(SP, 1 * kWordSize)); // Preserve IC data. 1651 __ sw(S5, Address(SP, 1 * kWordSize)); // Preserve IC data.
1652 __ sw(RA, Address(SP, 0 * kWordSize)); // Return address. 1652 __ sw(RA, Address(SP, 0 * kWordSize)); // Return address.
1653 __ CallRuntime(kSingleStepHandlerRuntimeEntry); 1653 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
1654 __ lw(RA, Address(SP, 0 * kWordSize)); 1654 __ lw(RA, Address(SP, 0 * kWordSize));
1655 __ lw(S5, Address(SP, 1 * kWordSize)); 1655 __ lw(S5, Address(SP, 1 * kWordSize));
1656 __ addiu(SP, SP, Immediate(2 * kWordSize)); 1656 __ addiu(SP, SP, Immediate(2 * kWordSize));
1657 __ Bind(&not_stepping); 1657 __ Bind(&not_stepping);
1658 1658
1659 // Load argument descriptor into S4. 1659 // Load argument descriptor into S4.
1660 __ lw(S4, FieldAddress(S5, ICData::arguments_descriptor_offset())); 1660 __ lw(S4, FieldAddress(S5, ICData::arguments_descriptor_offset()));
1661 // Preserve return address, since RA is needed for subroutine call. 1661 // Preserve return address, since RA is needed for subroutine call.
1662 __ mov(T2, RA); 1662 __ mov(T2, RA);
1663 // Loop that checks if there is an IC data match. 1663 // Loop that checks if there is an IC data match.
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1744 __ sw(S4, Address(SP, (num_slots - 2) * kWordSize)); 1744 __ sw(S4, Address(SP, (num_slots - 2) * kWordSize));
1745 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 1745 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
1746 __ sw(TMP, Address(SP, (num_slots - 3) * kWordSize)); 1746 __ sw(TMP, Address(SP, (num_slots - 3) * kWordSize));
1747 // Push call arguments. 1747 // Push call arguments.
1748 for (intptr_t i = 0; i < num_args; i++) { 1748 for (intptr_t i = 0; i < num_args; i++) {
1749 __ lw(TMP1, Address(T1, -i * kWordSize)); 1749 __ lw(TMP1, Address(T1, -i * kWordSize));
1750 __ sw(TMP1, Address(SP, (num_slots - i - 4) * kWordSize)); 1750 __ sw(TMP1, Address(SP, (num_slots - i - 4) * kWordSize));
1751 } 1751 }
1752 // Pass IC data object. 1752 // Pass IC data object.
1753 __ sw(S5, Address(SP, (num_slots - num_args - 4) * kWordSize)); 1753 __ sw(S5, Address(SP, (num_slots - num_args - 4) * kWordSize));
1754 __ CallRuntime(handle_ic_miss); 1754 __ CallRuntime(handle_ic_miss, num_args + 1);
1755 __ TraceSimMsg("NArgsCheckInlineCacheStub return"); 1755 __ TraceSimMsg("NArgsCheckInlineCacheStub return");
1756 // Pop returned code object into T3 (null if not found). 1756 // Pop returned code object into T3 (null if not found).
1757 // Restore arguments descriptor array and IC data array. 1757 // Restore arguments descriptor array and IC data array.
1758 __ lw(T3, Address(SP, (num_slots - 3) * kWordSize)); 1758 __ lw(T3, Address(SP, (num_slots - 3) * kWordSize));
1759 __ lw(S4, Address(SP, (num_slots - 2) * kWordSize)); 1759 __ lw(S4, Address(SP, (num_slots - 2) * kWordSize));
1760 __ lw(S5, Address(SP, (num_slots - 1) * kWordSize)); 1760 __ lw(S5, Address(SP, (num_slots - 1) * kWordSize));
1761 // Remove the call arguments pushed earlier, including the IC data object 1761 // Remove the call arguments pushed earlier, including the IC data object
1762 // and the arguments descriptor array. 1762 // and the arguments descriptor array.
1763 __ addiu(SP, SP, Immediate(num_slots * kWordSize)); 1763 __ addiu(SP, SP, Immediate(num_slots * kWordSize));
1764 __ LeaveStubFrame(); 1764 __ LeaveStubFrame();
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
1897 1897
1898 // Check single stepping. 1898 // Check single stepping.
1899 Label not_stepping; 1899 Label not_stepping;
1900 __ lw(T0, FieldAddress(CTX, Context::isolate_offset())); 1900 __ lw(T0, FieldAddress(CTX, Context::isolate_offset()));
1901 __ lbu(T0, Address(T0, Isolate::single_step_offset())); 1901 __ lbu(T0, Address(T0, Isolate::single_step_offset()));
1902 __ BranchEqual(T0, 0, &not_stepping); 1902 __ BranchEqual(T0, 0, &not_stepping);
1903 // Call single step callback in debugger. 1903 // Call single step callback in debugger.
1904 __ addiu(SP, SP, Immediate(-2 * kWordSize)); 1904 __ addiu(SP, SP, Immediate(-2 * kWordSize));
1905 __ sw(S5, Address(SP, 1 * kWordSize)); // Preserve IC data. 1905 __ sw(S5, Address(SP, 1 * kWordSize)); // Preserve IC data.
1906 __ sw(RA, Address(SP, 0 * kWordSize)); // Return address. 1906 __ sw(RA, Address(SP, 0 * kWordSize)); // Return address.
1907 __ CallRuntime(kSingleStepHandlerRuntimeEntry); 1907 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
1908 __ lw(RA, Address(SP, 0 * kWordSize)); 1908 __ lw(RA, Address(SP, 0 * kWordSize));
1909 __ lw(S5, Address(SP, 1 * kWordSize)); 1909 __ lw(S5, Address(SP, 1 * kWordSize));
1910 __ addiu(SP, SP, Immediate(2 * kWordSize)); 1910 __ addiu(SP, SP, Immediate(2 * kWordSize));
1911 __ Bind(&not_stepping); 1911 __ Bind(&not_stepping);
1912 1912
1913 1913
1914 // S5: IC data object (preserved). 1914 // S5: IC data object (preserved).
1915 __ lw(T0, FieldAddress(S5, ICData::ic_data_offset())); 1915 __ lw(T0, FieldAddress(S5, ICData::ic_data_offset()));
1916 // T0: ic_data_array with entries: target functions and count. 1916 // T0: ic_data_array with entries: target functions and count.
1917 __ AddImmediate(T0, Array::data_offset() - kHeapObjectTag); 1917 __ AddImmediate(T0, Array::data_offset() - kHeapObjectTag);
(...skipping 20 matching lines...) Expand all
1938 __ LoadImmediate(CMPRES1, reinterpret_cast<intptr_t>(Object::null())); 1938 __ LoadImmediate(CMPRES1, reinterpret_cast<intptr_t>(Object::null()));
1939 __ bne(T4, CMPRES1, &target_is_compiled); 1939 __ bne(T4, CMPRES1, &target_is_compiled);
1940 1940
1941 __ EnterStubFrame(); 1941 __ EnterStubFrame();
1942 // Preserve target function and IC data object. 1942 // Preserve target function and IC data object.
1943 // Two preserved registers, one argument (function) => 3 slots. 1943 // Two preserved registers, one argument (function) => 3 slots.
1944 __ addiu(SP, SP, Immediate(-3 * kWordSize)); 1944 __ addiu(SP, SP, Immediate(-3 * kWordSize));
1945 __ sw(S5, Address(SP, 2 * kWordSize)); // Preserve IC data. 1945 __ sw(S5, Address(SP, 2 * kWordSize)); // Preserve IC data.
1946 __ sw(T3, Address(SP, 1 * kWordSize)); // Preserve function. 1946 __ sw(T3, Address(SP, 1 * kWordSize)); // Preserve function.
1947 __ sw(T3, Address(SP, 0 * kWordSize)); // Function argument. 1947 __ sw(T3, Address(SP, 0 * kWordSize)); // Function argument.
1948 __ CallRuntime(kCompileFunctionRuntimeEntry); 1948 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
1949 __ lw(T3, Address(SP, 1 * kWordSize)); // Restore function. 1949 __ lw(T3, Address(SP, 1 * kWordSize)); // Restore function.
1950 __ lw(S5, Address(SP, 2 * kWordSize)); // Restore IC data. 1950 __ lw(S5, Address(SP, 2 * kWordSize)); // Restore IC data.
1951 __ addiu(SP, SP, Immediate(3 * kWordSize)); 1951 __ addiu(SP, SP, Immediate(3 * kWordSize));
1952 // T3: target function. 1952 // T3: target function.
1953 __ lw(T4, FieldAddress(T3, Function::code_offset())); 1953 __ lw(T4, FieldAddress(T3, Function::code_offset()));
1954 __ LeaveStubFrame(); 1954 __ LeaveStubFrame();
1955 1955
1956 __ Bind(&target_is_compiled); 1956 __ Bind(&target_is_compiled);
1957 // T4: target code. 1957 // T4: target code.
1958 __ lw(T3, FieldAddress(T4, Code::instructions_offset())); 1958 __ lw(T3, FieldAddress(T4, Code::instructions_offset()));
(...skipping 22 matching lines...) Expand all
1981 void StubCode::GenerateBreakpointStaticStub(Assembler* assembler) { 1981 void StubCode::GenerateBreakpointStaticStub(Assembler* assembler) {
1982 __ TraceSimMsg("BreakpointStaticStub"); 1982 __ TraceSimMsg("BreakpointStaticStub");
1983 // Create a stub frame as we are pushing some objects on the stack before 1983 // Create a stub frame as we are pushing some objects on the stack before
1984 // calling into the runtime. 1984 // calling into the runtime.
1985 __ EnterStubFrame(); 1985 __ EnterStubFrame();
1986 // Preserve arguments descriptor and make room for result. 1986 // Preserve arguments descriptor and make room for result.
1987 __ addiu(SP, SP, Immediate(-2 * kWordSize)); 1987 __ addiu(SP, SP, Immediate(-2 * kWordSize));
1988 __ sw(S5, Address(SP, 1 * kWordSize)); 1988 __ sw(S5, Address(SP, 1 * kWordSize));
1989 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 1989 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
1990 __ sw(TMP, Address(SP, 0 * kWordSize)); 1990 __ sw(TMP, Address(SP, 0 * kWordSize));
1991 __ CallRuntime(kBreakpointStaticHandlerRuntimeEntry); 1991 __ CallRuntime(kBreakpointStaticHandlerRuntimeEntry, 0);
1992 // Pop code object result and restore arguments descriptor. 1992 // Pop code object result and restore arguments descriptor.
1993 __ lw(T0, Address(SP, 0 * kWordSize)); 1993 __ lw(T0, Address(SP, 0 * kWordSize));
1994 __ lw(S5, Address(SP, 1 * kWordSize)); 1994 __ lw(S5, Address(SP, 1 * kWordSize));
1995 __ addiu(SP, SP, Immediate(2 * kWordSize)); 1995 __ addiu(SP, SP, Immediate(2 * kWordSize));
1996 __ LeaveStubFrame(); 1996 __ LeaveStubFrame();
1997 1997
1998 // Now call the static function. The breakpoint handler function 1998 // Now call the static function. The breakpoint handler function
1999 // ensures that the call target is compiled. 1999 // ensures that the call target is compiled.
2000 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); 2000 __ lw(T0, FieldAddress(T0, Code::instructions_offset()));
2001 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); 2001 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag);
2002 // Load arguments descriptor into S4. 2002 // Load arguments descriptor into S4.
2003 __ lw(S4, FieldAddress(S5, ICData::arguments_descriptor_offset())); 2003 __ lw(S4, FieldAddress(S5, ICData::arguments_descriptor_offset()));
2004 __ jr(T0); 2004 __ jr(T0);
2005 } 2005 }
2006 2006
2007 2007
2008 // V0: return value. 2008 // V0: return value.
2009 void StubCode::GenerateBreakpointReturnStub(Assembler* assembler) { 2009 void StubCode::GenerateBreakpointReturnStub(Assembler* assembler) {
2010 __ TraceSimMsg("BreakpoingReturnStub"); 2010 __ TraceSimMsg("BreakpoingReturnStub");
2011 // Create a stub frame as we are pushing some objects on the stack before 2011 // Create a stub frame as we are pushing some objects on the stack before
2012 // calling into the runtime. 2012 // calling into the runtime.
2013 __ EnterStubFrame(); 2013 __ EnterStubFrame();
2014 __ Push(V0); 2014 __ Push(V0);
2015 __ CallRuntime(kBreakpointReturnHandlerRuntimeEntry); 2015 __ CallRuntime(kBreakpointReturnHandlerRuntimeEntry, 0);
2016 __ Pop(V0); 2016 __ Pop(V0);
2017 __ LeaveStubFrame(); 2017 __ LeaveStubFrame();
2018 2018
2019 // Instead of returning to the patched Dart function, emulate the 2019 // Instead of returning to the patched Dart function, emulate the
2020 // smashed return code pattern and return to the function's caller. 2020 // smashed return code pattern and return to the function's caller.
2021 __ LeaveDartFrameAndReturn(); 2021 __ LeaveDartFrameAndReturn();
2022 } 2022 }
2023 2023
2024 2024
2025 // RA: return address (Dart code). 2025 // RA: return address (Dart code).
2026 // S5: Inline cache data array. 2026 // S5: Inline cache data array.
2027 void StubCode::GenerateBreakpointDynamicStub(Assembler* assembler) { 2027 void StubCode::GenerateBreakpointDynamicStub(Assembler* assembler) {
2028 // Create a stub frame as we are pushing some objects on the stack before 2028 // Create a stub frame as we are pushing some objects on the stack before
2029 // calling into the runtime. 2029 // calling into the runtime.
2030 __ TraceSimMsg("BreakpointDynamicStub"); 2030 __ TraceSimMsg("BreakpointDynamicStub");
2031 __ EnterStubFrame(); 2031 __ EnterStubFrame();
2032 __ Push(S5); 2032 __ Push(S5);
2033 __ CallRuntime(kBreakpointDynamicHandlerRuntimeEntry); 2033 __ CallRuntime(kBreakpointDynamicHandlerRuntimeEntry, 0);
2034 __ Pop(S5); 2034 __ Pop(S5);
2035 __ LeaveStubFrame(); 2035 __ LeaveStubFrame();
2036 2036
2037 // Find out which dispatch stub to call. 2037 // Find out which dispatch stub to call.
2038 __ lw(T1, FieldAddress(S5, ICData::num_args_tested_offset())); 2038 __ lw(T1, FieldAddress(S5, ICData::num_args_tested_offset()));
2039 2039
2040 Label one_arg, two_args, three_args; 2040 Label one_arg, two_args, three_args;
2041 __ BranchEqual(T1, 1, &one_arg); 2041 __ BranchEqual(T1, 1, &one_arg);
2042 __ BranchEqual(T1, 2, &two_args); 2042 __ BranchEqual(T1, 2, &two_args);
2043 __ BranchEqual(T1, 3, &three_args); 2043 __ BranchEqual(T1, 3, &three_args);
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after
2278 __ delay_slot()->SmiTag(T2); 2278 __ delay_slot()->SmiTag(T2);
2279 2279
2280 __ Bind(&update_ic_data); 2280 __ Bind(&update_ic_data);
2281 // T0: ICData 2281 // T0: ICData
2282 __ addiu(SP, SP, Immediate(-4 * kWordSize)); 2282 __ addiu(SP, SP, Immediate(-4 * kWordSize));
2283 __ sw(A1, Address(SP, 3 * kWordSize)); 2283 __ sw(A1, Address(SP, 3 * kWordSize));
2284 __ sw(A0, Address(SP, 2 * kWordSize)); 2284 __ sw(A0, Address(SP, 2 * kWordSize));
2285 __ LoadObject(TMP1, Symbols::EqualOperator()); // Target's name. 2285 __ LoadObject(TMP1, Symbols::EqualOperator()); // Target's name.
2286 __ sw(TMP1, Address(SP, 1 * kWordSize)); 2286 __ sw(TMP1, Address(SP, 1 * kWordSize));
2287 __ sw(T0, Address(SP, 0 * kWordSize)); // ICData. 2287 __ sw(T0, Address(SP, 0 * kWordSize)); // ICData.
2288 __ CallRuntime(kUpdateICDataTwoArgsRuntimeEntry); 2288 __ CallRuntime(kUpdateICDataTwoArgsRuntimeEntry, 4);
2289 __ lw(A0, Address(SP, 2 * kWordSize)); 2289 __ lw(A0, Address(SP, 2 * kWordSize));
2290 __ lw(A1, Address(SP, 3 * kWordSize)); 2290 __ lw(A1, Address(SP, 3 * kWordSize));
2291 __ b(&compute_result); 2291 __ b(&compute_result);
2292 __ delay_slot()->addiu(SP, SP, Immediate(4 * kWordSize)); 2292 __ delay_slot()->addiu(SP, SP, Immediate(4 * kWordSize));
2293 } 2293 }
2294 2294
2295 2295
2296 // Calls to the runtime to optimize the given function. 2296 // Calls to the runtime to optimize the given function.
2297 // T0: function to be reoptimized. 2297 // T0: function to be reoptimized.
2298 // S4: argument descriptor (preserved). 2298 // S4: argument descriptor (preserved).
2299 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { 2299 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) {
2300 __ TraceSimMsg("OptimizeFunctionStub"); 2300 __ TraceSimMsg("OptimizeFunctionStub");
2301 __ EnterStubFrame(); 2301 __ EnterStubFrame();
2302 __ addiu(SP, SP, Immediate(-3 * kWordSize)); 2302 __ addiu(SP, SP, Immediate(-3 * kWordSize));
2303 __ sw(S4, Address(SP, 2 * kWordSize)); 2303 __ sw(S4, Address(SP, 2 * kWordSize));
2304 // Setup space on stack for return value. 2304 // Setup space on stack for return value.
2305 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); 2305 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null()));
2306 __ sw(TMP, Address(SP, 1 * kWordSize)); 2306 __ sw(TMP, Address(SP, 1 * kWordSize));
2307 __ sw(T0, Address(SP, 0 * kWordSize)); 2307 __ sw(T0, Address(SP, 0 * kWordSize));
2308 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry); 2308 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
2309 __ TraceSimMsg("OptimizeFunctionStub return"); 2309 __ TraceSimMsg("OptimizeFunctionStub return");
2310 __ lw(T0, Address(SP, 1 * kWordSize)); // Get Code object 2310 __ lw(T0, Address(SP, 1 * kWordSize)); // Get Code object
2311 __ lw(S4, Address(SP, 2 * kWordSize)); // Restore argument descriptor. 2311 __ lw(S4, Address(SP, 2 * kWordSize)); // Restore argument descriptor.
2312 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Discard argument. 2312 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Discard argument.
2313 2313
2314 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); 2314 __ lw(T0, FieldAddress(T0, Code::instructions_offset()));
2315 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); 2315 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag);
2316 __ LeaveStubFrameAndReturn(T0); 2316 __ LeaveStubFrameAndReturn(T0);
2317 __ break_(0); 2317 __ break_(0);
2318 } 2318 }
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
2385 __ LoadClassId(temp2, right); 2385 __ LoadClassId(temp2, right);
2386 __ subu(CMPRES, temp1, temp2); 2386 __ subu(CMPRES, temp1, temp2);
2387 __ bne(CMPRES, ZR, &done); 2387 __ bne(CMPRES, ZR, &done);
2388 2388
2389 __ EnterStubFrame(); 2389 __ EnterStubFrame();
2390 __ ReserveAlignedFrameSpace(2 * kWordSize); 2390 __ ReserveAlignedFrameSpace(2 * kWordSize);
2391 __ sw(left, Address(SP, 1 * kWordSize)); 2391 __ sw(left, Address(SP, 1 * kWordSize));
2392 __ sw(right, Address(SP, 0 * kWordSize)); 2392 __ sw(right, Address(SP, 0 * kWordSize));
2393 __ mov(A0, left); 2393 __ mov(A0, left);
2394 __ mov(A1, right); 2394 __ mov(A1, right);
2395 __ CallRuntime(kBigintCompareRuntimeEntry); 2395 __ CallRuntime(kBigintCompareRuntimeEntry, 2);
2396 __ TraceSimMsg("IdenticalWithNumberCheckStub return"); 2396 __ TraceSimMsg("IdenticalWithNumberCheckStub return");
2397 // Result in V0, 0 means equal. 2397 // Result in V0, 0 means equal.
2398 __ LeaveStubFrame(); 2398 __ LeaveStubFrame();
2399 __ b(&done); 2399 __ b(&done);
2400 __ delay_slot()->mov(CMPRES, V0); 2400 __ delay_slot()->mov(CMPRES, V0);
2401 2401
2402 __ Bind(&reference_compare); 2402 __ Bind(&reference_compare);
2403 __ subu(CMPRES, left, right); 2403 __ subu(CMPRES, left, right);
2404 __ Bind(&done); 2404 __ Bind(&done);
2405 // A branch or test after this comparison will check CMPRES1 == CMPRES2. 2405 // A branch or test after this comparison will check CMPRES1 == CMPRES2.
2406 __ mov(CMPRES2, ZR); 2406 __ mov(CMPRES2, ZR);
2407 } 2407 }
2408 2408
2409 2409
2410 // Called only from unoptimized code. All relevant registers have been saved. 2410 // Called only from unoptimized code. All relevant registers have been saved.
2411 // RA: return address. 2411 // RA: return address.
2412 // SP + 4: left operand. 2412 // SP + 4: left operand.
2413 // SP + 0: right operand. 2413 // SP + 0: right operand.
2414 // Returns: CMPRES is zero if equal, non-zero otherwise. 2414 // Returns: CMPRES is zero if equal, non-zero otherwise.
2415 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub( 2415 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
2416 Assembler* assembler) { 2416 Assembler* assembler) {
2417 // Check single stepping. 2417 // Check single stepping.
2418 Label not_stepping; 2418 Label not_stepping;
2419 __ lw(T0, FieldAddress(CTX, Context::isolate_offset())); 2419 __ lw(T0, FieldAddress(CTX, Context::isolate_offset()));
2420 __ lbu(T0, Address(T0, Isolate::single_step_offset())); 2420 __ lbu(T0, Address(T0, Isolate::single_step_offset()));
2421 __ BranchEqual(T0, 0, &not_stepping); 2421 __ BranchEqual(T0, 0, &not_stepping);
2422 // Call single step callback in debugger. 2422 // Call single step callback in debugger.
2423 __ addiu(SP, SP, Immediate(-1 * kWordSize)); 2423 __ addiu(SP, SP, Immediate(-1 * kWordSize));
2424 __ sw(RA, Address(SP, 0 * kWordSize)); // Return address. 2424 __ sw(RA, Address(SP, 0 * kWordSize)); // Return address.
2425 __ CallRuntime(kSingleStepHandlerRuntimeEntry); 2425 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2426 __ lw(RA, Address(SP, 0 * kWordSize)); 2426 __ lw(RA, Address(SP, 0 * kWordSize));
2427 __ addiu(SP, SP, Immediate(1 * kWordSize)); 2427 __ addiu(SP, SP, Immediate(1 * kWordSize));
2428 __ Bind(&not_stepping); 2428 __ Bind(&not_stepping);
2429 2429
2430 const Register temp1 = T2; 2430 const Register temp1 = T2;
2431 const Register temp2 = T3; 2431 const Register temp2 = T3;
2432 const Register left = T1; 2432 const Register left = T1;
2433 const Register right = T0; 2433 const Register right = T0;
2434 // Preserve left, right. 2434 // Preserve left, right.
2435 __ lw(left, Address(SP, 1 * kWordSize)); 2435 __ lw(left, Address(SP, 1 * kWordSize));
(...skipping 27 matching lines...) Expand all
2463 __ lw(left, Address(SP, 1 * kWordSize)); 2463 __ lw(left, Address(SP, 1 * kWordSize));
2464 __ lw(temp2, Address(SP, 2 * kWordSize)); 2464 __ lw(temp2, Address(SP, 2 * kWordSize));
2465 __ lw(temp1, Address(SP, 3 * kWordSize)); 2465 __ lw(temp1, Address(SP, 3 * kWordSize));
2466 __ Ret(); 2466 __ Ret();
2467 __ delay_slot()->addiu(SP, SP, Immediate(4 * kWordSize)); 2467 __ delay_slot()->addiu(SP, SP, Immediate(4 * kWordSize));
2468 } 2468 }
2469 2469
2470 } // namespace dart 2470 } // namespace dart
2471 2471
2472 #endif // defined TARGET_ARCH_MIPS 2472 #endif // defined TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « runtime/vm/stub_code_ia32_test.cc ('k') | runtime/vm/stub_code_mips_test.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698