OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
88 // Retval is next to 1st argument. | 88 // Retval is next to 1st argument. |
89 __ delay_slot()->addiu(A3, A2, Immediate(kWordSize)); | 89 __ delay_slot()->addiu(A3, A2, Immediate(kWordSize)); |
90 __ TraceSimMsg("CallToRuntimeStub return"); | 90 __ TraceSimMsg("CallToRuntimeStub return"); |
91 | 91 |
92 // Reset exit frame information in Isolate structure. | 92 // Reset exit frame information in Isolate structure. |
93 __ sw(ZR, Address(CTX, Isolate::top_exit_frame_info_offset())); | 93 __ sw(ZR, Address(CTX, Isolate::top_exit_frame_info_offset())); |
94 | 94 |
95 // Load Context pointer from Isolate structure into A2. | 95 // Load Context pointer from Isolate structure into A2. |
96 __ lw(A2, Address(CTX, Isolate::top_context_offset())); | 96 __ lw(A2, Address(CTX, Isolate::top_context_offset())); |
97 | 97 |
98 // Reload NULLREG. | 98 // Load null. |
99 __ LoadImmediate(NULLREG, reinterpret_cast<intptr_t>(Object::null())); | 99 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
100 | 100 |
101 // Reset Context pointer in Isolate structure. | 101 // Reset Context pointer in Isolate structure. |
102 __ sw(NULLREG, Address(CTX, Isolate::top_context_offset())); | 102 __ sw(TMP, Address(CTX, Isolate::top_context_offset())); |
103 | 103 |
104 // Cache Context pointer into CTX while executing Dart code. | 104 // Cache Context pointer into CTX while executing Dart code. |
105 __ mov(CTX, A2); | 105 __ mov(CTX, A2); |
106 | 106 |
107 __ mov(SP, FP); | 107 __ mov(SP, FP); |
108 __ lw(RA, Address(SP, 1 * kWordSize)); | 108 __ lw(RA, Address(SP, 1 * kWordSize)); |
109 __ lw(FP, Address(SP, 0 * kWordSize)); | 109 __ lw(FP, Address(SP, 0 * kWordSize)); |
110 __ Ret(); | 110 __ Ret(); |
111 __ delay_slot()->addiu(SP, SP, Immediate(2 * kWordSize)); | 111 __ delay_slot()->addiu(SP, SP, Immediate(2 * kWordSize)); |
112 } | 112 } |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
195 // Call native function or redirection via simulator. | 195 // Call native function or redirection via simulator. |
196 __ jalr(T5); | 196 __ jalr(T5); |
197 __ TraceSimMsg("CallNativeCFunctionStub return"); | 197 __ TraceSimMsg("CallNativeCFunctionStub return"); |
198 | 198 |
199 // Reset exit frame information in Isolate structure. | 199 // Reset exit frame information in Isolate structure. |
200 __ sw(ZR, Address(CTX, Isolate::top_exit_frame_info_offset())); | 200 __ sw(ZR, Address(CTX, Isolate::top_exit_frame_info_offset())); |
201 | 201 |
202 // Load Context pointer from Isolate structure into A2. | 202 // Load Context pointer from Isolate structure into A2. |
203 __ lw(A2, Address(CTX, Isolate::top_context_offset())); | 203 __ lw(A2, Address(CTX, Isolate::top_context_offset())); |
204 | 204 |
205 // Reload NULLREG. | 205 // Load null. |
206 __ LoadImmediate(NULLREG, reinterpret_cast<intptr_t>(Object::null())); | 206 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
207 | 207 |
208 // Reset Context pointer in Isolate structure. | 208 // Reset Context pointer in Isolate structure. |
209 __ sw(NULLREG, Address(CTX, Isolate::top_context_offset())); | 209 __ sw(TMP, Address(CTX, Isolate::top_context_offset())); |
210 | 210 |
211 // Cache Context pointer into CTX while executing Dart code. | 211 // Cache Context pointer into CTX while executing Dart code. |
212 __ mov(CTX, A2); | 212 __ mov(CTX, A2); |
213 | 213 |
214 __ mov(SP, FP); | 214 __ mov(SP, FP); |
215 __ lw(RA, Address(SP, 1 * kWordSize)); | 215 __ lw(RA, Address(SP, 1 * kWordSize)); |
216 __ lw(FP, Address(SP, 0 * kWordSize)); | 216 __ lw(FP, Address(SP, 0 * kWordSize)); |
217 __ Ret(); | 217 __ Ret(); |
218 __ delay_slot()->addiu(SP, SP, Immediate(2 * kWordSize)); | 218 __ delay_slot()->addiu(SP, SP, Immediate(2 * kWordSize)); |
219 } | 219 } |
220 | 220 |
221 | 221 |
222 // Input parameters: | 222 // Input parameters: |
223 // S4: arguments descriptor array. | 223 // S4: arguments descriptor array. |
224 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 224 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { |
225 __ TraceSimMsg("CallStaticFunctionStub"); | 225 __ TraceSimMsg("CallStaticFunctionStub"); |
226 __ EnterStubFrame(); | 226 __ EnterStubFrame(); |
227 // Setup space on stack for return value and preserve arguments descriptor. | 227 // Setup space on stack for return value and preserve arguments descriptor. |
228 | 228 |
229 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 229 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
230 __ sw(S4, Address(SP, 1 * kWordSize)); | 230 __ sw(S4, Address(SP, 1 * kWordSize)); |
231 __ sw(NULLREG, Address(SP, 0 * kWordSize)); | 231 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 232 __ sw(TMP, Address(SP, 0 * kWordSize)); |
232 | 233 |
233 __ CallRuntime(kPatchStaticCallRuntimeEntry); | 234 __ CallRuntime(kPatchStaticCallRuntimeEntry); |
234 __ TraceSimMsg("CallStaticFunctionStub return"); | 235 __ TraceSimMsg("CallStaticFunctionStub return"); |
235 | 236 |
236 // Get Code object result and restore arguments descriptor array. | 237 // Get Code object result and restore arguments descriptor array. |
237 __ lw(T0, Address(SP, 0 * kWordSize)); | 238 __ lw(T0, Address(SP, 0 * kWordSize)); |
238 __ lw(S4, Address(SP, 1 * kWordSize)); | 239 __ lw(S4, Address(SP, 1 * kWordSize)); |
239 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 240 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
240 | 241 |
241 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); | 242 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); |
242 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); | 243 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); |
243 | 244 |
244 // Remove the stub frame as we are about to jump to the dart function. | 245 // Remove the stub frame as we are about to jump to the dart function. |
245 __ LeaveStubFrameAndReturn(T0); | 246 __ LeaveStubFrameAndReturn(T0); |
246 } | 247 } |
247 | 248 |
248 | 249 |
249 // Called from a static call only when an invalid code has been entered | 250 // Called from a static call only when an invalid code has been entered |
250 // (invalid because its function was optimized or deoptimized). | 251 // (invalid because its function was optimized or deoptimized). |
251 // S4: arguments descriptor array. | 252 // S4: arguments descriptor array. |
252 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 253 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { |
253 // Create a stub frame as we are pushing some objects on the stack before | 254 // Create a stub frame as we are pushing some objects on the stack before |
254 // calling into the runtime. | 255 // calling into the runtime. |
255 __ TraceSimMsg("FixCallersTarget"); | 256 __ TraceSimMsg("FixCallersTarget"); |
256 __ EnterStubFrame(); | 257 __ EnterStubFrame(); |
257 // Setup space on stack for return value and preserve arguments descriptor. | 258 // Setup space on stack for return value and preserve arguments descriptor. |
258 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 259 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
259 __ sw(S4, Address(SP, 1 * kWordSize)); | 260 __ sw(S4, Address(SP, 1 * kWordSize)); |
260 __ sw(NULLREG, Address(SP, 0 * kWordSize)); | 261 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 262 __ sw(TMP, Address(SP, 0 * kWordSize)); |
261 __ CallRuntime(kFixCallersTargetRuntimeEntry); | 263 __ CallRuntime(kFixCallersTargetRuntimeEntry); |
262 // Get Code object result and restore arguments descriptor array. | 264 // Get Code object result and restore arguments descriptor array. |
263 __ lw(T0, Address(SP, 0 * kWordSize)); | 265 __ lw(T0, Address(SP, 0 * kWordSize)); |
264 __ lw(S4, Address(SP, 1 * kWordSize)); | 266 __ lw(S4, Address(SP, 1 * kWordSize)); |
265 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 267 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
266 | 268 |
267 // Jump to the dart function. | 269 // Jump to the dart function. |
268 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); | 270 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); |
269 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag); | 271 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag); |
270 | 272 |
271 // Remove the stub frame. | 273 // Remove the stub frame. |
272 __ LeaveStubFrameAndReturn(T0); | 274 __ LeaveStubFrameAndReturn(T0); |
273 } | 275 } |
274 | 276 |
275 | 277 |
276 // Input parameters: | 278 // Input parameters: |
277 // A1: Smi-tagged argument count, may be zero. | 279 // A1: Smi-tagged argument count, may be zero. |
278 // FP[kParamEndSlotFromFp + 1]: Last argument. | 280 // FP[kParamEndSlotFromFp + 1]: Last argument. |
279 static void PushArgumentsArray(Assembler* assembler) { | 281 static void PushArgumentsArray(Assembler* assembler) { |
280 __ TraceSimMsg("PushArgumentsArray"); | 282 __ TraceSimMsg("PushArgumentsArray"); |
281 // Allocate array to store arguments of caller. | 283 // Allocate array to store arguments of caller. |
282 __ mov(A0, NULLREG); | 284 __ LoadImmediate(A0, reinterpret_cast<intptr_t>(Object::null())); |
283 // A0: Null element type for raw Array. | 285 // A0: Null element type for raw Array. |
284 // A1: Smi-tagged argument count, may be zero. | 286 // A1: Smi-tagged argument count, may be zero. |
285 __ BranchLink(&StubCode::AllocateArrayLabel()); | 287 __ BranchLink(&StubCode::AllocateArrayLabel()); |
286 __ TraceSimMsg("PushArgumentsArray return"); | 288 __ TraceSimMsg("PushArgumentsArray return"); |
287 // V0: newly allocated array. | 289 // V0: newly allocated array. |
288 // A1: Smi-tagged argument count, may be zero (was preserved by the stub). | 290 // A1: Smi-tagged argument count, may be zero (was preserved by the stub). |
289 __ Push(V0); // Array is in V0 and on top of stack. | 291 __ Push(V0); // Array is in V0 and on top of stack. |
290 __ sll(T1, A1, 1); | 292 __ sll(T1, A1, 1); |
291 __ addu(T1, FP, T1); | 293 __ addu(T1, FP, T1); |
292 __ AddImmediate(T1, kParamEndSlotFromFp * kWordSize); | 294 __ AddImmediate(T1, kParamEndSlotFromFp * kWordSize); |
(...skipping 29 matching lines...) Expand all Loading... |
322 __ lw(A1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 324 __ lw(A1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
323 __ sll(TMP1, A1, 1); // A1 is Smi. | 325 __ sll(TMP1, A1, 1); // A1 is Smi. |
324 __ addu(TMP1, FP, TMP1); | 326 __ addu(TMP1, FP, TMP1); |
325 __ lw(T1, Address(TMP1, kParamEndSlotFromFp * kWordSize)); | 327 __ lw(T1, Address(TMP1, kParamEndSlotFromFp * kWordSize)); |
326 | 328 |
327 // Push space for the return value. | 329 // Push space for the return value. |
328 // Push the receiver. | 330 // Push the receiver. |
329 // Push TMP1 data object. | 331 // Push TMP1 data object. |
330 // Push arguments descriptor array. | 332 // Push arguments descriptor array. |
331 __ addiu(SP, SP, Immediate(-4 * kWordSize)); | 333 __ addiu(SP, SP, Immediate(-4 * kWordSize)); |
332 __ sw(NULLREG, Address(SP, 3 * kWordSize)); | 334 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 335 __ sw(TMP, Address(SP, 3 * kWordSize)); |
333 __ sw(T1, Address(SP, 2 * kWordSize)); | 336 __ sw(T1, Address(SP, 2 * kWordSize)); |
334 __ sw(S5, Address(SP, 1 * kWordSize)); | 337 __ sw(S5, Address(SP, 1 * kWordSize)); |
335 __ sw(S4, Address(SP, 0 * kWordSize)); | 338 __ sw(S4, Address(SP, 0 * kWordSize)); |
336 | 339 |
337 // A1: Smi-tagged arguments array length. | 340 // A1: Smi-tagged arguments array length. |
338 PushArgumentsArray(assembler); | 341 PushArgumentsArray(assembler); |
339 __ TraceSimMsg("InstanceFunctionLookupStub return"); | 342 __ TraceSimMsg("InstanceFunctionLookupStub return"); |
340 | 343 |
341 __ CallRuntime(kInstanceFunctionLookupRuntimeEntry); | 344 __ CallRuntime(kInstanceFunctionLookupRuntimeEntry); |
342 | 345 |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
491 GenerateDeoptimizationSequence(assembler, true); // Preserve V0. | 494 GenerateDeoptimizationSequence(assembler, true); // Preserve V0. |
492 } | 495 } |
493 | 496 |
494 | 497 |
495 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 498 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
496 GenerateDeoptimizationSequence(assembler, false); // Don't preserve V0. | 499 GenerateDeoptimizationSequence(assembler, false); // Don't preserve V0. |
497 } | 500 } |
498 | 501 |
499 | 502 |
500 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { | 503 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { |
501 __ Unimplemented("MegamorphicMiss stub"); | 504 __ EnterStubFrame(); |
502 return; | 505 |
| 506 // Load the receiver. |
| 507 __ lw(T2, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
| 508 __ sll(T2, T2, 1); // T2 is a Smi. |
| 509 __ addu(TMP, FP, T2); |
| 510 __ lw(T6, Address(TMP, kParamEndSlotFromFp * kWordSize)); |
| 511 |
| 512 // Preserve IC data and arguments descriptor. |
| 513 __ addiu(SP, SP, Immediate(-6 * kWordSize)); |
| 514 __ sw(S5, Address(SP, 5 * kWordSize)); |
| 515 __ sw(S4, Address(SP, 4 * kWordSize)); |
| 516 |
| 517 // Push space for the return value. |
| 518 // Push the receiver. |
| 519 // Push IC data object. |
| 520 // Push arguments descriptor array. |
| 521 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 522 __ sw(TMP, Address(SP, 3 * kWordSize)); |
| 523 __ sw(T6, Address(SP, 2 * kWordSize)); |
| 524 __ sw(S5, Address(SP, 1 * kWordSize)); |
| 525 __ sw(S4, Address(SP, 0 * kWordSize)); |
| 526 |
| 527 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry); |
| 528 |
| 529 __ lw(T0, Address(SP, 3 * kWordSize)); // Get result. |
| 530 __ lw(S4, Address(SP, 4 * kWordSize)); // Restore argument descriptor. |
| 531 __ lw(S5, Address(SP, 5 * kWordSize)); // Restore IC data. |
| 532 __ addiu(SP, SP, Immediate(6 * kWordSize)); |
| 533 |
| 534 __ LeaveStubFrame(); |
| 535 |
| 536 Label nonnull; |
| 537 __ BranchNotEqual(T0, reinterpret_cast<int32_t>(Object::null()), &nonnull); |
| 538 __ Branch(&StubCode::InstanceFunctionLookupLabel()); |
| 539 __ Bind(&nonnull); |
| 540 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); |
| 541 __ jr(T0); |
503 } | 542 } |
504 | 543 |
505 | 544 |
506 // Called for inline allocation of arrays. | 545 // Called for inline allocation of arrays. |
507 // Input parameters: | 546 // Input parameters: |
508 // RA: return address. | 547 // RA: return address. |
509 // A1: Array length as Smi. | 548 // A1: Array length as Smi. |
510 // A0: array element type (either NULL or an instantiated type). | 549 // A0: array element type (either NULL or an instantiated type). |
511 // NOTE: A1 cannot be clobbered here as the caller relies on it being saved. | 550 // NOTE: A1 cannot be clobbered here as the caller relies on it being saved. |
512 // The newly allocated object is returned in V0. | 551 // The newly allocated object is returned in V0. |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
597 __ sw(T3, FieldAddress(V0, Array::tags_offset())); | 636 __ sw(T3, FieldAddress(V0, Array::tags_offset())); |
598 | 637 |
599 // Initialize all array elements to raw_null. | 638 // Initialize all array elements to raw_null. |
600 // V0: new object start as a tagged pointer. | 639 // V0: new object start as a tagged pointer. |
601 // T2: new object end address. | 640 // T2: new object end address. |
602 // A1: Array length as Smi. | 641 // A1: Array length as Smi. |
603 __ AddImmediate(T3, V0, Array::data_offset() - kHeapObjectTag); | 642 __ AddImmediate(T3, V0, Array::data_offset() - kHeapObjectTag); |
604 // T3: iterator which initially points to the start of the variable | 643 // T3: iterator which initially points to the start of the variable |
605 // data area to be initialized. | 644 // data area to be initialized. |
606 | 645 |
| 646 __ LoadImmediate(T7, reinterpret_cast<intptr_t>(Object::null())); |
607 Label loop, loop_exit; | 647 Label loop, loop_exit; |
608 __ BranchUnsignedGreaterEqual(T3, T2, &loop_exit); | 648 __ BranchUnsignedGreaterEqual(T3, T2, &loop_exit); |
609 __ Bind(&loop); | 649 __ Bind(&loop); |
610 __ addiu(T3, T3, Immediate(kWordSize)); | 650 __ addiu(T3, T3, Immediate(kWordSize)); |
611 __ bne(T3, T2, &loop); | 651 __ bne(T3, T2, &loop); |
612 __ delay_slot()->sw(NULLREG, Address(T3, -kWordSize)); | 652 __ delay_slot()->sw(T7, Address(T3, -kWordSize)); |
613 __ Bind(&loop_exit); | 653 __ Bind(&loop_exit); |
614 | 654 |
615 // Done allocating and initializing the array. | 655 // Done allocating and initializing the array. |
616 // V0: new object. | 656 // V0: new object. |
617 // A1: Array length as Smi (preserved for the caller.) | 657 // A1: Array length as Smi (preserved for the caller.) |
618 __ Ret(); | 658 __ Ret(); |
619 } | 659 } |
620 | 660 |
621 // Unable to allocate the array using the fast inline code, just call | 661 // Unable to allocate the array using the fast inline code, just call |
622 // into the runtime. | 662 // into the runtime. |
623 __ Bind(&slow_case); | 663 __ Bind(&slow_case); |
624 // Create a stub frame as we are pushing some objects on the stack before | 664 // Create a stub frame as we are pushing some objects on the stack before |
625 // calling into the runtime. | 665 // calling into the runtime. |
626 __ EnterStubFrame(); | 666 __ EnterStubFrame(); |
627 // Setup space on stack for return value. | 667 // Setup space on stack for return value. |
628 // Push array length as Smi and element type. | 668 // Push array length as Smi and element type. |
629 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 669 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
630 __ sw(NULLREG, Address(SP, 2 * kWordSize)); | 670 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 671 __ sw(TMP, Address(SP, 2 * kWordSize)); |
631 __ sw(A1, Address(SP, 1 * kWordSize)); | 672 __ sw(A1, Address(SP, 1 * kWordSize)); |
632 __ sw(T3, Address(SP, 0 * kWordSize)); | 673 __ sw(A0, Address(SP, 0 * kWordSize)); |
633 __ CallRuntime(kAllocateArrayRuntimeEntry); | 674 __ CallRuntime(kAllocateArrayRuntimeEntry); |
634 __ TraceSimMsg("AllocateArrayStub return"); | 675 __ TraceSimMsg("AllocateArrayStub return"); |
635 // Pop arguments; result is popped in IP. | 676 // Pop arguments; result is popped in IP. |
636 __ lw(TMP1, Address(SP, 2 * kWordSize)); | 677 __ lw(V0, Address(SP, 2 * kWordSize)); |
637 __ lw(A1, Address(SP, 1 * kWordSize)); | 678 __ lw(A1, Address(SP, 1 * kWordSize)); |
638 __ lw(T3, Address(SP, 0 * kWordSize)); | 679 __ lw(A0, Address(SP, 0 * kWordSize)); |
639 __ addiu(SP, SP, Immediate(3 * kWordSize)); | 680 __ addiu(SP, SP, Immediate(3 * kWordSize)); |
640 __ mov(V0, TMP1); | |
641 | 681 |
642 __ LeaveStubFrameAndReturn(); | 682 __ LeaveStubFrameAndReturn(); |
643 } | 683 } |
644 | 684 |
645 | 685 |
646 // Input parameters: | 686 // Input parameters: |
647 // RA: return address. | 687 // RA: return address. |
648 // SP: address of last argument. | 688 // SP: address of last argument. |
649 // S4: Arguments descriptor array. | 689 // S4: Arguments descriptor array. |
650 // Return: V0. | 690 // Return: V0. |
651 // Note: The closure object is the first argument to the function being | 691 // Note: The closure object is the first argument to the function being |
652 // called, the stub accesses the closure from this location directly | 692 // called, the stub accesses the closure from this location directly |
653 // when trying to resolve the call. | 693 // when trying to resolve the call. |
654 void StubCode::GenerateCallClosureFunctionStub(Assembler* assembler) { | 694 void StubCode::GenerateCallClosureFunctionStub(Assembler* assembler) { |
655 // Load num_args. | 695 // Load num_args. |
656 __ TraceSimMsg("GenerateCallClosureFunctionStub"); | 696 __ TraceSimMsg("GenerateCallClosureFunctionStub"); |
657 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 697 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
658 __ LoadImmediate(TMP1, Smi::RawValue(1)); | 698 __ LoadImmediate(TMP1, Smi::RawValue(1)); |
659 __ subu(T0, T0, TMP1); | 699 __ subu(T0, T0, TMP1); |
660 | 700 |
661 // Load closure object in T1. | 701 // Load closure object in T1. |
662 __ sll(T1, T0, 1); // T0 (num_args - 1) is a Smi. | 702 __ sll(T1, T0, 1); // T0 (num_args - 1) is a Smi. |
663 __ addu(T1, SP, T1); | 703 __ addu(T1, SP, T1); |
664 __ lw(T1, Address(T1)); | 704 __ lw(T1, Address(T1)); |
665 | 705 |
666 // Verify that T1 is a closure by checking its class. | 706 // Verify that T1 is a closure by checking its class. |
667 Label not_closure; | 707 Label not_closure; |
668 | 708 |
| 709 __ LoadImmediate(T7, reinterpret_cast<intptr_t>(Object::null())); |
| 710 |
669 // See if it is not a closure, but null object. | 711 // See if it is not a closure, but null object. |
670 __ beq(T1, NULLREG, ¬_closure); | 712 __ beq(T1, T7, ¬_closure); |
671 | 713 |
672 __ andi(CMPRES, T1, Immediate(kSmiTagMask)); | 714 __ andi(CMPRES, T1, Immediate(kSmiTagMask)); |
673 __ beq(CMPRES, ZR, ¬_closure); // Not a closure, but a smi. | 715 __ beq(CMPRES, ZR, ¬_closure); // Not a closure, but a smi. |
674 | 716 |
675 // Verify that the class of the object is a closure class by checking that | 717 // Verify that the class of the object is a closure class by checking that |
676 // class.signature_function() is not null. | 718 // class.signature_function() is not null. |
677 __ LoadClass(T0, T1); | 719 __ LoadClass(T0, T1); |
678 __ lw(T0, FieldAddress(T0, Class::signature_function_offset())); | 720 __ lw(T0, FieldAddress(T0, Class::signature_function_offset())); |
679 | 721 |
680 // See if actual class is not a closure class. | 722 // See if actual class is not a closure class. |
681 __ beq(T0, NULLREG, ¬_closure); | 723 __ beq(T0, T7, ¬_closure); |
682 | 724 |
683 // T0 is just the signature function. Load the actual closure function. | 725 // T0 is just the signature function. Load the actual closure function. |
684 __ lw(T2, FieldAddress(T1, Closure::function_offset())); | 726 __ lw(T2, FieldAddress(T1, Closure::function_offset())); |
685 | 727 |
686 // Load closure context in CTX; note that CTX has already been preserved. | 728 // Load closure context in CTX; note that CTX has already been preserved. |
687 __ lw(CTX, FieldAddress(T1, Closure::context_offset())); | 729 __ lw(CTX, FieldAddress(T1, Closure::context_offset())); |
688 | 730 |
689 Label function_compiled; | 731 Label function_compiled; |
690 // Load closure function code in T0. | 732 // Load closure function code in T0. |
691 __ lw(T0, FieldAddress(T2, Function::code_offset())); | 733 __ lw(T0, FieldAddress(T2, Function::code_offset())); |
692 __ bne(T0, NULLREG, &function_compiled); | 734 __ bne(T0, T7, &function_compiled); |
693 | 735 |
694 // Create a stub frame as we are pushing some objects on the stack before | 736 // Create a stub frame as we are pushing some objects on the stack before |
695 // calling into the runtime. | 737 // calling into the runtime. |
696 __ EnterStubFrame(); | 738 __ EnterStubFrame(); |
697 | 739 |
698 // Preserve arguments descriptor array and read-only function object argument. | 740 // Preserve arguments descriptor array and read-only function object argument. |
699 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 741 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
700 __ sw(S4, Address(SP, 1 * kWordSize)); | 742 __ sw(S4, Address(SP, 1 * kWordSize)); |
701 __ sw(T2, Address(SP, 0 * kWordSize)); | 743 __ sw(T2, Address(SP, 0 * kWordSize)); |
702 __ CallRuntime(kCompileFunctionRuntimeEntry); | 744 __ CallRuntime(kCompileFunctionRuntimeEntry); |
(...skipping 21 matching lines...) Expand all Loading... |
724 // returning here. | 766 // returning here. |
725 // If no call method exists, throw a NoSuchMethodError. | 767 // If no call method exists, throw a NoSuchMethodError. |
726 // T1: non-closure object. | 768 // T1: non-closure object. |
727 // S4: arguments descriptor array. | 769 // S4: arguments descriptor array. |
728 | 770 |
729 // Create a stub frame as we are pushing some objects on the stack before | 771 // Create a stub frame as we are pushing some objects on the stack before |
730 // calling into the runtime. | 772 // calling into the runtime. |
731 __ EnterStubFrame(); | 773 __ EnterStubFrame(); |
732 | 774 |
733 // Setup space on stack for result from error reporting. | 775 // Setup space on stack for result from error reporting. |
734 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 776 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
735 // Arguments descriptor and raw null. | 777 // Arguments descriptor and raw null. |
736 __ sw(NULLREG, Address(SP, 1 * kWordSize)); | 778 __ sw(T7, Address(SP, 1 * kWordSize)); |
737 __ sw(S4, Address(SP, 0 * kWordSize)); | 779 __ sw(S4, Address(SP, 0 * kWordSize)); |
738 | 780 |
739 // Load smi-tagged arguments array length, including the non-closure. | 781 // Load smi-tagged arguments array length, including the non-closure. |
740 __ lw(A1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 782 __ lw(A1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
741 PushArgumentsArray(assembler); | 783 PushArgumentsArray(assembler); |
742 | 784 |
743 // Stack: | 785 // Stack: |
744 // TOS + 0: Argument array. | 786 // TOS + 0: Argument array. |
745 // TOS + 1: Arguments descriptor array. | 787 // TOS + 1: Arguments descriptor array. |
746 // TOS + 2: Place for result from the call. | 788 // TOS + 2: Place for result from the call. |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
818 // Load arguments descriptor array into S4, which is passed to Dart code. | 860 // Load arguments descriptor array into S4, which is passed to Dart code. |
819 __ lw(S4, Address(A1, VMHandles::kOffsetOfRawPtrInHandle)); | 861 __ lw(S4, Address(A1, VMHandles::kOffsetOfRawPtrInHandle)); |
820 | 862 |
821 // Load number of arguments into S5. | 863 // Load number of arguments into S5. |
822 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); | 864 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
823 __ SmiUntag(T1); | 865 __ SmiUntag(T1); |
824 | 866 |
825 // Compute address of 'arguments array' data area into A2. | 867 // Compute address of 'arguments array' data area into A2. |
826 __ lw(A2, Address(A2, VMHandles::kOffsetOfRawPtrInHandle)); | 868 __ lw(A2, Address(A2, VMHandles::kOffsetOfRawPtrInHandle)); |
827 | 869 |
828 // Load the null Object into NULLREG for easy comparisons. | |
829 __ LoadImmediate(NULLREG, reinterpret_cast<intptr_t>(Object::null())); | |
830 | |
831 // Set up arguments for the Dart call. | 870 // Set up arguments for the Dart call. |
832 Label push_arguments; | 871 Label push_arguments; |
833 Label done_push_arguments; | 872 Label done_push_arguments; |
834 __ beq(T1, ZR, &done_push_arguments); // check if there are arguments. | 873 __ beq(T1, ZR, &done_push_arguments); // check if there are arguments. |
835 __ delay_slot()->addiu(A2, A2, | 874 __ delay_slot()->addiu(A2, A2, |
836 Immediate(Array::data_offset() - kHeapObjectTag)); | 875 Immediate(Array::data_offset() - kHeapObjectTag)); |
837 __ mov(A1, ZR); | 876 __ mov(A1, ZR); |
838 __ Bind(&push_arguments); | 877 __ Bind(&push_arguments); |
839 __ lw(A3, Address(A2)); | 878 __ lw(A3, Address(A2)); |
840 __ Push(A3); | 879 __ Push(A3); |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
953 __ sw(T1, FieldAddress(V0, Context::num_variables_offset())); | 992 __ sw(T1, FieldAddress(V0, Context::num_variables_offset())); |
954 | 993 |
955 // Setup isolate field. | 994 // Setup isolate field. |
956 // Load Isolate pointer from Context structure into R2. | 995 // Load Isolate pointer from Context structure into R2. |
957 // V0: new object. | 996 // V0: new object. |
958 // T1: number of context variables. | 997 // T1: number of context variables. |
959 __ lw(T2, FieldAddress(CTX, Context::isolate_offset())); | 998 __ lw(T2, FieldAddress(CTX, Context::isolate_offset())); |
960 // T2: isolate, not an object. | 999 // T2: isolate, not an object. |
961 __ sw(T2, FieldAddress(V0, Context::isolate_offset())); | 1000 __ sw(T2, FieldAddress(V0, Context::isolate_offset())); |
962 | 1001 |
| 1002 __ LoadImmediate(T7, reinterpret_cast<intptr_t>(Object::null())); |
| 1003 |
963 // Initialize the context variables. | 1004 // Initialize the context variables. |
964 // V0: new object. | 1005 // V0: new object. |
965 // T1: number of context variables. | 1006 // T1: number of context variables. |
966 Label loop, loop_exit; | 1007 Label loop, loop_exit; |
967 __ blez(T1, &loop_exit); | 1008 __ blez(T1, &loop_exit); |
968 // Setup the parent field. | 1009 // Setup the parent field. |
969 __ delay_slot()->sw(NULLREG, FieldAddress(V0, Context::parent_offset())); | 1010 __ delay_slot()->sw(T7, FieldAddress(V0, Context::parent_offset())); |
970 __ AddImmediate(T3, V0, Context::variable_offset(0) - kHeapObjectTag); | 1011 __ AddImmediate(T3, V0, Context::variable_offset(0) - kHeapObjectTag); |
971 __ sll(T1, T1, 2); | 1012 __ sll(T1, T1, 2); |
972 __ Bind(&loop); | 1013 __ Bind(&loop); |
973 __ addiu(T1, T1, Immediate(-kWordSize)); | 1014 __ addiu(T1, T1, Immediate(-kWordSize)); |
974 __ addu(TMP1, T3, T1); | 1015 __ addu(TMP1, T3, T1); |
975 __ bgtz(T1, &loop); | 1016 __ bgtz(T1, &loop); |
976 __ delay_slot()->sw(NULLREG, Address(TMP1)); | 1017 __ delay_slot()->sw(T7, Address(TMP1)); |
977 __ Bind(&loop_exit); | 1018 __ Bind(&loop_exit); |
978 | 1019 |
979 // Done allocating and initializing the context. | 1020 // Done allocating and initializing the context. |
980 // V0: new object. | 1021 // V0: new object. |
981 __ Ret(); | 1022 __ Ret(); |
982 | 1023 |
983 __ Bind(&slow_case); | 1024 __ Bind(&slow_case); |
984 } | 1025 } |
985 // Create a stub frame as we are pushing some objects on the stack before | 1026 // Create a stub frame as we are pushing some objects on the stack before |
986 // calling into the runtime. | 1027 // calling into the runtime. |
987 __ EnterStubFrame(); | 1028 __ EnterStubFrame(); |
988 // Setup space on stack for return value. | 1029 // Setup space on stack for return value. |
989 __ SmiTag(T1); | 1030 __ SmiTag(T1); |
990 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 1031 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
991 __ sw(NULLREG, Address(SP, 1 * kWordSize)); | 1032 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 1033 __ sw(TMP, Address(SP, 1 * kWordSize)); // Store null. |
992 __ sw(T1, Address(SP, 0 * kWordSize)); | 1034 __ sw(T1, Address(SP, 0 * kWordSize)); |
993 __ CallRuntime(kAllocateContextRuntimeEntry); // Allocate context. | 1035 __ CallRuntime(kAllocateContextRuntimeEntry); // Allocate context. |
994 __ lw(V0, Address(SP, 1 * kWordSize)); // Get the new context. | 1036 __ lw(V0, Address(SP, 1 * kWordSize)); // Get the new context. |
995 __ addiu(SP, SP, Immediate(2 * kWordSize)); // Pop argument and return. | 1037 __ addiu(SP, SP, Immediate(2 * kWordSize)); // Pop argument and return. |
996 | 1038 |
997 // V0: new object | 1039 // V0: new object |
998 // Restore the frame pointer. | 1040 // Restore the frame pointer. |
999 __ LeaveStubFrameAndReturn(); | 1041 __ LeaveStubFrameAndReturn(); |
1000 } | 1042 } |
1001 | 1043 |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1165 // T3: next object start. | 1207 // T3: next object start. |
1166 // T1: new object type arguments (if is_cls_parameterized). | 1208 // T1: new object type arguments (if is_cls_parameterized). |
1167 // Set the tags. | 1209 // Set the tags. |
1168 uword tags = 0; | 1210 uword tags = 0; |
1169 tags = RawObject::SizeTag::update(instance_size, tags); | 1211 tags = RawObject::SizeTag::update(instance_size, tags); |
1170 ASSERT(cls.id() != kIllegalCid); | 1212 ASSERT(cls.id() != kIllegalCid); |
1171 tags = RawObject::ClassIdTag::update(cls.id(), tags); | 1213 tags = RawObject::ClassIdTag::update(cls.id(), tags); |
1172 __ LoadImmediate(T0, tags); | 1214 __ LoadImmediate(T0, tags); |
1173 __ sw(T0, Address(T2, Instance::tags_offset())); | 1215 __ sw(T0, Address(T2, Instance::tags_offset())); |
1174 | 1216 |
| 1217 __ LoadImmediate(T7, reinterpret_cast<intptr_t>(Object::null())); |
| 1218 |
1175 // Initialize the remaining words of the object. | 1219 // Initialize the remaining words of the object. |
1176 // T2: new object start. | 1220 // T2: new object start. |
1177 // T3: next object start. | 1221 // T3: next object start. |
1178 // T1: new object type arguments (if is_cls_parameterized). | 1222 // T1: new object type arguments (if is_cls_parameterized). |
1179 // First try inlining the initialization without a loop. | 1223 // First try inlining the initialization without a loop. |
1180 if (instance_size < (kInlineInstanceSize * kWordSize)) { | 1224 if (instance_size < (kInlineInstanceSize * kWordSize)) { |
1181 // Check if the object contains any non-header fields. | 1225 // Check if the object contains any non-header fields. |
1182 // Small objects are initialized using a consecutive set of writes. | 1226 // Small objects are initialized using a consecutive set of writes. |
1183 for (intptr_t current_offset = sizeof(RawObject); | 1227 for (intptr_t current_offset = sizeof(RawObject); |
1184 current_offset < instance_size; | 1228 current_offset < instance_size; |
1185 current_offset += kWordSize) { | 1229 current_offset += kWordSize) { |
1186 __ sw(NULLREG, Address(T2, current_offset)); | 1230 __ sw(T7, Address(T2, current_offset)); |
1187 } | 1231 } |
1188 } else { | 1232 } else { |
1189 __ addiu(T4, T2, Immediate(sizeof(RawObject))); | 1233 __ addiu(T4, T2, Immediate(sizeof(RawObject))); |
1190 // Loop until the whole object is initialized. | 1234 // Loop until the whole object is initialized. |
1191 // T2: new object. | 1235 // T2: new object. |
1192 // T3: next object start. | 1236 // T3: next object start. |
1193 // T4: next word to be initialized. | 1237 // T4: next word to be initialized. |
1194 // T1: new object type arguments (if is_cls_parameterized). | 1238 // T1: new object type arguments (if is_cls_parameterized). |
1195 Label loop, loop_exit; | 1239 Label loop, loop_exit; |
1196 __ BranchUnsignedGreaterEqual(T4, T3, &loop_exit); | 1240 __ BranchUnsignedGreaterEqual(T4, T3, &loop_exit); |
1197 __ Bind(&loop); | 1241 __ Bind(&loop); |
1198 __ addiu(T4, T4, Immediate(kWordSize)); | 1242 __ addiu(T4, T4, Immediate(kWordSize)); |
1199 __ bne(T4, T3, &loop); | 1243 __ bne(T4, T3, &loop); |
1200 __ delay_slot()->sw(NULLREG, Address(T4, -kWordSize)); | 1244 __ delay_slot()->sw(T7, Address(T4, -kWordSize)); |
1201 __ Bind(&loop_exit); | 1245 __ Bind(&loop_exit); |
1202 } | 1246 } |
1203 if (is_cls_parameterized) { | 1247 if (is_cls_parameterized) { |
1204 // R1: new object type arguments. | 1248 // R1: new object type arguments. |
1205 // Set the type arguments in the new object. | 1249 // Set the type arguments in the new object. |
1206 __ sw(T1, Address(T2, cls.type_arguments_field_offset())); | 1250 __ sw(T1, Address(T2, cls.type_arguments_field_offset())); |
1207 } | 1251 } |
1208 // Done allocating and initializing the instance. | 1252 // Done allocating and initializing the instance. |
1209 // T2: new object still missing its heap tag. | 1253 // T2: new object still missing its heap tag. |
1210 __ Ret(); | 1254 __ Ret(); |
1211 __ delay_slot()->addiu(V0, T2, Immediate(kHeapObjectTag)); | 1255 __ delay_slot()->addiu(V0, T2, Immediate(kHeapObjectTag)); |
1212 | 1256 |
1213 __ Bind(&slow_case); | 1257 __ Bind(&slow_case); |
1214 } | 1258 } |
1215 if (is_cls_parameterized) { | 1259 if (is_cls_parameterized) { |
1216 __ lw(T1, Address(SP, 1 * kWordSize)); | 1260 __ lw(T1, Address(SP, 1 * kWordSize)); |
1217 __ lw(T0, Address(SP, 0 * kWordSize)); | 1261 __ lw(T0, Address(SP, 0 * kWordSize)); |
1218 } | 1262 } |
1219 // Create a stub frame as we are pushing some objects on the stack before | 1263 // Create a stub frame as we are pushing some objects on the stack before |
1220 // calling into the runtime. | 1264 // calling into the runtime. |
1221 __ EnterStubFrame(true); // Uses pool pointer to pass cls to runtime. | 1265 __ EnterStubFrame(true); // Uses pool pointer to pass cls to runtime. |
1222 __ LoadObject(TMP1, cls); | 1266 __ LoadObject(TMP1, cls); |
1223 | 1267 |
1224 __ addiu(SP, SP, Immediate(-4 * kWordSize)); | 1268 __ addiu(SP, SP, Immediate(-4 * kWordSize)); |
1225 // Space on stack for return value. | 1269 // Space on stack for return value. |
1226 __ sw(NULLREG, Address(SP, 3 * kWordSize)); | 1270 __ LoadImmediate(T7, reinterpret_cast<intptr_t>(Object::null())); |
| 1271 __ sw(T7, Address(SP, 3 * kWordSize)); |
1227 __ sw(TMP1, Address(SP, 2 * kWordSize)); // Class of object to be allocated. | 1272 __ sw(TMP1, Address(SP, 2 * kWordSize)); // Class of object to be allocated. |
1228 | 1273 |
1229 if (is_cls_parameterized) { | 1274 if (is_cls_parameterized) { |
1230 // Push type arguments of object to be allocated and of instantiator. | 1275 // Push type arguments of object to be allocated and of instantiator. |
1231 __ sw(T1, Address(SP, 1 * kWordSize)); | 1276 __ sw(T1, Address(SP, 1 * kWordSize)); |
1232 __ sw(T0, Address(SP, 0 * kWordSize)); | 1277 __ sw(T0, Address(SP, 0 * kWordSize)); |
1233 } else { | 1278 } else { |
1234 // Push null type arguments and kNoInstantiator. | 1279 // Push null type arguments and kNoInstantiator. |
1235 __ LoadImmediate(T1, Smi::RawValue(StubCode::kNoInstantiator)); | 1280 __ LoadImmediate(T1, Smi::RawValue(StubCode::kNoInstantiator)); |
1236 __ sw(NULLREG, Address(SP, 1 * kWordSize)); | 1281 __ sw(T7, Address(SP, 1 * kWordSize)); |
1237 __ sw(T1, Address(SP, 0 * kWordSize)); | 1282 __ sw(T1, Address(SP, 0 * kWordSize)); |
1238 } | 1283 } |
1239 __ CallRuntime(kAllocateObjectRuntimeEntry); // Allocate object. | 1284 __ CallRuntime(kAllocateObjectRuntimeEntry); // Allocate object. |
1240 __ TraceSimMsg("AllocationStubForClass return"); | 1285 __ TraceSimMsg("AllocationStubForClass return"); |
1241 // Pop result (newly allocated object). | 1286 // Pop result (newly allocated object). |
1242 __ lw(V0, Address(SP, 3 * kWordSize)); | 1287 __ lw(V0, Address(SP, 3 * kWordSize)); |
1243 __ addiu(SP, SP, Immediate(4 * kWordSize)); // Pop arguments. | 1288 __ addiu(SP, SP, Immediate(4 * kWordSize)); // Pop arguments. |
1244 // V0: new object | 1289 // V0: new object |
1245 // Restore the frame pointer and return. | 1290 // Restore the frame pointer and return. |
1246 __ LeaveStubFrameAndReturn(RA, true); | 1291 __ LeaveStubFrameAndReturn(RA, true); |
(...skipping 10 matching lines...) Expand all Loading... |
1257 ASSERT(func.IsClosureFunction()); | 1302 ASSERT(func.IsClosureFunction()); |
1258 const bool is_implicit_static_closure = | 1303 const bool is_implicit_static_closure = |
1259 func.IsImplicitStaticClosureFunction(); | 1304 func.IsImplicitStaticClosureFunction(); |
1260 const bool is_implicit_instance_closure = | 1305 const bool is_implicit_instance_closure = |
1261 func.IsImplicitInstanceClosureFunction(); | 1306 func.IsImplicitInstanceClosureFunction(); |
1262 const Class& cls = Class::ZoneHandle(func.signature_class()); | 1307 const Class& cls = Class::ZoneHandle(func.signature_class()); |
1263 const bool has_type_arguments = cls.HasTypeArguments(); | 1308 const bool has_type_arguments = cls.HasTypeArguments(); |
1264 | 1309 |
1265 __ TraceSimMsg("AllocationStubForClosure"); | 1310 __ TraceSimMsg("AllocationStubForClosure"); |
1266 __ EnterStubFrame(true); // Uses pool pointer to refer to function. | 1311 __ EnterStubFrame(true); // Uses pool pointer to refer to function. |
1267 const intptr_t kTypeArgumentsFPOffset = 4 * kWordSize; | 1312 const intptr_t kTypeArgumentsFPOffset = 3 * kWordSize; |
1268 const intptr_t kReceiverFPOffset = 5 * kWordSize; | 1313 const intptr_t kReceiverFPOffset = 4 * kWordSize; |
1269 const intptr_t closure_size = Closure::InstanceSize(); | 1314 const intptr_t closure_size = Closure::InstanceSize(); |
1270 const intptr_t context_size = Context::InstanceSize(1); // Captured receiver. | 1315 const intptr_t context_size = Context::InstanceSize(1); // Captured receiver. |
1271 if (FLAG_inline_alloc && | 1316 if (FLAG_inline_alloc && |
1272 Heap::IsAllocatableInNewSpace(closure_size + context_size)) { | 1317 Heap::IsAllocatableInNewSpace(closure_size + context_size)) { |
1273 Label slow_case; | 1318 Label slow_case; |
1274 Heap* heap = Isolate::Current()->heap(); | 1319 Heap* heap = Isolate::Current()->heap(); |
1275 __ LoadImmediate(T5, heap->TopAddress()); | 1320 __ LoadImmediate(T5, heap->TopAddress()); |
1276 __ lw(T2, Address(T5)); | 1321 __ lw(T2, Address(T5)); |
1277 __ AddImmediate(T3, T2, closure_size); | 1322 __ AddImmediate(T3, T2, closure_size); |
1278 if (is_implicit_instance_closure) { | 1323 if (is_implicit_instance_closure) { |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1330 | 1375 |
1331 // Set number of variables field to 1 (for captured receiver). | 1376 // Set number of variables field to 1 (for captured receiver). |
1332 __ LoadImmediate(T0, 1); | 1377 __ LoadImmediate(T0, 1); |
1333 __ sw(T0, Address(T4, Context::num_variables_offset())); | 1378 __ sw(T0, Address(T4, Context::num_variables_offset())); |
1334 | 1379 |
1335 // Set isolate field to isolate of current context. | 1380 // Set isolate field to isolate of current context. |
1336 __ lw(T0, FieldAddress(CTX, Context::isolate_offset())); | 1381 __ lw(T0, FieldAddress(CTX, Context::isolate_offset())); |
1337 __ sw(T0, Address(T4, Context::isolate_offset())); | 1382 __ sw(T0, Address(T4, Context::isolate_offset())); |
1338 | 1383 |
1339 // Set the parent to null. | 1384 // Set the parent to null. |
1340 __ sw(NULLREG, Address(T4, Context::parent_offset())); | 1385 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 1386 __ sw(TMP, Address(T4, Context::parent_offset())); |
1341 | 1387 |
1342 // Initialize the context variable to the receiver. | 1388 // Initialize the context variable to the receiver. |
1343 __ lw(T0, Address(FP, kReceiverFPOffset)); | 1389 __ lw(T0, Address(FP, kReceiverFPOffset)); |
1344 __ sw(T0, Address(T4, Context::variable_offset(0))); | 1390 __ sw(T0, Address(T4, Context::variable_offset(0))); |
1345 | 1391 |
1346 // Set the newly allocated context in the newly allocated closure. | 1392 // Set the newly allocated context in the newly allocated closure. |
1347 __ AddImmediate(T1, T4, kHeapObjectTag); | 1393 __ AddImmediate(T1, T4, kHeapObjectTag); |
1348 __ sw(T1, Address(T2, Closure::context_offset())); | 1394 __ sw(T1, Address(T2, Closure::context_offset())); |
1349 } else { | 1395 } else { |
1350 __ sw(CTX, Address(T2, Closure::context_offset())); | 1396 __ sw(CTX, Address(T2, Closure::context_offset())); |
(...skipping 12 matching lines...) Expand all Loading... |
1363 __ Bind(&slow_case); | 1409 __ Bind(&slow_case); |
1364 } | 1410 } |
1365 | 1411 |
1366 // If it's an implicit static closure we need 2 stack slots. Otherwise, | 1412 // If it's an implicit static closure we need 2 stack slots. Otherwise, |
1367 // If it's an implicit instance closure we need 4 stack slots, o/w only 3. | 1413 // If it's an implicit instance closure we need 4 stack slots, o/w only 3. |
1368 int num_slots = 2; | 1414 int num_slots = 2; |
1369 if (!is_implicit_static_closure) { | 1415 if (!is_implicit_static_closure) { |
1370 num_slots = is_implicit_instance_closure ? 4 : 3; | 1416 num_slots = is_implicit_instance_closure ? 4 : 3; |
1371 } | 1417 } |
1372 __ addiu(SP, SP, Immediate(-num_slots * kWordSize)); | 1418 __ addiu(SP, SP, Immediate(-num_slots * kWordSize)); |
| 1419 // Setup space on stack for return value. |
| 1420 __ LoadImmediate(T7, reinterpret_cast<intptr_t>(Object::null())); |
| 1421 __ sw(T7, Address(SP, (num_slots - 1) * kWordSize)); |
1373 __ LoadObject(TMP1, func); | 1422 __ LoadObject(TMP1, func); |
1374 // Setup space on stack for return value. | |
1375 __ sw(NULLREG, Address(SP, (num_slots - 1) * kWordSize)); | |
1376 __ sw(TMP1, Address(SP, (num_slots - 2) * kWordSize)); | 1423 __ sw(TMP1, Address(SP, (num_slots - 2) * kWordSize)); |
1377 if (is_implicit_static_closure) { | 1424 if (is_implicit_static_closure) { |
1378 __ CallRuntime(kAllocateImplicitStaticClosureRuntimeEntry); | 1425 __ CallRuntime(kAllocateImplicitStaticClosureRuntimeEntry); |
1379 __ TraceSimMsg("AllocationStubForClosure return"); | 1426 __ TraceSimMsg("AllocationStubForClosure return"); |
1380 } else { | 1427 } else { |
| 1428 __ mov(T2, T7); |
1381 if (is_implicit_instance_closure) { | 1429 if (is_implicit_instance_closure) { |
1382 __ lw(T1, Address(FP, kReceiverFPOffset)); | 1430 __ lw(T1, Address(FP, kReceiverFPOffset)); |
1383 __ sw(T1, Address(SP, (num_slots - 3) * kWordSize)); // Receiver. | 1431 __ sw(T1, Address(SP, (num_slots - 3) * kWordSize)); // Receiver. |
1384 __ sw(NULLREG, Address(SP, (num_slots - 4) * kWordSize)); // Push null. | |
1385 } | 1432 } |
1386 if (has_type_arguments) { | 1433 if (has_type_arguments) { |
1387 __ lw(V0, Address(FP, kTypeArgumentsFPOffset)); | 1434 __ lw(T2, Address(FP, kTypeArgumentsFPOffset)); |
1388 // Push type arguments of closure. | |
1389 __ sw(V0, Address(SP, (num_slots - 3) * kWordSize)); | |
1390 } | 1435 } |
| 1436 __ sw(T2, Address(SP, 0 * kWordSize)); |
1391 | 1437 |
1392 if (is_implicit_instance_closure) { | 1438 if (is_implicit_instance_closure) { |
1393 __ CallRuntime(kAllocateImplicitInstanceClosureRuntimeEntry); | 1439 __ CallRuntime(kAllocateImplicitInstanceClosureRuntimeEntry); |
1394 __ TraceSimMsg("AllocationStubForClosure return"); | 1440 __ TraceSimMsg("AllocationStubForClosure return"); |
1395 } else { | 1441 } else { |
1396 ASSERT(func.IsNonImplicitClosureFunction()); | 1442 ASSERT(func.IsNonImplicitClosureFunction()); |
1397 __ CallRuntime(kAllocateClosureRuntimeEntry); | 1443 __ CallRuntime(kAllocateClosureRuntimeEntry); |
1398 __ TraceSimMsg("AllocationStubForClosure return"); | 1444 __ TraceSimMsg("AllocationStubForClosure return"); |
1399 } | 1445 } |
1400 } | 1446 } |
1401 __ lw(V0, Address(SP, (num_slots - 1) * kWordSize)); // Pop function object. | 1447 __ lw(V0, Address(SP, (num_slots - 1) * kWordSize)); // Pop function object. |
1402 __ addiu(SP, SP, Immediate(num_slots * kWordSize)); | 1448 __ addiu(SP, SP, Immediate(num_slots * kWordSize)); |
1403 | 1449 |
1404 // V0: new object | 1450 // V0: new object |
1405 // Restore the frame pointer. | 1451 // Restore the frame pointer. |
1406 __ LeaveStubFrameAndReturn(RA, true); | 1452 __ LeaveStubFrameAndReturn(RA, true); |
1407 } | 1453 } |
1408 | 1454 |
1409 | 1455 |
| 1456 // The target function was not found, so invoke method |
| 1457 // "dynamic noSuchMethod(Invocation invocation)". |
| 1458 // S5: inline cache data object. |
| 1459 // S4: arguments descriptor array. |
1410 void StubCode::GenerateCallNoSuchMethodFunctionStub(Assembler* assembler) { | 1460 void StubCode::GenerateCallNoSuchMethodFunctionStub(Assembler* assembler) { |
1411 __ Unimplemented("CallNoSuchMethodFunction stub"); | 1461 __ EnterStubFrame(); |
| 1462 |
| 1463 // Load the receiver. |
| 1464 __ lw(A1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); |
| 1465 __ sll(TMP, A1, 1); // A1 is a Smi. |
| 1466 __ addu(TMP, FP, TMP); |
| 1467 __ lw(T6, Address(TMP, kParamEndSlotFromFp * kWordSize)); |
| 1468 |
| 1469 // Push space for the return value. |
| 1470 // Push the receiver. |
| 1471 // Push IC data object. |
| 1472 // Push arguments descriptor array. |
| 1473 __ addiu(SP, SP, Immediate(-4 * kWordSize)); |
| 1474 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 1475 __ sw(TMP, Address(SP, 3 * kWordSize)); |
| 1476 __ sw(T6, Address(SP, 2 * kWordSize)); |
| 1477 __ sw(S5, Address(SP, 1 * kWordSize)); |
| 1478 __ sw(S4, Address(SP, 0 * kWordSize)); |
| 1479 |
| 1480 // A1: Smi-tagged arguments array length. |
| 1481 PushArgumentsArray(assembler); |
| 1482 |
| 1483 __ CallRuntime(kInvokeNoSuchMethodFunctionRuntimeEntry); |
| 1484 |
| 1485 __ lw(V0, Address(SP, 4 * kWordSize)); // Get result into V0. |
| 1486 __ LeaveStubFrameAndReturn(); |
1412 } | 1487 } |
1413 | 1488 |
1414 | 1489 |
1415 // T0: function object. | 1490 // T0: function object. |
1416 // S5: inline cache data object. | 1491 // S5: inline cache data object. |
1417 // S4: arguments descriptor array. | 1492 // S4: arguments descriptor array. |
1418 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { | 1493 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { |
1419 __ TraceSimMsg("OptimizedUsageCounterIncrement"); | 1494 __ TraceSimMsg("OptimizedUsageCounterIncrement"); |
1420 Register ic_reg = S5; | 1495 Register ic_reg = S5; |
1421 Register func_reg = T0; | 1496 Register func_reg = T0; |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1578 // T1: address of receiver. | 1653 // T1: address of receiver. |
1579 // Create a stub frame as we are pushing some objects on the stack before | 1654 // Create a stub frame as we are pushing some objects on the stack before |
1580 // calling into the runtime. | 1655 // calling into the runtime. |
1581 __ EnterStubFrame(); | 1656 __ EnterStubFrame(); |
1582 // Preserve IC data object and arguments descriptor array and | 1657 // Preserve IC data object and arguments descriptor array and |
1583 // setup space on stack for result (target code object). | 1658 // setup space on stack for result (target code object). |
1584 int num_slots = num_args + 5; | 1659 int num_slots = num_args + 5; |
1585 __ addiu(SP, SP, Immediate(-num_slots * kWordSize)); | 1660 __ addiu(SP, SP, Immediate(-num_slots * kWordSize)); |
1586 __ sw(S5, Address(SP, (num_slots - 1) * kWordSize)); | 1661 __ sw(S5, Address(SP, (num_slots - 1) * kWordSize)); |
1587 __ sw(S4, Address(SP, (num_slots - 2) * kWordSize)); | 1662 __ sw(S4, Address(SP, (num_slots - 2) * kWordSize)); |
1588 __ sw(NULLREG, Address(SP, (num_slots - 3) * kWordSize)); | 1663 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 1664 __ sw(TMP, Address(SP, (num_slots - 3) * kWordSize)); |
1589 // Push call arguments. | 1665 // Push call arguments. |
1590 for (intptr_t i = 0; i < num_args; i++) { | 1666 for (intptr_t i = 0; i < num_args; i++) { |
1591 __ lw(TMP1, Address(T1, -i * kWordSize)); | 1667 __ lw(TMP1, Address(T1, -i * kWordSize)); |
1592 __ sw(TMP1, Address(SP, (num_slots - i - 4) * kWordSize)); | 1668 __ sw(TMP1, Address(SP, (num_slots - i - 4) * kWordSize)); |
1593 } | 1669 } |
1594 // Pass IC data object and arguments descriptor array. | 1670 // Pass IC data object and arguments descriptor array. |
1595 __ sw(S5, Address(SP, (num_slots - num_args - 4) * kWordSize)); | 1671 __ sw(S5, Address(SP, (num_slots - num_args - 4) * kWordSize)); |
1596 __ sw(S4, Address(SP, (num_slots - num_args - 5) * kWordSize)); | 1672 __ sw(S4, Address(SP, (num_slots - num_args - 5) * kWordSize)); |
1597 | 1673 |
1598 if (num_args == 1) { | 1674 if (num_args == 1) { |
1599 __ CallRuntime(kInlineCacheMissHandlerOneArgRuntimeEntry); | 1675 __ CallRuntime(kInlineCacheMissHandlerOneArgRuntimeEntry); |
1600 } else if (num_args == 2) { | 1676 } else if (num_args == 2) { |
1601 __ CallRuntime(kInlineCacheMissHandlerTwoArgsRuntimeEntry); | 1677 __ CallRuntime(kInlineCacheMissHandlerTwoArgsRuntimeEntry); |
1602 } else if (num_args == 3) { | 1678 } else if (num_args == 3) { |
1603 __ CallRuntime(kInlineCacheMissHandlerThreeArgsRuntimeEntry); | 1679 __ CallRuntime(kInlineCacheMissHandlerThreeArgsRuntimeEntry); |
1604 } else { | 1680 } else { |
1605 UNIMPLEMENTED(); | 1681 UNIMPLEMENTED(); |
1606 } | 1682 } |
1607 __ TraceSimMsg("NArgsCheckInlineCacheStub return"); | 1683 __ TraceSimMsg("NArgsCheckInlineCacheStub return"); |
1608 // Pop returned code object into T3 (null if not found). | 1684 // Pop returned code object into T3 (null if not found). |
1609 // Restore arguments descriptor array and IC data array. | 1685 // Restore arguments descriptor array and IC data array. |
1610 __ lw(T3, Address(SP, (num_slots - 3) * kWordSize)); | 1686 __ lw(T3, Address(SP, (num_slots - 3) * kWordSize)); |
1611 __ lw(S4, Address(SP, (num_slots - 2) * kWordSize)); | 1687 __ lw(S4, Address(SP, (num_slots - 2) * kWordSize)); |
1612 __ lw(S5, Address(SP, (num_slots - 1) * kWordSize)); | 1688 __ lw(S5, Address(SP, (num_slots - 1) * kWordSize)); |
1613 // Remove the call arguments pushed earlier, including the IC data object | 1689 // Remove the call arguments pushed earlier, including the IC data object |
1614 // and the arguments descriptor array. | 1690 // and the arguments descriptor array. |
1615 __ addiu(SP, SP, Immediate(num_slots * kWordSize)); | 1691 __ addiu(SP, SP, Immediate(num_slots * kWordSize)); |
1616 __ LeaveStubFrame(); | 1692 __ LeaveStubFrame(); |
1617 Label call_target_function; | 1693 Label call_target_function; |
1618 __ bne(T3, NULLREG, &call_target_function); | 1694 __ BranchNotEqual(T3, reinterpret_cast<int32_t>(Object::null()), |
| 1695 &call_target_function); |
1619 | 1696 |
1620 // NoSuchMethod or closure. | 1697 // NoSuchMethod or closure. |
1621 // Mark IC call that it may be a closure call that does not collect | 1698 // Mark IC call that it may be a closure call that does not collect |
1622 // type feedback. | 1699 // type feedback. |
1623 __ LoadImmediate(TMP1, 1); | 1700 __ LoadImmediate(TMP1, 1); |
1624 __ Branch(&StubCode::InstanceFunctionLookupLabel()); | 1701 __ Branch(&StubCode::InstanceFunctionLookupLabel()); |
1625 __ delay_slot()->sb(TMP1, FieldAddress(S5, ICData::is_closure_call_offset())); | 1702 __ delay_slot()->sb(TMP1, FieldAddress(S5, ICData::is_closure_call_offset())); |
1626 | 1703 |
1627 __ Bind(&found); | 1704 __ Bind(&found); |
1628 // T0: Pointer to an IC data check group. | 1705 // T0: Pointer to an IC data check group. |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1730 // RA: return address (Dart code). | 1807 // RA: return address (Dart code). |
1731 // S4: Arguments descriptor array. | 1808 // S4: Arguments descriptor array. |
1732 void StubCode::GenerateBreakpointStaticStub(Assembler* assembler) { | 1809 void StubCode::GenerateBreakpointStaticStub(Assembler* assembler) { |
1733 __ TraceSimMsg("BreakpointStaticStub"); | 1810 __ TraceSimMsg("BreakpointStaticStub"); |
1734 // Create a stub frame as we are pushing some objects on the stack before | 1811 // Create a stub frame as we are pushing some objects on the stack before |
1735 // calling into the runtime. | 1812 // calling into the runtime. |
1736 __ EnterStubFrame(); | 1813 __ EnterStubFrame(); |
1737 // Preserve arguments descriptor and make room for result. | 1814 // Preserve arguments descriptor and make room for result. |
1738 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 1815 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
1739 __ sw(S4, Address(SP, 1 * kWordSize)); | 1816 __ sw(S4, Address(SP, 1 * kWordSize)); |
1740 __ sw(NULLREG, Address(SP, 0 * kWordSize)); | 1817 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 1818 __ sw(TMP, Address(SP, 0 * kWordSize)); |
1741 __ CallRuntime(kBreakpointStaticHandlerRuntimeEntry); | 1819 __ CallRuntime(kBreakpointStaticHandlerRuntimeEntry); |
1742 // Pop code object result and restore arguments descriptor. | 1820 // Pop code object result and restore arguments descriptor. |
1743 __ lw(T0, Address(SP, 0 * kWordSize)); | 1821 __ lw(T0, Address(SP, 0 * kWordSize)); |
1744 __ lw(S4, Address(SP, 1 * kWordSize)); | 1822 __ lw(S4, Address(SP, 1 * kWordSize)); |
1745 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 1823 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
1746 __ LeaveStubFrame(); | 1824 __ LeaveStubFrame(); |
1747 | 1825 |
1748 // Now call the static function. The breakpoint handler function | 1826 // Now call the static function. The breakpoint handler function |
1749 // ensures that the call target is compiled. | 1827 // ensures that the call target is compiled. |
1750 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); | 1828 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1830 } | 1908 } |
1831 __ LoadClassId(T0, A0); | 1909 __ LoadClassId(T0, A0); |
1832 // A0: instance. | 1910 // A0: instance. |
1833 // A1: instantiator type arguments or NULL. | 1911 // A1: instantiator type arguments or NULL. |
1834 // A2: SubtypeTestCache. | 1912 // A2: SubtypeTestCache. |
1835 // T0: instance class id. | 1913 // T0: instance class id. |
1836 // T1: instance type arguments (null if none), used only if n > 1. | 1914 // T1: instance type arguments (null if none), used only if n > 1. |
1837 __ lw(T2, FieldAddress(A2, SubtypeTestCache::cache_offset())); | 1915 __ lw(T2, FieldAddress(A2, SubtypeTestCache::cache_offset())); |
1838 __ AddImmediate(T2, Array::data_offset() - kHeapObjectTag); | 1916 __ AddImmediate(T2, Array::data_offset() - kHeapObjectTag); |
1839 | 1917 |
| 1918 __ LoadImmediate(T7, reinterpret_cast<intptr_t>(Object::null())); |
| 1919 |
1840 Label loop, found, not_found, next_iteration; | 1920 Label loop, found, not_found, next_iteration; |
1841 // T0: instance class id. | 1921 // T0: instance class id. |
1842 // T1: instance type arguments. | 1922 // T1: instance type arguments. |
1843 // T2: Entry start. | 1923 // T2: Entry start. |
| 1924 // T7: null. |
1844 __ SmiTag(T0); | 1925 __ SmiTag(T0); |
1845 __ Bind(&loop); | 1926 __ Bind(&loop); |
1846 __ lw(T3, Address(T2, kWordSize * SubtypeTestCache::kInstanceClassId)); | 1927 __ lw(T3, Address(T2, kWordSize * SubtypeTestCache::kInstanceClassId)); |
1847 __ beq(T3, NULLREG, ¬_found); | 1928 __ beq(T3, T7, ¬_found); |
1848 | 1929 |
1849 if (n == 1) { | 1930 if (n == 1) { |
1850 __ beq(T3, T0, &found); | 1931 __ beq(T3, T0, &found); |
1851 } else { | 1932 } else { |
1852 __ bne(T3, T0, &next_iteration); | 1933 __ bne(T3, T0, &next_iteration); |
1853 __ lw(T3, | 1934 __ lw(T3, |
1854 Address(T2, kWordSize * SubtypeTestCache::kInstanceTypeArguments)); | 1935 Address(T2, kWordSize * SubtypeTestCache::kInstanceTypeArguments)); |
1855 if (n == 2) { | 1936 if (n == 2) { |
1856 __ beq(T3, T1, &found); | 1937 __ beq(T3, T1, &found); |
1857 } else { | 1938 } else { |
1858 __ bne(T3, T1, &next_iteration); | 1939 __ bne(T3, T1, &next_iteration); |
1859 __ lw(T3, Address(T2, kWordSize * | 1940 __ lw(T3, Address(T2, kWordSize * |
1860 SubtypeTestCache::kInstantiatorTypeArguments)); | 1941 SubtypeTestCache::kInstantiatorTypeArguments)); |
1861 __ beq(T3, A1, &found); | 1942 __ beq(T3, A1, &found); |
1862 } | 1943 } |
1863 } | 1944 } |
1864 __ Bind(&next_iteration); | 1945 __ Bind(&next_iteration); |
1865 __ b(&loop); | 1946 __ b(&loop); |
1866 __ delay_slot()->addiu(T2, T2, | 1947 __ delay_slot()->addiu(T2, T2, |
1867 Immediate(kWordSize * SubtypeTestCache::kTestEntryLength)); | 1948 Immediate(kWordSize * SubtypeTestCache::kTestEntryLength)); |
1868 // Fall through to not found. | 1949 // Fall through to not found. |
1869 __ Bind(¬_found); | 1950 __ Bind(¬_found); |
1870 __ Ret(); | 1951 __ Ret(); |
1871 __ delay_slot()->mov(V0, NULLREG); | 1952 __ delay_slot()->mov(V0, T7); |
1872 | 1953 |
1873 __ Bind(&found); | 1954 __ Bind(&found); |
1874 __ Ret(); | 1955 __ Ret(); |
1875 __ delay_slot()->lw(V0, | 1956 __ delay_slot()->lw(V0, |
1876 Address(T2, kWordSize * SubtypeTestCache::kTestResult)); | 1957 Address(T2, kWordSize * SubtypeTestCache::kTestResult)); |
1877 } | 1958 } |
1878 | 1959 |
1879 | 1960 |
1880 // Used to check class and type arguments. Arguments passed in registers: | 1961 // Used to check class and type arguments. Arguments passed in registers: |
1881 // RA: return address. | 1962 // RA: return address. |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1940 // Implements equality operator when one of the arguments is null | 2021 // Implements equality operator when one of the arguments is null |
1941 // (identity check) and updates ICData if necessary. | 2022 // (identity check) and updates ICData if necessary. |
1942 // RA: return address. | 2023 // RA: return address. |
1943 // A1: left argument. | 2024 // A1: left argument. |
1944 // A0: right argument. | 2025 // A0: right argument. |
1945 // T0: ICData. | 2026 // T0: ICData. |
1946 // V0: result. | 2027 // V0: result. |
1947 // TODO(srdjan): Move to VM stubs once Boolean objects become VM objects. | 2028 // TODO(srdjan): Move to VM stubs once Boolean objects become VM objects. |
1948 void StubCode::GenerateEqualityWithNullArgStub(Assembler* assembler) { | 2029 void StubCode::GenerateEqualityWithNullArgStub(Assembler* assembler) { |
1949 __ TraceSimMsg("EqualityWithNullArgStub"); | 2030 __ TraceSimMsg("EqualityWithNullArgStub"); |
| 2031 __ Comment("EqualityWithNullArgStub"); |
1950 __ EnterStubFrame(); | 2032 __ EnterStubFrame(); |
1951 static const intptr_t kNumArgsTested = 2; | 2033 static const intptr_t kNumArgsTested = 2; |
1952 #if defined(DEBUG) | 2034 #if defined(DEBUG) |
1953 { Label ok; | 2035 { Label ok; |
1954 __ lw(TMP1, FieldAddress(T0, ICData::num_args_tested_offset())); | 2036 __ lw(TMP1, FieldAddress(T0, ICData::num_args_tested_offset())); |
1955 __ BranchEqual(TMP1, kNumArgsTested, &ok); | 2037 __ BranchEqual(TMP1, kNumArgsTested, &ok); |
1956 __ Stop("Incorrect ICData for equality"); | 2038 __ Stop("Incorrect ICData for equality"); |
1957 __ Bind(&ok); | 2039 __ Bind(&ok); |
1958 } | 2040 } |
1959 #endif // DEBUG | 2041 #endif // DEBUG |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2041 | 2123 |
2042 // Calls to the runtime to optimize the given function. | 2124 // Calls to the runtime to optimize the given function. |
2043 // T0: function to be reoptimized. | 2125 // T0: function to be reoptimized. |
2044 // S4: argument descriptor (preserved). | 2126 // S4: argument descriptor (preserved). |
2045 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 2127 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
2046 __ TraceSimMsg("OptimizeFunctionStub"); | 2128 __ TraceSimMsg("OptimizeFunctionStub"); |
2047 __ EnterStubFrame(); | 2129 __ EnterStubFrame(); |
2048 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 2130 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
2049 __ sw(S4, Address(SP, 2 * kWordSize)); | 2131 __ sw(S4, Address(SP, 2 * kWordSize)); |
2050 // Setup space on stack for return value. | 2132 // Setup space on stack for return value. |
2051 __ sw(NULLREG, Address(SP, 1 * kWordSize)); | 2133 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 2134 __ sw(TMP, Address(SP, 1 * kWordSize)); |
2052 __ sw(T0, Address(SP, 0 * kWordSize)); | 2135 __ sw(T0, Address(SP, 0 * kWordSize)); |
2053 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry); | 2136 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry); |
2054 __ TraceSimMsg("OptimizeFunctionStub return"); | 2137 __ TraceSimMsg("OptimizeFunctionStub return"); |
2055 __ lw(T0, Address(SP, 1 * kWordSize)); // Get Code object | 2138 __ lw(T0, Address(SP, 1 * kWordSize)); // Get Code object |
2056 __ lw(S4, Address(SP, 2 * kWordSize)); // Restore argument descriptor. | 2139 __ lw(S4, Address(SP, 2 * kWordSize)); // Restore argument descriptor. |
2057 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Discard argument. | 2140 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Discard argument. |
2058 | 2141 |
2059 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); | 2142 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); |
2060 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); | 2143 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); |
2061 __ LeaveStubFrameAndReturn(T0); | 2144 __ LeaveStubFrameAndReturn(T0); |
(...skipping 10 matching lines...) Expand all Loading... |
2072 // Does identical check (object references are equal or not equal) with special | 2155 // Does identical check (object references are equal or not equal) with special |
2073 // checks for boxed numbers. | 2156 // checks for boxed numbers. |
2074 // RA: return address. | 2157 // RA: return address. |
2075 // SP + 4: left operand. | 2158 // SP + 4: left operand. |
2076 // SP + 0: right operand. | 2159 // SP + 0: right operand. |
2077 // Return: CMPRES is zero if equal, non-zero otherwise. | 2160 // Return: CMPRES is zero if equal, non-zero otherwise. |
2078 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint | 2161 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint |
2079 // cannot contain a value that fits in Mint or Smi. | 2162 // cannot contain a value that fits in Mint or Smi. |
2080 void StubCode::GenerateIdenticalWithNumberCheckStub(Assembler* assembler) { | 2163 void StubCode::GenerateIdenticalWithNumberCheckStub(Assembler* assembler) { |
2081 __ TraceSimMsg("IdenticalWithNumberCheckStub"); | 2164 __ TraceSimMsg("IdenticalWithNumberCheckStub"); |
2082 const Register ret = CMPRES; | 2165 __ Comment("IdenticalWithNumberCheckStub"); |
2083 const Register temp1 = T2; | 2166 const Register temp1 = T2; |
2084 const Register temp2 = T3; | 2167 const Register temp2 = T3; |
2085 const Register left = T1; | 2168 const Register left = T1; |
2086 const Register right = T0; | 2169 const Register right = T0; |
2087 // Preserve left, right. | 2170 // Preserve left, right. |
2088 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 2171 __ addiu(SP, SP, Immediate(-4 * kWordSize)); |
2089 __ sw(T1, Address(SP, 1 * kWordSize)); | 2172 __ sw(temp1, Address(SP, 3 * kWordSize)); |
2090 __ sw(T0, Address(SP, 0 * kWordSize)); | 2173 __ sw(temp2, Address(SP, 2 * kWordSize)); |
| 2174 __ sw(left, Address(SP, 1 * kWordSize)); |
| 2175 __ sw(right, Address(SP, 0 * kWordSize)); |
2091 // TOS + 3: left argument. | 2176 // TOS + 3: left argument. |
2092 // TOS + 2: right argument. | 2177 // TOS + 2: right argument. |
2093 // TOS + 1: saved left | 2178 // TOS + 1: saved left |
2094 // TOS + 0: saved right | 2179 // TOS + 0: saved right |
2095 __ lw(left, Address(SP, 3 * kWordSize)); | 2180 __ lw(left, Address(SP, 5 * kWordSize)); |
2096 __ lw(right, Address(SP, 2 * kWordSize)); | 2181 __ lw(right, Address(SP, 4 * kWordSize)); |
2097 Label reference_compare, done, check_mint, check_bigint; | 2182 Label reference_compare, done, check_mint, check_bigint; |
2098 // If any of the arguments is Smi do reference compare. | 2183 // If any of the arguments is Smi do reference compare. |
2099 __ andi(temp1, left, Immediate(kSmiTagMask)); | 2184 __ andi(temp1, left, Immediate(kSmiTagMask)); |
2100 __ beq(temp1, ZR, &reference_compare); | 2185 __ beq(temp1, ZR, &reference_compare); |
2101 __ andi(temp1, right, Immediate(kSmiTagMask)); | 2186 __ andi(temp1, right, Immediate(kSmiTagMask)); |
2102 __ beq(temp1, ZR, &reference_compare); | 2187 __ beq(temp1, ZR, &reference_compare); |
2103 | 2188 |
2104 // Value compare for two doubles. | 2189 // Value compare for two doubles. |
2105 __ LoadImmediate(temp1, kDoubleCid); | 2190 __ LoadImmediate(temp1, kDoubleCid); |
2106 __ LoadClassId(temp2, left); | 2191 __ LoadClassId(temp2, left); |
2107 __ bne(temp1, temp2, &check_mint); | 2192 __ bne(temp1, temp2, &check_mint); |
2108 __ LoadClassId(temp2, right); | 2193 __ LoadClassId(temp2, right); |
2109 __ subu(ret, temp1, temp2); | 2194 __ subu(CMPRES, temp1, temp2); |
2110 __ bne(ret, ZR, &done); | 2195 __ bne(CMPRES, ZR, &done); |
2111 | 2196 |
2112 // Double values bitwise compare. | 2197 // Double values bitwise compare. |
2113 __ lw(temp1, FieldAddress(left, Double::value_offset() + 0 * kWordSize)); | 2198 __ lw(temp1, FieldAddress(left, Double::value_offset() + 0 * kWordSize)); |
2114 __ lw(temp1, FieldAddress(right, Double::value_offset() + 0 * kWordSize)); | 2199 __ lw(temp2, FieldAddress(right, Double::value_offset() + 0 * kWordSize)); |
2115 __ subu(ret, temp1, temp2); | 2200 __ subu(CMPRES, temp1, temp2); |
2116 __ bne(ret, ZR, &done); | 2201 __ bne(CMPRES, ZR, &done); |
2117 __ lw(temp1, FieldAddress(left, Double::value_offset() + 1 * kWordSize)); | 2202 __ lw(temp1, FieldAddress(left, Double::value_offset() + 1 * kWordSize)); |
2118 __ lw(temp2, FieldAddress(right, Double::value_offset() + 1 * kWordSize)); | 2203 __ lw(temp2, FieldAddress(right, Double::value_offset() + 1 * kWordSize)); |
2119 __ b(&done); | 2204 __ b(&done); |
2120 __ delay_slot()->subu(ret, temp1, temp2); | 2205 __ delay_slot()->subu(CMPRES, temp1, temp2); |
2121 | 2206 |
2122 __ Bind(&check_mint); | 2207 __ Bind(&check_mint); |
2123 __ LoadImmediate(temp1, kMintCid); | 2208 __ LoadImmediate(temp1, kMintCid); |
2124 __ LoadClassId(temp2, left); | 2209 __ LoadClassId(temp2, left); |
2125 __ bne(temp1, temp2, &check_bigint); | 2210 __ bne(temp1, temp2, &check_bigint); |
2126 __ LoadClassId(temp2, right); | 2211 __ LoadClassId(temp2, right); |
2127 __ subu(ret, temp1, temp2); | 2212 __ subu(CMPRES, temp1, temp2); |
2128 __ bne(ret, ZR, &done); | 2213 __ bne(CMPRES, ZR, &done); |
2129 | 2214 |
2130 __ lw(temp1, FieldAddress(left, Mint::value_offset() + 0 * kWordSize)); | 2215 __ lw(temp1, FieldAddress(left, Mint::value_offset() + 0 * kWordSize)); |
2131 __ lw(temp2, FieldAddress(right, Mint::value_offset() + 0 * kWordSize)); | 2216 __ lw(temp2, FieldAddress(right, Mint::value_offset() + 0 * kWordSize)); |
2132 __ subu(ret, temp1, temp2); | 2217 __ subu(CMPRES, temp1, temp2); |
2133 __ bne(ret, ZR, &done); | 2218 __ bne(CMPRES, ZR, &done); |
2134 __ lw(temp1, FieldAddress(left, Mint::value_offset() + 1 * kWordSize)); | 2219 __ lw(temp1, FieldAddress(left, Mint::value_offset() + 1 * kWordSize)); |
2135 __ lw(temp2, FieldAddress(right, Mint::value_offset() + 1 * kWordSize)); | 2220 __ lw(temp2, FieldAddress(right, Mint::value_offset() + 1 * kWordSize)); |
2136 __ b(&done); | 2221 __ b(&done); |
2137 __ delay_slot()->subu(ret, temp1, temp2); | 2222 __ delay_slot()->subu(CMPRES, temp1, temp2); |
2138 | 2223 |
2139 __ Bind(&check_bigint); | 2224 __ Bind(&check_bigint); |
2140 __ LoadImmediate(temp1, kBigintCid); | 2225 __ LoadImmediate(temp1, kBigintCid); |
2141 __ LoadClassId(temp2, left); | 2226 __ LoadClassId(temp2, left); |
2142 __ bne(temp1, temp2, &reference_compare); | 2227 __ bne(temp1, temp2, &reference_compare); |
2143 __ LoadClassId(temp2, right); | 2228 __ LoadClassId(temp2, right); |
2144 __ subu(ret, temp1, temp2); | 2229 __ subu(CMPRES, temp1, temp2); |
2145 __ bne(ret, ZR, &done); | 2230 __ bne(CMPRES, ZR, &done); |
2146 | 2231 |
2147 __ EnterStubFrame(0); | 2232 __ EnterStubFrame(); |
2148 __ ReserveAlignedFrameSpace(2 * kWordSize); | 2233 __ ReserveAlignedFrameSpace(2 * kWordSize); |
2149 __ sw(T1, Address(SP, 1 * kWordSize)); | 2234 __ sw(left, Address(SP, 1 * kWordSize)); |
2150 __ sw(T0, Address(SP, 0 * kWordSize)); | 2235 __ sw(right, Address(SP, 0 * kWordSize)); |
| 2236 __ mov(A0, left); |
| 2237 __ mov(A1, right); |
2151 __ CallRuntime(kBigintCompareRuntimeEntry); | 2238 __ CallRuntime(kBigintCompareRuntimeEntry); |
2152 __ TraceSimMsg("IdenticalWithNumberCheckStub return"); | 2239 __ TraceSimMsg("IdenticalWithNumberCheckStub return"); |
2153 // Result in V0, 0 means equal. | 2240 // Result in V0, 0 means equal. |
2154 __ LeaveStubFrame(); | 2241 __ LeaveStubFrame(); |
2155 __ b(&done); | 2242 __ b(&done); |
2156 __ delay_slot()->mov(CMPRES, V0); | 2243 __ delay_slot()->mov(CMPRES, V0); |
2157 | 2244 |
2158 __ Bind(&reference_compare); | 2245 __ Bind(&reference_compare); |
2159 __ subu(ret, left, right); | 2246 __ subu(CMPRES, left, right); |
2160 __ Bind(&done); | 2247 __ Bind(&done); |
2161 // A branch or test after this comparison will check CMPRES == TMP1. | 2248 // A branch or test after this comparison will check CMPRES == TMP1. |
2162 __ mov(TMP1, ZR); | 2249 __ mov(TMP1, ZR); |
2163 __ lw(T0, Address(SP, 0 * kWordSize)); | 2250 __ lw(right, Address(SP, 0 * kWordSize)); |
2164 __ lw(T1, Address(SP, 1 * kWordSize)); | 2251 __ lw(left, Address(SP, 1 * kWordSize)); |
| 2252 __ lw(temp2, Address(SP, 2 * kWordSize)); |
| 2253 __ lw(temp1, Address(SP, 3 * kWordSize)); |
2165 __ Ret(); | 2254 __ Ret(); |
2166 __ delay_slot()->addiu(SP, SP, Immediate(2 * kWordSize)); | 2255 __ delay_slot()->addiu(SP, SP, Immediate(4 * kWordSize)); |
2167 } | 2256 } |
2168 | 2257 |
2169 } // namespace dart | 2258 } // namespace dart |
2170 | 2259 |
2171 #endif // defined TARGET_ARCH_MIPS | 2260 #endif // defined TARGET_ARCH_MIPS |
OLD | NEW |