| OLD | NEW |
| 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
| 6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
| 7 | 7 |
| 8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
| 9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
| 10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 116 __ LoadImmediate(R2, VMTag::kDartTagId); | 116 __ LoadImmediate(R2, VMTag::kDartTagId); |
| 117 __ StoreToOffset(R2, THR, Thread::vm_tag_offset()); | 117 __ StoreToOffset(R2, THR, Thread::vm_tag_offset()); |
| 118 | 118 |
| 119 // Reset exit frame information in Isolate structure. | 119 // Reset exit frame information in Isolate structure. |
| 120 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); | 120 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); |
| 121 | 121 |
| 122 __ LeaveStubFrame(); | 122 __ LeaveStubFrame(); |
| 123 __ ret(); | 123 __ ret(); |
| 124 } | 124 } |
| 125 | 125 |
| 126 | |
| 127 // Print the stop message. | 126 // Print the stop message. |
| 128 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) { | 127 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) { |
| 129 OS::Print("Stop message: %s\n", message); | 128 OS::Print("Stop message: %s\n", message); |
| 130 } | 129 } |
| 131 END_LEAF_RUNTIME_ENTRY | 130 END_LEAF_RUNTIME_ENTRY |
| 132 | 131 |
| 133 | |
| 134 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) { | 132 void StubCode::GeneratePrintStopMessageStub(Assembler* assembler) { |
| 135 __ Stop("GeneratePrintStopMessageStub"); | 133 __ Stop("GeneratePrintStopMessageStub"); |
| 136 } | 134 } |
| 137 | 135 |
| 138 | |
| 139 // Input parameters: | 136 // Input parameters: |
| 140 // LR : return address. | 137 // LR : return address. |
| 141 // SP : address of return value. | 138 // SP : address of return value. |
| 142 // R5 : address of the native function to call. | 139 // R5 : address of the native function to call. |
| 143 // R2 : address of first argument in argument array. | 140 // R2 : address of first argument in argument array. |
| 144 // R1 : argc_tag including number of arguments and function kind. | 141 // R1 : argc_tag including number of arguments and function kind. |
| 145 static void GenerateCallNativeWithWrapperStub(Assembler* assembler, | 142 static void GenerateCallNativeWithWrapperStub(Assembler* assembler, |
| 146 Address wrapper) { | 143 Address wrapper) { |
| 147 const intptr_t thread_offset = NativeArguments::thread_offset(); | 144 const intptr_t thread_offset = NativeArguments::thread_offset(); |
| 148 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 145 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 223 __ LoadImmediate(R2, VMTag::kDartTagId); | 220 __ LoadImmediate(R2, VMTag::kDartTagId); |
| 224 __ StoreToOffset(R2, THR, Thread::vm_tag_offset()); | 221 __ StoreToOffset(R2, THR, Thread::vm_tag_offset()); |
| 225 | 222 |
| 226 // Reset exit frame information in Isolate structure. | 223 // Reset exit frame information in Isolate structure. |
| 227 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); | 224 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); |
| 228 | 225 |
| 229 __ LeaveStubFrame(); | 226 __ LeaveStubFrame(); |
| 230 __ ret(); | 227 __ ret(); |
| 231 } | 228 } |
| 232 | 229 |
| 233 | |
| 234 void StubCode::GenerateCallNoScopeNativeStub(Assembler* assembler) { | 230 void StubCode::GenerateCallNoScopeNativeStub(Assembler* assembler) { |
| 235 GenerateCallNativeWithWrapperStub( | 231 GenerateCallNativeWithWrapperStub( |
| 236 assembler, | 232 assembler, |
| 237 Address(THR, Thread::no_scope_native_wrapper_entry_point_offset())); | 233 Address(THR, Thread::no_scope_native_wrapper_entry_point_offset())); |
| 238 } | 234 } |
| 239 | 235 |
| 240 | |
| 241 void StubCode::GenerateCallAutoScopeNativeStub(Assembler* assembler) { | 236 void StubCode::GenerateCallAutoScopeNativeStub(Assembler* assembler) { |
| 242 GenerateCallNativeWithWrapperStub( | 237 GenerateCallNativeWithWrapperStub( |
| 243 assembler, | 238 assembler, |
| 244 Address(THR, Thread::auto_scope_native_wrapper_entry_point_offset())); | 239 Address(THR, Thread::auto_scope_native_wrapper_entry_point_offset())); |
| 245 } | 240 } |
| 246 | 241 |
| 247 | |
| 248 // Input parameters: | 242 // Input parameters: |
| 249 // LR : return address. | 243 // LR : return address. |
| 250 // SP : address of return value. | 244 // SP : address of return value. |
| 251 // R5 : address of the native function to call. | 245 // R5 : address of the native function to call. |
| 252 // R2 : address of first argument in argument array. | 246 // R2 : address of first argument in argument array. |
| 253 // R1 : argc_tag including number of arguments and function kind. | 247 // R1 : argc_tag including number of arguments and function kind. |
| 254 void StubCode::GenerateCallBootstrapNativeStub(Assembler* assembler) { | 248 void StubCode::GenerateCallBootstrapNativeStub(Assembler* assembler) { |
| 255 const intptr_t thread_offset = NativeArguments::thread_offset(); | 249 const intptr_t thread_offset = NativeArguments::thread_offset(); |
| 256 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 250 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); |
| 257 const intptr_t argv_offset = NativeArguments::argv_offset(); | 251 const intptr_t argv_offset = NativeArguments::argv_offset(); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 328 __ LoadImmediate(R2, VMTag::kDartTagId); | 322 __ LoadImmediate(R2, VMTag::kDartTagId); |
| 329 __ StoreToOffset(R2, THR, Thread::vm_tag_offset()); | 323 __ StoreToOffset(R2, THR, Thread::vm_tag_offset()); |
| 330 | 324 |
| 331 // Reset exit frame information in Isolate structure. | 325 // Reset exit frame information in Isolate structure. |
| 332 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); | 326 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); |
| 333 | 327 |
| 334 __ LeaveStubFrame(); | 328 __ LeaveStubFrame(); |
| 335 __ ret(); | 329 __ ret(); |
| 336 } | 330 } |
| 337 | 331 |
| 338 | |
| 339 // Input parameters: | 332 // Input parameters: |
| 340 // R4: arguments descriptor array. | 333 // R4: arguments descriptor array. |
| 341 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 334 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { |
| 342 // Create a stub frame as we are pushing some objects on the stack before | 335 // Create a stub frame as we are pushing some objects on the stack before |
| 343 // calling into the runtime. | 336 // calling into the runtime. |
| 344 __ EnterStubFrame(); | 337 __ EnterStubFrame(); |
| 345 // Setup space on stack for return value and preserve arguments descriptor. | 338 // Setup space on stack for return value and preserve arguments descriptor. |
| 346 __ Push(R4); | 339 __ Push(R4); |
| 347 __ Push(ZR); | 340 __ Push(ZR); |
| 348 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); | 341 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); |
| 349 // Get Code object result and restore arguments descriptor array. | 342 // Get Code object result and restore arguments descriptor array. |
| 350 __ Pop(CODE_REG); | 343 __ Pop(CODE_REG); |
| 351 __ Pop(R4); | 344 __ Pop(R4); |
| 352 // Remove the stub frame. | 345 // Remove the stub frame. |
| 353 __ LeaveStubFrame(); | 346 __ LeaveStubFrame(); |
| 354 // Jump to the dart function. | 347 // Jump to the dart function. |
| 355 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); | 348 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
| 356 __ br(R0); | 349 __ br(R0); |
| 357 } | 350 } |
| 358 | 351 |
| 359 | |
| 360 // Called from a static call only when an invalid code has been entered | 352 // Called from a static call only when an invalid code has been entered |
| 361 // (invalid because its function was optimized or deoptimized). | 353 // (invalid because its function was optimized or deoptimized). |
| 362 // R4: arguments descriptor array. | 354 // R4: arguments descriptor array. |
| 363 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 355 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { |
| 364 // Load code pointer to this stub from the thread: | 356 // Load code pointer to this stub from the thread: |
| 365 // The one that is passed in, is not correct - it points to the code object | 357 // The one that is passed in, is not correct - it points to the code object |
| 366 // that needs to be replaced. | 358 // that needs to be replaced. |
| 367 __ ldr(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset())); | 359 __ ldr(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset())); |
| 368 // Create a stub frame as we are pushing some objects on the stack before | 360 // Create a stub frame as we are pushing some objects on the stack before |
| 369 // calling into the runtime. | 361 // calling into the runtime. |
| 370 __ EnterStubFrame(); | 362 __ EnterStubFrame(); |
| 371 // Setup space on stack for return value and preserve arguments descriptor. | 363 // Setup space on stack for return value and preserve arguments descriptor. |
| 372 __ Push(R4); | 364 __ Push(R4); |
| 373 __ Push(ZR); | 365 __ Push(ZR); |
| 374 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); | 366 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); |
| 375 // Get Code object result and restore arguments descriptor array. | 367 // Get Code object result and restore arguments descriptor array. |
| 376 __ Pop(CODE_REG); | 368 __ Pop(CODE_REG); |
| 377 __ Pop(R4); | 369 __ Pop(R4); |
| 378 // Remove the stub frame. | 370 // Remove the stub frame. |
| 379 __ LeaveStubFrame(); | 371 __ LeaveStubFrame(); |
| 380 // Jump to the dart function. | 372 // Jump to the dart function. |
| 381 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); | 373 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
| 382 __ br(R0); | 374 __ br(R0); |
| 383 } | 375 } |
| 384 | 376 |
| 385 | |
| 386 // Called from object allocate instruction when the allocation stub has been | 377 // Called from object allocate instruction when the allocation stub has been |
| 387 // disabled. | 378 // disabled. |
| 388 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { | 379 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { |
| 389 // Load code pointer to this stub from the thread: | 380 // Load code pointer to this stub from the thread: |
| 390 // The one that is passed in, is not correct - it points to the code object | 381 // The one that is passed in, is not correct - it points to the code object |
| 391 // that needs to be replaced. | 382 // that needs to be replaced. |
| 392 __ ldr(CODE_REG, Address(THR, Thread::fix_allocation_stub_code_offset())); | 383 __ ldr(CODE_REG, Address(THR, Thread::fix_allocation_stub_code_offset())); |
| 393 __ EnterStubFrame(); | 384 __ EnterStubFrame(); |
| 394 // Setup space on stack for return value. | 385 // Setup space on stack for return value. |
| 395 __ Push(ZR); | 386 __ Push(ZR); |
| 396 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); | 387 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); |
| 397 // Get Code object result. | 388 // Get Code object result. |
| 398 __ Pop(CODE_REG); | 389 __ Pop(CODE_REG); |
| 399 // Remove the stub frame. | 390 // Remove the stub frame. |
| 400 __ LeaveStubFrame(); | 391 __ LeaveStubFrame(); |
| 401 // Jump to the dart function. | 392 // Jump to the dart function. |
| 402 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); | 393 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
| 403 __ br(R0); | 394 __ br(R0); |
| 404 } | 395 } |
| 405 | 396 |
| 406 | |
| 407 // Input parameters: | 397 // Input parameters: |
| 408 // R2: smi-tagged argument count, may be zero. | 398 // R2: smi-tagged argument count, may be zero. |
| 409 // FP[kParamEndSlotFromFp + 1]: last argument. | 399 // FP[kParamEndSlotFromFp + 1]: last argument. |
| 410 static void PushArgumentsArray(Assembler* assembler) { | 400 static void PushArgumentsArray(Assembler* assembler) { |
| 411 // Allocate array to store arguments of caller. | 401 // Allocate array to store arguments of caller. |
| 412 __ LoadObject(R1, Object::null_object()); | 402 __ LoadObject(R1, Object::null_object()); |
| 413 // R1: null element type for raw Array. | 403 // R1: null element type for raw Array. |
| 414 // R2: smi-tagged argument count, may be zero. | 404 // R2: smi-tagged argument count, may be zero. |
| 415 __ BranchLink(*StubCode::AllocateArray_entry()); | 405 __ BranchLink(*StubCode::AllocateArray_entry()); |
| 416 // R0: newly allocated array. | 406 // R0: newly allocated array. |
| (...skipping 11 matching lines...) Expand all Loading... |
| 428 __ Bind(&loop); | 418 __ Bind(&loop); |
| 429 __ ldr(R7, Address(R1)); | 419 __ ldr(R7, Address(R1)); |
| 430 __ AddImmediate(R1, -kWordSize); | 420 __ AddImmediate(R1, -kWordSize); |
| 431 __ AddImmediate(R3, kWordSize); | 421 __ AddImmediate(R3, kWordSize); |
| 432 __ AddImmediateSetFlags(R2, R2, -Smi::RawValue(1)); | 422 __ AddImmediateSetFlags(R2, R2, -Smi::RawValue(1)); |
| 433 __ str(R7, Address(R3, -kWordSize)); | 423 __ str(R7, Address(R3, -kWordSize)); |
| 434 __ b(&loop, GE); | 424 __ b(&loop, GE); |
| 435 __ Bind(&loop_exit); | 425 __ Bind(&loop_exit); |
| 436 } | 426 } |
| 437 | 427 |
| 438 | |
| 439 // Used by eager and lazy deoptimization. Preserve result in RAX if necessary. | 428 // Used by eager and lazy deoptimization. Preserve result in RAX if necessary. |
| 440 // This stub translates optimized frame into unoptimized frame. The optimized | 429 // This stub translates optimized frame into unoptimized frame. The optimized |
| 441 // frame can contain values in registers and on stack, the unoptimized | 430 // frame can contain values in registers and on stack, the unoptimized |
| 442 // frame contains all values on stack. | 431 // frame contains all values on stack. |
| 443 // Deoptimization occurs in following steps: | 432 // Deoptimization occurs in following steps: |
| 444 // - Push all registers that can contain values. | 433 // - Push all registers that can contain values. |
| 445 // - Call C routine to copy the stack and saved registers into temporary buffer. | 434 // - Call C routine to copy the stack and saved registers into temporary buffer. |
| 446 // - Adjust caller's frame to correct unoptimized frame size. | 435 // - Adjust caller's frame to correct unoptimized frame size. |
| 447 // - Fill the unoptimized frame. | 436 // - Fill the unoptimized frame. |
| 448 // - Materialize objects that require allocation (e.g. Double instances). | 437 // - Materialize objects that require allocation (e.g. Double instances). |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 569 } else if (kind == kLazyDeoptFromThrow) { | 558 } else if (kind == kLazyDeoptFromThrow) { |
| 570 __ Pop(R1); // Restore stacktrace. | 559 __ Pop(R1); // Restore stacktrace. |
| 571 __ Pop(R0); // Restore exception. | 560 __ Pop(R0); // Restore exception. |
| 572 } | 561 } |
| 573 __ LeaveStubFrame(); | 562 __ LeaveStubFrame(); |
| 574 // Remove materialization arguments. | 563 // Remove materialization arguments. |
| 575 __ add(SP, SP, Operand(R2)); | 564 __ add(SP, SP, Operand(R2)); |
| 576 // The caller is responsible for emitting the return instruction. | 565 // The caller is responsible for emitting the return instruction. |
| 577 } | 566 } |
| 578 | 567 |
| 579 | |
| 580 // R0: result, must be preserved | 568 // R0: result, must be preserved |
| 581 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 569 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
| 582 // Push zap value instead of CODE_REG for lazy deopt. | 570 // Push zap value instead of CODE_REG for lazy deopt. |
| 583 __ LoadImmediate(TMP, kZapCodeReg); | 571 __ LoadImmediate(TMP, kZapCodeReg); |
| 584 __ Push(TMP); | 572 __ Push(TMP); |
| 585 // Return address for "call" to deopt stub. | 573 // Return address for "call" to deopt stub. |
| 586 __ LoadImmediate(LR, kZapReturnAddress); | 574 __ LoadImmediate(LR, kZapReturnAddress); |
| 587 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); | 575 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
| 588 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); | 576 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
| 589 __ ret(); | 577 __ ret(); |
| 590 } | 578 } |
| 591 | 579 |
| 592 | |
| 593 // R0: exception, must be preserved | 580 // R0: exception, must be preserved |
| 594 // R1: stacktrace, must be preserved | 581 // R1: stacktrace, must be preserved |
| 595 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { | 582 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
| 596 // Push zap value instead of CODE_REG for lazy deopt. | 583 // Push zap value instead of CODE_REG for lazy deopt. |
| 597 __ LoadImmediate(TMP, kZapCodeReg); | 584 __ LoadImmediate(TMP, kZapCodeReg); |
| 598 __ Push(TMP); | 585 __ Push(TMP); |
| 599 // Return address for "call" to deopt stub. | 586 // Return address for "call" to deopt stub. |
| 600 __ LoadImmediate(LR, kZapReturnAddress); | 587 __ LoadImmediate(LR, kZapReturnAddress); |
| 601 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); | 588 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
| 602 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); | 589 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
| 603 __ ret(); | 590 __ ret(); |
| 604 } | 591 } |
| 605 | 592 |
| 606 | |
| 607 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 593 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
| 608 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 594 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
| 609 __ ret(); | 595 __ ret(); |
| 610 } | 596 } |
| 611 | 597 |
| 612 | |
| 613 static void GenerateDispatcherCode(Assembler* assembler, | 598 static void GenerateDispatcherCode(Assembler* assembler, |
| 614 Label* call_target_function) { | 599 Label* call_target_function) { |
| 615 __ Comment("NoSuchMethodDispatch"); | 600 __ Comment("NoSuchMethodDispatch"); |
| 616 // When lazily generated invocation dispatchers are disabled, the | 601 // When lazily generated invocation dispatchers are disabled, the |
| 617 // miss-handler may return null. | 602 // miss-handler may return null. |
| 618 __ CompareObject(R0, Object::null_object()); | 603 __ CompareObject(R0, Object::null_object()); |
| 619 __ b(call_target_function, NE); | 604 __ b(call_target_function, NE); |
| 620 __ EnterStubFrame(); | 605 __ EnterStubFrame(); |
| 621 | 606 |
| 622 // Load the receiver. | 607 // Load the receiver. |
| (...skipping 14 matching lines...) Expand all Loading... |
| 637 // R2: Smi-tagged arguments array length. | 622 // R2: Smi-tagged arguments array length. |
| 638 PushArgumentsArray(assembler); | 623 PushArgumentsArray(assembler); |
| 639 const intptr_t kNumArgs = 4; | 624 const intptr_t kNumArgs = 4; |
| 640 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); | 625 __ CallRuntime(kInvokeNoSuchMethodDispatcherRuntimeEntry, kNumArgs); |
| 641 __ Drop(4); | 626 __ Drop(4); |
| 642 __ Pop(R0); // Return value. | 627 __ Pop(R0); // Return value. |
| 643 __ LeaveStubFrame(); | 628 __ LeaveStubFrame(); |
| 644 __ ret(); | 629 __ ret(); |
| 645 } | 630 } |
| 646 | 631 |
| 647 | |
| 648 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { | 632 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { |
| 649 __ EnterStubFrame(); | 633 __ EnterStubFrame(); |
| 650 | 634 |
| 651 // Load the receiver. | 635 // Load the receiver. |
| 652 __ LoadFieldFromOffset(R2, R4, ArgumentsDescriptor::count_offset()); | 636 __ LoadFieldFromOffset(R2, R4, ArgumentsDescriptor::count_offset()); |
| 653 __ add(TMP, FP, Operand(R2, LSL, 2)); // R2 is Smi. | 637 __ add(TMP, FP, Operand(R2, LSL, 2)); // R2 is Smi. |
| 654 __ LoadFromOffset(R6, TMP, kParamEndSlotFromFp * kWordSize); | 638 __ LoadFromOffset(R6, TMP, kParamEndSlotFromFp * kWordSize); |
| 655 | 639 |
| 656 // Preserve IC data and arguments descriptor. | 640 // Preserve IC data and arguments descriptor. |
| 657 __ Push(R5); | 641 __ Push(R5); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 682 GenerateDispatcherCode(assembler, &call_target_function); | 666 GenerateDispatcherCode(assembler, &call_target_function); |
| 683 __ Bind(&call_target_function); | 667 __ Bind(&call_target_function); |
| 684 } | 668 } |
| 685 | 669 |
| 686 // Tail-call to target function. | 670 // Tail-call to target function. |
| 687 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); | 671 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
| 688 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | 672 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
| 689 __ br(R2); | 673 __ br(R2); |
| 690 } | 674 } |
| 691 | 675 |
| 692 | |
| 693 // Called for inline allocation of arrays. | 676 // Called for inline allocation of arrays. |
| 694 // Input parameters: | 677 // Input parameters: |
| 695 // LR: return address. | 678 // LR: return address. |
| 696 // R2: array length as Smi. | 679 // R2: array length as Smi. |
| 697 // R1: array element type (either NULL or an instantiated type). | 680 // R1: array element type (either NULL or an instantiated type). |
| 698 // NOTE: R2 cannot be clobbered here as the caller relies on it being saved. | 681 // NOTE: R2 cannot be clobbered here as the caller relies on it being saved. |
| 699 // The newly allocated object is returned in R0. | 682 // The newly allocated object is returned in R0. |
| 700 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) { | 683 void StubCode::GenerateAllocateArrayStub(Assembler* assembler) { |
| 701 Label slow_case; | 684 Label slow_case; |
| 702 // Compute the size to be allocated, it is based on the array length | 685 // Compute the size to be allocated, it is based on the array length |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 827 __ Push(R1); | 810 __ Push(R1); |
| 828 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); | 811 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); |
| 829 // Pop arguments; result is popped in IP. | 812 // Pop arguments; result is popped in IP. |
| 830 __ Pop(R1); | 813 __ Pop(R1); |
| 831 __ Pop(R2); | 814 __ Pop(R2); |
| 832 __ Pop(R0); | 815 __ Pop(R0); |
| 833 __ LeaveStubFrame(); | 816 __ LeaveStubFrame(); |
| 834 __ ret(); | 817 __ ret(); |
| 835 } | 818 } |
| 836 | 819 |
| 837 | |
| 838 // Called when invoking Dart code from C++ (VM code). | 820 // Called when invoking Dart code from C++ (VM code). |
| 839 // Input parameters: | 821 // Input parameters: |
| 840 // LR : points to return address. | 822 // LR : points to return address. |
| 841 // R0 : code object of the Dart function to call. | 823 // R0 : code object of the Dart function to call. |
| 842 // R1 : arguments descriptor array. | 824 // R1 : arguments descriptor array. |
| 843 // R2 : arguments array. | 825 // R2 : arguments array. |
| 844 // R3 : current thread. | 826 // R3 : current thread. |
| 845 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 827 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { |
| 846 __ Comment("InvokeDartCodeStub"); | 828 __ Comment("InvokeDartCodeStub"); |
| 847 | 829 |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 960 // using it as the stack pointer. | 942 // using it as the stack pointer. |
| 961 __ ldr(r, Address(SP, 1 * kWordSize, Address::PostIndex)); | 943 __ ldr(r, Address(SP, 1 * kWordSize, Address::PostIndex)); |
| 962 } | 944 } |
| 963 | 945 |
| 964 // Restore the frame pointer and C stack pointer and return. | 946 // Restore the frame pointer and C stack pointer and return. |
| 965 __ LeaveFrame(); | 947 __ LeaveFrame(); |
| 966 __ RestoreCSP(); | 948 __ RestoreCSP(); |
| 967 __ ret(); | 949 __ ret(); |
| 968 } | 950 } |
| 969 | 951 |
| 970 | |
| 971 // Called for inline allocation of contexts. | 952 // Called for inline allocation of contexts. |
| 972 // Input: | 953 // Input: |
| 973 // R1: number of context variables. | 954 // R1: number of context variables. |
| 974 // Output: | 955 // Output: |
| 975 // R0: new allocated RawContext object. | 956 // R0: new allocated RawContext object. |
| 976 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { | 957 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { |
| 977 if (FLAG_inline_alloc) { | 958 if (FLAG_inline_alloc) { |
| 978 Label slow_case; | 959 Label slow_case; |
| 979 // First compute the rounded instance size. | 960 // First compute the rounded instance size. |
| 980 // R1: number of context variables. | 961 // R1: number of context variables. |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1075 __ Push(R1); | 1056 __ Push(R1); |
| 1076 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context. | 1057 __ CallRuntime(kAllocateContextRuntimeEntry, 1); // Allocate context. |
| 1077 __ Drop(1); // Pop number of context variables argument. | 1058 __ Drop(1); // Pop number of context variables argument. |
| 1078 __ Pop(R0); // Pop the new context object. | 1059 __ Pop(R0); // Pop the new context object. |
| 1079 // R0: new object | 1060 // R0: new object |
| 1080 // Restore the frame pointer. | 1061 // Restore the frame pointer. |
| 1081 __ LeaveStubFrame(); | 1062 __ LeaveStubFrame(); |
| 1082 __ ret(); | 1063 __ ret(); |
| 1083 } | 1064 } |
| 1084 | 1065 |
| 1085 | |
| 1086 // Helper stub to implement Assembler::StoreIntoObject. | 1066 // Helper stub to implement Assembler::StoreIntoObject. |
| 1087 // Input parameters: | 1067 // Input parameters: |
| 1088 // R0: Address being stored | 1068 // R0: Address being stored |
| 1089 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) { | 1069 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) { |
| 1090 Label add_to_buffer; | 1070 Label add_to_buffer; |
| 1091 // Check whether this object has already been remembered. Skip adding to the | 1071 // Check whether this object has already been remembered. Skip adding to the |
| 1092 // store buffer if the object is in the store buffer already. | 1072 // store buffer if the object is in the store buffer already. |
| 1093 __ LoadFieldFromOffset(TMP, R0, Object::tags_offset(), kWord); | 1073 __ LoadFieldFromOffset(TMP, R0, Object::tags_offset(), kWord); |
| 1094 __ tsti(TMP, Immediate(1 << RawObject::kRememberedBit)); | 1074 __ tsti(TMP, Immediate(1 << RawObject::kRememberedBit)); |
| 1095 __ b(&add_to_buffer, EQ); | 1075 __ b(&add_to_buffer, EQ); |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1141 // Setup frame, push callee-saved registers. | 1121 // Setup frame, push callee-saved registers. |
| 1142 | 1122 |
| 1143 __ EnterCallRuntimeFrame(0 * kWordSize); | 1123 __ EnterCallRuntimeFrame(0 * kWordSize); |
| 1144 __ mov(R0, THR); | 1124 __ mov(R0, THR); |
| 1145 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1); | 1125 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1); |
| 1146 // Restore callee-saved registers, tear down frame. | 1126 // Restore callee-saved registers, tear down frame. |
| 1147 __ LeaveCallRuntimeFrame(); | 1127 __ LeaveCallRuntimeFrame(); |
| 1148 __ ret(); | 1128 __ ret(); |
| 1149 } | 1129 } |
| 1150 | 1130 |
| 1151 | |
| 1152 // Called for inline allocation of objects. | 1131 // Called for inline allocation of objects. |
| 1153 // Input parameters: | 1132 // Input parameters: |
| 1154 // LR : return address. | 1133 // LR : return address. |
| 1155 // SP + 0 : type arguments object (only if class is parameterized). | 1134 // SP + 0 : type arguments object (only if class is parameterized). |
| 1156 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, | 1135 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, |
| 1157 const Class& cls) { | 1136 const Class& cls) { |
| 1158 // The generated code is different if the class is parameterized. | 1137 // The generated code is different if the class is parameterized. |
| 1159 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; | 1138 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; |
| 1160 ASSERT(!is_cls_parameterized || | 1139 ASSERT(!is_cls_parameterized || |
| 1161 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); | 1140 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1268 } | 1247 } |
| 1269 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. | 1248 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. |
| 1270 __ Drop(2); // Pop arguments. | 1249 __ Drop(2); // Pop arguments. |
| 1271 __ Pop(R0); // Pop result (newly allocated object). | 1250 __ Pop(R0); // Pop result (newly allocated object). |
| 1272 // R0: new object | 1251 // R0: new object |
| 1273 // Restore the frame pointer. | 1252 // Restore the frame pointer. |
| 1274 __ LeaveStubFrame(); | 1253 __ LeaveStubFrame(); |
| 1275 __ ret(); | 1254 __ ret(); |
| 1276 } | 1255 } |
| 1277 | 1256 |
| 1278 | |
| 1279 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function | 1257 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function |
| 1280 // from the entry code of a dart function after an error in passed argument | 1258 // from the entry code of a dart function after an error in passed argument |
| 1281 // name or number is detected. | 1259 // name or number is detected. |
| 1282 // Input parameters: | 1260 // Input parameters: |
| 1283 // LR : return address. | 1261 // LR : return address. |
| 1284 // SP : address of last argument. | 1262 // SP : address of last argument. |
| 1285 // R4: arguments descriptor array. | 1263 // R4: arguments descriptor array. |
| 1286 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { | 1264 void StubCode::GenerateCallClosureNoSuchMethodStub(Assembler* assembler) { |
| 1287 __ EnterStubFrame(); | 1265 __ EnterStubFrame(); |
| 1288 | 1266 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1306 | 1284 |
| 1307 // R2: Smi-tagged arguments array length. | 1285 // R2: Smi-tagged arguments array length. |
| 1308 PushArgumentsArray(assembler); | 1286 PushArgumentsArray(assembler); |
| 1309 | 1287 |
| 1310 const intptr_t kNumArgs = 3; | 1288 const intptr_t kNumArgs = 3; |
| 1311 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); | 1289 __ CallRuntime(kInvokeClosureNoSuchMethodRuntimeEntry, kNumArgs); |
| 1312 // noSuchMethod on closures always throws an error, so it will never return. | 1290 // noSuchMethod on closures always throws an error, so it will never return. |
| 1313 __ brk(0); | 1291 __ brk(0); |
| 1314 } | 1292 } |
| 1315 | 1293 |
| 1316 | |
| 1317 // R6: function object. | 1294 // R6: function object. |
| 1318 // R5: inline cache data object. | 1295 // R5: inline cache data object. |
| 1319 // Cannot use function object from ICData as it may be the inlined | 1296 // Cannot use function object from ICData as it may be the inlined |
| 1320 // function and not the top-scope function. | 1297 // function and not the top-scope function. |
| 1321 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { | 1298 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { |
| 1322 Register ic_reg = R5; | 1299 Register ic_reg = R5; |
| 1323 Register func_reg = R6; | 1300 Register func_reg = R6; |
| 1324 if (FLAG_trace_optimized_ic_calls) { | 1301 if (FLAG_trace_optimized_ic_calls) { |
| 1325 __ EnterStubFrame(); | 1302 __ EnterStubFrame(); |
| 1326 __ Push(R6); // Preserve. | 1303 __ Push(R6); // Preserve. |
| 1327 __ Push(R5); // Preserve. | 1304 __ Push(R5); // Preserve. |
| 1328 __ Push(ic_reg); // Argument. | 1305 __ Push(ic_reg); // Argument. |
| 1329 __ Push(func_reg); // Argument. | 1306 __ Push(func_reg); // Argument. |
| 1330 __ CallRuntime(kTraceICCallRuntimeEntry, 2); | 1307 __ CallRuntime(kTraceICCallRuntimeEntry, 2); |
| 1331 __ Drop(2); // Discard argument; | 1308 __ Drop(2); // Discard argument; |
| 1332 __ Pop(R5); // Restore. | 1309 __ Pop(R5); // Restore. |
| 1333 __ Pop(R6); // Restore. | 1310 __ Pop(R6); // Restore. |
| 1334 __ LeaveStubFrame(); | 1311 __ LeaveStubFrame(); |
| 1335 } | 1312 } |
| 1336 __ LoadFieldFromOffset(R7, func_reg, Function::usage_counter_offset(), kWord); | 1313 __ LoadFieldFromOffset(R7, func_reg, Function::usage_counter_offset(), kWord); |
| 1337 __ add(R7, R7, Operand(1)); | 1314 __ add(R7, R7, Operand(1)); |
| 1338 __ StoreFieldToOffset(R7, func_reg, Function::usage_counter_offset(), kWord); | 1315 __ StoreFieldToOffset(R7, func_reg, Function::usage_counter_offset(), kWord); |
| 1339 } | 1316 } |
| 1340 | 1317 |
| 1341 | |
| 1342 // Loads function into 'temp_reg'. | 1318 // Loads function into 'temp_reg'. |
| 1343 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, | 1319 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, |
| 1344 Register temp_reg) { | 1320 Register temp_reg) { |
| 1345 if (FLAG_optimization_counter_threshold >= 0) { | 1321 if (FLAG_optimization_counter_threshold >= 0) { |
| 1346 Register ic_reg = R5; | 1322 Register ic_reg = R5; |
| 1347 Register func_reg = temp_reg; | 1323 Register func_reg = temp_reg; |
| 1348 ASSERT(temp_reg == R6); | 1324 ASSERT(temp_reg == R6); |
| 1349 __ Comment("Increment function counter"); | 1325 __ Comment("Increment function counter"); |
| 1350 __ LoadFieldFromOffset(func_reg, ic_reg, ICData::owner_offset()); | 1326 __ LoadFieldFromOffset(func_reg, ic_reg, ICData::owner_offset()); |
| 1351 __ LoadFieldFromOffset(R7, func_reg, Function::usage_counter_offset(), | 1327 __ LoadFieldFromOffset(R7, func_reg, Function::usage_counter_offset(), |
| 1352 kWord); | 1328 kWord); |
| 1353 __ AddImmediate(R7, 1); | 1329 __ AddImmediate(R7, 1); |
| 1354 __ StoreFieldToOffset(R7, func_reg, Function::usage_counter_offset(), | 1330 __ StoreFieldToOffset(R7, func_reg, Function::usage_counter_offset(), |
| 1355 kWord); | 1331 kWord); |
| 1356 } | 1332 } |
| 1357 } | 1333 } |
| 1358 | 1334 |
| 1359 | |
| 1360 // Note: R5 must be preserved. | 1335 // Note: R5 must be preserved. |
| 1361 // Attempt a quick Smi operation for known operations ('kind'). The ICData | 1336 // Attempt a quick Smi operation for known operations ('kind'). The ICData |
| 1362 // must have been primed with a Smi/Smi check that will be used for counting | 1337 // must have been primed with a Smi/Smi check that will be used for counting |
| 1363 // the invocations. | 1338 // the invocations. |
| 1364 static void EmitFastSmiOp(Assembler* assembler, | 1339 static void EmitFastSmiOp(Assembler* assembler, |
| 1365 Token::Kind kind, | 1340 Token::Kind kind, |
| 1366 intptr_t num_args, | 1341 intptr_t num_args, |
| 1367 Label* not_smi_or_overflow) { | 1342 Label* not_smi_or_overflow) { |
| 1368 __ Comment("Fast Smi op"); | 1343 __ Comment("Fast Smi op"); |
| 1369 __ ldr(R0, Address(SP, +0 * kWordSize)); // Right. | 1344 __ ldr(R0, Address(SP, +0 * kWordSize)); // Right. |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1416 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; | 1391 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; |
| 1417 // Update counter, ignore overflow. | 1392 // Update counter, ignore overflow. |
| 1418 __ LoadFromOffset(R1, R6, count_offset); | 1393 __ LoadFromOffset(R1, R6, count_offset); |
| 1419 __ adds(R1, R1, Operand(Smi::RawValue(1))); | 1394 __ adds(R1, R1, Operand(Smi::RawValue(1))); |
| 1420 __ StoreToOffset(R1, R6, count_offset); | 1395 __ StoreToOffset(R1, R6, count_offset); |
| 1421 } | 1396 } |
| 1422 | 1397 |
| 1423 __ ret(); | 1398 __ ret(); |
| 1424 } | 1399 } |
| 1425 | 1400 |
| 1426 | |
| 1427 // Generate inline cache check for 'num_args'. | 1401 // Generate inline cache check for 'num_args'. |
| 1428 // LR: return address. | 1402 // LR: return address. |
| 1429 // R5: inline cache data object. | 1403 // R5: inline cache data object. |
| 1430 // Control flow: | 1404 // Control flow: |
| 1431 // - If receiver is null -> jump to IC miss. | 1405 // - If receiver is null -> jump to IC miss. |
| 1432 // - If receiver is Smi -> load Smi class. | 1406 // - If receiver is Smi -> load Smi class. |
| 1433 // - If receiver is not-Smi -> load receiver's class. | 1407 // - If receiver is not-Smi -> load receiver's class. |
| 1434 // - Check if 'num_args' (including receiver) match any IC data group. | 1408 // - Check if 'num_args' (including receiver) match any IC data group. |
| 1435 // - Match found -> jump to target. | 1409 // - Match found -> jump to target. |
| 1436 // - Match not found -> jump to IC miss. | 1410 // - Match not found -> jump to IC miss. |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1598 __ EnterStubFrame(); | 1572 __ EnterStubFrame(); |
| 1599 __ Push(R5); // Preserve IC data. | 1573 __ Push(R5); // Preserve IC data. |
| 1600 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1574 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
| 1601 __ Pop(R5); | 1575 __ Pop(R5); |
| 1602 __ RestoreCodePointer(); | 1576 __ RestoreCodePointer(); |
| 1603 __ LeaveStubFrame(); | 1577 __ LeaveStubFrame(); |
| 1604 __ b(&done_stepping); | 1578 __ b(&done_stepping); |
| 1605 } | 1579 } |
| 1606 } | 1580 } |
| 1607 | 1581 |
| 1608 | |
| 1609 // Use inline cache data array to invoke the target or continue in inline | 1582 // Use inline cache data array to invoke the target or continue in inline |
| 1610 // cache miss handler. Stub for 1-argument check (receiver class). | 1583 // cache miss handler. Stub for 1-argument check (receiver class). |
| 1611 // LR: return address. | 1584 // LR: return address. |
| 1612 // R5: inline cache data object. | 1585 // R5: inline cache data object. |
| 1613 // Inline cache data object structure: | 1586 // Inline cache data object structure: |
| 1614 // 0: function-name | 1587 // 0: function-name |
| 1615 // 1: N, number of arguments checked. | 1588 // 1: N, number of arguments checked. |
| 1616 // 2 .. (length - 1): group of checks, each check containing: | 1589 // 2 .. (length - 1): group of checks, each check containing: |
| 1617 // - N classes. | 1590 // - N classes. |
| 1618 // - 1 target function. | 1591 // - 1 target function. |
| 1619 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { | 1592 void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { |
| 1620 GenerateUsageCounterIncrement(assembler, R6); | 1593 GenerateUsageCounterIncrement(assembler, R6); |
| 1621 GenerateNArgsCheckInlineCacheStub( | 1594 GenerateNArgsCheckInlineCacheStub( |
| 1622 assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); | 1595 assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); |
| 1623 } | 1596 } |
| 1624 | 1597 |
| 1625 | |
| 1626 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { | 1598 void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { |
| 1627 GenerateUsageCounterIncrement(assembler, R6); | 1599 GenerateUsageCounterIncrement(assembler, R6); |
| 1628 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1600 GenerateNArgsCheckInlineCacheStub(assembler, 2, |
| 1629 kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1601 kInlineCacheMissHandlerTwoArgsRuntimeEntry, |
| 1630 Token::kILLEGAL); | 1602 Token::kILLEGAL); |
| 1631 } | 1603 } |
| 1632 | 1604 |
| 1633 | |
| 1634 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { | 1605 void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { |
| 1635 GenerateUsageCounterIncrement(assembler, R6); | 1606 GenerateUsageCounterIncrement(assembler, R6); |
| 1636 GenerateNArgsCheckInlineCacheStub( | 1607 GenerateNArgsCheckInlineCacheStub( |
| 1637 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD); | 1608 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD); |
| 1638 } | 1609 } |
| 1639 | 1610 |
| 1640 | |
| 1641 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { | 1611 void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { |
| 1642 GenerateUsageCounterIncrement(assembler, R6); | 1612 GenerateUsageCounterIncrement(assembler, R6); |
| 1643 GenerateNArgsCheckInlineCacheStub( | 1613 GenerateNArgsCheckInlineCacheStub( |
| 1644 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB); | 1614 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB); |
| 1645 } | 1615 } |
| 1646 | 1616 |
| 1647 | |
| 1648 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { | 1617 void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { |
| 1649 GenerateUsageCounterIncrement(assembler, R6); | 1618 GenerateUsageCounterIncrement(assembler, R6); |
| 1650 GenerateNArgsCheckInlineCacheStub( | 1619 GenerateNArgsCheckInlineCacheStub( |
| 1651 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ); | 1620 assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ); |
| 1652 } | 1621 } |
| 1653 | 1622 |
| 1654 | |
| 1655 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( | 1623 void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( |
| 1656 Assembler* assembler) { | 1624 Assembler* assembler) { |
| 1657 GenerateOptimizedUsageCounterIncrement(assembler); | 1625 GenerateOptimizedUsageCounterIncrement(assembler); |
| 1658 GenerateNArgsCheckInlineCacheStub(assembler, 1, | 1626 GenerateNArgsCheckInlineCacheStub(assembler, 1, |
| 1659 kInlineCacheMissHandlerOneArgRuntimeEntry, | 1627 kInlineCacheMissHandlerOneArgRuntimeEntry, |
| 1660 Token::kILLEGAL, true /* optimized */); | 1628 Token::kILLEGAL, true /* optimized */); |
| 1661 } | 1629 } |
| 1662 | 1630 |
| 1663 | |
| 1664 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( | 1631 void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( |
| 1665 Assembler* assembler) { | 1632 Assembler* assembler) { |
| 1666 GenerateOptimizedUsageCounterIncrement(assembler); | 1633 GenerateOptimizedUsageCounterIncrement(assembler); |
| 1667 GenerateNArgsCheckInlineCacheStub(assembler, 2, | 1634 GenerateNArgsCheckInlineCacheStub(assembler, 2, |
| 1668 kInlineCacheMissHandlerTwoArgsRuntimeEntry, | 1635 kInlineCacheMissHandlerTwoArgsRuntimeEntry, |
| 1669 Token::kILLEGAL, true /* optimized */); | 1636 Token::kILLEGAL, true /* optimized */); |
| 1670 } | 1637 } |
| 1671 | 1638 |
| 1672 | |
| 1673 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 1639 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { |
| 1674 GenerateUsageCounterIncrement(assembler, R6); | 1640 GenerateUsageCounterIncrement(assembler, R6); |
| 1675 #if defined(DEBUG) | 1641 #if defined(DEBUG) |
| 1676 { | 1642 { |
| 1677 Label ok; | 1643 Label ok; |
| 1678 // Check that the IC data array has NumArgsTested() == 0. | 1644 // Check that the IC data array has NumArgsTested() == 0. |
| 1679 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. | 1645 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. |
| 1680 __ LoadFromOffset(R6, R5, ICData::state_bits_offset() - kHeapObjectTag, | 1646 __ LoadFromOffset(R6, R5, ICData::state_bits_offset() - kHeapObjectTag, |
| 1681 kUnsignedWord); | 1647 kUnsignedWord); |
| 1682 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. | 1648 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1727 __ EnterStubFrame(); | 1693 __ EnterStubFrame(); |
| 1728 __ Push(R5); // Preserve IC data. | 1694 __ Push(R5); // Preserve IC data. |
| 1729 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1695 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
| 1730 __ Pop(R5); | 1696 __ Pop(R5); |
| 1731 __ RestoreCodePointer(); | 1697 __ RestoreCodePointer(); |
| 1732 __ LeaveStubFrame(); | 1698 __ LeaveStubFrame(); |
| 1733 __ b(&done_stepping); | 1699 __ b(&done_stepping); |
| 1734 } | 1700 } |
| 1735 } | 1701 } |
| 1736 | 1702 |
| 1737 | |
| 1738 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { | 1703 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { |
| 1739 GenerateUsageCounterIncrement(assembler, R6); | 1704 GenerateUsageCounterIncrement(assembler, R6); |
| 1740 GenerateNArgsCheckInlineCacheStub( | 1705 GenerateNArgsCheckInlineCacheStub( |
| 1741 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); | 1706 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); |
| 1742 } | 1707 } |
| 1743 | 1708 |
| 1744 | |
| 1745 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 1709 void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { |
| 1746 GenerateUsageCounterIncrement(assembler, R6); | 1710 GenerateUsageCounterIncrement(assembler, R6); |
| 1747 GenerateNArgsCheckInlineCacheStub( | 1711 GenerateNArgsCheckInlineCacheStub( |
| 1748 assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); | 1712 assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); |
| 1749 } | 1713 } |
| 1750 | 1714 |
| 1751 | |
| 1752 // Stub for compiling a function and jumping to the compiled code. | 1715 // Stub for compiling a function and jumping to the compiled code. |
| 1753 // R5: IC-Data (for methods). | 1716 // R5: IC-Data (for methods). |
| 1754 // R4: Arguments descriptor. | 1717 // R4: Arguments descriptor. |
| 1755 // R0: Function. | 1718 // R0: Function. |
| 1756 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { | 1719 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { |
| 1757 // Preserve arg desc. and IC data object. | 1720 // Preserve arg desc. and IC data object. |
| 1758 __ EnterStubFrame(); | 1721 __ EnterStubFrame(); |
| 1759 __ Push(R5); // Save IC Data. | 1722 __ Push(R5); // Save IC Data. |
| 1760 __ Push(R4); // Save arg. desc. | 1723 __ Push(R4); // Save arg. desc. |
| 1761 __ Push(R0); // Pass function. | 1724 __ Push(R0); // Pass function. |
| 1762 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); | 1725 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); |
| 1763 __ Pop(R0); // Restore argument. | 1726 __ Pop(R0); // Restore argument. |
| 1764 __ Pop(R4); // Restore arg desc. | 1727 __ Pop(R4); // Restore arg desc. |
| 1765 __ Pop(R5); // Restore IC Data. | 1728 __ Pop(R5); // Restore IC Data. |
| 1766 __ LeaveStubFrame(); | 1729 __ LeaveStubFrame(); |
| 1767 | 1730 |
| 1768 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); | 1731 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
| 1769 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | 1732 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
| 1770 __ br(R2); | 1733 __ br(R2); |
| 1771 } | 1734 } |
| 1772 | 1735 |
| 1773 | |
| 1774 // R5: Contains an ICData. | 1736 // R5: Contains an ICData. |
| 1775 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { | 1737 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { |
| 1776 __ EnterStubFrame(); | 1738 __ EnterStubFrame(); |
| 1777 __ Push(R5); | 1739 __ Push(R5); |
| 1778 __ Push(ZR); // Space for result. | 1740 __ Push(ZR); // Space for result. |
| 1779 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1741 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
| 1780 __ Pop(CODE_REG); | 1742 __ Pop(CODE_REG); |
| 1781 __ Pop(R5); | 1743 __ Pop(R5); |
| 1782 __ LeaveStubFrame(); | 1744 __ LeaveStubFrame(); |
| 1783 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); | 1745 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
| 1784 __ br(R0); | 1746 __ br(R0); |
| 1785 } | 1747 } |
| 1786 | 1748 |
| 1787 | |
| 1788 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { | 1749 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { |
| 1789 __ EnterStubFrame(); | 1750 __ EnterStubFrame(); |
| 1790 __ Push(ZR); // Space for result. | 1751 __ Push(ZR); // Space for result. |
| 1791 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1752 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
| 1792 __ Pop(CODE_REG); | 1753 __ Pop(CODE_REG); |
| 1793 __ LeaveStubFrame(); | 1754 __ LeaveStubFrame(); |
| 1794 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); | 1755 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
| 1795 __ br(R0); | 1756 __ br(R0); |
| 1796 } | 1757 } |
| 1797 | 1758 |
| 1798 // Called only from unoptimized code. All relevant registers have been saved. | 1759 // Called only from unoptimized code. All relevant registers have been saved. |
| 1799 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { | 1760 void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { |
| 1800 // Check single stepping. | 1761 // Check single stepping. |
| 1801 Label stepping, done_stepping; | 1762 Label stepping, done_stepping; |
| 1802 __ LoadIsolate(R1); | 1763 __ LoadIsolate(R1); |
| 1803 __ LoadFromOffset(R1, R1, Isolate::single_step_offset(), kUnsignedByte); | 1764 __ LoadFromOffset(R1, R1, Isolate::single_step_offset(), kUnsignedByte); |
| 1804 __ CompareImmediate(R1, 0); | 1765 __ CompareImmediate(R1, 0); |
| 1805 __ b(&stepping, NE); | 1766 __ b(&stepping, NE); |
| 1806 __ Bind(&done_stepping); | 1767 __ Bind(&done_stepping); |
| 1807 | 1768 |
| 1808 __ ret(); | 1769 __ ret(); |
| 1809 | 1770 |
| 1810 __ Bind(&stepping); | 1771 __ Bind(&stepping); |
| 1811 __ EnterStubFrame(); | 1772 __ EnterStubFrame(); |
| 1812 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1773 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
| 1813 __ LeaveStubFrame(); | 1774 __ LeaveStubFrame(); |
| 1814 __ b(&done_stepping); | 1775 __ b(&done_stepping); |
| 1815 } | 1776 } |
| 1816 | 1777 |
| 1817 | |
| 1818 // Used to check class and type arguments. Arguments passed in registers: | 1778 // Used to check class and type arguments. Arguments passed in registers: |
| 1819 // LR: return address. | 1779 // LR: return address. |
| 1820 // R0: instance (must be preserved). | 1780 // R0: instance (must be preserved). |
| 1821 // R1: instantiator type arguments (only if n == 4, can be raw_null). | 1781 // R1: instantiator type arguments (only if n == 4, can be raw_null). |
| 1822 // R2: function type arguments (only if n == 4, can be raw_null). | 1782 // R2: function type arguments (only if n == 4, can be raw_null). |
| 1823 // R3: SubtypeTestCache. | 1783 // R3: SubtypeTestCache. |
| 1824 // Result in R1: null -> not found, otherwise result (true or false). | 1784 // Result in R1: null -> not found, otherwise result (true or false). |
| 1825 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { | 1785 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { |
| 1826 ASSERT((n == 1) || (n == 2) || (n == 4)); | 1786 ASSERT((n == 1) || (n == 2) || (n == 4)); |
| 1827 if (n > 1) { | 1787 if (n > 1) { |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1893 // Fall through to not found. | 1853 // Fall through to not found. |
| 1894 __ Bind(¬_found); | 1854 __ Bind(¬_found); |
| 1895 __ LoadObject(R1, Object::null_object()); | 1855 __ LoadObject(R1, Object::null_object()); |
| 1896 __ ret(); | 1856 __ ret(); |
| 1897 | 1857 |
| 1898 __ Bind(&found); | 1858 __ Bind(&found); |
| 1899 __ LoadFromOffset(R1, R3, kWordSize * SubtypeTestCache::kTestResult); | 1859 __ LoadFromOffset(R1, R3, kWordSize * SubtypeTestCache::kTestResult); |
| 1900 __ ret(); | 1860 __ ret(); |
| 1901 } | 1861 } |
| 1902 | 1862 |
| 1903 | |
| 1904 // Used to check class and type arguments. Arguments passed on stack: | 1863 // Used to check class and type arguments. Arguments passed on stack: |
| 1905 // LR: return address. | 1864 // LR: return address. |
| 1906 // R0: instance (must be preserved). | 1865 // R0: instance (must be preserved). |
| 1907 // R1: unused. | 1866 // R1: unused. |
| 1908 // R2: unused. | 1867 // R2: unused. |
| 1909 // R3: SubtypeTestCache. | 1868 // R3: SubtypeTestCache. |
| 1910 // Result in R1: null -> not found, otherwise result (true or false). | 1869 // Result in R1: null -> not found, otherwise result (true or false). |
| 1911 void StubCode::GenerateSubtype1TestCacheStub(Assembler* assembler) { | 1870 void StubCode::GenerateSubtype1TestCacheStub(Assembler* assembler) { |
| 1912 GenerateSubtypeNTestCacheStub(assembler, 1); | 1871 GenerateSubtypeNTestCacheStub(assembler, 1); |
| 1913 } | 1872 } |
| 1914 | 1873 |
| 1915 | |
| 1916 // Used to check class and type arguments. Arguments passed in registers: | 1874 // Used to check class and type arguments. Arguments passed in registers: |
| 1917 // LR: return address. | 1875 // LR: return address. |
| 1918 // R0: instance (must be preserved). | 1876 // R0: instance (must be preserved). |
| 1919 // R1: unused. | 1877 // R1: unused. |
| 1920 // R2: unused. | 1878 // R2: unused. |
| 1921 // R3: SubtypeTestCache. | 1879 // R3: SubtypeTestCache. |
| 1922 // Result in R1: null -> not found, otherwise result (true or false). | 1880 // Result in R1: null -> not found, otherwise result (true or false). |
| 1923 void StubCode::GenerateSubtype2TestCacheStub(Assembler* assembler) { | 1881 void StubCode::GenerateSubtype2TestCacheStub(Assembler* assembler) { |
| 1924 GenerateSubtypeNTestCacheStub(assembler, 2); | 1882 GenerateSubtypeNTestCacheStub(assembler, 2); |
| 1925 } | 1883 } |
| 1926 | 1884 |
| 1927 | |
| 1928 // Used to check class and type arguments. Arguments passed on stack: | 1885 // Used to check class and type arguments. Arguments passed on stack: |
| 1929 // LR: return address. | 1886 // LR: return address. |
| 1930 // R0: instance (must be preserved). | 1887 // R0: instance (must be preserved). |
| 1931 // R1: instantiator type arguments (can be raw_null). | 1888 // R1: instantiator type arguments (can be raw_null). |
| 1932 // R2: function type arguments (can be raw_null). | 1889 // R2: function type arguments (can be raw_null). |
| 1933 // R3: SubtypeTestCache. | 1890 // R3: SubtypeTestCache. |
| 1934 // Result in R1: null -> not found, otherwise result (true or false). | 1891 // Result in R1: null -> not found, otherwise result (true or false). |
| 1935 void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { | 1892 void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { |
| 1936 GenerateSubtypeNTestCacheStub(assembler, 4); | 1893 GenerateSubtypeNTestCacheStub(assembler, 4); |
| 1937 } | 1894 } |
| 1938 | 1895 |
| 1939 | |
| 1940 void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) { | 1896 void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) { |
| 1941 __ mov(R0, CSP); | 1897 __ mov(R0, CSP); |
| 1942 __ ret(); | 1898 __ ret(); |
| 1943 } | 1899 } |
| 1944 | 1900 |
| 1945 | |
| 1946 // Jump to a frame on the call stack. | 1901 // Jump to a frame on the call stack. |
| 1947 // LR: return address. | 1902 // LR: return address. |
| 1948 // R0: program_counter. | 1903 // R0: program_counter. |
| 1949 // R1: stack_pointer. | 1904 // R1: stack_pointer. |
| 1950 // R2: frame_pointer. | 1905 // R2: frame_pointer. |
| 1951 // R3: thread. | 1906 // R3: thread. |
| 1952 // Does not return. | 1907 // Does not return. |
| 1953 void StubCode::GenerateJumpToFrameStub(Assembler* assembler) { | 1908 void StubCode::GenerateJumpToFrameStub(Assembler* assembler) { |
| 1954 ASSERT(kExceptionObjectReg == R0); | 1909 ASSERT(kExceptionObjectReg == R0); |
| 1955 ASSERT(kStackTraceObjectReg == R1); | 1910 ASSERT(kStackTraceObjectReg == R1); |
| 1956 __ mov(LR, R0); // Program counter. | 1911 __ mov(LR, R0); // Program counter. |
| 1957 __ mov(SP, R1); // Stack pointer. | 1912 __ mov(SP, R1); // Stack pointer. |
| 1958 __ mov(FP, R2); // Frame_pointer. | 1913 __ mov(FP, R2); // Frame_pointer. |
| 1959 __ mov(THR, R3); | 1914 __ mov(THR, R3); |
| 1960 // Set the tag. | 1915 // Set the tag. |
| 1961 __ LoadImmediate(R2, VMTag::kDartTagId); | 1916 __ LoadImmediate(R2, VMTag::kDartTagId); |
| 1962 __ StoreToOffset(R2, THR, Thread::vm_tag_offset()); | 1917 __ StoreToOffset(R2, THR, Thread::vm_tag_offset()); |
| 1963 // Clear top exit frame. | 1918 // Clear top exit frame. |
| 1964 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); | 1919 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); |
| 1965 // Restore the pool pointer. | 1920 // Restore the pool pointer. |
| 1966 __ RestoreCodePointer(); | 1921 __ RestoreCodePointer(); |
| 1967 __ LoadPoolPointer(); | 1922 __ LoadPoolPointer(); |
| 1968 __ ret(); // Jump to continuation point. | 1923 __ ret(); // Jump to continuation point. |
| 1969 } | 1924 } |
| 1970 | 1925 |
| 1971 | |
| 1972 // Run an exception handler. Execution comes from JumpToFrame | 1926 // Run an exception handler. Execution comes from JumpToFrame |
| 1973 // stub or from the simulator. | 1927 // stub or from the simulator. |
| 1974 // | 1928 // |
| 1975 // The arguments are stored in the Thread object. | 1929 // The arguments are stored in the Thread object. |
| 1976 // Does not return. | 1930 // Does not return. |
| 1977 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) { | 1931 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) { |
| 1978 __ LoadFromOffset(LR, THR, Thread::resume_pc_offset()); | 1932 __ LoadFromOffset(LR, THR, Thread::resume_pc_offset()); |
| 1979 __ LoadImmediate(R2, 0); | 1933 __ LoadImmediate(R2, 0); |
| 1980 | 1934 |
| 1981 // Exception object. | 1935 // Exception object. |
| 1982 __ LoadFromOffset(R0, THR, Thread::active_exception_offset()); | 1936 __ LoadFromOffset(R0, THR, Thread::active_exception_offset()); |
| 1983 __ StoreToOffset(R2, THR, Thread::active_exception_offset()); | 1937 __ StoreToOffset(R2, THR, Thread::active_exception_offset()); |
| 1984 | 1938 |
| 1985 // StackTrace object. | 1939 // StackTrace object. |
| 1986 __ LoadFromOffset(R1, THR, Thread::active_stacktrace_offset()); | 1940 __ LoadFromOffset(R1, THR, Thread::active_stacktrace_offset()); |
| 1987 __ StoreToOffset(R2, THR, Thread::active_stacktrace_offset()); | 1941 __ StoreToOffset(R2, THR, Thread::active_stacktrace_offset()); |
| 1988 | 1942 |
| 1989 __ ret(); // Jump to the exception handler code. | 1943 __ ret(); // Jump to the exception handler code. |
| 1990 } | 1944 } |
| 1991 | 1945 |
| 1992 | |
| 1993 // Deoptimize a frame on the call stack before rewinding. | 1946 // Deoptimize a frame on the call stack before rewinding. |
| 1994 // The arguments are stored in the Thread object. | 1947 // The arguments are stored in the Thread object. |
| 1995 // No result. | 1948 // No result. |
| 1996 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { | 1949 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { |
| 1997 // Push zap value instead of CODE_REG. | 1950 // Push zap value instead of CODE_REG. |
| 1998 __ LoadImmediate(TMP, kZapCodeReg); | 1951 __ LoadImmediate(TMP, kZapCodeReg); |
| 1999 __ Push(TMP); | 1952 __ Push(TMP); |
| 2000 | 1953 |
| 2001 // Load the deopt pc into LR. | 1954 // Load the deopt pc into LR. |
| 2002 __ LoadFromOffset(LR, THR, Thread::resume_pc_offset()); | 1955 __ LoadFromOffset(LR, THR, Thread::resume_pc_offset()); |
| 2003 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 1956 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
| 2004 | 1957 |
| 2005 // After we have deoptimized, jump to the correct frame. | 1958 // After we have deoptimized, jump to the correct frame. |
| 2006 __ EnterStubFrame(); | 1959 __ EnterStubFrame(); |
| 2007 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); | 1960 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); |
| 2008 __ LeaveStubFrame(); | 1961 __ LeaveStubFrame(); |
| 2009 __ brk(0); | 1962 __ brk(0); |
| 2010 } | 1963 } |
| 2011 | 1964 |
| 2012 | |
| 2013 // Calls to the runtime to optimize the given function. | 1965 // Calls to the runtime to optimize the given function. |
| 2014 // R6: function to be re-optimized. | 1966 // R6: function to be re-optimized. |
| 2015 // R4: argument descriptor (preserved). | 1967 // R4: argument descriptor (preserved). |
| 2016 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1968 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
| 2017 __ EnterStubFrame(); | 1969 __ EnterStubFrame(); |
| 2018 __ Push(R4); | 1970 __ Push(R4); |
| 2019 // Setup space on stack for the return value. | 1971 // Setup space on stack for the return value. |
| 2020 __ Push(ZR); | 1972 __ Push(ZR); |
| 2021 __ Push(R6); | 1973 __ Push(R6); |
| 2022 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 1974 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
| 2023 __ Pop(R0); // Discard argument. | 1975 __ Pop(R0); // Discard argument. |
| 2024 __ Pop(R0); // Get Function object | 1976 __ Pop(R0); // Get Function object |
| 2025 __ Pop(R4); // Restore argument descriptor. | 1977 __ Pop(R4); // Restore argument descriptor. |
| 2026 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); | 1978 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
| 2027 __ LoadFieldFromOffset(R1, R0, Function::entry_point_offset()); | 1979 __ LoadFieldFromOffset(R1, R0, Function::entry_point_offset()); |
| 2028 __ LeaveStubFrame(); | 1980 __ LeaveStubFrame(); |
| 2029 __ br(R1); | 1981 __ br(R1); |
| 2030 __ brk(0); | 1982 __ brk(0); |
| 2031 } | 1983 } |
| 2032 | 1984 |
| 2033 | |
| 2034 // Does identical check (object references are equal or not equal) with special | 1985 // Does identical check (object references are equal or not equal) with special |
| 2035 // checks for boxed numbers. | 1986 // checks for boxed numbers. |
| 2036 // Left and right are pushed on stack. | 1987 // Left and right are pushed on stack. |
| 2037 // Return Zero condition flag set if equal. | 1988 // Return Zero condition flag set if equal. |
| 2038 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint | 1989 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint |
| 2039 // cannot contain a value that fits in Mint or Smi. | 1990 // cannot contain a value that fits in Mint or Smi. |
| 2040 static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler, | 1991 static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler, |
| 2041 const Register left, | 1992 const Register left, |
| 2042 const Register right) { | 1993 const Register right) { |
| 2043 Label reference_compare, done, check_mint, check_bigint; | 1994 Label reference_compare, done, check_mint, check_bigint; |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2081 // Result in R0, 0 means equal. | 2032 // Result in R0, 0 means equal. |
| 2082 __ LeaveStubFrame(); | 2033 __ LeaveStubFrame(); |
| 2083 __ cmp(R0, Operand(0)); | 2034 __ cmp(R0, Operand(0)); |
| 2084 __ b(&done); | 2035 __ b(&done); |
| 2085 | 2036 |
| 2086 __ Bind(&reference_compare); | 2037 __ Bind(&reference_compare); |
| 2087 __ CompareRegisters(left, right); | 2038 __ CompareRegisters(left, right); |
| 2088 __ Bind(&done); | 2039 __ Bind(&done); |
| 2089 } | 2040 } |
| 2090 | 2041 |
| 2091 | |
| 2092 // Called only from unoptimized code. All relevant registers have been saved. | 2042 // Called only from unoptimized code. All relevant registers have been saved. |
| 2093 // LR: return address. | 2043 // LR: return address. |
| 2094 // SP + 4: left operand. | 2044 // SP + 4: left operand. |
| 2095 // SP + 0: right operand. | 2045 // SP + 0: right operand. |
| 2096 // Return Zero condition flag set if equal. | 2046 // Return Zero condition flag set if equal. |
| 2097 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub( | 2047 void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub( |
| 2098 Assembler* assembler) { | 2048 Assembler* assembler) { |
| 2099 // Check single stepping. | 2049 // Check single stepping. |
| 2100 Label stepping, done_stepping; | 2050 Label stepping, done_stepping; |
| 2101 if (FLAG_support_debugger) { | 2051 if (FLAG_support_debugger) { |
| (...skipping 14 matching lines...) Expand all Loading... |
| 2116 if (FLAG_support_debugger) { | 2066 if (FLAG_support_debugger) { |
| 2117 __ Bind(&stepping); | 2067 __ Bind(&stepping); |
| 2118 __ EnterStubFrame(); | 2068 __ EnterStubFrame(); |
| 2119 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 2069 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
| 2120 __ RestoreCodePointer(); | 2070 __ RestoreCodePointer(); |
| 2121 __ LeaveStubFrame(); | 2071 __ LeaveStubFrame(); |
| 2122 __ b(&done_stepping); | 2072 __ b(&done_stepping); |
| 2123 } | 2073 } |
| 2124 } | 2074 } |
| 2125 | 2075 |
| 2126 | |
| 2127 // Called from optimized code only. | 2076 // Called from optimized code only. |
| 2128 // LR: return address. | 2077 // LR: return address. |
| 2129 // SP + 4: left operand. | 2078 // SP + 4: left operand. |
| 2130 // SP + 0: right operand. | 2079 // SP + 0: right operand. |
| 2131 // Return Zero condition flag set if equal. | 2080 // Return Zero condition flag set if equal. |
| 2132 void StubCode::GenerateOptimizedIdenticalWithNumberCheckStub( | 2081 void StubCode::GenerateOptimizedIdenticalWithNumberCheckStub( |
| 2133 Assembler* assembler) { | 2082 Assembler* assembler) { |
| 2134 const Register left = R1; | 2083 const Register left = R1; |
| 2135 const Register right = R0; | 2084 const Register right = R0; |
| 2136 __ LoadFromOffset(left, SP, 1 * kWordSize); | 2085 __ LoadFromOffset(left, SP, 1 * kWordSize); |
| 2137 __ LoadFromOffset(right, SP, 0 * kWordSize); | 2086 __ LoadFromOffset(right, SP, 0 * kWordSize); |
| 2138 GenerateIdenticalWithNumberCheckStub(assembler, left, right); | 2087 GenerateIdenticalWithNumberCheckStub(assembler, left, right); |
| 2139 __ ret(); | 2088 __ ret(); |
| 2140 } | 2089 } |
| 2141 | 2090 |
| 2142 | |
| 2143 // Called from megamorphic calls. | 2091 // Called from megamorphic calls. |
| 2144 // R0: receiver | 2092 // R0: receiver |
| 2145 // R5: MegamorphicCache (preserved) | 2093 // R5: MegamorphicCache (preserved) |
| 2146 // Passed to target: | 2094 // Passed to target: |
| 2147 // CODE_REG: target Code | 2095 // CODE_REG: target Code |
| 2148 // R4: arguments descriptor | 2096 // R4: arguments descriptor |
| 2149 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) { | 2097 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) { |
| 2150 // Jump if receiver is a smi. | 2098 // Jump if receiver is a smi. |
| 2151 Label smi_case; | 2099 Label smi_case; |
| 2152 __ TestImmediate(R0, kSmiTagMask); | 2100 __ TestImmediate(R0, kSmiTagMask); |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2205 // Try next extry in the table. | 2153 // Try next extry in the table. |
| 2206 __ AddImmediate(R3, Smi::RawValue(1)); | 2154 __ AddImmediate(R3, Smi::RawValue(1)); |
| 2207 __ b(&loop); | 2155 __ b(&loop); |
| 2208 | 2156 |
| 2209 // Load cid for the Smi case. | 2157 // Load cid for the Smi case. |
| 2210 __ Bind(&smi_case); | 2158 __ Bind(&smi_case); |
| 2211 __ LoadImmediate(R0, kSmiCid); | 2159 __ LoadImmediate(R0, kSmiCid); |
| 2212 __ b(&cid_loaded); | 2160 __ b(&cid_loaded); |
| 2213 } | 2161 } |
| 2214 | 2162 |
| 2215 | |
| 2216 // Called from switchable IC calls. | 2163 // Called from switchable IC calls. |
| 2217 // R0: receiver | 2164 // R0: receiver |
| 2218 // R5: ICData (preserved) | 2165 // R5: ICData (preserved) |
| 2219 // Passed to target: | 2166 // Passed to target: |
| 2220 // CODE_REG: target Code object | 2167 // CODE_REG: target Code object |
| 2221 // R4: arguments descriptor | 2168 // R4: arguments descriptor |
| 2222 void StubCode::GenerateICCallThroughFunctionStub(Assembler* assembler) { | 2169 void StubCode::GenerateICCallThroughFunctionStub(Assembler* assembler) { |
| 2223 Label loop, found, miss; | 2170 Label loop, found, miss; |
| 2224 __ ldr(R4, FieldAddress(R5, ICData::arguments_descriptor_offset())); | 2171 __ ldr(R4, FieldAddress(R5, ICData::arguments_descriptor_offset())); |
| 2225 __ ldr(R8, FieldAddress(R5, ICData::ic_data_offset())); | 2172 __ ldr(R8, FieldAddress(R5, ICData::ic_data_offset())); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 2246 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset())); | 2193 __ ldr(CODE_REG, FieldAddress(R0, Function::code_offset())); |
| 2247 __ br(R1); | 2194 __ br(R1); |
| 2248 | 2195 |
| 2249 __ Bind(&miss); | 2196 __ Bind(&miss); |
| 2250 __ LoadIsolate(R2); | 2197 __ LoadIsolate(R2); |
| 2251 __ ldr(CODE_REG, Address(R2, Isolate::ic_miss_code_offset())); | 2198 __ ldr(CODE_REG, Address(R2, Isolate::ic_miss_code_offset())); |
| 2252 __ ldr(R1, FieldAddress(CODE_REG, Code::entry_point_offset())); | 2199 __ ldr(R1, FieldAddress(CODE_REG, Code::entry_point_offset())); |
| 2253 __ br(R1); | 2200 __ br(R1); |
| 2254 } | 2201 } |
| 2255 | 2202 |
| 2256 | |
| 2257 void StubCode::GenerateICCallThroughCodeStub(Assembler* assembler) { | 2203 void StubCode::GenerateICCallThroughCodeStub(Assembler* assembler) { |
| 2258 Label loop, found, miss; | 2204 Label loop, found, miss; |
| 2259 __ ldr(R4, FieldAddress(R5, ICData::arguments_descriptor_offset())); | 2205 __ ldr(R4, FieldAddress(R5, ICData::arguments_descriptor_offset())); |
| 2260 __ ldr(R8, FieldAddress(R5, ICData::ic_data_offset())); | 2206 __ ldr(R8, FieldAddress(R5, ICData::ic_data_offset())); |
| 2261 __ AddImmediate(R8, Array::data_offset() - kHeapObjectTag); | 2207 __ AddImmediate(R8, Array::data_offset() - kHeapObjectTag); |
| 2262 // R8: first IC entry | 2208 // R8: first IC entry |
| 2263 __ LoadTaggedClassIdMayBeSmi(R1, R0); | 2209 __ LoadTaggedClassIdMayBeSmi(R1, R0); |
| 2264 // R1: receiver cid as Smi | 2210 // R1: receiver cid as Smi |
| 2265 | 2211 |
| 2266 __ Bind(&loop); | 2212 __ Bind(&loop); |
| (...skipping 14 matching lines...) Expand all Loading... |
| 2281 __ ldr(CODE_REG, Address(R8, code_offset)); | 2227 __ ldr(CODE_REG, Address(R8, code_offset)); |
| 2282 __ br(R1); | 2228 __ br(R1); |
| 2283 | 2229 |
| 2284 __ Bind(&miss); | 2230 __ Bind(&miss); |
| 2285 __ LoadIsolate(R2); | 2231 __ LoadIsolate(R2); |
| 2286 __ ldr(CODE_REG, Address(R2, Isolate::ic_miss_code_offset())); | 2232 __ ldr(CODE_REG, Address(R2, Isolate::ic_miss_code_offset())); |
| 2287 __ ldr(R1, FieldAddress(CODE_REG, Code::entry_point_offset())); | 2233 __ ldr(R1, FieldAddress(CODE_REG, Code::entry_point_offset())); |
| 2288 __ br(R1); | 2234 __ br(R1); |
| 2289 } | 2235 } |
| 2290 | 2236 |
| 2291 | |
| 2292 // Called from switchable IC calls. | 2237 // Called from switchable IC calls. |
| 2293 // R0: receiver | 2238 // R0: receiver |
| 2294 // R5: SingleTargetCache | 2239 // R5: SingleTargetCache |
| 2295 void StubCode::GenerateUnlinkedCallStub(Assembler* assembler) { | 2240 void StubCode::GenerateUnlinkedCallStub(Assembler* assembler) { |
| 2296 __ EnterStubFrame(); | 2241 __ EnterStubFrame(); |
| 2297 __ Push(R0); // Preserve receiver. | 2242 __ Push(R0); // Preserve receiver. |
| 2298 | 2243 |
| 2299 __ Push(ZR); // Result slot. | 2244 __ Push(ZR); // Result slot. |
| 2300 __ Push(R0); // Arg0: Receiver | 2245 __ Push(R0); // Arg0: Receiver |
| 2301 __ Push(R5); // Arg1: UnlinkedCall | 2246 __ Push(R5); // Arg1: UnlinkedCall |
| 2302 __ CallRuntime(kUnlinkedCallRuntimeEntry, 2); | 2247 __ CallRuntime(kUnlinkedCallRuntimeEntry, 2); |
| 2303 __ Drop(2); | 2248 __ Drop(2); |
| 2304 __ Pop(R5); // result = IC | 2249 __ Pop(R5); // result = IC |
| 2305 | 2250 |
| 2306 __ Pop(R0); // Restore receiver. | 2251 __ Pop(R0); // Restore receiver. |
| 2307 __ LeaveStubFrame(); | 2252 __ LeaveStubFrame(); |
| 2308 | 2253 |
| 2309 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); | 2254 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); |
| 2310 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); | 2255 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); |
| 2311 __ br(R1); | 2256 __ br(R1); |
| 2312 } | 2257 } |
| 2313 | 2258 |
| 2314 | |
| 2315 // Called from switchable IC calls. | 2259 // Called from switchable IC calls. |
| 2316 // R0: receiver | 2260 // R0: receiver |
| 2317 // R5: SingleTargetCache | 2261 // R5: SingleTargetCache |
| 2318 // Passed to target: | 2262 // Passed to target: |
| 2319 // CODE_REG: target Code object | 2263 // CODE_REG: target Code object |
| 2320 void StubCode::GenerateSingleTargetCallStub(Assembler* assembler) { | 2264 void StubCode::GenerateSingleTargetCallStub(Assembler* assembler) { |
| 2321 Label miss; | 2265 Label miss; |
| 2322 __ LoadClassIdMayBeSmi(R1, R0); | 2266 __ LoadClassIdMayBeSmi(R1, R0); |
| 2323 __ ldr(R2, FieldAddress(R5, SingleTargetCache::lower_limit_offset()), | 2267 __ ldr(R2, FieldAddress(R5, SingleTargetCache::lower_limit_offset()), |
| 2324 kUnsignedHalfword); | 2268 kUnsignedHalfword); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 2345 __ Pop(R5); // result = IC | 2289 __ Pop(R5); // result = IC |
| 2346 | 2290 |
| 2347 __ Pop(R0); // Restore receiver. | 2291 __ Pop(R0); // Restore receiver. |
| 2348 __ LeaveStubFrame(); | 2292 __ LeaveStubFrame(); |
| 2349 | 2293 |
| 2350 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); | 2294 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); |
| 2351 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); | 2295 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); |
| 2352 __ br(R1); | 2296 __ br(R1); |
| 2353 } | 2297 } |
| 2354 | 2298 |
| 2355 | |
| 2356 // Called from the monomorphic checked entry. | 2299 // Called from the monomorphic checked entry. |
| 2357 // R0: receiver | 2300 // R0: receiver |
| 2358 void StubCode::GenerateMonomorphicMissStub(Assembler* assembler) { | 2301 void StubCode::GenerateMonomorphicMissStub(Assembler* assembler) { |
| 2359 __ ldr(CODE_REG, Address(THR, Thread::monomorphic_miss_stub_offset())); | 2302 __ ldr(CODE_REG, Address(THR, Thread::monomorphic_miss_stub_offset())); |
| 2360 __ EnterStubFrame(); | 2303 __ EnterStubFrame(); |
| 2361 __ Push(R0); // Preserve receiver. | 2304 __ Push(R0); // Preserve receiver. |
| 2362 | 2305 |
| 2363 __ Push(ZR); // Result slot. | 2306 __ Push(ZR); // Result slot. |
| 2364 __ Push(R0); // Arg0: Receiver | 2307 __ Push(R0); // Arg0: Receiver |
| 2365 __ CallRuntime(kMonomorphicMissRuntimeEntry, 1); | 2308 __ CallRuntime(kMonomorphicMissRuntimeEntry, 1); |
| 2366 __ Drop(1); | 2309 __ Drop(1); |
| 2367 __ Pop(R5); // result = IC | 2310 __ Pop(R5); // result = IC |
| 2368 | 2311 |
| 2369 __ Pop(R0); // Restore receiver. | 2312 __ Pop(R0); // Restore receiver. |
| 2370 __ LeaveStubFrame(); | 2313 __ LeaveStubFrame(); |
| 2371 | 2314 |
| 2372 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); | 2315 __ ldr(CODE_REG, Address(THR, Thread::ic_lookup_through_code_stub_offset())); |
| 2373 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); | 2316 __ ldr(R1, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); |
| 2374 __ br(R1); | 2317 __ br(R1); |
| 2375 } | 2318 } |
| 2376 | 2319 |
| 2377 | |
| 2378 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2320 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
| 2379 __ brk(0); | 2321 __ brk(0); |
| 2380 } | 2322 } |
| 2381 | 2323 |
| 2382 | |
| 2383 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { | 2324 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { |
| 2384 __ brk(0); | 2325 __ brk(0); |
| 2385 } | 2326 } |
| 2386 | 2327 |
| 2387 } // namespace dart | 2328 } // namespace dart |
| 2388 | 2329 |
| 2389 #endif // defined TARGET_ARCH_ARM64 | 2330 #endif // defined TARGET_ARCH_ARM64 |
| OLD | NEW |