OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
(...skipping 23 matching lines...) Expand all Loading... |
34 // SP : address of last argument in argument array. | 34 // SP : address of last argument in argument array. |
35 // SP + 8*R4 - 8 : address of first argument in argument array. | 35 // SP + 8*R4 - 8 : address of first argument in argument array. |
36 // SP + 8*R4 : address of return value. | 36 // SP + 8*R4 : address of return value. |
37 // R5 : address of the runtime function to call. | 37 // R5 : address of the runtime function to call. |
38 // R4 : number of arguments to the call. | 38 // R4 : number of arguments to the call. |
39 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { | 39 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { |
40 const intptr_t thread_offset = NativeArguments::thread_offset(); | 40 const intptr_t thread_offset = NativeArguments::thread_offset(); |
41 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 41 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); |
42 const intptr_t argv_offset = NativeArguments::argv_offset(); | 42 const intptr_t argv_offset = NativeArguments::argv_offset(); |
43 const intptr_t retval_offset = NativeArguments::retval_offset(); | 43 const intptr_t retval_offset = NativeArguments::retval_offset(); |
44 const intptr_t exitframe_last_param_slot_from_fp = 1; | |
45 | 44 |
46 __ SetPrologueOffset(); | 45 __ SetPrologueOffset(); |
47 __ Comment("CallToRuntimeStub"); | 46 __ Comment("CallToRuntimeStub"); |
48 __ EnterStubFrame(); | 47 __ EnterStubFrame(); |
49 | 48 |
50 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << R28)) != 0); | 49 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << R28)) != 0); |
51 __ LoadIsolate(R28); | 50 __ LoadIsolate(R28); |
52 | 51 |
53 // Save exit frame information to enable stack walking as we are about | 52 // Save exit frame information to enable stack walking as we are about |
54 // to transition to Dart VM C++ code. | 53 // to transition to Dart VM C++ code. |
(...skipping 29 matching lines...) Expand all Loading... |
84 | 83 |
85 // There are no runtime calls to closures, so we do not need to set the tag | 84 // There are no runtime calls to closures, so we do not need to set the tag |
86 // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. | 85 // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. |
87 ASSERT(argc_tag_offset == 1 * kWordSize); | 86 ASSERT(argc_tag_offset == 1 * kWordSize); |
88 __ mov(R1, R4); // Set argc in NativeArguments. | 87 __ mov(R1, R4); // Set argc in NativeArguments. |
89 | 88 |
90 ASSERT(argv_offset == 2 * kWordSize); | 89 ASSERT(argv_offset == 2 * kWordSize); |
91 __ add(R2, ZR, Operand(R4, LSL, 3)); | 90 __ add(R2, ZR, Operand(R4, LSL, 3)); |
92 __ add(R2, FP, Operand(R2)); // Compute argv. | 91 __ add(R2, FP, Operand(R2)); // Compute argv. |
93 // Set argv in NativeArguments. | 92 // Set argv in NativeArguments. |
94 __ AddImmediate(R2, R2, exitframe_last_param_slot_from_fp * kWordSize); | 93 __ AddImmediate(R2, R2, kParamEndSlotFromFp * kWordSize); |
95 | 94 |
96 ASSERT(retval_offset == 3 * kWordSize); | 95 ASSERT(retval_offset == 3 * kWordSize); |
97 __ AddImmediate(R3, R2, kWordSize); | 96 __ AddImmediate(R3, R2, kWordSize); |
98 | 97 |
99 __ StoreToOffset(R0, SP, thread_offset); | 98 __ StoreToOffset(R0, SP, thread_offset); |
100 __ StoreToOffset(R1, SP, argc_tag_offset); | 99 __ StoreToOffset(R1, SP, argc_tag_offset); |
101 __ StoreToOffset(R2, SP, argv_offset); | 100 __ StoreToOffset(R2, SP, argv_offset); |
102 __ StoreToOffset(R3, SP, retval_offset); | 101 __ StoreToOffset(R3, SP, retval_offset); |
103 __ mov(R0, SP); // Pass the pointer to the NativeArguments. | 102 __ mov(R0, SP); // Pass the pointer to the NativeArguments. |
104 | 103 |
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 // R4: arguments descriptor array. | 338 // R4: arguments descriptor array. |
340 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 339 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { |
341 // Create a stub frame as we are pushing some objects on the stack before | 340 // Create a stub frame as we are pushing some objects on the stack before |
342 // calling into the runtime. | 341 // calling into the runtime. |
343 __ EnterStubFrame(); | 342 __ EnterStubFrame(); |
344 // Setup space on stack for return value and preserve arguments descriptor. | 343 // Setup space on stack for return value and preserve arguments descriptor. |
345 __ Push(R4); | 344 __ Push(R4); |
346 __ PushObject(Object::null_object()); | 345 __ PushObject(Object::null_object()); |
347 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); | 346 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); |
348 // Get Code object result and restore arguments descriptor array. | 347 // Get Code object result and restore arguments descriptor array. |
349 __ Pop(R0); | 348 __ Pop(CODE_REG); |
350 __ Pop(R4); | 349 __ Pop(R4); |
351 // Remove the stub frame. | 350 // Remove the stub frame. |
352 __ LeaveStubFrame(); | 351 __ LeaveStubFrame(); |
353 // Jump to the dart function. | 352 // Jump to the dart function. |
354 __ LoadFieldFromOffset(R0, R0, Code::entry_point_offset()); | 353 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
355 __ br(R0); | 354 __ br(R0); |
356 } | 355 } |
357 | 356 |
358 | 357 |
359 // Called from a static call only when an invalid code has been entered | 358 // Called from a static call only when an invalid code has been entered |
360 // (invalid because its function was optimized or deoptimized). | 359 // (invalid because its function was optimized or deoptimized). |
361 // R4: arguments descriptor array. | 360 // R4: arguments descriptor array. |
362 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 361 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { |
| 362 // Load code pointer to this stub from the thread: |
| 363 // The one that is passed in, is not correct - it points to the code object |
| 364 // that needs to be replaced. |
| 365 __ ldr(CODE_REG, Address(THR, Thread::fix_callers_target_code_offset())); |
363 // Create a stub frame as we are pushing some objects on the stack before | 366 // Create a stub frame as we are pushing some objects on the stack before |
364 // calling into the runtime. | 367 // calling into the runtime. |
365 __ EnterStubFrame(); | 368 __ EnterStubFrame(); |
366 // Setup space on stack for return value and preserve arguments descriptor. | 369 // Setup space on stack for return value and preserve arguments descriptor. |
367 __ Push(R4); | 370 __ Push(R4); |
368 __ PushObject(Object::null_object()); | 371 __ PushObject(Object::null_object()); |
369 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); | 372 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); |
370 // Get Code object result and restore arguments descriptor array. | 373 // Get Code object result and restore arguments descriptor array. |
371 __ Pop(R0); | 374 __ Pop(CODE_REG); |
372 __ Pop(R4); | 375 __ Pop(R4); |
373 // Remove the stub frame. | 376 // Remove the stub frame. |
374 __ LeaveStubFrame(); | 377 __ LeaveStubFrame(); |
375 // Jump to the dart function. | 378 // Jump to the dart function. |
376 __ LoadFieldFromOffset(R0, R0, Code::entry_point_offset()); | 379 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
377 __ br(R0); | 380 __ br(R0); |
378 } | 381 } |
379 | 382 |
380 | 383 |
381 // Called from object allocate instruction when the allocation stub has been | 384 // Called from object allocate instruction when the allocation stub has been |
382 // disabled. | 385 // disabled. |
383 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { | 386 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { |
| 387 // Load code pointer to this stub from the thread: |
| 388 // The one that is passed in, is not correct - it points to the code object |
| 389 // that needs to be replaced. |
| 390 __ ldr(CODE_REG, Address(THR, Thread::fix_allocation_stub_code_offset())); |
384 __ EnterStubFrame(); | 391 __ EnterStubFrame(); |
385 // Setup space on stack for return value. | 392 // Setup space on stack for return value. |
386 __ PushObject(Object::null_object()); | 393 __ PushObject(Object::null_object()); |
387 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); | 394 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); |
388 // Get Code object result. | 395 // Get Code object result. |
389 __ Pop(R0); | 396 __ Pop(CODE_REG); |
390 // Remove the stub frame. | 397 // Remove the stub frame. |
391 __ LeaveStubFrame(); | 398 __ LeaveStubFrame(); |
392 // Jump to the dart function. | 399 // Jump to the dart function. |
393 __ LoadFieldFromOffset(R0, R0, Code::entry_point_offset()); | 400 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
394 __ br(R0); | 401 __ br(R0); |
395 } | 402 } |
396 | 403 |
397 | 404 |
398 // Input parameters: | 405 // Input parameters: |
399 // R2: smi-tagged argument count, may be zero. | 406 // R2: smi-tagged argument count, may be zero. |
400 // FP[kParamEndSlotFromFp + 1]: last argument. | 407 // FP[kParamEndSlotFromFp + 1]: last argument. |
401 static void PushArgumentsArray(Assembler* assembler) { | 408 static void PushArgumentsArray(Assembler* assembler) { |
402 // Allocate array to store arguments of caller. | 409 // Allocate array to store arguments of caller. |
403 __ LoadObject(R1, Object::null_object()); | 410 __ LoadObject(R1, Object::null_object()); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
441 // Stack after TagAndPushPP() below: | 448 // Stack after TagAndPushPP() below: |
442 // +------------------+ | 449 // +------------------+ |
443 // | Saved PP | <- PP | 450 // | Saved PP | <- PP |
444 // +------------------+ | 451 // +------------------+ |
445 // | PC marker | <- TOS | 452 // | PC marker | <- TOS |
446 // +------------------+ | 453 // +------------------+ |
447 // | Saved FP | <- FP of stub | 454 // | Saved FP | <- FP of stub |
448 // +------------------+ | 455 // +------------------+ |
449 // | return-address | (deoptimization point) | 456 // | return-address | (deoptimization point) |
450 // +------------------+ | 457 // +------------------+ |
| 458 // | Saved CODE_REG | |
| 459 // +------------------+ |
451 // | ... | <- SP of optimized frame | 460 // | ... | <- SP of optimized frame |
452 // | 461 // |
453 // Parts of the code cannot GC, part of the code can GC. | 462 // Parts of the code cannot GC, part of the code can GC. |
454 static void GenerateDeoptimizationSequence(Assembler* assembler, | 463 static void GenerateDeoptimizationSequence(Assembler* assembler, |
455 bool preserve_result) { | 464 DeoptStubKind kind) { |
456 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 465 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
457 // is no need to set the correct PC marker or load PP, since they get patched. | 466 // is no need to set the correct PC marker or load PP, since they get patched. |
458 __ EnterStubFrame(); | 467 __ EnterStubFrame(); |
459 | 468 |
460 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry | 469 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry |
461 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. | 470 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. |
462 const intptr_t saved_result_slot_from_fp = | 471 const intptr_t saved_result_slot_from_fp = |
463 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); | 472 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); |
464 // Result in R0 is preserved as part of pushing all registers below. | 473 // Result in R0 is preserved as part of pushing all registers below. |
465 | 474 |
466 // Push registers in their enumeration order: lowest register number at | 475 // Push registers in their enumeration order: lowest register number at |
467 // lowest address. | 476 // lowest address. |
468 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { | 477 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { |
469 const Register r = static_cast<Register>(i); | 478 const Register r = static_cast<Register>(i); |
470 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); | 479 if (r == CODE_REG) { |
| 480 // Save the original value of CODE_REG pushed before invoking this stub |
| 481 // instead of the value used to call this stub. |
| 482 COMPILE_ASSERT(R25 > CODE_REG); |
| 483 __ ldr(R25, Address(FP, 2 * kWordSize)); |
| 484 __ str(R25, Address(SP, -1 * kWordSize, Address::PreIndex)); |
| 485 } else { |
| 486 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); |
| 487 } |
471 } | 488 } |
472 | 489 |
473 for (intptr_t reg_idx = kNumberOfVRegisters - 1; reg_idx >= 0; reg_idx--) { | 490 for (intptr_t reg_idx = kNumberOfVRegisters - 1; reg_idx >= 0; reg_idx--) { |
474 VRegister vreg = static_cast<VRegister>(reg_idx); | 491 VRegister vreg = static_cast<VRegister>(reg_idx); |
475 __ PushQuad(vreg); | 492 __ PushQuad(vreg); |
476 } | 493 } |
477 | 494 |
478 __ mov(R0, SP); // Pass address of saved registers block. | 495 __ mov(R0, SP); // Pass address of saved registers block. |
| 496 __ LoadImmediate(R1, kind == kLazyDeopt ? 1 : 0); |
479 __ ReserveAlignedFrameSpace(0); | 497 __ ReserveAlignedFrameSpace(0); |
480 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 1); | 498 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); |
481 // Result (R0) is stack-size (FP - SP) in bytes. | 499 // Result (R0) is stack-size (FP - SP) in bytes. |
482 | 500 |
| 501 const bool preserve_result = (kind == kLazyDeopt); |
483 if (preserve_result) { | 502 if (preserve_result) { |
484 // Restore result into R1 temporarily. | 503 // Restore result into R1 temporarily. |
485 __ LoadFromOffset(R1, FP, saved_result_slot_from_fp * kWordSize); | 504 __ LoadFromOffset(R1, FP, saved_result_slot_from_fp * kWordSize); |
486 } | 505 } |
487 | 506 |
488 // There is a Dart Frame on the stack. We must restore PP and leave frame. | 507 // There is a Dart Frame on the stack. We must restore PP and leave frame. |
| 508 __ RestoreCodePointer(); |
489 __ LeaveStubFrame(); | 509 __ LeaveStubFrame(); |
490 __ sub(SP, FP, Operand(R0)); | 510 __ sub(SP, FP, Operand(R0)); |
491 | 511 |
492 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 512 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
493 // is no need to set the correct PC marker or load PP, since they get patched. | 513 // is no need to set the correct PC marker or load PP, since they get patched. |
494 __ EnterStubFrame(); | 514 __ EnterStubFrame(); |
495 | 515 |
496 if (preserve_result) { | 516 if (preserve_result) { |
497 __ Push(R1); // Preserve result as first local. | 517 __ Push(R1); // Preserve result as first local. |
498 } | 518 } |
499 __ ReserveAlignedFrameSpace(0); | 519 __ ReserveAlignedFrameSpace(0); |
500 __ mov(R0, FP); // Pass last FP as parameter in R0. | 520 __ mov(R0, FP); // Pass last FP as parameter in R0. |
501 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); | 521 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); |
502 if (preserve_result) { | 522 if (preserve_result) { |
503 // Restore result into R1. | 523 // Restore result into R1. |
504 __ LoadFromOffset(R1, FP, kFirstLocalSlotFromFp * kWordSize); | 524 __ LoadFromOffset(R1, FP, kFirstLocalSlotFromFp * kWordSize); |
505 } | 525 } |
506 // Code above cannot cause GC. | 526 // Code above cannot cause GC. |
507 // There is a Dart Frame on the stack. We must restore PP and leave frame. | 527 // There is a Dart Frame on the stack. We must restore PP and leave frame. |
| 528 __ RestoreCodePointer(); |
508 __ LeaveStubFrame(); | 529 __ LeaveStubFrame(); |
509 | 530 |
510 // Frame is fully rewritten at this point and it is safe to perform a GC. | 531 // Frame is fully rewritten at this point and it is safe to perform a GC. |
511 // Materialize any objects that were deferred by FillFrame because they | 532 // Materialize any objects that were deferred by FillFrame because they |
512 // require allocation. | 533 // require allocation. |
513 // Enter stub frame with loading PP. The caller's PP is not materialized yet. | 534 // Enter stub frame with loading PP. The caller's PP is not materialized yet. |
514 __ EnterStubFrame(); | 535 __ EnterStubFrame(); |
515 if (preserve_result) { | 536 if (preserve_result) { |
516 __ Push(R1); // Preserve result, it will be GC-d here. | 537 __ Push(R1); // Preserve result, it will be GC-d here. |
517 } | 538 } |
518 __ Push(ZR); // Space for the result. | 539 __ Push(ZR); // Space for the result. |
519 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); | 540 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); |
520 // Result tells stub how many bytes to remove from the expression stack | 541 // Result tells stub how many bytes to remove from the expression stack |
521 // of the bottom-most frame. They were used as materialization arguments. | 542 // of the bottom-most frame. They were used as materialization arguments. |
522 __ Pop(R1); | 543 __ Pop(R1); |
523 __ SmiUntag(R1); | 544 __ SmiUntag(R1); |
524 if (preserve_result) { | 545 if (preserve_result) { |
525 __ Pop(R0); // Restore result. | 546 __ Pop(R0); // Restore result. |
526 } | 547 } |
527 __ LeaveStubFrame(); | 548 __ LeaveStubFrame(); |
528 // Remove materialization arguments. | 549 // Remove materialization arguments. |
529 __ add(SP, SP, Operand(R1)); | 550 __ add(SP, SP, Operand(R1)); |
530 __ ret(); | 551 __ ret(); |
531 } | 552 } |
532 | 553 |
533 | 554 |
534 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { | 555 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { |
535 // Correct return address to point just after the call that is being | 556 // Correct return address to point just after the call that is being |
536 // deoptimized. | 557 // deoptimized. |
537 __ AddImmediate(LR, LR, -CallPattern::kLengthInBytes); | 558 __ AddImmediate(LR, LR, -CallPattern::kDeoptCallLengthInBytes); |
538 GenerateDeoptimizationSequence(assembler, true); // Preserve R0. | 559 // Push zap value instead of CODE_REG for lazy deopt. |
| 560 __ LoadImmediate(TMP, 0xf1f1f1f1); |
| 561 __ Push(TMP); |
| 562 GenerateDeoptimizationSequence(assembler, kLazyDeopt); |
539 } | 563 } |
540 | 564 |
541 | 565 |
542 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 566 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
543 GenerateDeoptimizationSequence(assembler, false); // Don't preserve R0. | 567 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
544 } | 568 } |
545 | 569 |
546 | 570 |
547 static void GenerateDispatcherCode(Assembler* assembler, | 571 static void GenerateDispatcherCode(Assembler* assembler, |
548 Label* call_target_function) { | 572 Label* call_target_function) { |
549 __ Comment("NoSuchMethodDispatch"); | 573 __ Comment("NoSuchMethodDispatch"); |
550 // When lazily generated invocation dispatchers are disabled, the | 574 // When lazily generated invocation dispatchers are disabled, the |
551 // miss-handler may return null. | 575 // miss-handler may return null. |
552 __ CompareObject(R0, Object::null_object()); | 576 __ CompareObject(R0, Object::null_object()); |
553 __ b(call_target_function, NE); | 577 __ b(call_target_function, NE); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
594 __ Push(R4); | 618 __ Push(R4); |
595 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); | 619 __ CallRuntime(kMegamorphicCacheMissHandlerRuntimeEntry, 3); |
596 // Remove arguments. | 620 // Remove arguments. |
597 __ Drop(3); | 621 __ Drop(3); |
598 __ Pop(R0); // Get result into R0 (target function). | 622 __ Pop(R0); // Get result into R0 (target function). |
599 | 623 |
600 // Restore IC data and arguments descriptor. | 624 // Restore IC data and arguments descriptor. |
601 __ Pop(R4); | 625 __ Pop(R4); |
602 __ Pop(R5); | 626 __ Pop(R5); |
603 | 627 |
| 628 __ RestoreCodePointer(); |
604 __ LeaveStubFrame(); | 629 __ LeaveStubFrame(); |
605 | 630 |
606 if (!FLAG_lazy_dispatchers) { | 631 if (!FLAG_lazy_dispatchers) { |
607 Label call_target_function; | 632 Label call_target_function; |
608 GenerateDispatcherCode(assembler, &call_target_function); | 633 GenerateDispatcherCode(assembler, &call_target_function); |
609 __ Bind(&call_target_function); | 634 __ Bind(&call_target_function); |
610 } | 635 } |
611 | 636 |
612 // Tail-call to target function. | 637 // Tail-call to target function. |
| 638 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
613 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | 639 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
614 __ br(R2); | 640 __ br(R2); |
615 } | 641 } |
616 | 642 |
617 | 643 |
618 // Called for inline allocation of arrays. | 644 // Called for inline allocation of arrays. |
619 // Input parameters: | 645 // Input parameters: |
620 // LR: return address. | 646 // LR: return address. |
621 // R2: array length as Smi. | 647 // R2: array length as Smi. |
622 // R1: array element type (either NULL or an instantiated type). | 648 // R1: array element type (either NULL or an instantiated type). |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
759 __ Pop(R2); | 785 __ Pop(R2); |
760 __ Pop(R0); | 786 __ Pop(R0); |
761 __ LeaveStubFrame(); | 787 __ LeaveStubFrame(); |
762 __ ret(); | 788 __ ret(); |
763 } | 789 } |
764 | 790 |
765 | 791 |
766 // Called when invoking Dart code from C++ (VM code). | 792 // Called when invoking Dart code from C++ (VM code). |
767 // Input parameters: | 793 // Input parameters: |
768 // LR : points to return address. | 794 // LR : points to return address. |
769 // R0 : entrypoint of the Dart function to call. | 795 // R0 : code object of the Dart function to call. |
770 // R1 : arguments descriptor array. | 796 // R1 : arguments descriptor array. |
771 // R2 : arguments array. | 797 // R2 : arguments array. |
772 // R3 : current thread. | 798 // R3 : current thread. |
773 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 799 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { |
774 __ Comment("InvokeDartCodeStub"); | 800 __ Comment("InvokeDartCodeStub"); |
775 | 801 |
776 // Copy the C stack pointer (R31) into the stack pointer we'll actually use | 802 // Copy the C stack pointer (R31) into the stack pointer we'll actually use |
777 // to access the stack, and put the C stack pointer at the stack limit. | 803 // to access the stack, and put the C stack pointer at the stack limit. |
778 __ SetupDartSP(Isolate::GetSpecifiedStackSize()); | 804 __ SetupDartSP(Isolate::GetSpecifiedStackSize()); |
779 __ EnterFrame(0); | 805 __ EnterFrame(0); |
780 | 806 |
| 807 // Push code object to PC marker slot. |
| 808 __ ldr(TMP, Address(R3, Thread::invoke_dart_code_stub_offset())); |
| 809 __ Push(TMP); |
| 810 |
781 // Save the callee-saved registers. | 811 // Save the callee-saved registers. |
782 for (int i = kAbiFirstPreservedCpuReg; i <= kAbiLastPreservedCpuReg; i++) { | 812 for (int i = kAbiFirstPreservedCpuReg; i <= kAbiLastPreservedCpuReg; i++) { |
783 const Register r = static_cast<Register>(i); | 813 const Register r = static_cast<Register>(i); |
784 // We use str instead of the Push macro because we will be pushing the PP | 814 // We use str instead of the Push macro because we will be pushing the PP |
785 // register when it is not holding a pool-pointer since we are coming from | 815 // register when it is not holding a pool-pointer since we are coming from |
786 // C++ code. | 816 // C++ code. |
787 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); | 817 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); |
788 } | 818 } |
789 | 819 |
790 // Save the bottom 64-bits of callee-saved V registers. | 820 // Save the bottom 64-bits of callee-saved V registers. |
791 for (int i = kAbiFirstPreservedFpuReg; i <= kAbiLastPreservedFpuReg; i++) { | 821 for (int i = kAbiFirstPreservedFpuReg; i <= kAbiLastPreservedFpuReg; i++) { |
792 const VRegister r = static_cast<VRegister>(i); | 822 const VRegister r = static_cast<VRegister>(i); |
793 __ PushDouble(r); | 823 __ PushDouble(r); |
794 } | 824 } |
795 | 825 |
796 // We now load the pool pointer(PP) as we are about to invoke dart code and we | |
797 // could potentially invoke some intrinsic functions which need the PP to be | |
798 // set up. | |
799 __ LoadPoolPointer(); | |
800 | |
801 // Set up THR, which caches the current thread in Dart code. | 826 // Set up THR, which caches the current thread in Dart code. |
802 if (THR != R3) { | 827 if (THR != R3) { |
803 __ mov(THR, R3); | 828 __ mov(THR, R3); |
804 } | 829 } |
805 // Load Isolate pointer into temporary register R5. | 830 // Load Isolate pointer into temporary register R5. |
806 __ LoadIsolate(R5); | 831 __ LoadIsolate(R5); |
807 | 832 |
808 // Save the current VMTag on the stack. | 833 // Save the current VMTag on the stack. |
809 __ LoadFromOffset(R4, R5, Isolate::vm_tag_offset()); | 834 __ LoadFromOffset(R4, R5, Isolate::vm_tag_offset()); |
810 __ Push(R4); | 835 __ Push(R4); |
811 | 836 |
812 // Mark that the isolate is executing Dart code. | 837 // Mark that the isolate is executing Dart code. |
813 __ LoadImmediate(R6, VMTag::kDartTagId); | 838 __ LoadImmediate(R6, VMTag::kDartTagId); |
814 __ StoreToOffset(R6, R5, Isolate::vm_tag_offset()); | 839 __ StoreToOffset(R6, R5, Isolate::vm_tag_offset()); |
815 | 840 |
816 // Save top resource and top exit frame info. Use R6 as a temporary register. | 841 // Save top resource and top exit frame info. Use R6 as a temporary register. |
817 // StackFrameIterator reads the top exit frame info saved in this frame. | 842 // StackFrameIterator reads the top exit frame info saved in this frame. |
818 __ LoadFromOffset(R6, THR, Thread::top_resource_offset()); | 843 __ LoadFromOffset(R6, THR, Thread::top_resource_offset()); |
819 __ StoreToOffset(ZR, THR, Thread::top_resource_offset()); | 844 __ StoreToOffset(ZR, THR, Thread::top_resource_offset()); |
820 __ Push(R6); | 845 __ Push(R6); |
821 __ LoadFromOffset(R6, THR, Thread::top_exit_frame_info_offset()); | 846 __ LoadFromOffset(R6, THR, Thread::top_exit_frame_info_offset()); |
822 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); | 847 __ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset()); |
823 // kExitLinkSlotFromEntryFp must be kept in sync with the code below. | 848 // kExitLinkSlotFromEntryFp must be kept in sync with the code below. |
824 ASSERT(kExitLinkSlotFromEntryFp == -21); | 849 ASSERT(kExitLinkSlotFromEntryFp == -22); |
825 __ Push(R6); | 850 __ Push(R6); |
826 | 851 |
827 // Load arguments descriptor array into R4, which is passed to Dart code. | 852 // Load arguments descriptor array into R4, which is passed to Dart code. |
828 __ LoadFromOffset(R4, R1, VMHandles::kOffsetOfRawPtrInHandle); | 853 __ LoadFromOffset(R4, R1, VMHandles::kOffsetOfRawPtrInHandle); |
829 | 854 |
830 // Load number of arguments into S5. | 855 // Load number of arguments into S5. |
831 __ LoadFieldFromOffset(R5, R4, ArgumentsDescriptor::count_offset()); | 856 __ LoadFieldFromOffset(R5, R4, ArgumentsDescriptor::count_offset()); |
832 __ SmiUntag(R5); | 857 __ SmiUntag(R5); |
833 | 858 |
834 // Compute address of 'arguments array' data area into R2. | 859 // Compute address of 'arguments array' data area into R2. |
835 __ LoadFromOffset(R2, R2, VMHandles::kOffsetOfRawPtrInHandle); | 860 __ LoadFromOffset(R2, R2, VMHandles::kOffsetOfRawPtrInHandle); |
836 __ AddImmediate(R2, R2, Array::data_offset() - kHeapObjectTag); | 861 __ AddImmediate(R2, R2, Array::data_offset() - kHeapObjectTag); |
837 | 862 |
838 // Set up arguments for the Dart call. | 863 // Set up arguments for the Dart call. |
839 Label push_arguments; | 864 Label push_arguments; |
840 Label done_push_arguments; | 865 Label done_push_arguments; |
841 __ cmp(R5, Operand(0)); | 866 __ cmp(R5, Operand(0)); |
842 __ b(&done_push_arguments, EQ); // check if there are arguments. | 867 __ b(&done_push_arguments, EQ); // check if there are arguments. |
843 __ LoadImmediate(R1, 0); | 868 __ LoadImmediate(R1, 0); |
844 __ Bind(&push_arguments); | 869 __ Bind(&push_arguments); |
845 __ ldr(R3, Address(R2)); | 870 __ ldr(R3, Address(R2)); |
846 __ Push(R3); | 871 __ Push(R3); |
847 __ add(R1, R1, Operand(1)); | 872 __ add(R1, R1, Operand(1)); |
848 __ add(R2, R2, Operand(kWordSize)); | 873 __ add(R2, R2, Operand(kWordSize)); |
849 __ cmp(R1, Operand(R5)); | 874 __ cmp(R1, Operand(R5)); |
850 __ b(&push_arguments, LT); | 875 __ b(&push_arguments, LT); |
851 __ Bind(&done_push_arguments); | 876 __ Bind(&done_push_arguments); |
852 | 877 |
| 878 // We now load the pool pointer(PP) with a GC safe value as we are about to |
| 879 // invoke dart code. We don't need a real object pool here. |
| 880 // Smi zero does not work because ARM64 assumes PP to be untagged. |
| 881 __ LoadObject(PP, Object::null_object()); |
| 882 |
853 // Call the Dart code entrypoint. | 883 // Call the Dart code entrypoint. |
| 884 __ ldr(CODE_REG, Address(R0, VMHandles::kOffsetOfRawPtrInHandle)); |
| 885 __ ldr(R0, FieldAddress(CODE_REG, Code::entry_point_offset())); |
854 __ blr(R0); // R4 is the arguments descriptor array. | 886 __ blr(R0); // R4 is the arguments descriptor array. |
855 __ Comment("InvokeDartCodeStub return"); | 887 __ Comment("InvokeDartCodeStub return"); |
856 | 888 |
857 // Restore constant pool pointer after return. | |
858 __ LoadPoolPointer(); | |
859 | |
860 // Get rid of arguments pushed on the stack. | 889 // Get rid of arguments pushed on the stack. |
861 __ AddImmediate(SP, FP, kExitLinkSlotFromEntryFp * kWordSize); | 890 __ AddImmediate(SP, FP, kExitLinkSlotFromEntryFp * kWordSize); |
862 | 891 |
863 __ LoadIsolate(R28); | 892 __ LoadIsolate(R28); |
864 | 893 |
865 // Restore the saved top exit frame info and top resource back into the | 894 // Restore the saved top exit frame info and top resource back into the |
866 // Isolate structure. Uses R6 as a temporary register for this. | 895 // Isolate structure. Uses R6 as a temporary register for this. |
867 __ Pop(R6); | 896 __ Pop(R6); |
868 __ StoreToOffset(R6, THR, Thread::top_exit_frame_info_offset()); | 897 __ StoreToOffset(R6, THR, Thread::top_exit_frame_info_offset()); |
869 __ Pop(R6); | 898 __ Pop(R6); |
(...skipping 11 matching lines...) Expand all Loading... |
881 | 910 |
882 // Restore C++ ABI callee-saved registers. | 911 // Restore C++ ABI callee-saved registers. |
883 for (int i = kAbiLastPreservedCpuReg; i >= kAbiFirstPreservedCpuReg; i--) { | 912 for (int i = kAbiLastPreservedCpuReg; i >= kAbiFirstPreservedCpuReg; i--) { |
884 Register r = static_cast<Register>(i); | 913 Register r = static_cast<Register>(i); |
885 // We use ldr instead of the Pop macro because we will be popping the PP | 914 // We use ldr instead of the Pop macro because we will be popping the PP |
886 // register when it is not holding a pool-pointer since we are returning to | 915 // register when it is not holding a pool-pointer since we are returning to |
887 // C++ code. We also skip the dart stack pointer SP, since we are still | 916 // C++ code. We also skip the dart stack pointer SP, since we are still |
888 // using it as the stack pointer. | 917 // using it as the stack pointer. |
889 __ ldr(r, Address(SP, 1 * kWordSize, Address::PostIndex)); | 918 __ ldr(r, Address(SP, 1 * kWordSize, Address::PostIndex)); |
890 } | 919 } |
891 __ set_constant_pool_allowed(false); | |
892 | 920 |
893 // Restore the frame pointer and C stack pointer and return. | 921 // Restore the frame pointer and C stack pointer and return. |
894 __ LeaveFrame(); | 922 __ LeaveFrame(); |
895 __ mov(CSP, SP); | 923 __ mov(CSP, SP); |
896 __ ret(); | 924 __ ret(); |
897 } | 925 } |
898 | 926 |
899 | 927 |
900 // Called for inline allocation of contexts. | 928 // Called for inline allocation of contexts. |
901 // Input: | 929 // Input: |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1067 // Restore callee-saved registers, tear down frame. | 1095 // Restore callee-saved registers, tear down frame. |
1068 __ LeaveCallRuntimeFrame(); | 1096 __ LeaveCallRuntimeFrame(); |
1069 __ ret(); | 1097 __ ret(); |
1070 } | 1098 } |
1071 | 1099 |
1072 | 1100 |
1073 // Called for inline allocation of objects. | 1101 // Called for inline allocation of objects. |
1074 // Input parameters: | 1102 // Input parameters: |
1075 // LR : return address. | 1103 // LR : return address. |
1076 // SP + 0 : type arguments object (only if class is parameterized). | 1104 // SP + 0 : type arguments object (only if class is parameterized). |
1077 void StubCode::GenerateAllocationStubForClass( | 1105 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, |
1078 Assembler* assembler, const Class& cls, | 1106 const Class& cls) { |
1079 uword* entry_patch_offset, uword* patch_code_pc_offset) { | |
1080 *entry_patch_offset = assembler->CodeSize(); | |
1081 // The generated code is different if the class is parameterized. | 1107 // The generated code is different if the class is parameterized. |
1082 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; | 1108 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; |
1083 ASSERT(!is_cls_parameterized || | 1109 ASSERT(!is_cls_parameterized || |
1084 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); | 1110 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); |
1085 // kInlineInstanceSize is a constant used as a threshold for determining | 1111 // kInlineInstanceSize is a constant used as a threshold for determining |
1086 // when the object initialization should be done as a loop or as | 1112 // when the object initialization should be done as a loop or as |
1087 // straight line code. | 1113 // straight line code. |
1088 const int kInlineInstanceSize = 12; | 1114 const int kInlineInstanceSize = 12; |
1089 const intptr_t instance_size = cls.instance_size(); | 1115 const intptr_t instance_size = cls.instance_size(); |
1090 ASSERT(instance_size > 0); | 1116 ASSERT(instance_size > 0); |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1190 // Push null type arguments. | 1216 // Push null type arguments. |
1191 __ PushObject(Object::null_object()); | 1217 __ PushObject(Object::null_object()); |
1192 } | 1218 } |
1193 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. | 1219 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. |
1194 __ Drop(2); // Pop arguments. | 1220 __ Drop(2); // Pop arguments. |
1195 __ Pop(R0); // Pop result (newly allocated object). | 1221 __ Pop(R0); // Pop result (newly allocated object). |
1196 // R0: new object | 1222 // R0: new object |
1197 // Restore the frame pointer. | 1223 // Restore the frame pointer. |
1198 __ LeaveStubFrame(); | 1224 __ LeaveStubFrame(); |
1199 __ ret(); | 1225 __ ret(); |
1200 *patch_code_pc_offset = assembler->CodeSize(); | |
1201 __ BranchPatchable(*StubCode::FixAllocationStubTarget_entry()); | |
1202 } | 1226 } |
1203 | 1227 |
1204 | 1228 |
1205 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function | 1229 // Called for invoking "dynamic noSuchMethod(Invocation invocation)" function |
1206 // from the entry code of a dart function after an error in passed argument | 1230 // from the entry code of a dart function after an error in passed argument |
1207 // name or number is detected. | 1231 // name or number is detected. |
1208 // Input parameters: | 1232 // Input parameters: |
1209 // LR : return address. | 1233 // LR : return address. |
1210 // SP : address of last argument. | 1234 // SP : address of last argument. |
1211 // R4: arguments descriptor array. | 1235 // R4: arguments descriptor array. |
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1508 // Pass IC data object. | 1532 // Pass IC data object. |
1509 __ Push(R5); | 1533 __ Push(R5); |
1510 __ CallRuntime(handle_ic_miss, num_args + 1); | 1534 __ CallRuntime(handle_ic_miss, num_args + 1); |
1511 // Remove the call arguments pushed earlier, including the IC data object. | 1535 // Remove the call arguments pushed earlier, including the IC data object. |
1512 __ Drop(num_args + 1); | 1536 __ Drop(num_args + 1); |
1513 // Pop returned function object into R0. | 1537 // Pop returned function object into R0. |
1514 // Restore arguments descriptor array and IC data array. | 1538 // Restore arguments descriptor array and IC data array. |
1515 __ Pop(R0); // Pop returned function object into R0. | 1539 __ Pop(R0); // Pop returned function object into R0. |
1516 __ Pop(R5); // Restore IC Data. | 1540 __ Pop(R5); // Restore IC Data. |
1517 __ Pop(R4); // Restore arguments descriptor array. | 1541 __ Pop(R4); // Restore arguments descriptor array. |
| 1542 if (range_collection_mode == kCollectRanges) { |
| 1543 __ RestoreCodePointer(); |
| 1544 } |
1518 __ LeaveStubFrame(); | 1545 __ LeaveStubFrame(); |
1519 Label call_target_function; | 1546 Label call_target_function; |
1520 if (!FLAG_lazy_dispatchers) { | 1547 if (!FLAG_lazy_dispatchers) { |
1521 GenerateDispatcherCode(assembler, &call_target_function); | 1548 GenerateDispatcherCode(assembler, &call_target_function); |
1522 } else { | 1549 } else { |
1523 __ b(&call_target_function); | 1550 __ b(&call_target_function); |
1524 } | 1551 } |
1525 | 1552 |
1526 __ Bind(&found); | 1553 __ Bind(&found); |
1527 __ Comment("Update caller's counter"); | 1554 __ Comment("Update caller's counter"); |
1528 // R6: pointer to an IC data check group. | 1555 // R6: pointer to an IC data check group. |
1529 const intptr_t target_offset = ICData::TargetIndexFor(num_args) * kWordSize; | 1556 const intptr_t target_offset = ICData::TargetIndexFor(num_args) * kWordSize; |
1530 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; | 1557 const intptr_t count_offset = ICData::CountIndexFor(num_args) * kWordSize; |
1531 __ LoadFromOffset(R0, R6, target_offset); | 1558 __ LoadFromOffset(R0, R6, target_offset); |
1532 | 1559 |
1533 if (FLAG_optimization_counter_threshold >= 0) { | 1560 if (FLAG_optimization_counter_threshold >= 0) { |
1534 // Update counter. | 1561 // Update counter. |
1535 __ LoadFromOffset(R1, R6, count_offset); | 1562 __ LoadFromOffset(R1, R6, count_offset); |
1536 __ adds(R1, R1, Operand(Smi::RawValue(1))); | 1563 __ adds(R1, R1, Operand(Smi::RawValue(1))); |
1537 __ LoadImmediate(R2, Smi::RawValue(Smi::kMaxValue)); | 1564 __ LoadImmediate(R2, Smi::RawValue(Smi::kMaxValue)); |
1538 __ csel(R1, R2, R1, VS); // Overflow. | 1565 __ csel(R1, R2, R1, VS); // Overflow. |
1539 __ StoreToOffset(R1, R6, count_offset); | 1566 __ StoreToOffset(R1, R6, count_offset); |
1540 } | 1567 } |
1541 | 1568 |
1542 __ Comment("Call target"); | 1569 __ Comment("Call target"); |
1543 __ Bind(&call_target_function); | 1570 __ Bind(&call_target_function); |
1544 // R0: target function. | 1571 // R0: target function. |
1545 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | |
1546 if (range_collection_mode == kCollectRanges) { | 1572 if (range_collection_mode == kCollectRanges) { |
| 1573 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
1547 __ ldr(R1, Address(SP, 0 * kWordSize)); | 1574 __ ldr(R1, Address(SP, 0 * kWordSize)); |
1548 if (num_args == 2) { | 1575 if (num_args == 2) { |
1549 __ ldr(R3, Address(SP, 1 * kWordSize)); | 1576 __ ldr(R3, Address(SP, 1 * kWordSize)); |
1550 } | 1577 } |
1551 __ EnterStubFrame(); | 1578 __ EnterStubFrame(); |
1552 __ Push(R5); | 1579 __ Push(R5); |
1553 if (num_args == 2) { | 1580 if (num_args == 2) { |
1554 __ Push(R3); | 1581 __ Push(R3); |
1555 } | 1582 } |
1556 __ Push(R1); | 1583 __ Push(R1); |
| 1584 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
1557 __ blr(R2); | 1585 __ blr(R2); |
1558 | 1586 |
1559 Label done; | 1587 Label done; |
1560 __ ldr(R5, Address(FP, kFirstLocalSlotFromFp * kWordSize)); | 1588 __ ldr(R5, Address(FP, kFirstLocalSlotFromFp * kWordSize)); |
1561 __ UpdateRangeFeedback(R0, 2, R5, R1, R4, &done); | 1589 __ UpdateRangeFeedback(R0, 2, R5, R1, R4, &done); |
1562 __ Bind(&done); | 1590 __ Bind(&done); |
1563 __ LeaveStubFrame(); | 1591 __ LeaveStubFrame(); |
1564 __ ret(); | 1592 __ ret(); |
1565 } else { | 1593 } else { |
| 1594 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
| 1595 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
1566 __ br(R2); | 1596 __ br(R2); |
1567 } | 1597 } |
1568 | 1598 |
1569 if (FLAG_support_debugger && !optimized) { | 1599 if (FLAG_support_debugger && !optimized) { |
1570 __ Bind(&stepping); | 1600 __ Bind(&stepping); |
1571 __ EnterStubFrame(); | 1601 __ EnterStubFrame(); |
1572 __ Push(R5); // Preserve IC data. | 1602 __ Push(R5); // Preserve IC data. |
1573 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1603 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
1574 __ Pop(R5); | 1604 __ Pop(R5); |
| 1605 __ RestoreCodePointer(); |
1575 __ LeaveStubFrame(); | 1606 __ LeaveStubFrame(); |
1576 __ b(&done_stepping); | 1607 __ b(&done_stepping); |
1577 } | 1608 } |
1578 } | 1609 } |
1579 | 1610 |
1580 | 1611 |
1581 // Use inline cache data array to invoke the target or continue in inline | 1612 // Use inline cache data array to invoke the target or continue in inline |
1582 // cache miss handler. Stub for 1-argument check (receiver class). | 1613 // cache miss handler. Stub for 1-argument check (receiver class). |
1583 // LR: return address. | 1614 // LR: return address. |
1584 // R5: inline cache data object. | 1615 // R5: inline cache data object. |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1709 __ LoadImmediate(R2, Smi::RawValue(Smi::kMaxValue)); | 1740 __ LoadImmediate(R2, Smi::RawValue(Smi::kMaxValue)); |
1710 __ csel(R1, R2, R1, VS); // Overflow. | 1741 __ csel(R1, R2, R1, VS); // Overflow. |
1711 __ StoreToOffset(R1, R6, count_offset); | 1742 __ StoreToOffset(R1, R6, count_offset); |
1712 } | 1743 } |
1713 | 1744 |
1714 // Load arguments descriptor into R4. | 1745 // Load arguments descriptor into R4. |
1715 __ LoadFieldFromOffset(R4, R5, ICData::arguments_descriptor_offset()); | 1746 __ LoadFieldFromOffset(R4, R5, ICData::arguments_descriptor_offset()); |
1716 | 1747 |
1717 // Get function and call it, if possible. | 1748 // Get function and call it, if possible. |
1718 __ LoadFromOffset(R0, R6, target_offset); | 1749 __ LoadFromOffset(R0, R6, target_offset); |
| 1750 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
1719 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | 1751 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
1720 __ br(R2); | 1752 __ br(R2); |
1721 | 1753 |
1722 if (FLAG_support_debugger) { | 1754 if (FLAG_support_debugger) { |
1723 __ Bind(&stepping); | 1755 __ Bind(&stepping); |
1724 __ EnterStubFrame(); | 1756 __ EnterStubFrame(); |
1725 __ Push(R5); // Preserve IC data. | 1757 __ Push(R5); // Preserve IC data. |
1726 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 1758 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
1727 __ Pop(R5); | 1759 __ Pop(R5); |
| 1760 __ RestoreCodePointer(); |
1728 __ LeaveStubFrame(); | 1761 __ LeaveStubFrame(); |
1729 __ b(&done_stepping); | 1762 __ b(&done_stepping); |
1730 } | 1763 } |
1731 } | 1764 } |
1732 | 1765 |
1733 | 1766 |
1734 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { | 1767 void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { |
1735 GenerateUsageCounterIncrement(assembler, R6); | 1768 GenerateUsageCounterIncrement(assembler, R6); |
1736 GenerateNArgsCheckInlineCacheStub( | 1769 GenerateNArgsCheckInlineCacheStub( |
1737 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL, | 1770 assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL, |
(...skipping 18 matching lines...) Expand all Loading... |
1756 __ EnterStubFrame(); | 1789 __ EnterStubFrame(); |
1757 __ Push(R5); // Save IC Data. | 1790 __ Push(R5); // Save IC Data. |
1758 __ Push(R4); // Save arg. desc. | 1791 __ Push(R4); // Save arg. desc. |
1759 __ Push(R0); // Pass function. | 1792 __ Push(R0); // Pass function. |
1760 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); | 1793 __ CallRuntime(kCompileFunctionRuntimeEntry, 1); |
1761 __ Pop(R0); // Restore argument. | 1794 __ Pop(R0); // Restore argument. |
1762 __ Pop(R4); // Restore arg desc. | 1795 __ Pop(R4); // Restore arg desc. |
1763 __ Pop(R5); // Restore IC Data. | 1796 __ Pop(R5); // Restore IC Data. |
1764 __ LeaveStubFrame(); | 1797 __ LeaveStubFrame(); |
1765 | 1798 |
| 1799 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
1766 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); | 1800 __ LoadFieldFromOffset(R2, R0, Function::entry_point_offset()); |
1767 __ br(R2); | 1801 __ br(R2); |
1768 } | 1802 } |
1769 | 1803 |
1770 | 1804 |
1771 // R5: Contains an ICData. | 1805 // R5: Contains an ICData. |
1772 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { | 1806 void StubCode::GenerateICCallBreakpointStub(Assembler* assembler) { |
1773 __ EnterStubFrame(); | 1807 __ EnterStubFrame(); |
1774 __ Push(R5); | 1808 __ Push(R5); |
1775 __ PushObject(Object::null_object()); // Space for result. | 1809 __ PushObject(Object::null_object()); // Space for result. |
1776 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1810 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
1777 __ Pop(R0); | 1811 __ Pop(CODE_REG); |
1778 __ Pop(R5); | 1812 __ Pop(R5); |
1779 __ LeaveStubFrame(); | 1813 __ LeaveStubFrame(); |
| 1814 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
1780 __ br(R0); | 1815 __ br(R0); |
1781 } | 1816 } |
1782 | 1817 |
1783 | 1818 |
1784 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { | 1819 void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) { |
1785 __ EnterStubFrame(); | 1820 __ EnterStubFrame(); |
1786 __ PushObject(Object::null_object()); // Space for result. | 1821 __ PushObject(Object::null_object()); // Space for result. |
1787 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); | 1822 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0); |
1788 __ Pop(R0); | 1823 __ Pop(CODE_REG); |
1789 __ LeaveStubFrame(); | 1824 __ LeaveStubFrame(); |
| 1825 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
1790 __ br(R0); | 1826 __ br(R0); |
1791 } | 1827 } |
1792 | 1828 |
1793 // Called only from unoptimized code. All relevant registers have been saved. | 1829 // Called only from unoptimized code. All relevant registers have been saved. |
1794 void StubCode::GenerateDebugStepCheckStub( | 1830 void StubCode::GenerateDebugStepCheckStub( |
1795 Assembler* assembler) { | 1831 Assembler* assembler) { |
1796 // Check single stepping. | 1832 // Check single stepping. |
1797 Label stepping, done_stepping; | 1833 Label stepping, done_stepping; |
1798 __ LoadIsolate(R1); | 1834 __ LoadIsolate(R1); |
1799 __ LoadFromOffset( | 1835 __ LoadFromOffset( |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1957 // R6: function to be re-optimized. | 1993 // R6: function to be re-optimized. |
1958 // R4: argument descriptor (preserved). | 1994 // R4: argument descriptor (preserved). |
1959 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1995 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
1960 __ EnterStubFrame(); | 1996 __ EnterStubFrame(); |
1961 __ Push(R4); | 1997 __ Push(R4); |
1962 // Setup space on stack for the return value. | 1998 // Setup space on stack for the return value. |
1963 __ PushObject(Object::null_object()); | 1999 __ PushObject(Object::null_object()); |
1964 __ Push(R6); | 2000 __ Push(R6); |
1965 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 2001 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
1966 __ Pop(R0); // Discard argument. | 2002 __ Pop(R0); // Discard argument. |
1967 __ Pop(R0); // Get Code object | 2003 __ Pop(CODE_REG); // Get Code object |
1968 __ Pop(R4); // Restore argument descriptor. | 2004 __ Pop(R4); // Restore argument descriptor. |
1969 __ LoadFieldFromOffset(R0, R0, Code::entry_point_offset()); | 2005 __ LoadFieldFromOffset(R0, CODE_REG, Code::entry_point_offset()); |
1970 __ LeaveStubFrame(); | 2006 __ LeaveStubFrame(); |
1971 __ br(R0); | 2007 __ br(R0); |
1972 __ brk(0); | 2008 __ brk(0); |
1973 } | 2009 } |
1974 | 2010 |
1975 | 2011 |
1976 // Does identical check (object references are equal or not equal) with special | 2012 // Does identical check (object references are equal or not equal) with special |
1977 // checks for boxed numbers. | 2013 // checks for boxed numbers. |
1978 // Left and right are pushed on stack. | 2014 // Left and right are pushed on stack. |
1979 // Return Zero condition flag set if equal. | 2015 // Return Zero condition flag set if equal. |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2052 const Register right = R0; | 2088 const Register right = R0; |
2053 __ LoadFromOffset(left, SP, 1 * kWordSize); | 2089 __ LoadFromOffset(left, SP, 1 * kWordSize); |
2054 __ LoadFromOffset(right, SP, 0 * kWordSize); | 2090 __ LoadFromOffset(right, SP, 0 * kWordSize); |
2055 GenerateIdenticalWithNumberCheckStub(assembler, left, right); | 2091 GenerateIdenticalWithNumberCheckStub(assembler, left, right); |
2056 __ ret(); | 2092 __ ret(); |
2057 | 2093 |
2058 if (FLAG_support_debugger) { | 2094 if (FLAG_support_debugger) { |
2059 __ Bind(&stepping); | 2095 __ Bind(&stepping); |
2060 __ EnterStubFrame(); | 2096 __ EnterStubFrame(); |
2061 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); | 2097 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0); |
| 2098 __ RestoreCodePointer(); |
2062 __ LeaveStubFrame(); | 2099 __ LeaveStubFrame(); |
2063 __ b(&done_stepping); | 2100 __ b(&done_stepping); |
2064 } | 2101 } |
2065 } | 2102 } |
2066 | 2103 |
2067 | 2104 |
2068 // Called from optimized code only. | 2105 // Called from optimized code only. |
2069 // LR: return address. | 2106 // LR: return address. |
2070 // SP + 4: left operand. | 2107 // SP + 4: left operand. |
2071 // SP + 0: right operand. | 2108 // SP + 0: right operand. |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2110 __ CompareRegisters(R4, R0); | 2147 __ CompareRegisters(R4, R0); |
2111 __ b(&update, NE); | 2148 __ b(&update, NE); |
2112 | 2149 |
2113 __ Bind(&call_target_function); | 2150 __ Bind(&call_target_function); |
2114 // Call the target found in the cache. For a class id match, this is a | 2151 // Call the target found in the cache. For a class id match, this is a |
2115 // proper target for the given name and arguments descriptor. If the | 2152 // proper target for the given name and arguments descriptor. If the |
2116 // illegal class id was found, the target is a cache miss handler that can | 2153 // illegal class id was found, the target is a cache miss handler that can |
2117 // be invoked as a normal Dart function. | 2154 // be invoked as a normal Dart function. |
2118 __ add(TMP, R2, Operand(R3, LSL, 3)); | 2155 __ add(TMP, R2, Operand(R3, LSL, 3)); |
2119 __ LoadFieldFromOffset(R0, TMP, base + kWordSize); | 2156 __ LoadFieldFromOffset(R0, TMP, base + kWordSize); |
| 2157 __ LoadFieldFromOffset(CODE_REG, R0, Function::code_offset()); |
2120 __ LoadFieldFromOffset(R1, R0, Function::entry_point_offset()); | 2158 __ LoadFieldFromOffset(R1, R0, Function::entry_point_offset()); |
2121 } | 2159 } |
2122 | 2160 |
2123 | 2161 |
2124 // Called from megamorphic calls. | 2162 // Called from megamorphic calls. |
2125 // R0: receiver. | 2163 // R0: receiver. |
2126 // R1: lookup cache. | 2164 // R1: lookup cache. |
2127 // Result: | 2165 // Result: |
2128 // R1: entry point. | 2166 // R1: entry point. |
2129 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { | 2167 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { |
2130 EmitMegamorphicLookup(assembler, R0, R1, R1); | 2168 EmitMegamorphicLookup(assembler, R0, R1, R1); |
2131 __ ret(); | 2169 __ ret(); |
2132 } | 2170 } |
2133 | 2171 |
2134 } // namespace dart | 2172 } // namespace dart |
2135 | 2173 |
2136 #endif // defined TARGET_ARCH_ARM64 | 2174 #endif // defined TARGET_ARCH_ARM64 |
OLD | NEW |