Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(78)

Side by Side Diff: runtime/vm/assembler_x64.cc

Issue 1343373003: Revert "VM: New calling convention for generated code." (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/assembler_x64.h ('k') | runtime/vm/assembler_x64_test.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // NOLINT 5 #include "vm/globals.h" // NOLINT
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/cpu.h" 9 #include "vm/cpu.h"
10 #include "vm/heap.h" 10 #include "vm/heap.h"
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
86 EmitRegisterREX(TMP, REX_W); 86 EmitRegisterREX(TMP, REX_W);
87 EmitUint8(0xB8 | (TMP & 7)); 87 EmitUint8(0xB8 | (TMP & 7));
88 EmitInt64(label->address()); 88 EmitInt64(label->address());
89 } 89 }
90 call(TMP); 90 call(TMP);
91 } 91 }
92 92
93 93
94 void Assembler::CallPatchable(const StubEntry& stub_entry) { 94 void Assembler::CallPatchable(const StubEntry& stub_entry) {
95 ASSERT(constant_pool_allowed()); 95 ASSERT(constant_pool_allowed());
96 const Code& target = Code::Handle(stub_entry.code());
97 intptr_t call_start = buffer_.GetPosition(); 96 intptr_t call_start = buffer_.GetPosition();
98 const int32_t offset = ObjectPool::element_offset( 97 const int32_t offset = ObjectPool::element_offset(
99 object_pool_wrapper_.FindObject(target, kPatchable)); 98 object_pool_wrapper_.FindExternalLabel(&stub_entry.label(), kPatchable));
100 LoadWordFromPoolOffset(CODE_REG, offset - kHeapObjectTag); 99 call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag));
101 movq(TMP, FieldAddress(CODE_REG, Code::entry_point_offset()));
102 call(TMP);
103 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); 100 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
104 } 101 }
105 102
106 103
107 void Assembler::Call(const StubEntry& stub_entry) { 104 void Assembler::Call(const StubEntry& stub_entry) {
108 ASSERT(constant_pool_allowed()); 105 ASSERT(constant_pool_allowed());
109 const Code& target = Code::Handle(stub_entry.code());
110 const int32_t offset = ObjectPool::element_offset( 106 const int32_t offset = ObjectPool::element_offset(
111 object_pool_wrapper_.FindObject(target, kNotPatchable)); 107 object_pool_wrapper_.FindExternalLabel(&stub_entry.label(),
112 LoadWordFromPoolOffset(CODE_REG, offset - kHeapObjectTag); 108 kNotPatchable));
113 movq(TMP, FieldAddress(CODE_REG, Code::entry_point_offset())); 109 call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag));
114 call(TMP);
115 } 110 }
116 111
117 112
118 void Assembler::pushq(Register reg) { 113 void Assembler::pushq(Register reg) {
119 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 114 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
120 EmitRegisterREX(reg, REX_NONE); 115 EmitRegisterREX(reg, REX_NONE);
121 EmitUint8(0x50 | (reg & 7)); 116 EmitUint8(0x50 | (reg & 7));
122 } 117 }
123 118
124 119
(...skipping 2416 matching lines...) Expand 10 before | Expand all | Expand 10 after
2541 EmitUint8(0x70 + condition); 2536 EmitUint8(0x70 + condition);
2542 EmitNearLabelLink(label); 2537 EmitNearLabelLink(label);
2543 } else { 2538 } else {
2544 EmitUint8(0x0F); 2539 EmitUint8(0x0F);
2545 EmitUint8(0x80 + condition); 2540 EmitUint8(0x80 + condition);
2546 EmitLabelLink(label); 2541 EmitLabelLink(label);
2547 } 2542 }
2548 } 2543 }
2549 2544
2550 2545
2551 void Assembler::J(Condition condition, 2546 void Assembler::J(Condition condition, const StubEntry& stub_entry,
2552 const StubEntry& stub_entry,
2553 Register pp) { 2547 Register pp) {
2554 Label no_jump; 2548 Label no_jump;
2555 // Negate condition. 2549 // Negate condition.
2556 j(static_cast<Condition>(condition ^ 1), &no_jump, kNearJump); 2550 j(static_cast<Condition>(condition ^ 1), &no_jump, Assembler::kNearJump);
2557 Jmp(stub_entry, pp); 2551 Jmp(stub_entry, pp);
2558 Bind(&no_jump); 2552 Bind(&no_jump);
2559 } 2553 }
2560 2554
2561 2555
2562 void Assembler::jmp(Register reg) { 2556 void Assembler::jmp(Register reg) {
2563 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2557 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2564 Operand operand(reg); 2558 Operand operand(reg);
2565 EmitOperandREX(4, operand, REX_NONE); 2559 EmitOperandREX(4, operand, REX_NONE);
2566 EmitUint8(0xFF); 2560 EmitUint8(0xFF);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2607 { // Encode movq(TMP, Immediate(label->address())), but always as imm64. 2601 { // Encode movq(TMP, Immediate(label->address())), but always as imm64.
2608 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2602 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2609 EmitRegisterREX(TMP, REX_W); 2603 EmitRegisterREX(TMP, REX_W);
2610 EmitUint8(0xB8 | (TMP & 7)); 2604 EmitUint8(0xB8 | (TMP & 7));
2611 EmitInt64(label->address()); 2605 EmitInt64(label->address());
2612 } 2606 }
2613 jmp(TMP); 2607 jmp(TMP);
2614 } 2608 }
2615 2609
2616 2610
2611 void Assembler::jmp(const StubEntry& stub_entry) {
2612 jmp(&stub_entry.label());
2613 }
2614
2615
2617 void Assembler::JmpPatchable(const StubEntry& stub_entry, Register pp) { 2616 void Assembler::JmpPatchable(const StubEntry& stub_entry, Register pp) {
2618 ASSERT((pp != PP) || constant_pool_allowed()); 2617 ASSERT((pp != PP) || constant_pool_allowed());
2619 const Code& target = Code::Handle(stub_entry.code()); 2618 intptr_t call_start = buffer_.GetPosition();
2620 const int32_t offset = ObjectPool::element_offset( 2619 const int32_t offset = ObjectPool::element_offset(
2621 object_pool_wrapper_.FindObject(target, kPatchable)); 2620 object_pool_wrapper_.FindExternalLabel(&stub_entry.label(), kPatchable));
2622 movq(CODE_REG, Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); 2621 // Patchable jumps always use a 32-bit immediate encoding.
2623 movq(TMP, FieldAddress(CODE_REG, Code::entry_point_offset())); 2622 jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag));
2624 jmp(TMP); 2623 ASSERT((buffer_.GetPosition() - call_start) == JumpPattern::kLengthInBytes);
2625 } 2624 }
2626 2625
2627 2626
2628 void Assembler::Jmp(const StubEntry& stub_entry, Register pp) { 2627 void Assembler::Jmp(const StubEntry& stub_entry, Register pp) {
2629 ASSERT((pp != PP) || constant_pool_allowed()); 2628 ASSERT((pp != PP) || constant_pool_allowed());
2630 const Code& target = Code::Handle(stub_entry.code());
2631 const int32_t offset = ObjectPool::element_offset( 2629 const int32_t offset = ObjectPool::element_offset(
2632 object_pool_wrapper_.FindObject(target, kNotPatchable)); 2630 object_pool_wrapper_.FindExternalLabel(&stub_entry.label(),
2633 movq(CODE_REG, FieldAddress(pp, offset)); 2631 kNotPatchable));
2634 movq(TMP, FieldAddress(CODE_REG, Code::entry_point_offset())); 2632 jmp(Address(pp, offset - kHeapObjectTag));
2635 jmp(TMP);
2636 } 2633 }
2637 2634
2638 2635
2639 void Assembler::lock() { 2636 void Assembler::lock() {
2640 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2637 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2641 EmitUint8(0xF0); 2638 EmitUint8(0xF0);
2642 } 2639 }
2643 2640
2644 2641
2645 void Assembler::cmpxchgl(const Address& address, Register reg) { 2642 void Assembler::cmpxchgl(const Address& address, Register reg) {
(...skipping 427 matching lines...) Expand 10 before | Expand all | Expand 10 after
3073 if (can_value_be_smi) { 3070 if (can_value_be_smi) {
3074 StoreIntoObjectFilter(object, value, &done); 3071 StoreIntoObjectFilter(object, value, &done);
3075 } else { 3072 } else {
3076 StoreIntoObjectFilterNoSmi(object, value, &done); 3073 StoreIntoObjectFilterNoSmi(object, value, &done);
3077 } 3074 }
3078 // A store buffer update is required. 3075 // A store buffer update is required.
3079 if (value != RDX) pushq(RDX); 3076 if (value != RDX) pushq(RDX);
3080 if (object != RDX) { 3077 if (object != RDX) {
3081 movq(RDX, object); 3078 movq(RDX, object);
3082 } 3079 }
3083 pushq(CODE_REG);
3084 movq(CODE_REG, Address(THR, Thread::update_store_buffer_code_offset()));
3085 movq(TMP, Address(THR, Thread::update_store_buffer_entry_point_offset())); 3080 movq(TMP, Address(THR, Thread::update_store_buffer_entry_point_offset()));
3086 call(TMP); 3081 call(TMP);
3087
3088 popq(CODE_REG);
3089 if (value != RDX) popq(RDX); 3082 if (value != RDX) popq(RDX);
3090 Bind(&done); 3083 Bind(&done);
3091 } 3084 }
3092 3085
3093 3086
3094 void Assembler::StoreIntoObjectNoBarrier(Register object, 3087 void Assembler::StoreIntoObjectNoBarrier(Register object,
3095 const Address& dest, 3088 const Address& dest,
3096 Register value, 3089 Register value,
3097 FieldContent old_content) { 3090 FieldContent old_content) {
3098 VerifiedWrite(dest, value, old_content); 3091 VerifiedWrite(dest, value, old_content);
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after
3379 call(reg); 3372 call(reg);
3380 } 3373 }
3381 3374
3382 3375
3383 void Assembler::CallRuntime(const RuntimeEntry& entry, 3376 void Assembler::CallRuntime(const RuntimeEntry& entry,
3384 intptr_t argument_count) { 3377 intptr_t argument_count) {
3385 entry.Call(this, argument_count); 3378 entry.Call(this, argument_count);
3386 } 3379 }
3387 3380
3388 3381
3389 void Assembler::RestoreCodePointer() {
3390 movq(CODE_REG, Address(RBP, kPcMarkerSlotFromFp * kWordSize));
3391 }
3392
3393
3394 void Assembler::LoadPoolPointer(Register pp) { 3382 void Assembler::LoadPoolPointer(Register pp) {
3395 // Load new pool pointer. 3383 // Load new pool pointer.
3396 CheckCodePointer(); 3384 const intptr_t kRIPRelativeMovqSize = 7;
3397 movq(pp, FieldAddress(CODE_REG, Code::object_pool_offset())); 3385 const intptr_t entry_to_rip_offset = CodeSize() + kRIPRelativeMovqSize;
3386 const intptr_t object_pool_pc_dist =
3387 Instructions::HeaderSize() - Instructions::object_pool_offset();
3388 movq(pp, Address::AddressRIPRelative(
3389 -entry_to_rip_offset - object_pool_pc_dist));
3390 ASSERT(CodeSize() == entry_to_rip_offset);
3398 set_constant_pool_allowed(pp == PP); 3391 set_constant_pool_allowed(pp == PP);
3399 } 3392 }
3400 3393
3401 3394
3402 void Assembler::EnterDartFrame(intptr_t frame_size, Register new_pp) { 3395 void Assembler::EnterDartFrame(intptr_t frame_size,
3403 CheckCodePointer(); 3396 Register new_pp,
3397 Register pc_marker_override) {
3404 ASSERT(!constant_pool_allowed()); 3398 ASSERT(!constant_pool_allowed());
3405 EnterFrame(0); 3399 EnterFrame(0);
3406 pushq(CODE_REG); 3400 pushq(pc_marker_override);
3407 pushq(PP); 3401 pushq(PP);
3408 if (new_pp == kNoRegister) { 3402 movq(PP, new_pp);
3409 LoadPoolPointer(PP);
3410 } else {
3411 movq(PP, new_pp);
3412 }
3413 set_constant_pool_allowed(true); 3403 set_constant_pool_allowed(true);
3414 if (frame_size != 0) { 3404 if (frame_size != 0) {
3415 subq(RSP, Immediate(frame_size)); 3405 subq(RSP, Immediate(frame_size));
3416 } 3406 }
3417 } 3407 }
3418 3408
3419 3409
3420 void Assembler::LeaveDartFrame(RestorePP restore_pp) { 3410 void Assembler::LeaveDartFrame() {
3411 set_constant_pool_allowed(false);
3421 // Restore caller's PP register that was pushed in EnterDartFrame. 3412 // Restore caller's PP register that was pushed in EnterDartFrame.
3422 if (restore_pp == kRestoreCallerPP) { 3413 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize)));
3423 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize)));
3424 set_constant_pool_allowed(false);
3425 }
3426 LeaveFrame(); 3414 LeaveFrame();
3427 } 3415 }
3428 3416
3429 3417
3430 void Assembler::CheckCodePointer() {
3431 #ifdef DEBUG
3432 Label cid_ok, instructions_ok;
3433 pushq(RAX);
3434 LoadClassId(RAX, CODE_REG);
3435 cmpq(RAX, Immediate(kCodeCid));
3436 j(EQUAL, &cid_ok);
3437 int3();
3438 Bind(&cid_ok);
3439 {
3440 const intptr_t kRIPRelativeLeaqSize = 7;
3441 const intptr_t header_to_entry_offset =
3442 (Instructions::HeaderSize() - kHeapObjectTag);
3443 const intptr_t header_to_rip_offset =
3444 CodeSize() + kRIPRelativeLeaqSize + header_to_entry_offset;
3445 leaq(RAX, Address::AddressRIPRelative(-header_to_rip_offset));
3446 ASSERT(CodeSize() == (header_to_rip_offset - header_to_entry_offset));
3447 }
3448 cmpq(RAX, FieldAddress(CODE_REG, Code::saved_instructions_offset()));
3449 j(EQUAL, &instructions_ok);
3450 int3();
3451 Bind(&instructions_ok);
3452 popq(RAX);
3453 #endif
3454 }
3455
3456
3457 // On entry to a function compiled for OSR, the caller's frame pointer, the 3418 // On entry to a function compiled for OSR, the caller's frame pointer, the
3458 // stack locals, and any copied parameters are already in place. The frame 3419 // stack locals, and any copied parameters are already in place. The frame
3459 // pointer is already set up. The PC marker is not correct for the 3420 // pointer is already set up. The PC marker is not correct for the
3460 // optimized function and there may be extra space for spill slots to 3421 // optimized function and there may be extra space for spill slots to
3461 // allocate. 3422 // allocate.
3462 void Assembler::EnterOsrFrame(intptr_t extra_size) { 3423 void Assembler::EnterOsrFrame(intptr_t extra_size,
3424 Register new_pp,
3425 Register pc_marker_override) {
3463 ASSERT(!constant_pool_allowed()); 3426 ASSERT(!constant_pool_allowed());
3464 if (prologue_offset_ == -1) { 3427 if (prologue_offset_ == -1) {
3465 Comment("PrologueOffset = %" Pd "", CodeSize()); 3428 Comment("PrologueOffset = %" Pd "", CodeSize());
3466 prologue_offset_ = CodeSize(); 3429 prologue_offset_ = CodeSize();
3467 } 3430 }
3468 RestoreCodePointer(); 3431 movq(Address(RBP, kPcMarkerSlotFromFp * kWordSize), pc_marker_override);
3469 LoadPoolPointer(); 3432 movq(PP, new_pp);
3470 3433 set_constant_pool_allowed(true);
3471 if (extra_size != 0) { 3434 if (extra_size != 0) {
3472 subq(RSP, Immediate(extra_size)); 3435 subq(RSP, Immediate(extra_size));
3473 } 3436 }
3474 } 3437 }
3475 3438
3476 3439
3477 void Assembler::EnterStubFrame() { 3440 void Assembler::EnterStubFrame() {
3478 EnterDartFrame(0, kNoRegister); 3441 set_constant_pool_allowed(false);
3442 EnterFrame(0);
3443 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames.
3444 pushq(PP); // Save caller's pool pointer
3445 LoadPoolPointer();
3479 } 3446 }
3480 3447
3481 3448
3482 void Assembler::LeaveStubFrame() { 3449 void Assembler::LeaveStubFrame() {
3483 LeaveDartFrame(); 3450 LeaveDartFrame();
3484 } 3451 }
3485 3452
3486 3453
3487 void Assembler::MaybeTraceAllocation(intptr_t cid, 3454 void Assembler::MaybeTraceAllocation(intptr_t cid,
3488 Label* trace, 3455 Label* trace,
(...skipping 488 matching lines...) Expand 10 before | Expand all | Expand 10 after
3977 3944
3978 3945
3979 const char* Assembler::FpuRegisterName(FpuRegister reg) { 3946 const char* Assembler::FpuRegisterName(FpuRegister reg) {
3980 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); 3947 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters));
3981 return xmm_reg_names[reg]; 3948 return xmm_reg_names[reg];
3982 } 3949 }
3983 3950
3984 } // namespace dart 3951 } // namespace dart
3985 3952
3986 #endif // defined TARGET_ARCH_X64 3953 #endif // defined TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « runtime/vm/assembler_x64.h ('k') | runtime/vm/assembler_x64_test.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698