| Index: runtime/vm/intermediate_language_x64.cc
|
| diff --git a/runtime/vm/intermediate_language_x64.cc b/runtime/vm/intermediate_language_x64.cc
|
| index ec0bbc7602be704c3684b4f7aa88580c51a95693..5e2b2a33a6fde7eab16bbe555bc5a2eb0ef385f8 100644
|
| --- a/runtime/vm/intermediate_language_x64.cc
|
| +++ b/runtime/vm/intermediate_language_x64.cc
|
| @@ -783,11 +783,11 @@ void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| RBX, Immediate(reinterpret_cast<uword>(native_c_function())), PP);
|
| __ LoadImmediate(
|
| R10, Immediate(argc_tag), PP);
|
| - const ExternalLabel* stub_entry = (is_bootstrap_native() || is_leaf_call) ?
|
| - &stub_code->CallBootstrapCFunctionLabel() :
|
| - &stub_code->CallNativeCFunctionLabel();
|
| + const Code& stub = Code::Handle((is_bootstrap_native() || is_leaf_call) ?
|
| + stub_code->CallBootstrapCFunctionCode() :
|
| + stub_code->CallNativeCFunctionCode());
|
| compiler->GenerateCall(token_pos(),
|
| - stub_entry,
|
| + stub,
|
| RawPcDescriptors::kOther,
|
| locs());
|
| __ popq(result);
|
| @@ -1667,7 +1667,6 @@ class BoxAllocationSlowPath : public SlowPathCode {
|
| __ Bind(entry_label());
|
| const Code& stub =
|
| Code::Handle(isolate, stub_code->GetAllocationStubForClass(cls_));
|
| - const ExternalLabel label(stub.EntryPoint());
|
|
|
| LocationSummary* locs = instruction_->locs();
|
|
|
| @@ -1675,7 +1674,7 @@ class BoxAllocationSlowPath : public SlowPathCode {
|
|
|
| compiler->SaveLiveRegisters(locs);
|
| compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
|
| - &label,
|
| + stub,
|
| RawPcDescriptors::kOther,
|
| locs);
|
| __ MoveRegister(result_, RAX);
|
| @@ -2128,9 +2127,8 @@ void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Isolate* isolate = compiler->isolate();
|
| const Code& stub = Code::Handle(
|
| isolate, isolate->stub_code()->GetAllocateArrayStub());
|
| - const ExternalLabel label(stub.EntryPoint());
|
| compiler->GenerateCall(token_pos(),
|
| - &label,
|
| + stub,
|
| RawPcDescriptors::kOther,
|
| locs());
|
| compiler->AddStubCallTarget(stub);
|
| @@ -2400,9 +2398,8 @@ class AllocateContextSlowPath : public SlowPathCode {
|
|
|
| __ LoadImmediate(R10, Immediate(instruction_->num_context_variables()), PP);
|
| StubCode* stub_code = compiler->isolate()->stub_code();
|
| - const ExternalLabel label(stub_code->AllocateContextEntryPoint());
|
| compiler->GenerateCall(instruction_->token_pos(),
|
| - &label,
|
| + Code::Handle(stub_code->AllocateContextCode()),
|
| RawPcDescriptors::kOther,
|
| locs);
|
| ASSERT(instruction_->locs()->out(0).reg() == RAX);
|
| @@ -2456,9 +2453,8 @@ void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| StubCode* stub_code = compiler->isolate()->stub_code();
|
|
|
| __ LoadImmediate(R10, Immediate(num_context_variables()), PP);
|
| - const ExternalLabel label(stub_code->AllocateContextEntryPoint());
|
| compiler->GenerateCall(token_pos(),
|
| - &label,
|
| + Code::Handle(stub_code->AllocateContextCode()),
|
| RawPcDescriptors::kOther,
|
| locs());
|
| }
|
| @@ -2546,6 +2542,7 @@ void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| needs_stacktrace());
|
|
|
| // Restore the pool pointer.
|
| + __ RestoreCodePointer();
|
| __ LoadPoolPointer(PP);
|
|
|
| if (HasParallelMove()) {
|
| @@ -6193,7 +6190,7 @@ LocationSummary* IndirectGotoInstr::MakeLocationSummary(Zone* zone,
|
| zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
|
|
|
| summary->set_in(0, Location::RequiresRegister());
|
| - summary->set_temp(0, Location::RequiresRegister());
|
| + summary->set_temp(0, Location::RegisterLocation(R13));
|
|
|
| return summary;
|
| }
|
| @@ -6203,8 +6200,16 @@ void IndirectGotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Register offset_reg = locs()->in(0).reg();
|
| Register target_address_reg = locs()->temp_slot(0)->reg();
|
|
|
| + {
|
| + const intptr_t kRIPRelativeLeaqSize = 7;
|
| + const intptr_t entry_to_rip_offset =
|
| + __ CodeSize() + kRIPRelativeLeaqSize;
|
| + __ leaq(target_address_reg,
|
| + Address::AddressRIPRelative(-entry_to_rip_offset));
|
| + ASSERT(__ CodeSize() == entry_to_rip_offset);
|
| + }
|
| +
|
| // Load from [current frame pointer] + kPcMarkerSlotFromFp.
|
| - __ movq(target_address_reg, Address(RBP, kPcMarkerSlotFromFp * kWordSize));
|
|
|
| // Calculate the final absolute address.
|
| if (offset()->definition()->representation() == kTagged) {
|
| @@ -6323,7 +6328,8 @@ void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| // Function in RAX.
|
| ASSERT(locs()->in(0).reg() == RAX);
|
| - __ movq(RCX, FieldAddress(RAX, Function::instructions_offset()));
|
| + __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset()));
|
| + __ movq(RCX, FieldAddress(CODE_REG, Code::instructions_offset()));
|
|
|
| // RAX: Function.
|
| // R10: Arguments descriptor array.
|
| @@ -6381,9 +6387,8 @@ void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| StubCode* stub_code = isolate->stub_code();
|
| const Code& stub = Code::Handle(isolate,
|
| stub_code->GetAllocationStubForClass(cls()));
|
| - const ExternalLabel label(stub.EntryPoint());
|
| compiler->GenerateCall(token_pos(),
|
| - &label,
|
| + stub,
|
| RawPcDescriptors::kOther,
|
| locs());
|
| compiler->AddStubCallTarget(stub);
|
| @@ -6394,8 +6399,7 @@ void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| ASSERT(!compiler->is_optimizing());
|
| StubCode* stub_code = compiler->isolate()->stub_code();
|
| - const ExternalLabel label(stub_code->DebugStepCheckEntryPoint());
|
| - __ CallPatchable(&label);
|
| + __ CallPatchable(Code::Handle(stub_code->DebugStepCheckCode()));
|
| compiler->AddCurrentDescriptor(stub_kind_, Isolate::kNoDeoptId, token_pos());
|
| compiler->RecordSafepoint(locs());
|
| }
|
|
|