| Index: runtime/vm/intermediate_language_x64.cc
|
| diff --git a/runtime/vm/intermediate_language_x64.cc b/runtime/vm/intermediate_language_x64.cc
|
| index 7339ef80be229e3a194c248680b682a5663d51a4..19fc8b3fcf4bb8883bde34513525de0706d05776 100644
|
| --- a/runtime/vm/intermediate_language_x64.cc
|
| +++ b/runtime/vm/intermediate_language_x64.cc
|
| @@ -2555,8 +2555,7 @@ void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| needs_stacktrace());
|
|
|
| // Restore the pool pointer.
|
| - __ RestoreCodePointer();
|
| - __ LoadPoolPointer(PP);
|
| + __ LoadPoolPointer();
|
|
|
| if (HasParallelMove()) {
|
| compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
|
| @@ -6222,18 +6221,10 @@ LocationSummary* IndirectGotoInstr::MakeLocationSummary(Zone* zone,
|
|
|
| void IndirectGotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Register offset_reg = locs()->in(0).reg();
|
| - Register target_address_reg = locs()->temp(0).reg();
|
| -
|
| - {
|
| - const intptr_t kRIPRelativeLeaqSize = 7;
|
| - const intptr_t entry_to_rip_offset =
|
| - __ CodeSize() + kRIPRelativeLeaqSize;
|
| - __ leaq(target_address_reg,
|
| - Address::AddressRIPRelative(-entry_to_rip_offset));
|
| - ASSERT(__ CodeSize() == entry_to_rip_offset);
|
| - }
|
| + Register target_address_reg = locs()->temp_slot(0)->reg();
|
|
|
| // Load from [current frame pointer] + kPcMarkerSlotFromFp.
|
| + __ movq(target_address_reg, Address(RBP, kPcMarkerSlotFromFp * kWordSize));
|
|
|
| // Calculate the final absolute address.
|
| if (offset()->definition()->representation() == kTagged) {
|
| @@ -6352,7 +6343,6 @@ void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| // Function in RAX.
|
| ASSERT(locs()->in(0).reg() == RAX);
|
| - __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset()));
|
| __ movq(RCX, FieldAddress(RAX, Function::entry_point_offset()));
|
|
|
| // RAX: Function.
|
|
|