| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
| 6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
| (...skipping 729 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 740 } | 740 } |
| 741 __ LoadImmediate( | 741 __ LoadImmediate( |
| 742 RBX, Immediate(reinterpret_cast<uword>(native_c_function())), PP); | 742 RBX, Immediate(reinterpret_cast<uword>(native_c_function())), PP); |
| 743 __ LoadImmediate( | 743 __ LoadImmediate( |
| 744 R10, Immediate(argc_tag), PP); | 744 R10, Immediate(argc_tag), PP); |
| 745 const ExternalLabel* stub_entry = (is_bootstrap_native() || is_leaf_call) ? | 745 const ExternalLabel* stub_entry = (is_bootstrap_native() || is_leaf_call) ? |
| 746 &StubCode::CallBootstrapCFunctionLabel() : | 746 &StubCode::CallBootstrapCFunctionLabel() : |
| 747 &StubCode::CallNativeCFunctionLabel(); | 747 &StubCode::CallNativeCFunctionLabel(); |
| 748 compiler->GenerateCall(token_pos(), | 748 compiler->GenerateCall(token_pos(), |
| 749 stub_entry, | 749 stub_entry, |
| 750 PcDescriptors::kOther, | 750 RawPcDescriptors::kOther, |
| 751 locs()); | 751 locs()); |
| 752 __ popq(result); | 752 __ popq(result); |
| 753 } | 753 } |
| 754 | 754 |
| 755 | 755 |
| 756 static bool CanBeImmediateIndex(Value* index, intptr_t cid) { | 756 static bool CanBeImmediateIndex(Value* index, intptr_t cid) { |
| 757 if (!index->definition()->IsConstant()) return false; | 757 if (!index->definition()->IsConstant()) return false; |
| 758 const Object& constant = index->definition()->AsConstant()->value(); | 758 const Object& constant = index->definition()->AsConstant()->value(); |
| 759 if (!constant.IsSmi()) return false; | 759 if (!constant.IsSmi()) return false; |
| 760 const Smi& smi_const = Smi::Cast(constant); | 760 const Smi& smi_const = Smi::Cast(constant); |
| (...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1553 const Code& stub = | 1553 const Code& stub = |
| 1554 Code::Handle(StubCode::GetAllocationStubForClass(cls_)); | 1554 Code::Handle(StubCode::GetAllocationStubForClass(cls_)); |
| 1555 const ExternalLabel label(stub.EntryPoint()); | 1555 const ExternalLabel label(stub.EntryPoint()); |
| 1556 | 1556 |
| 1557 LocationSummary* locs = instruction_->locs(); | 1557 LocationSummary* locs = instruction_->locs(); |
| 1558 locs->live_registers()->Remove(locs->out(0)); | 1558 locs->live_registers()->Remove(locs->out(0)); |
| 1559 | 1559 |
| 1560 compiler->SaveLiveRegisters(locs); | 1560 compiler->SaveLiveRegisters(locs); |
| 1561 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 1561 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| 1562 &label, | 1562 &label, |
| 1563 PcDescriptors::kOther, | 1563 RawPcDescriptors::kOther, |
| 1564 locs); | 1564 locs); |
| 1565 __ MoveRegister(locs->temp(0).reg(), RAX); | 1565 __ MoveRegister(locs->temp(0).reg(), RAX); |
| 1566 compiler->RestoreLiveRegisters(locs); | 1566 compiler->RestoreLiveRegisters(locs); |
| 1567 | 1567 |
| 1568 __ jmp(exit_label()); | 1568 __ jmp(exit_label()); |
| 1569 } | 1569 } |
| 1570 | 1570 |
| 1571 private: | 1571 private: |
| 1572 StoreInstanceFieldInstr* instruction_; | 1572 StoreInstanceFieldInstr* instruction_; |
| 1573 const Class& cls_; | 1573 const Class& cls_; |
| (...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2012 __ Drop(2); | 2012 __ Drop(2); |
| 2013 __ popq(kResultReg); | 2013 __ popq(kResultReg); |
| 2014 __ Bind(&done); | 2014 __ Bind(&done); |
| 2015 return; | 2015 return; |
| 2016 } | 2016 } |
| 2017 } | 2017 } |
| 2018 | 2018 |
| 2019 __ Bind(&slow_path); | 2019 __ Bind(&slow_path); |
| 2020 compiler->GenerateCall(token_pos(), | 2020 compiler->GenerateCall(token_pos(), |
| 2021 &StubCode::AllocateArrayLabel(), | 2021 &StubCode::AllocateArrayLabel(), |
| 2022 PcDescriptors::kOther, | 2022 RawPcDescriptors::kOther, |
| 2023 locs()); | 2023 locs()); |
| 2024 __ Bind(&done); | 2024 __ Bind(&done); |
| 2025 ASSERT(locs()->out(0).reg() == kResultReg); | 2025 ASSERT(locs()->out(0).reg() == kResultReg); |
| 2026 } | 2026 } |
| 2027 | 2027 |
| 2028 | 2028 |
| 2029 class BoxDoubleSlowPath : public SlowPathCode { | 2029 class BoxDoubleSlowPath : public SlowPathCode { |
| 2030 public: | 2030 public: |
| 2031 explicit BoxDoubleSlowPath(Instruction* instruction) | 2031 explicit BoxDoubleSlowPath(Instruction* instruction) |
| 2032 : instruction_(instruction) { } | 2032 : instruction_(instruction) { } |
| 2033 | 2033 |
| 2034 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 2034 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2035 __ Comment("BoxDoubleSlowPath"); | 2035 __ Comment("BoxDoubleSlowPath"); |
| 2036 __ Bind(entry_label()); | 2036 __ Bind(entry_label()); |
| 2037 const Class& double_class = compiler->double_class(); | 2037 const Class& double_class = compiler->double_class(); |
| 2038 const Code& stub = | 2038 const Code& stub = |
| 2039 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); | 2039 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
| 2040 const ExternalLabel label(stub.EntryPoint()); | 2040 const ExternalLabel label(stub.EntryPoint()); |
| 2041 | 2041 |
| 2042 LocationSummary* locs = instruction_->locs(); | 2042 LocationSummary* locs = instruction_->locs(); |
| 2043 locs->live_registers()->Remove(locs->out(0)); | 2043 locs->live_registers()->Remove(locs->out(0)); |
| 2044 | 2044 |
| 2045 compiler->SaveLiveRegisters(locs); | 2045 compiler->SaveLiveRegisters(locs); |
| 2046 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 2046 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| 2047 &label, | 2047 &label, |
| 2048 PcDescriptors::kOther, | 2048 RawPcDescriptors::kOther, |
| 2049 locs); | 2049 locs); |
| 2050 __ MoveRegister(locs->out(0).reg(), RAX); | 2050 __ MoveRegister(locs->out(0).reg(), RAX); |
| 2051 compiler->RestoreLiveRegisters(locs); | 2051 compiler->RestoreLiveRegisters(locs); |
| 2052 | 2052 |
| 2053 __ jmp(exit_label()); | 2053 __ jmp(exit_label()); |
| 2054 } | 2054 } |
| 2055 | 2055 |
| 2056 private: | 2056 private: |
| 2057 Instruction* instruction_; | 2057 Instruction* instruction_; |
| 2058 }; | 2058 }; |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2070 const Code& stub = | 2070 const Code& stub = |
| 2071 Code::Handle(StubCode::GetAllocationStubForClass(float32x4_class)); | 2071 Code::Handle(StubCode::GetAllocationStubForClass(float32x4_class)); |
| 2072 const ExternalLabel label(stub.EntryPoint()); | 2072 const ExternalLabel label(stub.EntryPoint()); |
| 2073 | 2073 |
| 2074 LocationSummary* locs = instruction_->locs(); | 2074 LocationSummary* locs = instruction_->locs(); |
| 2075 locs->live_registers()->Remove(locs->out(0)); | 2075 locs->live_registers()->Remove(locs->out(0)); |
| 2076 | 2076 |
| 2077 compiler->SaveLiveRegisters(locs); | 2077 compiler->SaveLiveRegisters(locs); |
| 2078 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 2078 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| 2079 &label, | 2079 &label, |
| 2080 PcDescriptors::kOther, | 2080 RawPcDescriptors::kOther, |
| 2081 locs); | 2081 locs); |
| 2082 __ MoveRegister(locs->out(0).reg(), RAX); | 2082 __ MoveRegister(locs->out(0).reg(), RAX); |
| 2083 compiler->RestoreLiveRegisters(locs); | 2083 compiler->RestoreLiveRegisters(locs); |
| 2084 | 2084 |
| 2085 __ jmp(exit_label()); | 2085 __ jmp(exit_label()); |
| 2086 } | 2086 } |
| 2087 | 2087 |
| 2088 private: | 2088 private: |
| 2089 Instruction* instruction_; | 2089 Instruction* instruction_; |
| 2090 }; | 2090 }; |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2102 const Code& stub = | 2102 const Code& stub = |
| 2103 Code::Handle(StubCode::GetAllocationStubForClass(float64x2_class)); | 2103 Code::Handle(StubCode::GetAllocationStubForClass(float64x2_class)); |
| 2104 const ExternalLabel label(stub.EntryPoint()); | 2104 const ExternalLabel label(stub.EntryPoint()); |
| 2105 | 2105 |
| 2106 LocationSummary* locs = instruction_->locs(); | 2106 LocationSummary* locs = instruction_->locs(); |
| 2107 locs->live_registers()->Remove(locs->out(0)); | 2107 locs->live_registers()->Remove(locs->out(0)); |
| 2108 | 2108 |
| 2109 compiler->SaveLiveRegisters(locs); | 2109 compiler->SaveLiveRegisters(locs); |
| 2110 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 2110 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| 2111 &label, | 2111 &label, |
| 2112 PcDescriptors::kOther, | 2112 RawPcDescriptors::kOther, |
| 2113 locs); | 2113 locs); |
| 2114 __ MoveRegister(locs->out(0).reg(), RAX); | 2114 __ MoveRegister(locs->out(0).reg(), RAX); |
| 2115 compiler->RestoreLiveRegisters(locs); | 2115 compiler->RestoreLiveRegisters(locs); |
| 2116 | 2116 |
| 2117 __ jmp(exit_label()); | 2117 __ jmp(exit_label()); |
| 2118 } | 2118 } |
| 2119 | 2119 |
| 2120 private: | 2120 private: |
| 2121 Instruction* instruction_; | 2121 Instruction* instruction_; |
| 2122 }; | 2122 }; |
| (...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2385 | 2385 |
| 2386 | 2386 |
| 2387 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2387 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2388 ASSERT(locs()->temp(0).reg() == R10); | 2388 ASSERT(locs()->temp(0).reg() == R10); |
| 2389 ASSERT(locs()->out(0).reg() == RAX); | 2389 ASSERT(locs()->out(0).reg() == RAX); |
| 2390 | 2390 |
| 2391 __ LoadImmediate(R10, Immediate(num_context_variables()), PP); | 2391 __ LoadImmediate(R10, Immediate(num_context_variables()), PP); |
| 2392 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); | 2392 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); |
| 2393 compiler->GenerateCall(token_pos(), | 2393 compiler->GenerateCall(token_pos(), |
| 2394 &label, | 2394 &label, |
| 2395 PcDescriptors::kOther, | 2395 RawPcDescriptors::kOther, |
| 2396 locs()); | 2396 locs()); |
| 2397 } | 2397 } |
| 2398 | 2398 |
| 2399 | 2399 |
| 2400 LocationSummary* CloneContextInstr::MakeLocationSummary(Isolate* isolate, | 2400 LocationSummary* CloneContextInstr::MakeLocationSummary(Isolate* isolate, |
| 2401 bool opt) const { | 2401 bool opt) const { |
| 2402 const intptr_t kNumInputs = 1; | 2402 const intptr_t kNumInputs = 1; |
| 2403 const intptr_t kNumTemps = 0; | 2403 const intptr_t kNumTemps = 0; |
| 2404 LocationSummary* locs = new(isolate) LocationSummary( | 2404 LocationSummary* locs = new(isolate) LocationSummary( |
| 2405 isolate, kNumInputs, kNumTemps, LocationSummary::kCall); | 2405 isolate, kNumInputs, kNumTemps, LocationSummary::kCall); |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2499 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); | 2499 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); |
| 2500 compiler->pending_deoptimization_env_ = env; | 2500 compiler->pending_deoptimization_env_ = env; |
| 2501 compiler->GenerateRuntimeCall(instruction_->token_pos(), | 2501 compiler->GenerateRuntimeCall(instruction_->token_pos(), |
| 2502 instruction_->deopt_id(), | 2502 instruction_->deopt_id(), |
| 2503 kStackOverflowRuntimeEntry, | 2503 kStackOverflowRuntimeEntry, |
| 2504 0, | 2504 0, |
| 2505 instruction_->locs()); | 2505 instruction_->locs()); |
| 2506 | 2506 |
| 2507 if (FLAG_use_osr && !compiler->is_optimizing() && instruction_->in_loop()) { | 2507 if (FLAG_use_osr && !compiler->is_optimizing() && instruction_->in_loop()) { |
| 2508 // In unoptimized code, record loop stack checks as possible OSR entries. | 2508 // In unoptimized code, record loop stack checks as possible OSR entries. |
| 2509 compiler->AddCurrentDescriptor(PcDescriptors::kOsrEntry, | 2509 compiler->AddCurrentDescriptor(RawPcDescriptors::kOsrEntry, |
| 2510 instruction_->deopt_id(), | 2510 instruction_->deopt_id(), |
| 2511 0); // No token position. | 2511 0); // No token position. |
| 2512 } | 2512 } |
| 2513 compiler->pending_deoptimization_env_ = NULL; | 2513 compiler->pending_deoptimization_env_ = NULL; |
| 2514 compiler->RestoreLiveRegisters(instruction_->locs()); | 2514 compiler->RestoreLiveRegisters(instruction_->locs()); |
| 2515 __ jmp(exit_label()); | 2515 __ jmp(exit_label()); |
| 2516 } | 2516 } |
| 2517 | 2517 |
| 2518 | 2518 |
| 2519 Label* osr_entry_label() { | 2519 Label* osr_entry_label() { |
| (...skipping 912 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3432 const Code& stub = | 3432 const Code& stub = |
| 3433 Code::Handle(StubCode::GetAllocationStubForClass(int32x4_class)); | 3433 Code::Handle(StubCode::GetAllocationStubForClass(int32x4_class)); |
| 3434 const ExternalLabel label(stub.EntryPoint()); | 3434 const ExternalLabel label(stub.EntryPoint()); |
| 3435 | 3435 |
| 3436 LocationSummary* locs = instruction_->locs(); | 3436 LocationSummary* locs = instruction_->locs(); |
| 3437 locs->live_registers()->Remove(locs->out(0)); | 3437 locs->live_registers()->Remove(locs->out(0)); |
| 3438 | 3438 |
| 3439 compiler->SaveLiveRegisters(locs); | 3439 compiler->SaveLiveRegisters(locs); |
| 3440 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 3440 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| 3441 &label, | 3441 &label, |
| 3442 PcDescriptors::kOther, | 3442 RawPcDescriptors::kOther, |
| 3443 locs); | 3443 locs); |
| 3444 __ MoveRegister(locs->out(0).reg(), RAX); | 3444 __ MoveRegister(locs->out(0).reg(), RAX); |
| 3445 compiler->RestoreLiveRegisters(locs); | 3445 compiler->RestoreLiveRegisters(locs); |
| 3446 | 3446 |
| 3447 __ jmp(exit_label()); | 3447 __ jmp(exit_label()); |
| 3448 } | 3448 } |
| 3449 | 3449 |
| 3450 private: | 3450 private: |
| 3451 BoxInt32x4Instr* instruction_; | 3451 BoxInt32x4Instr* instruction_; |
| 3452 }; | 3452 }; |
| (...skipping 2144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5597 | 5597 |
| 5598 void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5598 void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 5599 __ Bind(compiler->GetJumpLabel(this)); | 5599 __ Bind(compiler->GetJumpLabel(this)); |
| 5600 if (!compiler->is_optimizing()) { | 5600 if (!compiler->is_optimizing()) { |
| 5601 if (compiler->NeedsEdgeCounter(this)) { | 5601 if (compiler->NeedsEdgeCounter(this)) { |
| 5602 compiler->EmitEdgeCounter(); | 5602 compiler->EmitEdgeCounter(); |
| 5603 } | 5603 } |
| 5604 // The deoptimization descriptor points after the edge counter code for | 5604 // The deoptimization descriptor points after the edge counter code for |
| 5605 // uniformity with ARM and MIPS, where we can reuse pattern matching | 5605 // uniformity with ARM and MIPS, where we can reuse pattern matching |
| 5606 // code that matches backwards from the end of the pattern. | 5606 // code that matches backwards from the end of the pattern. |
| 5607 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 5607 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
| 5608 deopt_id_, | 5608 deopt_id_, |
| 5609 Scanner::kNoSourcePos); | 5609 Scanner::kNoSourcePos); |
| 5610 } | 5610 } |
| 5611 if (HasParallelMove()) { | 5611 if (HasParallelMove()) { |
| 5612 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 5612 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
| 5613 } | 5613 } |
| 5614 } | 5614 } |
| 5615 | 5615 |
| 5616 | 5616 |
| 5617 LocationSummary* GotoInstr::MakeLocationSummary(Isolate* isolate, | 5617 LocationSummary* GotoInstr::MakeLocationSummary(Isolate* isolate, |
| 5618 bool opt) const { | 5618 bool opt) const { |
| 5619 return new(isolate) LocationSummary(isolate, 0, 0, LocationSummary::kNoCall); | 5619 return new(isolate) LocationSummary(isolate, 0, 0, LocationSummary::kNoCall); |
| 5620 } | 5620 } |
| 5621 | 5621 |
| 5622 | 5622 |
| 5623 void GotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5623 void GotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 5624 if (!compiler->is_optimizing()) { | 5624 if (!compiler->is_optimizing()) { |
| 5625 if (FLAG_emit_edge_counters) { | 5625 if (FLAG_emit_edge_counters) { |
| 5626 compiler->EmitEdgeCounter(); | 5626 compiler->EmitEdgeCounter(); |
| 5627 } | 5627 } |
| 5628 // Add a deoptimization descriptor for deoptimizing instructions that | 5628 // Add a deoptimization descriptor for deoptimizing instructions that |
| 5629 // may be inserted before this instruction. This descriptor points | 5629 // may be inserted before this instruction. This descriptor points |
| 5630 // after the edge counter for uniformity with ARM and MIPS, where we can | 5630 // after the edge counter for uniformity with ARM and MIPS, where we can |
| 5631 // reuse pattern matching that matches backwards from the end of the | 5631 // reuse pattern matching that matches backwards from the end of the |
| 5632 // pattern. | 5632 // pattern. |
| 5633 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 5633 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
| 5634 GetDeoptId(), | 5634 GetDeoptId(), |
| 5635 Scanner::kNoSourcePos); | 5635 Scanner::kNoSourcePos); |
| 5636 } | 5636 } |
| 5637 if (HasParallelMove()) { | 5637 if (HasParallelMove()) { |
| 5638 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 5638 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
| 5639 } | 5639 } |
| 5640 | 5640 |
| 5641 // We can fall through if the successor is the next block in the list. | 5641 // We can fall through if the successor is the next block in the list. |
| 5642 // Otherwise, we need a jump. | 5642 // Otherwise, we need a jump. |
| 5643 if (!compiler->CanFallThroughTo(successor())) { | 5643 if (!compiler->CanFallThroughTo(successor())) { |
| (...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5765 // Function in RAX. | 5765 // Function in RAX. |
| 5766 ASSERT(locs()->in(0).reg() == RAX); | 5766 ASSERT(locs()->in(0).reg() == RAX); |
| 5767 __ movq(RCX, FieldAddress(RAX, Function::instructions_offset())); | 5767 __ movq(RCX, FieldAddress(RAX, Function::instructions_offset())); |
| 5768 | 5768 |
| 5769 // RAX: Function. | 5769 // RAX: Function. |
| 5770 // R10: Arguments descriptor array. | 5770 // R10: Arguments descriptor array. |
| 5771 // RBX: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value). | 5771 // RBX: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value). |
| 5772 __ xorq(RBX, RBX); | 5772 __ xorq(RBX, RBX); |
| 5773 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 5773 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
| 5774 __ call(RCX); | 5774 __ call(RCX); |
| 5775 compiler->AddCurrentDescriptor(PcDescriptors::kClosureCall, | 5775 compiler->AddCurrentDescriptor(RawPcDescriptors::kClosureCall, |
| 5776 deopt_id(), | 5776 deopt_id(), |
| 5777 token_pos()); | 5777 token_pos()); |
| 5778 compiler->RecordSafepoint(locs()); | 5778 compiler->RecordSafepoint(locs()); |
| 5779 // Marks either the continuation point in unoptimized code or the | 5779 // Marks either the continuation point in unoptimized code or the |
| 5780 // deoptimization point in optimized code, after call. | 5780 // deoptimization point in optimized code, after call. |
| 5781 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id()); | 5781 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id()); |
| 5782 if (compiler->is_optimizing()) { | 5782 if (compiler->is_optimizing()) { |
| 5783 compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); | 5783 compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); |
| 5784 } else { | 5784 } else { |
| 5785 // Add deoptimization continuation point after the call and before the | 5785 // Add deoptimization continuation point after the call and before the |
| 5786 // arguments are removed. | 5786 // arguments are removed. |
| 5787 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 5787 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
| 5788 deopt_id_after, | 5788 deopt_id_after, |
| 5789 token_pos()); | 5789 token_pos()); |
| 5790 } | 5790 } |
| 5791 __ Drop(argument_count); | 5791 __ Drop(argument_count); |
| 5792 } | 5792 } |
| 5793 | 5793 |
| 5794 | 5794 |
| 5795 LocationSummary* BooleanNegateInstr::MakeLocationSummary(Isolate* isolate, | 5795 LocationSummary* BooleanNegateInstr::MakeLocationSummary(Isolate* isolate, |
| 5796 bool opt) const { | 5796 bool opt) const { |
| 5797 return LocationSummary::Make(isolate, | 5797 return LocationSummary::Make(isolate, |
| (...skipping 20 matching lines...) Expand all Loading... |
| 5818 bool opt) const { | 5818 bool opt) const { |
| 5819 return MakeCallSummary(); | 5819 return MakeCallSummary(); |
| 5820 } | 5820 } |
| 5821 | 5821 |
| 5822 | 5822 |
| 5823 void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5823 void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 5824 const Code& stub = Code::Handle(StubCode::GetAllocationStubForClass(cls())); | 5824 const Code& stub = Code::Handle(StubCode::GetAllocationStubForClass(cls())); |
| 5825 const ExternalLabel label(stub.EntryPoint()); | 5825 const ExternalLabel label(stub.EntryPoint()); |
| 5826 compiler->GenerateCall(token_pos(), | 5826 compiler->GenerateCall(token_pos(), |
| 5827 &label, | 5827 &label, |
| 5828 PcDescriptors::kOther, | 5828 RawPcDescriptors::kOther, |
| 5829 locs()); | 5829 locs()); |
| 5830 __ Drop(ArgumentCount()); // Discard arguments. | 5830 __ Drop(ArgumentCount()); // Discard arguments. |
| 5831 } | 5831 } |
| 5832 | 5832 |
| 5833 | 5833 |
| 5834 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5834 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 5835 ASSERT(!compiler->is_optimizing()); | 5835 ASSERT(!compiler->is_optimizing()); |
| 5836 const ExternalLabel label(StubCode::DebugStepCheckEntryPoint()); | 5836 const ExternalLabel label(StubCode::DebugStepCheckEntryPoint()); |
| 5837 __ movq(R10, Immediate(0)); | 5837 __ movq(R10, Immediate(0)); |
| 5838 __ movq(RBX, Immediate(0)); | 5838 __ movq(RBX, Immediate(0)); |
| 5839 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs()); | 5839 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs()); |
| 5840 #if defined(DEBUG) | 5840 #if defined(DEBUG) |
| 5841 __ movq(R10, Immediate(kInvalidObjectPointer)); | 5841 __ movq(R10, Immediate(kInvalidObjectPointer)); |
| 5842 __ movq(RBX, Immediate(kInvalidObjectPointer)); | 5842 __ movq(RBX, Immediate(kInvalidObjectPointer)); |
| 5843 #endif | 5843 #endif |
| 5844 } | 5844 } |
| 5845 | 5845 |
| 5846 } // namespace dart | 5846 } // namespace dart |
| 5847 | 5847 |
| 5848 #undef __ | 5848 #undef __ |
| 5849 | 5849 |
| 5850 #endif // defined TARGET_ARCH_X64 | 5850 #endif // defined TARGET_ARCH_X64 |
| OLD | NEW |