OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. |
6 #if defined(TARGET_ARCH_IA32) | 6 #if defined(TARGET_ARCH_IA32) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 4762 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4773 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4773 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4774 if (!compiler->CanFallThroughTo(normal_entry())) { | 4774 if (!compiler->CanFallThroughTo(normal_entry())) { |
4775 __ jmp(compiler->GetJumpLabel(normal_entry())); | 4775 __ jmp(compiler->GetJumpLabel(normal_entry())); |
4776 } | 4776 } |
4777 } | 4777 } |
4778 | 4778 |
4779 | 4779 |
4780 void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4780 void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4781 __ Bind(compiler->GetJumpLabel(this)); | 4781 __ Bind(compiler->GetJumpLabel(this)); |
4782 if (!compiler->is_optimizing()) { | 4782 if (!compiler->is_optimizing()) { |
| 4783 compiler->EmitEdgeCounter(); |
| 4784 // The deoptimization descriptor points after the edge counter code for |
| 4785 // uniformity with ARM and MIPS, where we can reuse pattern matching |
| 4786 // code that matches backwards from the end of the pattern. |
4783 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 4787 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
4784 deopt_id_, | 4788 deopt_id_, |
4785 Scanner::kDummyTokenIndex); | 4789 Scanner::kDummyTokenIndex); |
4786 // Add an edge counter. | |
4787 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); | |
4788 counter.SetAt(0, Smi::Handle(Smi::New(0))); | |
4789 Label done; | |
4790 __ Comment("Edge counter"); | |
4791 __ LoadObject(EAX, counter); | |
4792 __ addl(FieldAddress(EAX, Array::element_offset(0)), | |
4793 Immediate(Smi::RawValue(1))); | |
4794 __ j(NO_OVERFLOW, &done); | |
4795 __ movl(FieldAddress(EAX, Array::element_offset(0)), | |
4796 Immediate(Smi::RawValue(Smi::kMaxValue))); | |
4797 __ Bind(&done); | |
4798 } | 4790 } |
4799 if (HasParallelMove()) { | 4791 if (HasParallelMove()) { |
4800 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 4792 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
4801 } | 4793 } |
4802 } | 4794 } |
4803 | 4795 |
4804 | 4796 |
4805 LocationSummary* GotoInstr::MakeLocationSummary() const { | 4797 LocationSummary* GotoInstr::MakeLocationSummary() const { |
4806 return new LocationSummary(0, 0, LocationSummary::kNoCall); | 4798 return new LocationSummary(0, 0, LocationSummary::kNoCall); |
4807 } | 4799 } |
4808 | 4800 |
4809 | 4801 |
4810 void GotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4802 void GotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4811 if (!compiler->is_optimizing()) { | 4803 if (!compiler->is_optimizing()) { |
4812 // Add deoptimization descriptor for deoptimizing instructions that may | 4804 compiler->EmitEdgeCounter(); |
4813 // be inserted before this instruction. | 4805 // Add a deoptimization descriptor for deoptimizing instructions that |
| 4806 // may be inserted before this instruction. This descriptor points |
| 4807 // after the edge counter for uniformity with ARM and MIPS, where we can |
| 4808 // reuse pattern matching that matches backwards from the end of the |
| 4809 // pattern. |
4814 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 4810 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
4815 GetDeoptId(), | 4811 GetDeoptId(), |
4816 0); // No token position. | 4812 0); // No token position. |
4817 // Add an edge counter. | |
4818 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); | |
4819 counter.SetAt(0, Smi::Handle(Smi::New(0))); | |
4820 Label done; | |
4821 __ Comment("Edge counter"); | |
4822 __ LoadObject(EAX, counter); | |
4823 __ addl(FieldAddress(EAX, Array::element_offset(0)), | |
4824 Immediate(Smi::RawValue(1))); | |
4825 __ j(NO_OVERFLOW, &done); | |
4826 __ movl(FieldAddress(EAX, Array::element_offset(0)), | |
4827 Immediate(Smi::RawValue(Smi::kMaxValue))); | |
4828 __ Bind(&done); | |
4829 } | 4813 } |
4830 if (HasParallelMove()) { | 4814 if (HasParallelMove()) { |
4831 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 4815 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
4832 } | 4816 } |
4833 | 4817 |
4834 // We can fall through if the successor is the next block in the list. | 4818 // We can fall through if the successor is the next block in the list. |
4835 // Otherwise, we need a jump. | 4819 // Otherwise, we need a jump. |
4836 if (!compiler->CanFallThroughTo(successor())) { | 4820 if (!compiler->CanFallThroughTo(successor())) { |
4837 __ jmp(compiler->GetJumpLabel(successor())); | 4821 __ jmp(compiler->GetJumpLabel(successor())); |
4838 } | 4822 } |
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5233 PcDescriptors::kOther, | 5217 PcDescriptors::kOther, |
5234 locs()); | 5218 locs()); |
5235 __ Drop(2); // Discard type arguments and receiver. | 5219 __ Drop(2); // Discard type arguments and receiver. |
5236 } | 5220 } |
5237 | 5221 |
5238 } // namespace dart | 5222 } // namespace dart |
5239 | 5223 |
5240 #undef __ | 5224 #undef __ |
5241 | 5225 |
5242 #endif // defined TARGET_ARCH_IA32 | 5226 #endif // defined TARGET_ARCH_IA32 |
OLD | NEW |