OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 4528 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4539 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4539 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4540 if (!compiler->CanFallThroughTo(normal_entry())) { | 4540 if (!compiler->CanFallThroughTo(normal_entry())) { |
4541 __ jmp(compiler->GetJumpLabel(normal_entry())); | 4541 __ jmp(compiler->GetJumpLabel(normal_entry())); |
4542 } | 4542 } |
4543 } | 4543 } |
4544 | 4544 |
4545 | 4545 |
4546 void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4546 void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4547 __ Bind(compiler->GetJumpLabel(this)); | 4547 __ Bind(compiler->GetJumpLabel(this)); |
4548 if (!compiler->is_optimizing()) { | 4548 if (!compiler->is_optimizing()) { |
| 4549 compiler->EmitEdgeCounter(); |
| 4550 // The deoptimization descriptor points after the edge counter code for |
| 4551 // uniformity with ARM and MIPS, where we can reuse pattern matching |
| 4552 // code that matches backwards from the end of the pattern. |
4549 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 4553 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
4550 deopt_id_, | 4554 deopt_id_, |
4551 Scanner::kDummyTokenIndex); | 4555 Scanner::kDummyTokenIndex); |
4552 // Add an edge counter. | |
4553 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); | |
4554 counter.SetAt(0, Smi::Handle(Smi::New(0))); | |
4555 Label done; | |
4556 __ Comment("Edge counter"); | |
4557 __ LoadObject(RAX, counter, PP); | |
4558 __ addq(FieldAddress(RAX, Array::element_offset(0)), | |
4559 Immediate(Smi::RawValue(1))); | |
4560 __ j(NO_OVERFLOW, &done); | |
4561 __ movq(FieldAddress(RAX, Array::element_offset(0)), | |
4562 Immediate(Smi::RawValue(Smi::kMaxValue))); | |
4563 __ Bind(&done); | |
4564 } | 4556 } |
4565 if (HasParallelMove()) { | 4557 if (HasParallelMove()) { |
4566 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 4558 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
4567 } | 4559 } |
4568 } | 4560 } |
4569 | 4561 |
4570 | 4562 |
4571 LocationSummary* GotoInstr::MakeLocationSummary() const { | 4563 LocationSummary* GotoInstr::MakeLocationSummary() const { |
4572 return new LocationSummary(0, 0, LocationSummary::kNoCall); | 4564 return new LocationSummary(0, 0, LocationSummary::kNoCall); |
4573 } | 4565 } |
4574 | 4566 |
4575 | 4567 |
4576 void GotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4568 void GotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4577 if (!compiler->is_optimizing()) { | 4569 if (!compiler->is_optimizing()) { |
4578 // Add deoptimization descriptor for deoptimizing instructions that may | 4570 compiler->EmitEdgeCounter(); |
4579 // be inserted before this instruction. | 4571 // Add a deoptimization descriptor for deoptimizing instructions that |
| 4572 // may be inserted before this instruction. This descriptor points |
| 4573 // after the edge counter for uniformity with ARM and MIPS, where we can |
| 4574 // reuse pattern matching that matches backwards from the end of the |
| 4575 // pattern. |
4580 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 4576 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
4581 GetDeoptId(), | 4577 GetDeoptId(), |
4582 0); // No token position. | 4578 0); // No token position. |
4583 // Add an edge counter. | |
4584 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); | |
4585 counter.SetAt(0, Smi::Handle(Smi::New(0))); | |
4586 Label done; | |
4587 __ Comment("Edge counter"); | |
4588 __ LoadObject(RAX, counter, PP); | |
4589 __ addq(FieldAddress(RAX, Array::element_offset(0)), | |
4590 Immediate(Smi::RawValue(1))); | |
4591 __ j(NO_OVERFLOW, &done); | |
4592 __ movq(FieldAddress(RAX, Array::element_offset(0)), | |
4593 Immediate(Smi::RawValue(Smi::kMaxValue))); | |
4594 __ Bind(&done); | |
4595 } | 4579 } |
4596 if (HasParallelMove()) { | 4580 if (HasParallelMove()) { |
4597 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 4581 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
4598 } | 4582 } |
4599 | 4583 |
4600 // We can fall through if the successor is the next block in the list. | 4584 // We can fall through if the successor is the next block in the list. |
4601 // Otherwise, we need a jump. | 4585 // Otherwise, we need a jump. |
4602 if (!compiler->CanFallThroughTo(successor())) { | 4586 if (!compiler->CanFallThroughTo(successor())) { |
4603 __ jmp(compiler->GetJumpLabel(successor())); | 4587 __ jmp(compiler->GetJumpLabel(successor())); |
4604 } | 4588 } |
(...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4842 PcDescriptors::kOther, | 4826 PcDescriptors::kOther, |
4843 locs()); | 4827 locs()); |
4844 __ Drop(2); // Discard type arguments and receiver. | 4828 __ Drop(2); // Discard type arguments and receiver. |
4845 } | 4829 } |
4846 | 4830 |
4847 } // namespace dart | 4831 } // namespace dart |
4848 | 4832 |
4849 #undef __ | 4833 #undef __ |
4850 | 4834 |
4851 #endif // defined TARGET_ARCH_X64 | 4835 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |