| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. |
| 6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
| 7 | 7 |
| 8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
| 9 | 9 |
| 10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
| (...skipping 702 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 713 // type check runtime call is the checked value. | 713 // type check runtime call is the checked value. |
| 714 __ Drop(6); | 714 __ Drop(6); |
| 715 __ Pop(R0); | 715 __ Pop(R0); |
| 716 | 716 |
| 717 __ Bind(&is_assignable); | 717 __ Bind(&is_assignable); |
| 718 // Restore instantiator (R2) and its type arguments (R1). | 718 // Restore instantiator (R2) and its type arguments (R1). |
| 719 __ PopList((1 << R1) | (1 << R2)); | 719 __ PopList((1 << R1) | (1 << R2)); |
| 720 } | 720 } |
| 721 | 721 |
| 722 | 722 |
| 723 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) { | |
| 724 if (!is_optimizing()) { | |
| 725 if (FLAG_enable_type_checks && instr->IsAssertAssignable()) { | |
| 726 AssertAssignableInstr* assert = instr->AsAssertAssignable(); | |
| 727 AddCurrentDescriptor(PcDescriptors::kDeopt, | |
| 728 assert->deopt_id(), | |
| 729 assert->token_pos()); | |
| 730 } else if (instr->IsGuardField() || | |
| 731 instr->CanBecomeDeoptimizationTarget()) { | |
| 732 AddCurrentDescriptor(PcDescriptors::kDeopt, | |
| 733 instr->deopt_id(), | |
| 734 Scanner::kDummyTokenIndex); | |
| 735 } | |
| 736 AllocateRegistersLocally(instr); | |
| 737 } else if (instr->MayThrow() && | |
| 738 (CurrentTryIndex() != CatchClauseNode::kInvalidTryIndex)) { | |
| 739 // Optimized try-block: Sync locals to fixed stack locations. | |
| 740 EmitTrySync(instr, CurrentTryIndex()); | |
| 741 } | |
| 742 } | |
| 743 | |
| 744 | |
| 745 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset, | 723 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset, |
| 746 Location loc, | 724 Location loc, |
| 747 bool* push_emitted) { | 725 bool* push_emitted) { |
| 748 if (loc.IsConstant()) { | 726 if (loc.IsConstant()) { |
| 749 if (!*push_emitted) { | 727 if (!*push_emitted) { |
| 750 __ Push(R0); | 728 __ Push(R0); |
| 751 *push_emitted = true; | 729 *push_emitted = true; |
| 752 } | 730 } |
| 753 __ LoadObject(R0, loc.constant()); | 731 __ LoadObject(R0, loc.constant()); |
| 754 __ StoreToOffset(kWord, R0, FP, dest_offset); | 732 __ StoreToOffset(kWord, R0, FP, dest_offset); |
| (...skipping 513 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1268 // Add deoptimization continuation point after the call and before the | 1246 // Add deoptimization continuation point after the call and before the |
| 1269 // arguments are removed. | 1247 // arguments are removed. |
| 1270 AddCurrentDescriptor(PcDescriptors::kDeopt, | 1248 AddCurrentDescriptor(PcDescriptors::kDeopt, |
| 1271 deopt_id_after, | 1249 deopt_id_after, |
| 1272 token_pos); | 1250 token_pos); |
| 1273 } | 1251 } |
| 1274 } | 1252 } |
| 1275 } | 1253 } |
| 1276 | 1254 |
| 1277 | 1255 |
| 1256 void FlowGraphCompiler::EmitEdgeCounter() { |
| 1257 // We do not check for overflow when incrementing the edge counter. The |
| 1258 // function should normally be optimized long before the counter can |
| 1259 // overflow; and though we do not reset the counters when we optimize or |
| 1260 // deoptimize, there is a bound on the number of |
| 1261 // optimization/deoptimization cycles we will attempt. |
| 1262 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); |
| 1263 counter.SetAt(0, Smi::Handle(Smi::New(0))); |
| 1264 __ Comment("Edge counter"); |
| 1265 __ LoadObject(R0, counter); |
| 1266 __ ldr(IP, FieldAddress(R0, Array::element_offset(0))); |
| 1267 __ adds(IP, IP, ShifterOperand(Smi::RawValue(1))); |
| 1268 __ str(IP, FieldAddress(R0, Array::element_offset(0))); |
| 1269 } |
| 1270 |
| 1271 |
| 1278 void FlowGraphCompiler::EmitOptimizedInstanceCall( | 1272 void FlowGraphCompiler::EmitOptimizedInstanceCall( |
| 1279 ExternalLabel* target_label, | 1273 ExternalLabel* target_label, |
| 1280 const ICData& ic_data, | 1274 const ICData& ic_data, |
| 1281 intptr_t argument_count, | 1275 intptr_t argument_count, |
| 1282 intptr_t deopt_id, | 1276 intptr_t deopt_id, |
| 1283 intptr_t token_pos, | 1277 intptr_t token_pos, |
| 1284 LocationSummary* locs) { | 1278 LocationSummary* locs) { |
| 1285 // Each ICData propagated from unoptimized to optimized code contains the | 1279 // Each ICData propagated from unoptimized to optimized code contains the |
| 1286 // function that corresponds to the Dart function of that IC call. Due | 1280 // function that corresponds to the Dart function of that IC call. Due |
| 1287 // to inlining in optimized code, that function may not correspond to the | 1281 // to inlining in optimized code, that function may not correspond to the |
| (...skipping 629 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1917 DRegister dreg = EvenDRegisterOf(reg); | 1911 DRegister dreg = EvenDRegisterOf(reg); |
| 1918 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex)); | 1912 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex)); |
| 1919 } | 1913 } |
| 1920 | 1914 |
| 1921 | 1915 |
| 1922 #undef __ | 1916 #undef __ |
| 1923 | 1917 |
| 1924 } // namespace dart | 1918 } // namespace dart |
| 1925 | 1919 |
| 1926 #endif // defined TARGET_ARCH_ARM | 1920 #endif // defined TARGET_ARCH_ARM |
| OLD | NEW |