OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. |
6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
7 | 7 |
8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
9 | 9 |
10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
(...skipping 13 matching lines...) Expand all Loading... |
24 | 24 |
25 namespace dart { | 25 namespace dart { |
26 | 26 |
27 DEFINE_FLAG(bool, trap_on_deoptimization, false, "Trap on deoptimization."); | 27 DEFINE_FLAG(bool, trap_on_deoptimization, false, "Trap on deoptimization."); |
28 DEFINE_FLAG(bool, unbox_mints, true, "Optimize 64-bit integer arithmetic."); | 28 DEFINE_FLAG(bool, unbox_mints, true, "Optimize 64-bit integer arithmetic."); |
29 DEFINE_FLAG(bool, unbox_doubles, true, "Optimize double arithmetic."); | 29 DEFINE_FLAG(bool, unbox_doubles, true, "Optimize double arithmetic."); |
30 DECLARE_FLAG(bool, enable_simd_inline); | 30 DECLARE_FLAG(bool, enable_simd_inline); |
31 DECLARE_FLAG(bool, use_megamorphic_stub); | 31 DECLARE_FLAG(bool, use_megamorphic_stub); |
32 | 32 |
33 | 33 |
| 34 void MegamorphicSlowPath::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 35 Assembler* assembler = compiler->assembler(); |
| 36 #define __ assembler-> |
| 37 __ Bind(entry_label()); |
| 38 __ Comment("MegamorphicSlowPath"); |
| 39 compiler->EmitMegamorphicInstanceCall(ic_data_, argument_count_, deopt_id_, |
| 40 token_pos_, locs_, try_index_); |
| 41 __ b(exit_label()); |
| 42 #undef __ |
| 43 } |
| 44 |
| 45 |
34 FlowGraphCompiler::~FlowGraphCompiler() { | 46 FlowGraphCompiler::~FlowGraphCompiler() { |
35 // BlockInfos are zone-allocated, so their destructors are not called. | 47 // BlockInfos are zone-allocated, so their destructors are not called. |
36 // Verify the labels explicitly here. | 48 // Verify the labels explicitly here. |
37 for (int i = 0; i < block_info_.length(); ++i) { | 49 for (int i = 0; i < block_info_.length(); ++i) { |
38 ASSERT(!block_info_[i]->jump_label()->IsLinked()); | 50 ASSERT(!block_info_[i]->jump_label()->IsLinked()); |
39 } | 51 } |
40 } | 52 } |
41 | 53 |
42 | 54 |
43 bool FlowGraphCompiler::SupportsUnboxedDoubles() { | 55 bool FlowGraphCompiler::SupportsUnboxedDoubles() { |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
168 } | 180 } |
169 | 181 |
170 return builder->CreateDeoptInfo(deopt_table); | 182 return builder->CreateDeoptInfo(deopt_table); |
171 } | 183 } |
172 | 184 |
173 | 185 |
174 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, | 186 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, |
175 intptr_t stub_ix) { | 187 intptr_t stub_ix) { |
176 // Calls do not need stubs, they share a deoptimization trampoline. | 188 // Calls do not need stubs, they share a deoptimization trampoline. |
177 ASSERT(reason() != ICData::kDeoptAtCall); | 189 ASSERT(reason() != ICData::kDeoptAtCall); |
178 Assembler* assem = compiler->assembler(); | 190 Assembler* assembler = compiler->assembler(); |
179 #define __ assem-> | 191 #define __ assembler-> |
180 __ Comment("%s", Name()); | 192 __ Comment("%s", Name()); |
181 __ Bind(entry_label()); | 193 __ Bind(entry_label()); |
182 if (FLAG_trap_on_deoptimization) { | 194 if (FLAG_trap_on_deoptimization) { |
183 __ bkpt(0); | 195 __ bkpt(0); |
184 } | 196 } |
185 | 197 |
186 ASSERT(deopt_env() != NULL); | 198 ASSERT(deopt_env() != NULL); |
187 | 199 |
188 // LR may be live. It will be clobbered by BranchLink, so cache it in IP. | 200 // LR may be live. It will be clobbered by BranchLink, so cache it in IP. |
189 // It will be restored at the top of the deoptimization stub, specifically in | 201 // It will be restored at the top of the deoptimization stub, specifically in |
190 // GenerateDeoptimizationSequence in stub_code_arm.cc. | 202 // GenerateDeoptimizationSequence in stub_code_arm.cc. |
191 __ Push(CODE_REG); | 203 __ Push(CODE_REG); |
192 __ mov(IP, Operand(LR)); | 204 __ mov(IP, Operand(LR)); |
193 __ BranchLink(*StubCode::Deoptimize_entry()); | 205 __ BranchLink(*StubCode::Deoptimize_entry()); |
194 set_pc_offset(assem->CodeSize()); | 206 set_pc_offset(assembler->CodeSize()); |
195 #undef __ | 207 #undef __ |
196 } | 208 } |
197 | 209 |
198 | 210 |
199 #define __ assembler()-> | 211 #define __ assembler()-> |
200 | 212 |
201 | 213 |
202 // Fall through if bool_register contains null. | 214 // Fall through if bool_register contains null. |
203 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, | 215 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, |
204 Label* is_true, | 216 Label* is_true, |
(...skipping 1046 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1251 locs); | 1263 locs); |
1252 __ Drop(argument_count); | 1264 __ Drop(argument_count); |
1253 } | 1265 } |
1254 | 1266 |
1255 | 1267 |
1256 void FlowGraphCompiler::EmitMegamorphicInstanceCall( | 1268 void FlowGraphCompiler::EmitMegamorphicInstanceCall( |
1257 const ICData& ic_data, | 1269 const ICData& ic_data, |
1258 intptr_t argument_count, | 1270 intptr_t argument_count, |
1259 intptr_t deopt_id, | 1271 intptr_t deopt_id, |
1260 intptr_t token_pos, | 1272 intptr_t token_pos, |
1261 LocationSummary* locs) { | 1273 LocationSummary* locs, |
| 1274 intptr_t try_index) { |
1262 const String& name = String::Handle(zone(), ic_data.target_name()); | 1275 const String& name = String::Handle(zone(), ic_data.target_name()); |
1263 const Array& arguments_descriptor = | 1276 const Array& arguments_descriptor = |
1264 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); | 1277 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); |
1265 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); | 1278 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); |
1266 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(), | 1279 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(), |
1267 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor)); | 1280 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor)); |
1268 | 1281 |
1269 __ Comment("MegamorphicCall"); | 1282 __ Comment("MegamorphicCall"); |
1270 __ LoadFromOffset(kWord, R0, SP, (argument_count - 1) * kWordSize); | 1283 __ LoadFromOffset(kWord, R0, SP, (argument_count - 1) * kWordSize); |
1271 __ LoadObject(R9, cache); | 1284 __ LoadObject(R9, cache); |
1272 if (FLAG_use_megamorphic_stub) { | 1285 if (FLAG_use_megamorphic_stub) { |
1273 __ BranchLink(*StubCode::MegamorphicLookup_entry()); | 1286 __ BranchLink(*StubCode::MegamorphicLookup_entry()); |
1274 } else { | 1287 } else { |
1275 StubCode::EmitMegamorphicLookup(assembler()); | 1288 StubCode::EmitMegamorphicLookup(assembler()); |
1276 } | 1289 } |
1277 __ blx(R1); | 1290 __ blx(R1); |
1278 | 1291 |
1279 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, token_pos); | |
1280 RecordSafepoint(locs); | 1292 RecordSafepoint(locs); |
1281 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); | 1293 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
1282 if (is_optimizing()) { | 1294 if (Compiler::always_optimize()) { |
| 1295 // Megamorphic calls may occur in slow path stubs. |
| 1296 // If valid use try_index argument. |
| 1297 if (try_index == CatchClauseNode::kInvalidTryIndex) { |
| 1298 try_index = CurrentTryIndex(); |
| 1299 } |
| 1300 pc_descriptors_list()->AddDescriptor(RawPcDescriptors::kOther, |
| 1301 assembler()->CodeSize(), |
| 1302 Thread::kNoDeoptId, |
| 1303 token_pos, |
| 1304 try_index); |
| 1305 } else if (is_optimizing()) { |
| 1306 AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 1307 Thread::kNoDeoptId, token_pos); |
1283 AddDeoptIndexAtCall(deopt_id_after, token_pos); | 1308 AddDeoptIndexAtCall(deopt_id_after, token_pos); |
1284 } else { | 1309 } else { |
| 1310 AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 1311 Thread::kNoDeoptId, token_pos); |
1285 // Add deoptimization continuation point after the call and before the | 1312 // Add deoptimization continuation point after the call and before the |
1286 // arguments are removed. | 1313 // arguments are removed. |
1287 AddCurrentDescriptor(RawPcDescriptors::kDeopt, | 1314 AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
1288 deopt_id_after, token_pos); | 1315 deopt_id_after, token_pos); |
1289 } | 1316 } |
1290 __ Drop(argument_count); | 1317 __ Drop(argument_count); |
1291 } | 1318 } |
1292 | 1319 |
1293 | 1320 |
1294 void FlowGraphCompiler::EmitSwitchableInstanceCall( | 1321 void FlowGraphCompiler::EmitSwitchableInstanceCall( |
(...skipping 615 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1910 DRegister dreg = EvenDRegisterOf(reg); | 1937 DRegister dreg = EvenDRegisterOf(reg); |
1911 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex)); | 1938 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex)); |
1912 } | 1939 } |
1913 | 1940 |
1914 | 1941 |
1915 #undef __ | 1942 #undef __ |
1916 | 1943 |
1917 } // namespace dart | 1944 } // namespace dart |
1918 | 1945 |
1919 #endif // defined TARGET_ARCH_ARM | 1946 #endif // defined TARGET_ARCH_ARM |
OLD | NEW |