OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. |
| 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. |
| 6 #if defined(TARGET_ARCH_DBC) |
| 7 |
| 8 #include "vm/intermediate_language.h" |
| 9 |
| 10 #include "vm/cpu.h" |
| 11 #include "vm/compiler.h" |
| 12 #include "vm/dart_entry.h" |
| 13 #include "vm/flow_graph.h" |
| 14 #include "vm/flow_graph_compiler.h" |
| 15 #include "vm/flow_graph_range_analysis.h" |
| 16 #include "vm/locations.h" |
| 17 #include "vm/object_store.h" |
| 18 #include "vm/parser.h" |
| 19 #include "vm/simulator.h" |
| 20 #include "vm/stack_frame.h" |
| 21 #include "vm/stub_code.h" |
| 22 #include "vm/symbols.h" |
| 23 |
| 24 #define __ compiler->assembler()-> |
| 25 |
| 26 namespace dart { |
| 27 |
| 28 DECLARE_FLAG(bool, allow_absolute_addresses); |
| 29 DECLARE_FLAG(bool, emit_edge_counters); |
| 30 DECLARE_FLAG(int, optimization_counter_threshold); |
| 31 |
| 32 // List of instructions that are still unimplemented by DBC backend. |
| 33 #define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \ |
| 34 M(Stop) \ |
| 35 M(IndirectGoto) \ |
| 36 M(LoadCodeUnits) \ |
| 37 M(InstanceOf) \ |
| 38 M(LoadUntagged) \ |
| 39 M(AllocateUninitializedContext) \ |
| 40 M(BinaryInt32Op) \ |
| 41 M(UnarySmiOp) \ |
| 42 M(UnaryDoubleOp) \ |
| 43 M(SmiToDouble) \ |
| 44 M(Int32ToDouble) \ |
| 45 M(MintToDouble) \ |
| 46 M(DoubleToInteger) \ |
| 47 M(DoubleToSmi) \ |
| 48 M(DoubleToDouble) \ |
| 49 M(DoubleToFloat) \ |
| 50 M(FloatToDouble) \ |
| 51 M(UnboxedConstant) \ |
| 52 M(CheckEitherNonSmi) \ |
| 53 M(BinaryDoubleOp) \ |
| 54 M(MathUnary) \ |
| 55 M(MathMinMax) \ |
| 56 M(Box) \ |
| 57 M(Unbox) \ |
| 58 M(BoxInt64) \ |
| 59 M(CaseInsensitiveCompareUC16) \ |
| 60 M(BinaryMintOp) \ |
| 61 M(ShiftMintOp) \ |
| 62 M(UnaryMintOp) \ |
| 63 M(StringToCharCode) \ |
| 64 M(StringFromCharCode) \ |
| 65 M(InvokeMathCFunction) \ |
| 66 M(MergedMath) \ |
| 67 M(GuardFieldClass) \ |
| 68 M(GuardFieldLength) \ |
| 69 M(IfThenElse) \ |
| 70 M(BinaryFloat32x4Op) \ |
| 71 M(Simd32x4Shuffle) \ |
| 72 M(Simd32x4ShuffleMix) \ |
| 73 M(Simd32x4GetSignMask) \ |
| 74 M(Float32x4Constructor) \ |
| 75 M(Float32x4Zero) \ |
| 76 M(Float32x4Splat) \ |
| 77 M(Float32x4Comparison) \ |
| 78 M(Float32x4MinMax) \ |
| 79 M(Float32x4Scale) \ |
| 80 M(Float32x4Sqrt) \ |
| 81 M(Float32x4ZeroArg) \ |
| 82 M(Float32x4Clamp) \ |
| 83 M(Float32x4With) \ |
| 84 M(Float32x4ToInt32x4) \ |
| 85 M(Int32x4Constructor) \ |
| 86 M(Int32x4BoolConstructor) \ |
| 87 M(Int32x4GetFlag) \ |
| 88 M(Int32x4Select) \ |
| 89 M(Int32x4SetFlag) \ |
| 90 M(Int32x4ToFloat32x4) \ |
| 91 M(BinaryInt32x4Op) \ |
| 92 M(TestCids) \ |
| 93 M(BinaryFloat64x2Op) \ |
| 94 M(Float64x2Zero) \ |
| 95 M(Float64x2Constructor) \ |
| 96 M(Float64x2Splat) \ |
| 97 M(Float32x4ToFloat64x2) \ |
| 98 M(Float64x2ToFloat32x4) \ |
| 99 M(Simd64x2Shuffle) \ |
| 100 M(Float64x2ZeroArg) \ |
| 101 M(Float64x2OneArg) \ |
| 102 M(ExtractNthOutput) \ |
| 103 M(BinaryUint32Op) \ |
| 104 M(ShiftUint32Op) \ |
| 105 M(UnaryUint32Op) \ |
| 106 M(UnboxedIntConverter) \ |
| 107 M(GrowRegExpStack) \ |
| 108 M(BoxInteger32) \ |
| 109 M(UnboxInteger32) \ |
| 110 M(CheckedSmiOp) \ |
| 111 M(CheckArrayBound) \ |
| 112 M(CheckSmi) \ |
| 113 M(LoadClassId) \ |
| 114 M(CheckClassId) \ |
| 115 M(CheckClass) \ |
| 116 M(BinarySmiOp) \ |
| 117 M(TestSmi) \ |
| 118 M(RelationalOp) \ |
| 119 M(EqualityCompare) \ |
| 120 M(LoadIndexed) \ |
| 121 // Location summaries actually are not used by the unoptimizing DBC compiler |
| 122 // because we don't allocate any registers. |
| 123 static LocationSummary* CreateLocationSummary(Zone* zone, |
| 124 intptr_t num_inputs, |
| 125 bool has_result) { |
| 126 const intptr_t kNumTemps = 0; |
| 127 LocationSummary* locs = new(zone) LocationSummary( |
| 128 zone, num_inputs, kNumTemps, LocationSummary::kNoCall); |
| 129 for (intptr_t i = 0; i < num_inputs; i++) { |
| 130 locs->set_in(i, Location::RequiresRegister()); |
| 131 } |
| 132 if (has_result) { |
| 133 locs->set_out(0, Location::RequiresRegister()); |
| 134 } |
| 135 return locs; |
| 136 } |
| 137 |
| 138 |
| 139 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, In, Out) \ |
| 140 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ |
| 141 const { \ |
| 142 return CreateLocationSummary(zone, In, Out); \ |
| 143 } \ |
| 144 |
| 145 #define EMIT_NATIVE_CODE(Name, In, Out) \ |
| 146 DEFINE_MAKE_LOCATION_SUMMARY(Name, In, Out); \ |
| 147 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) \ |
| 148 |
| 149 #define DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ |
| 150 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ |
| 151 const { \ |
| 152 UNIMPLEMENTED(); \ |
| 153 return NULL; \ |
| 154 } \ |
| 155 |
| 156 #define DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ |
| 157 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \ |
| 158 UNIMPLEMENTED(); \ |
| 159 } |
| 160 |
| 161 #define DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(Name) \ |
| 162 void Name##Instr::EmitBranchCode(FlowGraphCompiler*, BranchInstr*) { \ |
| 163 UNIMPLEMENTED(); \ |
| 164 } \ |
| 165 Condition Name##Instr::EmitComparisonCode(FlowGraphCompiler*, \ |
| 166 BranchLabels) { \ |
| 167 UNIMPLEMENTED(); \ |
| 168 return EQ; \ |
| 169 } |
| 170 |
| 171 #define DEFINE_UNIMPLEMENTED(Name) \ |
| 172 DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ |
| 173 DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ |
| 174 |
| 175 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED) |
| 176 |
| 177 #undef DEFINE_UNIMPLEMENTED |
| 178 |
| 179 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids) |
| 180 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestSmi) |
| 181 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(RelationalOp) |
| 182 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(EqualityCompare) |
| 183 |
| 184 |
| 185 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2, true); |
| 186 |
| 187 |
| 188 EMIT_NATIVE_CODE(AssertBoolean, 1, true) { |
| 189 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0); |
| 190 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 191 deopt_id(), |
| 192 token_pos()); |
| 193 } |
| 194 |
| 195 |
| 196 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary(Zone* zone, |
| 197 bool optimizing) const { |
| 198 return MakeCallSummary(zone); |
| 199 } |
| 200 |
| 201 |
| 202 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 203 UNIMPLEMENTED(); |
| 204 } |
| 205 |
| 206 |
| 207 EMIT_NATIVE_CODE(CheckStackOverflow, 0, false) { |
| 208 __ CheckStack(); |
| 209 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, |
| 210 Thread::kNoDeoptId, |
| 211 token_pos()); |
| 212 } |
| 213 |
| 214 |
| 215 EMIT_NATIVE_CODE(PushArgument, 1, false) { |
| 216 if (compiler->is_optimizing()) { |
| 217 __ Push(locs()->in(0).reg()); |
| 218 } |
| 219 } |
| 220 |
| 221 |
| 222 EMIT_NATIVE_CODE(LoadLocal, 0, false) { |
| 223 ASSERT(!compiler->is_optimizing()); |
| 224 ASSERT(local().index() != 0); |
| 225 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1)); |
| 226 } |
| 227 |
| 228 |
| 229 EMIT_NATIVE_CODE(StoreLocal, 0, false) { |
| 230 ASSERT(!compiler->is_optimizing()); |
| 231 ASSERT(local().index() != 0); |
| 232 if (HasTemp()) { |
| 233 __ StoreLocal( |
| 234 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); |
| 235 } else { |
| 236 __ PopLocal( |
| 237 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); |
| 238 } |
| 239 } |
| 240 |
| 241 |
| 242 EMIT_NATIVE_CODE(Constant, 0, true) { |
| 243 const intptr_t kidx = __ AddConstant(value()); |
| 244 if (compiler->is_optimizing()) { |
| 245 __ LoadConstant(locs()->out(0).reg(), kidx); |
| 246 } else { |
| 247 __ PushConstant(kidx); |
| 248 } |
| 249 } |
| 250 |
| 251 |
| 252 EMIT_NATIVE_CODE(Return, 1, false) { |
| 253 __ ReturnTOS(); |
| 254 } |
| 255 |
| 256 |
| 257 EMIT_NATIVE_CODE(StoreStaticField, 1, false) { |
| 258 const intptr_t kidx = __ AddConstant(field()); |
| 259 __ StoreStaticTOS(kidx); |
| 260 } |
| 261 |
| 262 |
| 263 EMIT_NATIVE_CODE(LoadStaticField, 1, true) { |
| 264 const intptr_t kidx = __ AddConstant(StaticField()); |
| 265 __ PushStatic(kidx); |
| 266 } |
| 267 |
| 268 |
| 269 EMIT_NATIVE_CODE(InitStaticField, 0, false) { |
| 270 ASSERT(!compiler->is_optimizing()); |
| 271 __ InitStaticTOS(); |
| 272 } |
| 273 |
| 274 |
| 275 EMIT_NATIVE_CODE(ClosureCall, 0, false) { |
| 276 intptr_t argument_count = ArgumentCount(); |
| 277 const Array& arguments_descriptor = |
| 278 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, |
| 279 argument_names())); |
| 280 const intptr_t argdesc_kidx = |
| 281 compiler->assembler()->AddConstant(arguments_descriptor); |
| 282 __ StaticCall(argument_count, argdesc_kidx); |
| 283 |
| 284 compiler->RecordSafepoint(locs()); |
| 285 // Marks either the continuation point in unoptimized code or the |
| 286 // deoptimization point in optimized code, after call. |
| 287 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id()); |
| 288 if (compiler->is_optimizing()) { |
| 289 compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); |
| 290 } |
| 291 // Add deoptimization continuation point after the call and before the |
| 292 // arguments are removed. |
| 293 // In optimized code this descriptor is needed for exception handling. |
| 294 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
| 295 deopt_id_after, |
| 296 token_pos()); |
| 297 } |
| 298 |
| 299 |
| 300 static void EmitBranchOnCondition(FlowGraphCompiler* compiler, |
| 301 Condition true_condition, |
| 302 BranchLabels labels) { |
| 303 if (labels.fall_through == labels.false_label) { |
| 304 // If the next block is the false successor, fall through to it. |
| 305 __ Jump(labels.true_label); |
| 306 } else { |
| 307 // If the next block is not the false successor, branch to it. |
| 308 __ Jump(labels.false_label); |
| 309 |
| 310 // Fall through or jump to the true successor. |
| 311 if (labels.fall_through != labels.true_label) { |
| 312 __ Jump(labels.true_label); |
| 313 } |
| 314 } |
| 315 } |
| 316 |
| 317 |
| 318 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
| 319 BranchLabels labels) { |
| 320 ASSERT((kind() == Token::kNE_STRICT) || |
| 321 (kind() == Token::kEQ_STRICT)); |
| 322 const Bytecode::Opcode eq_op = needs_number_check() ? |
| 323 Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS; |
| 324 const Bytecode::Opcode ne_op = needs_number_check() ? |
| 325 Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS; |
| 326 |
| 327 if (kind() == Token::kEQ_STRICT) { |
| 328 __ Emit((labels.fall_through == labels.false_label) ? eq_op : ne_op); |
| 329 } else { |
| 330 __ Emit((labels.fall_through == labels.false_label) ? ne_op : eq_op); |
| 331 } |
| 332 |
| 333 if (needs_number_check() && token_pos().IsReal()) { |
| 334 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, |
| 335 Thread::kNoDeoptId, |
| 336 token_pos()); |
| 337 } |
| 338 return EQ; |
| 339 } |
| 340 |
| 341 |
| 342 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
| 343 BranchInstr* branch) { |
| 344 ASSERT((kind() == Token::kEQ_STRICT) || |
| 345 (kind() == Token::kNE_STRICT)); |
| 346 |
| 347 BranchLabels labels = compiler->CreateBranchLabels(branch); |
| 348 Condition true_condition = EmitComparisonCode(compiler, labels); |
| 349 EmitBranchOnCondition(compiler, true_condition, labels); |
| 350 } |
| 351 |
| 352 |
| 353 EMIT_NATIVE_CODE(StrictCompare, 2, true) { |
| 354 ASSERT((kind() == Token::kEQ_STRICT) || |
| 355 (kind() == Token::kNE_STRICT)); |
| 356 |
| 357 Label is_true, is_false; |
| 358 BranchLabels labels = { &is_true, &is_false, &is_false }; |
| 359 Condition true_condition = EmitComparisonCode(compiler, labels); |
| 360 EmitBranchOnCondition(compiler, true_condition, labels); |
| 361 Label done; |
| 362 __ Bind(&is_false); |
| 363 __ PushConstant(Bool::False()); |
| 364 __ Jump(&done); |
| 365 __ Bind(&is_true); |
| 366 __ PushConstant(Bool::True()); |
| 367 __ Bind(&done); |
| 368 } |
| 369 |
| 370 |
| 371 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, |
| 372 bool opt) const { |
| 373 comparison()->InitializeLocationSummary(zone, opt); |
| 374 // Branches don't produce a result. |
| 375 comparison()->locs()->set_out(0, Location::NoLocation()); |
| 376 return comparison()->locs(); |
| 377 } |
| 378 |
| 379 |
| 380 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 381 comparison()->EmitBranchCode(compiler, this); |
| 382 } |
| 383 |
| 384 |
| 385 EMIT_NATIVE_CODE(Goto, 0, false) { |
| 386 if (HasParallelMove()) { |
| 387 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
| 388 } |
| 389 // We can fall through if the successor is the next block in the list. |
| 390 // Otherwise, we need a jump. |
| 391 if (!compiler->CanFallThroughTo(successor())) { |
| 392 __ Jump(compiler->GetJumpLabel(successor())); |
| 393 } |
| 394 } |
| 395 |
| 396 |
| 397 EMIT_NATIVE_CODE(CreateArray, 2, true) { |
| 398 __ CreateArrayTOS(); |
| 399 } |
| 400 |
| 401 |
| 402 EMIT_NATIVE_CODE(StoreIndexed, 3, false) { |
| 403 ASSERT(class_id() == kArrayCid); |
| 404 __ StoreIndexedTOS(); |
| 405 } |
| 406 |
| 407 |
| 408 EMIT_NATIVE_CODE(StringInterpolate, 0, false) { |
| 409 const intptr_t kArgumentCount = 1; |
| 410 const Array& arguments_descriptor = Array::Handle( |
| 411 ArgumentsDescriptor::New(kArgumentCount, Object::null_array())); |
| 412 __ PushConstant(CallFunction()); |
| 413 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); |
| 414 __ StaticCall(kArgumentCount, argdesc_kidx); |
| 415 } |
| 416 |
| 417 |
| 418 EMIT_NATIVE_CODE(NativeCall, 0, false) { |
| 419 SetupNative(); |
| 420 |
| 421 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); |
| 422 |
| 423 ASSERT(!link_lazily()); |
| 424 const ExternalLabel label(reinterpret_cast<uword>(native_c_function())); |
| 425 const intptr_t target_kidx = |
| 426 __ object_pool_wrapper().FindImmediate(label.address()); |
| 427 const intptr_t argc_tag_kidx = |
| 428 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag)); |
| 429 __ PushConstant(target_kidx); |
| 430 __ PushConstant(argc_tag_kidx); |
| 431 if (is_bootstrap_native()) { |
| 432 __ NativeBootstrapCall(); |
| 433 } else { |
| 434 __ NativeCall(); |
| 435 } |
| 436 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 437 Thread::kNoDeoptId, |
| 438 token_pos()); |
| 439 } |
| 440 |
| 441 |
| 442 EMIT_NATIVE_CODE(AllocateObject, 0, true) { |
| 443 if (ArgumentCount() == 1) { |
| 444 __ PushConstant(cls()); |
| 445 __ AllocateT(); |
| 446 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 447 Thread::kNoDeoptId, |
| 448 token_pos()); |
| 449 } else { |
| 450 const intptr_t kidx = __ AddConstant(cls()); |
| 451 __ Allocate(kidx); |
| 452 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 453 Thread::kNoDeoptId, |
| 454 token_pos()); |
| 455 } |
| 456 } |
| 457 |
| 458 |
| 459 EMIT_NATIVE_CODE(StoreInstanceField, 2, false) { |
| 460 ASSERT(!HasTemp()); |
| 461 ASSERT(offset_in_bytes() % kWordSize == 0); |
| 462 if (compiler->is_optimizing()) { |
| 463 const Register value = locs()->in(1).reg(); |
| 464 const Register instance = locs()->in(0).reg(); |
| 465 __ StoreField(instance, offset_in_bytes() / kWordSize, value); |
| 466 } else { |
| 467 __ StoreFieldTOS(offset_in_bytes() / kWordSize); |
| 468 } |
| 469 } |
| 470 |
| 471 |
| 472 EMIT_NATIVE_CODE(LoadField, 1, true) { |
| 473 ASSERT(offset_in_bytes() % kWordSize == 0); |
| 474 __ LoadFieldTOS(offset_in_bytes() / kWordSize); |
| 475 } |
| 476 |
| 477 |
| 478 EMIT_NATIVE_CODE(BooleanNegate, 1, true) { |
| 479 __ BooleanNegateTOS(); |
| 480 } |
| 481 |
| 482 |
| 483 EMIT_NATIVE_CODE(AllocateContext, 0, false) { |
| 484 __ AllocateContext(num_context_variables()); |
| 485 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 486 Thread::kNoDeoptId, |
| 487 token_pos()); |
| 488 } |
| 489 |
| 490 |
| 491 EMIT_NATIVE_CODE(CloneContext, 0, false) { |
| 492 __ CloneContext(); |
| 493 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 494 Thread::kNoDeoptId, |
| 495 token_pos()); |
| 496 } |
| 497 |
| 498 |
| 499 EMIT_NATIVE_CODE(CatchBlockEntry, 0, false) { |
| 500 __ Bind(compiler->GetJumpLabel(this)); |
| 501 compiler->AddExceptionHandler(catch_try_index(), |
| 502 try_index(), |
| 503 compiler->assembler()->CodeSize(), |
| 504 catch_handler_types_, |
| 505 needs_stacktrace()); |
| 506 __ MoveSpecial(-exception_var().index()-1, |
| 507 Simulator::kExceptionSpecialIndex); |
| 508 __ MoveSpecial(-stacktrace_var().index()-1, |
| 509 Simulator::kStacktraceSpecialIndex); |
| 510 __ SetFrame(compiler->StackSize()); |
| 511 } |
| 512 |
| 513 |
| 514 EMIT_NATIVE_CODE(Throw, 0, false) { |
| 515 __ Throw(0); |
| 516 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 517 deopt_id(), |
| 518 token_pos()); |
| 519 __ Trap(); |
| 520 } |
| 521 |
| 522 |
| 523 EMIT_NATIVE_CODE(ReThrow, 0, false) { |
| 524 compiler->SetNeedsStacktrace(catch_try_index()); |
| 525 __ Throw(1); |
| 526 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 527 deopt_id(), |
| 528 token_pos()); |
| 529 __ Trap(); |
| 530 } |
| 531 |
| 532 EMIT_NATIVE_CODE(InstantiateType, 1, true) { |
| 533 __ InstantiateType(__ AddConstant(type())); |
| 534 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 535 deopt_id(), |
| 536 token_pos()); |
| 537 } |
| 538 |
| 539 EMIT_NATIVE_CODE(InstantiateTypeArguments, 1, true) { |
| 540 __ InstantiateTypeArgumentsTOS( |
| 541 type_arguments().IsRawInstantiatedRaw(type_arguments().Length()), |
| 542 __ AddConstant(type_arguments())); |
| 543 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
| 544 deopt_id(), |
| 545 token_pos()); |
| 546 } |
| 547 |
| 548 |
| 549 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 550 __ DebugStep(); |
| 551 compiler->AddCurrentDescriptor(stub_kind_, Thread::kNoDeoptId, token_pos()); |
| 552 } |
| 553 |
| 554 |
| 555 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 556 if (!compiler->CanFallThroughTo(normal_entry())) { |
| 557 __ Jump(compiler->GetJumpLabel(normal_entry())); |
| 558 } |
| 559 } |
| 560 |
| 561 |
| 562 LocationSummary* Instruction::MakeCallSummary(Zone* zone) { |
| 563 LocationSummary* result = new(zone) LocationSummary( |
| 564 zone, 0, 0, LocationSummary::kCall); |
| 565 result->set_out(0, Location::RequiresRegister()); |
| 566 return result; |
| 567 } |
| 568 |
| 569 |
| 570 CompileType BinaryUint32OpInstr::ComputeType() const { |
| 571 return CompileType::Int(); |
| 572 } |
| 573 |
| 574 |
| 575 CompileType ShiftUint32OpInstr::ComputeType() const { |
| 576 return CompileType::Int(); |
| 577 } |
| 578 |
| 579 |
| 580 CompileType UnaryUint32OpInstr::ComputeType() const { |
| 581 return CompileType::Int(); |
| 582 } |
| 583 |
| 584 |
| 585 static const intptr_t kMintShiftCountLimit = 63; |
| 586 |
| 587 |
| 588 bool ShiftMintOpInstr::has_shift_count_check() const { |
| 589 return !RangeUtils::IsWithin( |
| 590 right()->definition()->range(), 0, kMintShiftCountLimit); |
| 591 } |
| 592 |
| 593 |
| 594 CompileType LoadIndexedInstr::ComputeType() const { |
| 595 switch (class_id_) { |
| 596 case kArrayCid: |
| 597 case kImmutableArrayCid: |
| 598 return CompileType::Dynamic(); |
| 599 |
| 600 case kTypedDataFloat32ArrayCid: |
| 601 case kTypedDataFloat64ArrayCid: |
| 602 return CompileType::FromCid(kDoubleCid); |
| 603 case kTypedDataFloat32x4ArrayCid: |
| 604 return CompileType::FromCid(kFloat32x4Cid); |
| 605 case kTypedDataInt32x4ArrayCid: |
| 606 return CompileType::FromCid(kInt32x4Cid); |
| 607 case kTypedDataFloat64x2ArrayCid: |
| 608 return CompileType::FromCid(kFloat64x2Cid); |
| 609 |
| 610 case kTypedDataInt8ArrayCid: |
| 611 case kTypedDataUint8ArrayCid: |
| 612 case kTypedDataUint8ClampedArrayCid: |
| 613 case kExternalTypedDataUint8ArrayCid: |
| 614 case kExternalTypedDataUint8ClampedArrayCid: |
| 615 case kTypedDataInt16ArrayCid: |
| 616 case kTypedDataUint16ArrayCid: |
| 617 case kOneByteStringCid: |
| 618 case kTwoByteStringCid: |
| 619 return CompileType::FromCid(kSmiCid); |
| 620 |
| 621 case kTypedDataInt32ArrayCid: |
| 622 case kTypedDataUint32ArrayCid: |
| 623 return CompileType::Int(); |
| 624 |
| 625 default: |
| 626 UNREACHABLE(); |
| 627 return CompileType::Dynamic(); |
| 628 } |
| 629 } |
| 630 |
| 631 |
| 632 Representation LoadIndexedInstr::representation() const { |
| 633 switch (class_id_) { |
| 634 case kArrayCid: |
| 635 case kImmutableArrayCid: |
| 636 case kTypedDataInt8ArrayCid: |
| 637 case kTypedDataUint8ArrayCid: |
| 638 case kTypedDataUint8ClampedArrayCid: |
| 639 case kExternalTypedDataUint8ArrayCid: |
| 640 case kExternalTypedDataUint8ClampedArrayCid: |
| 641 case kTypedDataInt16ArrayCid: |
| 642 case kTypedDataUint16ArrayCid: |
| 643 case kOneByteStringCid: |
| 644 case kTwoByteStringCid: |
| 645 return kTagged; |
| 646 case kTypedDataInt32ArrayCid: |
| 647 return kUnboxedInt32; |
| 648 case kTypedDataUint32ArrayCid: |
| 649 return kUnboxedUint32; |
| 650 case kTypedDataFloat32ArrayCid: |
| 651 case kTypedDataFloat64ArrayCid: |
| 652 return kUnboxedDouble; |
| 653 case kTypedDataInt32x4ArrayCid: |
| 654 return kUnboxedInt32x4; |
| 655 case kTypedDataFloat32x4ArrayCid: |
| 656 return kUnboxedFloat32x4; |
| 657 case kTypedDataFloat64x2ArrayCid: |
| 658 return kUnboxedFloat64x2; |
| 659 default: |
| 660 UNREACHABLE(); |
| 661 return kTagged; |
| 662 } |
| 663 } |
| 664 |
| 665 |
| 666 Representation StoreIndexedInstr::RequiredInputRepresentation( |
| 667 intptr_t idx) const { |
| 668 // Array can be a Dart object or a pointer to external data. |
| 669 if (idx == 0) return kNoRepresentation; // Flexible input representation. |
| 670 if (idx == 1) return kTagged; // Index is a smi. |
| 671 ASSERT(idx == 2); |
| 672 switch (class_id_) { |
| 673 case kArrayCid: |
| 674 case kOneByteStringCid: |
| 675 case kTypedDataInt8ArrayCid: |
| 676 case kTypedDataUint8ArrayCid: |
| 677 case kExternalTypedDataUint8ArrayCid: |
| 678 case kTypedDataUint8ClampedArrayCid: |
| 679 case kExternalTypedDataUint8ClampedArrayCid: |
| 680 case kTypedDataInt16ArrayCid: |
| 681 case kTypedDataUint16ArrayCid: |
| 682 return kTagged; |
| 683 case kTypedDataInt32ArrayCid: |
| 684 return kUnboxedInt32; |
| 685 case kTypedDataUint32ArrayCid: |
| 686 return kUnboxedUint32; |
| 687 case kTypedDataFloat32ArrayCid: |
| 688 case kTypedDataFloat64ArrayCid: |
| 689 return kUnboxedDouble; |
| 690 case kTypedDataFloat32x4ArrayCid: |
| 691 return kUnboxedFloat32x4; |
| 692 case kTypedDataInt32x4ArrayCid: |
| 693 return kUnboxedInt32x4; |
| 694 case kTypedDataFloat64x2ArrayCid: |
| 695 return kUnboxedFloat64x2; |
| 696 default: |
| 697 UNREACHABLE(); |
| 698 return kTagged; |
| 699 } |
| 700 } |
| 701 |
| 702 } // namespace dart |
| 703 |
| 704 #endif // defined TARGET_ARCH_DBC |
OLD | NEW |