Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(99)

Side by Side Diff: runtime/vm/intermediate_language_arm.cc

Issue 504143003: Support Int32 representation for selected binary operations. (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM.
6 #if defined(TARGET_ARCH_ARM) 6 #if defined(TARGET_ARCH_ARM)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/cpu.h" 10 #include "vm/cpu.h"
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after
298 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 298 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
299 locs->set_out(0, Location::RequiresFpuRegister()); 299 locs->set_out(0, Location::RequiresFpuRegister());
300 locs->set_temp(0, Location::RequiresRegister()); 300 locs->set_temp(0, Location::RequiresRegister());
301 return locs; 301 return locs;
302 } 302 }
303 303
304 304
305 void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 305 void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
306 // The register allocator drops constant definitions that have no uses. 306 // The register allocator drops constant definitions that have no uses.
307 if (!locs()->out(0).IsInvalid()) { 307 if (!locs()->out(0).IsInvalid()) {
308 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0) && 308 switch (representation_) {
309 TargetCPUFeatures::neon_supported()) { 309 case kUnboxedDouble:
310 const QRegister dst = locs()->out(0).fpu_reg(); 310 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0) &&
311 __ veorq(dst, dst, dst); 311 TargetCPUFeatures::neon_supported()) {
312 } else { 312 const QRegister dst = locs()->out(0).fpu_reg();
313 const DRegister dst = EvenDRegisterOf(locs()->out(0).fpu_reg()); 313 __ veorq(dst, dst, dst);
314 const Register temp = locs()->temp(0).reg(); 314 } else {
315 __ LoadDImmediate(dst, Double::Cast(value()).value(), temp); 315 const DRegister dst = EvenDRegisterOf(locs()->out(0).fpu_reg());
316 const Register temp = locs()->temp(0).reg();
317 __ LoadDImmediate(dst, Double::Cast(value()).value(), temp);
318 }
319 break;
320 case kUnboxedInt32:
321 __ LoadImmediate(locs()->out(0).reg(), Smi::Cast(value()).Value());
322 break;
323 default:
324 UNREACHABLE();
325 break;
316 } 326 }
317 } 327 }
318 } 328 }
319 329
320 330
321 LocationSummary* AssertAssignableInstr::MakeLocationSummary(Isolate* isolate, 331 LocationSummary* AssertAssignableInstr::MakeLocationSummary(Isolate* isolate,
322 bool opt) const { 332 bool opt) const {
323 const intptr_t kNumInputs = 3; 333 const intptr_t kNumInputs = 3;
324 const intptr_t kNumTemps = 0; 334 const intptr_t kNumTemps = 0;
325 LocationSummary* summary = new(isolate) LocationSummary( 335 LocationSummary* summary = new(isolate) LocationSummary(
(...skipping 853 matching lines...) Expand 10 before | Expand all | Expand 10 after
1179 const intptr_t kNumInputs = 2; 1189 const intptr_t kNumInputs = 2;
1180 const intptr_t kNumTemps = 0; 1190 const intptr_t kNumTemps = 0;
1181 LocationSummary* locs = new(isolate) LocationSummary( 1191 LocationSummary* locs = new(isolate) LocationSummary(
1182 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 1192 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
1183 locs->set_in(0, Location::RequiresRegister()); 1193 locs->set_in(0, Location::RequiresRegister());
1184 bool needs_base = false; 1194 bool needs_base = false;
1185 if (CanBeImmediateIndex(index(), class_id(), IsExternal(), 1195 if (CanBeImmediateIndex(index(), class_id(), IsExternal(),
1186 true, // Load. 1196 true, // Load.
1187 &needs_base)) { 1197 &needs_base)) {
1188 // CanBeImmediateIndex must return false for unsafe smis. 1198 // CanBeImmediateIndex must return false for unsafe smis.
1189 locs->set_in(1, Location::Constant(index()->BoundConstant())); 1199 locs->set_in(1, Location::Constant(index()->definition()->AsConstant()));
1190 } else { 1200 } else {
1191 locs->set_in(1, Location::RequiresRegister()); 1201 locs->set_in(1, Location::RequiresRegister());
1192 } 1202 }
1193 if ((representation() == kUnboxedDouble) || 1203 if ((representation() == kUnboxedDouble) ||
1194 (representation() == kUnboxedFloat32x4) || 1204 (representation() == kUnboxedFloat32x4) ||
1195 (representation() == kUnboxedInt32x4) || 1205 (representation() == kUnboxedInt32x4) ||
1196 (representation() == kUnboxedFloat64x2)) { 1206 (representation() == kUnboxedFloat64x2)) {
1197 if (class_id() == kTypedDataFloat32ArrayCid) { 1207 if (class_id() == kTypedDataFloat32ArrayCid) {
1198 // Need register <= Q7 for float operations. 1208 // Need register <= Q7 for float operations.
1199 // TODO(fschneider): Add a register policy to specify a subset of 1209 // TODO(fschneider): Add a register policy to specify a subset of
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after
1380 1390
1381 bool needs_base = false; 1391 bool needs_base = false;
1382 if (CanBeImmediateIndex(index(), class_id(), IsExternal(), 1392 if (CanBeImmediateIndex(index(), class_id(), IsExternal(),
1383 false, // Store. 1393 false, // Store.
1384 &needs_base)) { 1394 &needs_base)) {
1385 const intptr_t kNumTemps = needs_base ? 1 : 0; 1395 const intptr_t kNumTemps = needs_base ? 1 : 0;
1386 locs = new(isolate) LocationSummary( 1396 locs = new(isolate) LocationSummary(
1387 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 1397 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
1388 1398
1389 // CanBeImmediateIndex must return false for unsafe smis. 1399 // CanBeImmediateIndex must return false for unsafe smis.
1390 locs->set_in(1, Location::Constant(index()->BoundConstant())); 1400 locs->set_in(1, Location::Constant(index()->definition()->AsConstant()));
1391 if (needs_base) { 1401 if (needs_base) {
1392 locs->set_temp(0, Location::RequiresRegister()); 1402 locs->set_temp(0, Location::RequiresRegister());
1393 } 1403 }
1394 } else { 1404 } else {
1395 const intptr_t kNumTemps = 0; 1405 const intptr_t kNumTemps = 0;
1396 locs = new(isolate) LocationSummary( 1406 locs = new(isolate) LocationSummary(
1397 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 1407 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
1398 1408
1399 locs->set_in(1, Location::WritableRegister()); 1409 locs->set_in(1, Location::WritableRegister());
1400 } 1410 }
(...skipping 1552 matching lines...) Expand 10 before | Expand all | Expand 10 after
2953 } else if ((op_kind() == Token::kMUL) && 2963 } else if ((op_kind() == Token::kMUL) &&
2954 (TargetCPUFeatures::arm_version() != ARMv7)) { 2964 (TargetCPUFeatures::arm_version() != ARMv7)) {
2955 num_temps = 1; 2965 num_temps = 1;
2956 } 2966 }
2957 LocationSummary* summary = new(isolate) LocationSummary( 2967 LocationSummary* summary = new(isolate) LocationSummary(
2958 isolate, kNumInputs, num_temps, LocationSummary::kNoCall); 2968 isolate, kNumInputs, num_temps, LocationSummary::kNoCall);
2959 if (op_kind() == Token::kTRUNCDIV) { 2969 if (op_kind() == Token::kTRUNCDIV) {
2960 summary->set_in(0, Location::RequiresRegister()); 2970 summary->set_in(0, Location::RequiresRegister());
2961 if (RightIsPowerOfTwoConstant()) { 2971 if (RightIsPowerOfTwoConstant()) {
2962 ConstantInstr* right_constant = right()->definition()->AsConstant(); 2972 ConstantInstr* right_constant = right()->definition()->AsConstant();
2963 summary->set_in(1, Location::Constant(right_constant->value())); 2973 summary->set_in(1, Location::Constant(right_constant));
2964 summary->set_temp(0, Location::RequiresRegister()); 2974 summary->set_temp(0, Location::RequiresRegister());
2965 } else { 2975 } else {
2966 summary->set_in(1, Location::RequiresRegister()); 2976 summary->set_in(1, Location::RequiresRegister());
2967 summary->set_temp(0, Location::RequiresRegister()); 2977 summary->set_temp(0, Location::RequiresRegister());
2968 summary->set_temp(1, Location::RequiresFpuRegister()); 2978 summary->set_temp(1, Location::RequiresFpuRegister());
2969 } 2979 }
2970 summary->set_out(0, Location::RequiresRegister()); 2980 summary->set_out(0, Location::RequiresRegister());
2971 return summary; 2981 return summary;
2972 } 2982 }
2973 if (op_kind() == Token::kMOD) { 2983 if (op_kind() == Token::kMOD) {
(...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after
3317 UNREACHABLE(); 3327 UNREACHABLE();
3318 break; 3328 break;
3319 } 3329 }
3320 default: 3330 default:
3321 UNREACHABLE(); 3331 UNREACHABLE();
3322 break; 3332 break;
3323 } 3333 }
3324 } 3334 }
3325 3335
3326 3336
3337 static void EmitInt32ShiftLeft(FlowGraphCompiler* compiler,
3338 BinaryInt32OpInstr* shift_left) {
3339 const bool is_truncating = shift_left->IsTruncating();
3340 const LocationSummary& locs = *shift_left->locs();
3341 const Register left = locs.in(0).reg();
3342 const Register result = locs.out(0).reg();
3343 Label* deopt = shift_left->CanDeoptimize() ?
3344 compiler->AddDeoptStub(shift_left->deopt_id(), ICData::kDeoptBinarySmiOp)
3345 : NULL;
3346 ASSERT(locs.in(1).IsConstant());
3347 const Object& constant = locs.in(1).constant();
3348 ASSERT(constant.IsSmi());
3349 // Immediate shift operation takes 5 bits for the count.
3350 const intptr_t kCountLimit = 0x1F;
3351 const intptr_t value = Smi::Cast(constant).Value();
3352 if (value == 0) {
Florian Schneider 2014/08/27 09:36:51 TODO: Shift by 0 constant should be folded away. I
Vyacheslav Egorov (Google) 2014/08/27 11:45:37 Yes, I have this as part of my BinarySmiOpInstr re
3353 __ MoveRegister(result, left);
3354 } else if ((value < 0) || (value >= kCountLimit)) {
3355 // This condition may not be known earlier in some cases because
3356 // of constant propagation, inlining, etc.
3357 if ((value >= kCountLimit) && is_truncating) {
3358 __ mov(result, Operand(0));
3359 } else {
3360 // Result is Mint or exception.
3361 __ b(deopt);
3362 }
3363 } else {
3364 if (!is_truncating) {
3365 // Check for overflow (preserve left).
3366 __ Lsl(IP, left, value);
3367 __ cmp(left, Operand(IP, ASR, value));
3368 __ b(deopt, NE); // Overflow.
3369 }
3370 // Shift for result now we know there is no overflow.
3371 __ Lsl(result, left, value);
3372 }
3373 }
3374
3375
3376 LocationSummary* BinaryInt32OpInstr::MakeLocationSummary(Isolate* isolate,
3377 bool opt) const {
3378 const intptr_t kNumInputs = 2;
3379 // Calculate number of temporaries.
3380 intptr_t num_temps = 0;
3381 if (((op_kind() == Token::kSHL) && !IsTruncating()) ||
3382 (op_kind() == Token::kSHR)) {
3383 num_temps = 1;
3384 } else if ((op_kind() == Token::kMUL) &&
3385 (TargetCPUFeatures::arm_version() != ARMv7)) {
3386 num_temps = 1;
3387 }
3388 LocationSummary* summary = new(isolate) LocationSummary(
3389 isolate, kNumInputs, num_temps, LocationSummary::kNoCall);
3390 summary->set_in(0, Location::RequiresRegister());
3391 summary->set_in(1, Location::RegisterOrSmiConstant(right()));
3392 if (((op_kind() == Token::kSHL) && !IsTruncating()) ||
3393 (op_kind() == Token::kSHR)) {
3394 summary->set_temp(0, Location::RequiresRegister());
3395 }
3396 if (op_kind() == Token::kMUL) {
3397 if (TargetCPUFeatures::arm_version() != ARMv7) {
3398 summary->set_temp(0, Location::RequiresFpuRegister());
3399 }
3400 }
3401 // We make use of 3-operand instructions by not requiring result register
3402 // to be identical to first input register as on Intel.
3403 summary->set_out(0, Location::RequiresRegister());
3404 return summary;
3405 }
3406
3407
3408 void BinaryInt32OpInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
3409 if (op_kind() == Token::kSHL) {
3410 EmitInt32ShiftLeft(compiler, this);
3411 return;
3412 }
3413
3414 const Register left = locs()->in(0).reg();
3415 const Register result = locs()->out(0).reg();
3416 Label* deopt = NULL;
3417 if (CanDeoptimize()) {
3418 deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp);
3419 }
3420
3421 if (locs()->in(1).IsConstant()) {
3422 const Object& constant = locs()->in(1).constant();
3423 ASSERT(constant.IsSmi());
3424 const int32_t value = Smi::Cast(constant).Value();
3425 switch (op_kind()) {
3426 case Token::kADD: {
3427 if (deopt == NULL) {
3428 __ AddImmediate(result, left, value);
3429 } else {
3430 __ AddImmediateSetFlags(result, left, value);
3431 __ b(deopt, VS);
3432 }
3433 break;
3434 }
3435 case Token::kSUB: {
3436 if (deopt == NULL) {
3437 __ AddImmediate(result, left, -value);
3438 } else {
3439 // Negating value and using AddImmediateSetFlags would not detect the
3440 // overflow when value == kMinInt32.
3441 __ SubImmediateSetFlags(result, left, value);
3442 __ b(deopt, VS);
3443 }
3444 break;
3445 }
3446 case Token::kMUL: {
3447 if (deopt == NULL) {
3448 if (value == 2) {
3449 __ mov(result, Operand(left, LSL, 1));
3450 } else {
3451 __ LoadImmediate(IP, value);
3452 __ mul(result, left, IP);
3453 }
3454 } else {
3455 if (value == 2) {
3456 __ CompareImmediate(left, 0xC0000000);
3457 __ b(deopt, MI);
3458 __ mov(result, Operand(left, LSL, 1));
3459 } else {
3460 if (TargetCPUFeatures::arm_version() == ARMv7) {
3461 __ LoadImmediate(IP, value);
3462 __ smull(result, IP, left, IP);
3463 // IP: result bits 32..63.
3464 __ cmp(IP, Operand(result, ASR, 31));
3465 __ b(deopt, NE);
3466 } else if (TargetCPUFeatures::can_divide()) {
3467 const QRegister qtmp = locs()->temp(0).fpu_reg();
3468 const DRegister dtmp0 = EvenDRegisterOf(qtmp);
3469 const DRegister dtmp1 = OddDRegisterOf(qtmp);
3470 __ LoadImmediate(IP, value);
3471 __ CheckMultSignedOverflow(left, IP, result, dtmp0, dtmp1, deopt);
3472 __ mul(result, left, IP);
3473 } else {
3474 __ b(deopt);
Florian Schneider 2014/08/27 09:36:51 It seems wrong to always deoptimize in this case.
Vyacheslav Egorov (Google) 2014/08/27 11:45:37 Yes, I was also surprised we do it this way and as
3475 }
3476 }
3477 }
3478 break;
3479 }
3480 case Token::kBIT_AND: {
3481 // No overflow check.
3482 Operand o;
3483 if (Operand::CanHold(value, &o)) {
3484 __ and_(result, left, o);
3485 } else if (Operand::CanHold(~value, &o)) {
3486 __ bic(result, left, o);
3487 } else {
3488 __ LoadImmediate(IP, value);
3489 __ and_(result, left, Operand(IP));
3490 }
3491 break;
3492 }
3493 case Token::kBIT_OR: {
3494 // No overflow check.
3495 Operand o;
3496 if (Operand::CanHold(value, &o)) {
3497 __ orr(result, left, o);
3498 } else {
3499 __ LoadImmediate(IP, value);
3500 __ orr(result, left, Operand(IP));
3501 }
3502 break;
3503 }
3504 case Token::kBIT_XOR: {
3505 // No overflow check.
3506 Operand o;
3507 if (Operand::CanHold(value, &o)) {
3508 __ eor(result, left, o);
3509 } else {
3510 __ LoadImmediate(IP, value);
3511 __ eor(result, left, Operand(IP));
3512 }
3513 break;
3514 }
3515 case Token::kSHR: {
3516 // sarl operation masks the count to 5 bits.
3517 const intptr_t kCountLimit = 0x1F;
3518
3519 if (value == 0) {
3520 // TODO(vegorov): should be handled outside.
3521 __ MoveRegister(result, left);
3522 break;
3523 } else if (value < 0) {
3524 // TODO(vegorov): should be handled outside.
3525 __ b(deopt);
3526 break;
3527 }
3528
3529 if (value >= kCountLimit) {
3530 __ Asr(result, left, kCountLimit);
3531 } else {
3532 __ Asr(result, left, value);
3533 }
3534 break;
3535 }
3536
3537 default:
3538 UNREACHABLE();
3539 break;
3540 }
3541 return;
3542 }
3543
3544 const Register right = locs()->in(1).reg();
3545 switch (op_kind()) {
3546 case Token::kADD: {
3547 if (deopt == NULL) {
3548 __ add(result, left, Operand(right));
3549 } else {
3550 __ adds(result, left, Operand(right));
3551 __ b(deopt, VS);
3552 }
3553 break;
3554 }
3555 case Token::kSUB: {
3556 if (deopt == NULL) {
3557 __ sub(result, left, Operand(right));
3558 } else {
3559 __ subs(result, left, Operand(right));
3560 __ b(deopt, VS);
3561 }
3562 break;
3563 }
3564 case Token::kMUL: {
3565 if (deopt == NULL) {
3566 __ mul(result, left, right);
3567 } else {
3568 if (TargetCPUFeatures::arm_version() == ARMv7) {
3569 __ smull(result, IP, left, right);
3570 // IP: result bits 32..63.
3571 __ cmp(IP, Operand(result, ASR, 31));
3572 __ b(deopt, NE);
3573 } else if (TargetCPUFeatures::can_divide()) {
3574 const QRegister qtmp = locs()->temp(0).fpu_reg();
3575 const DRegister dtmp0 = EvenDRegisterOf(qtmp);
3576 const DRegister dtmp1 = OddDRegisterOf(qtmp);
3577 __ CheckMultSignedOverflow(left, right, result, dtmp0, dtmp1, deopt);
3578 __ mul(result, left, right);
3579 } else {
3580 __ b(deopt);
Florian Schneider 2014/08/27 09:36:51 Add TODO here as well.
3581 }
3582 }
3583 break;
3584 }
3585 case Token::kBIT_AND: {
3586 // No overflow check.
3587 __ and_(result, left, Operand(right));
3588 break;
3589 }
3590 case Token::kBIT_OR: {
3591 // No overflow check.
3592 __ orr(result, left, Operand(right));
3593 break;
3594 }
3595 case Token::kBIT_XOR: {
3596 // No overflow check.
3597 __ eor(result, left, Operand(right));
3598 break;
3599 }
3600 default:
3601 UNREACHABLE();
3602 break;
3603 }
3604 }
3605
3606
3327 LocationSummary* CheckEitherNonSmiInstr::MakeLocationSummary(Isolate* isolate, 3607 LocationSummary* CheckEitherNonSmiInstr::MakeLocationSummary(Isolate* isolate,
3328 bool opt) const { 3608 bool opt) const {
3329 intptr_t left_cid = left()->Type()->ToCid(); 3609 intptr_t left_cid = left()->Type()->ToCid();
3330 intptr_t right_cid = right()->Type()->ToCid(); 3610 intptr_t right_cid = right()->Type()->ToCid();
3331 ASSERT((left_cid != kDoubleCid) && (right_cid != kDoubleCid)); 3611 ASSERT((left_cid != kDoubleCid) && (right_cid != kDoubleCid));
3332 const intptr_t kNumInputs = 2; 3612 const intptr_t kNumInputs = 2;
3333 const intptr_t kNumTemps = 0; 3613 const intptr_t kNumTemps = 0;
3334 LocationSummary* summary = new(isolate) LocationSummary( 3614 LocationSummary* summary = new(isolate) LocationSummary(
3335 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 3615 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
3336 summary->set_in(0, Location::RequiresRegister()); 3616 summary->set_in(0, Location::RequiresRegister());
(...skipping 1695 matching lines...) Expand 10 before | Expand all | Expand 10 after
5032 } 5312 }
5033 5313
5034 5314
5035 void UnaryDoubleOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 5315 void UnaryDoubleOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
5036 const DRegister result = EvenDRegisterOf(locs()->out(0).fpu_reg()); 5316 const DRegister result = EvenDRegisterOf(locs()->out(0).fpu_reg());
5037 const DRegister value = EvenDRegisterOf(locs()->in(0).fpu_reg()); 5317 const DRegister value = EvenDRegisterOf(locs()->in(0).fpu_reg());
5038 __ vnegd(result, value); 5318 __ vnegd(result, value);
5039 } 5319 }
5040 5320
5041 5321
5322 LocationSummary* Int32ToDoubleInstr::MakeLocationSummary(Isolate* isolate,
5323 bool opt) const {
5324 const intptr_t kNumInputs = 1;
5325 const intptr_t kNumTemps = 0;
5326 LocationSummary* result = new(isolate) LocationSummary(
5327 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
5328 result->set_in(0, Location::RequiresRegister());
5329 result->set_out(0, Location::RequiresFpuRegister());
5330 return result;
5331 }
5332
5333
5334 void Int32ToDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
5335 const Register value = locs()->in(0).reg();
5336 const DRegister result = EvenDRegisterOf(locs()->out(0).fpu_reg());
5337 __ vmovsr(STMP, value);
5338 __ vcvtdi(result, STMP);
5339 }
5340
5341
5042 LocationSummary* SmiToDoubleInstr::MakeLocationSummary(Isolate* isolate, 5342 LocationSummary* SmiToDoubleInstr::MakeLocationSummary(Isolate* isolate,
5043 bool opt) const { 5343 bool opt) const {
5044 const intptr_t kNumInputs = 1; 5344 const intptr_t kNumInputs = 1;
5045 const intptr_t kNumTemps = 0; 5345 const intptr_t kNumTemps = 0;
5046 LocationSummary* result = new(isolate) LocationSummary( 5346 LocationSummary* result = new(isolate) LocationSummary(
5047 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 5347 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
5048 result->set_in(0, Location::RequiresRegister()); 5348 result->set_in(0, Location::RequiresRegister());
5049 result->set_out(0, Location::RequiresFpuRegister()); 5349 result->set_out(0, Location::RequiresFpuRegister());
5050 return result; 5350 return result;
5051 } 5351 }
(...skipping 1159 matching lines...) Expand 10 before | Expand all | Expand 10 after
6211 CompileType ShiftUint32OpInstr::ComputeType() const { 6511 CompileType ShiftUint32OpInstr::ComputeType() const {
6212 return CompileType::Int(); 6512 return CompileType::Int();
6213 } 6513 }
6214 6514
6215 6515
6216 CompileType UnaryUint32OpInstr::ComputeType() const { 6516 CompileType UnaryUint32OpInstr::ComputeType() const {
6217 return CompileType::Int(); 6517 return CompileType::Int();
6218 } 6518 }
6219 6519
6220 6520
6221 CompileType BoxUint32Instr::ComputeType() const {
6222 return CompileType::Int();
6223 }
6224
6225
6226 CompileType UnboxUint32Instr::ComputeType() const {
6227 return CompileType::Int();
6228 }
6229
6230
6231 LocationSummary* BinaryUint32OpInstr::MakeLocationSummary(Isolate* isolate, 6521 LocationSummary* BinaryUint32OpInstr::MakeLocationSummary(Isolate* isolate,
6232 bool opt) const { 6522 bool opt) const {
6233 const intptr_t kNumInputs = 2; 6523 const intptr_t kNumInputs = 2;
6234 const intptr_t kNumTemps = 0; 6524 const intptr_t kNumTemps = 0;
6235 LocationSummary* summary = new(isolate) LocationSummary( 6525 LocationSummary* summary = new(isolate) LocationSummary(
6236 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 6526 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
6237 summary->set_in(0, Location::RequiresRegister()); 6527 summary->set_in(0, Location::RequiresRegister());
6238 summary->set_in(1, Location::RequiresRegister()); 6528 summary->set_in(1, Location::RequiresRegister());
6239 summary->set_out(0, Location::RequiresRegister()); 6529 summary->set_out(0, Location::RequiresRegister());
6240 return summary; 6530 return summary;
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
6473 __ b(&done, EQ); 6763 __ b(&done, EQ);
6474 // Mint case. 6764 // Mint case.
6475 __ CompareClassId(value, kMintCid, temp); 6765 __ CompareClassId(value, kMintCid, temp);
6476 __ b(deopt, NE); 6766 __ b(deopt, NE);
6477 __ LoadFromOffset(kWord, out, value, Mint::value_offset() - kHeapObjectTag); 6767 __ LoadFromOffset(kWord, out, value, Mint::value_offset() - kHeapObjectTag);
6478 __ Bind(&done); 6768 __ Bind(&done);
6479 } 6769 }
6480 } 6770 }
6481 6771
6482 6772
6773 LocationSummary* BoxInt32Instr::MakeLocationSummary(Isolate* isolate,
6774 bool opt) const {
6775 const intptr_t kNumInputs = 1;
6776 const intptr_t kNumTemps = ValueFitsSmi() ? 0 : 1;
6777 LocationSummary* summary = new(isolate) LocationSummary(
6778 isolate,
6779 kNumInputs,
6780 kNumTemps,
6781 ValueFitsSmi() ? LocationSummary::kNoCall
6782 : LocationSummary::kCallOnSlowPath);
6783 summary->set_in(0, Location::RequiresRegister());
6784 if (!ValueFitsSmi()) {
6785 summary->set_temp(0, Location::RequiresRegister());
6786 }
6787 summary->set_out(0, Location::RequiresRegister());
6788 return summary;
6789 }
6790
6791
6792 void BoxInt32Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
6793 Register value = locs()->in(0).reg();
6794 Register out = locs()->out(0).reg();
6795 ASSERT(value != out);
6796
6797 __ Lsl(out, value, 1);
6798 if (!ValueFitsSmi()) {
6799 Register temp = locs()->temp(0).reg();
6800 Label done;
6801 __ cmp(value, Operand(out, ASR, 1));
6802 __ b(&done, EQ);
6803 BoxAllocationSlowPath::Allocate(
6804 compiler,
6805 this,
6806 compiler->mint_class(),
6807 out,
6808 temp);
6809 __ Asr(temp, value, kBitsPerWord - 1);
6810 __ StoreToOffset(kWord,
6811 value,
6812 out,
6813 Mint::value_offset() - kHeapObjectTag);
6814 __ StoreToOffset(kWord,
6815 temp,
6816 out,
6817 Mint::value_offset() - kHeapObjectTag + kWordSize);
6818 __ Bind(&done);
6819 }
6820 }
6821
6822
6823
6824 LocationSummary* UnboxInt32Instr::MakeLocationSummary(Isolate* isolate,
6825 bool opt) const {
6826 const intptr_t value_cid = value()->Type()->ToCid();
6827 const intptr_t kNumInputs = 1;
6828 const intptr_t kNumTemps =
6829 ((value_cid == kMintCid) || (value_cid == kSmiCid)) ? 0 : 1;
6830 LocationSummary* summary = new(isolate) LocationSummary(
6831 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
6832 summary->set_in(0, Location::RequiresRegister());
6833 if (kNumTemps > 0) {
6834 summary->set_temp(0, Location::RequiresRegister());
6835 }
6836 summary->set_out(0, Location::RequiresRegister());
6837 return summary;
6838 }
6839
6840
6841 static void LoadInt32FromMint(FlowGraphCompiler* compiler,
6842 Register mint,
6843 Register result,
6844 Register temp,
6845 Label* deopt) {
6846 __ LoadFromOffset(kWord,
6847 result,
6848 mint,
6849 Mint::value_offset() - kHeapObjectTag);
6850 if (deopt != NULL) {
6851 __ LoadFromOffset(kWord,
6852 temp,
6853 mint,
6854 Mint::value_offset() - kHeapObjectTag + kWordSize);
6855 __ cmp(temp, Operand(result, ASR, kBitsPerWord - 1));
6856 __ b(deopt, NE);
6857 }
6858 }
6859
6860
6861 void UnboxInt32Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
6862 const intptr_t value_cid = value()->Type()->ToCid();
6863 const Register value = locs()->in(0).reg();
6864 const Register out = locs()->out(0).reg();
6865 ASSERT(value != out);
6866
6867 if (value_cid == kMintCid) {
6868 Register temp = CanDeoptimize() ? locs()->temp(0).reg() : kNoRegister;
6869 Label* deopt = CanDeoptimize() ?
6870 compiler->AddDeoptStub(deopt_id_, ICData::kDeoptUnboxInteger) : NULL;
6871 LoadInt32FromMint(compiler,
6872 value,
6873 out,
6874 temp,
6875 deopt);
6876 } else if (value_cid == kSmiCid) {
6877 __ SmiUntag(out, value);
6878 } else {
6879 Register temp = locs()->temp(0).reg();
6880 Label* deopt = compiler->AddDeoptStub(deopt_id_,
6881 ICData::kDeoptUnboxInteger);
6882 Label done;
6883 __ tst(value, Operand(kSmiTagMask));
6884 // Smi case.
6885 __ mov(out, Operand(value), EQ);
6886 __ SmiUntag(out, EQ);
6887 __ b(&done, EQ);
6888 // Mint case.
6889 __ CompareClassId(value, kMintCid, temp);
6890 __ b(deopt, NE);
6891 LoadInt32FromMint(compiler,
6892 value,
6893 out,
6894 temp,
6895 deopt);
6896 __ Bind(&done);
6897 }
6898 }
6899
6900
6483 LocationSummary* UnboxedIntConverterInstr::MakeLocationSummary(Isolate* isolate, 6901 LocationSummary* UnboxedIntConverterInstr::MakeLocationSummary(Isolate* isolate,
6484 bool opt) const { 6902 bool opt) const {
6485 const intptr_t kNumInputs = 1; 6903 const intptr_t kNumInputs = 1;
6486 const intptr_t kNumTemps = 0; 6904 const intptr_t kNumTemps = 0;
6487 LocationSummary* summary = new(isolate) LocationSummary( 6905 LocationSummary* summary = new(isolate) LocationSummary(
6488 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 6906 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
6489 if (from() == kUnboxedMint) { 6907 if (from() == kUnboxedMint) {
6908 ASSERT((to() == kUnboxedUint32) || (to() == kUnboxedInt32));
6490 summary->set_in(0, Location::Pair(Location::RequiresRegister(), 6909 summary->set_in(0, Location::Pair(Location::RequiresRegister(),
6491 Location::RequiresRegister())); 6910 Location::RequiresRegister()));
6492 summary->set_out(0, Location::RequiresRegister()); 6911 summary->set_out(0, Location::RequiresRegister());
6493 } else { 6912 } else if (to() == kUnboxedMint) {
6494 ASSERT(from() == kUnboxedUint32); 6913 ASSERT((from() == kUnboxedUint32) || (from() == kUnboxedInt32));
6495 summary->set_in(0, Location::RequiresRegister()); 6914 summary->set_in(0, Location::RequiresRegister());
6496 summary->set_out(0, Location::Pair(Location::RequiresRegister(), 6915 summary->set_out(0, Location::Pair(Location::RequiresRegister(),
6497 Location::RequiresRegister())); 6916 Location::RequiresRegister()));
6917 } else {
6918 ASSERT((to() == kUnboxedUint32) || (to() == kUnboxedInt32));
6919 ASSERT((from() == kUnboxedUint32) || (from() == kUnboxedInt32));
6920 summary->set_in(0, Location::RequiresRegister());
6921 summary->set_out(0, Location::SameAsFirstInput());
6498 } 6922 }
6499 return summary; 6923 return summary;
6500 } 6924 }
6501 6925
6502 6926
6503 void UnboxedIntConverterInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 6927 void UnboxedIntConverterInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
6504 if (from() == kUnboxedMint) { 6928 if (from() == kUnboxedInt32 && to() == kUnboxedUint32) {
6929 const Register out = locs()->out(0).reg();
6930 // Representations are bitwise equivalent.
6931 ASSERT(out == locs()->in(0).reg());
6932 } else if (from() == kUnboxedUint32 && to() == kUnboxedInt32) {
6933 const Register out = locs()->out(0).reg();
6934 // Representations are bitwise equivalent.
6935 ASSERT(out == locs()->in(0).reg());
6936 if (CanDeoptimize()) {
6937 Label* deopt =
6938 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptUnboxInteger);
6939 __ tst(out, Operand(out));
6940 __ b(deopt, MI);
6941 }
6942 } else if (from() == kUnboxedMint) {
6943 ASSERT(to() == kUnboxedUint32 || to() == kUnboxedInt32);
6505 PairLocation* in_pair = locs()->in(0).AsPairLocation(); 6944 PairLocation* in_pair = locs()->in(0).AsPairLocation();
6506 Register in_lo = in_pair->At(0).reg(); 6945 Register in_lo = in_pair->At(0).reg();
6946 Register in_hi = in_pair->At(1).reg();
6507 Register out = locs()->out(0).reg(); 6947 Register out = locs()->out(0).reg();
6508 // Copy low word. 6948 // Copy low word.
6509 __ mov(out, Operand(in_lo)); 6949 __ mov(out, Operand(in_lo));
6510 } else { 6950 if (CanDeoptimize()) {
6511 ASSERT(from() == kUnboxedUint32); 6951 Label* deopt =
6952 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptUnboxInteger);
6953 ASSERT(to() == kUnboxedInt32);
6954 __ cmp(in_hi, Operand(in_lo, ASR, kBitsPerWord - 1));
6955 __ b(deopt, NE);
6956 }
6957 } else if (from() == kUnboxedUint32 || from() == kUnboxedInt32) {
6958 ASSERT(to() == kUnboxedMint);
6512 Register in = locs()->in(0).reg(); 6959 Register in = locs()->in(0).reg();
6513 PairLocation* out_pair = locs()->out(0).AsPairLocation(); 6960 PairLocation* out_pair = locs()->out(0).AsPairLocation();
6514 Register out_lo = out_pair->At(0).reg(); 6961 Register out_lo = out_pair->At(0).reg();
6515 Register out_hi = out_pair->At(1).reg(); 6962 Register out_hi = out_pair->At(1).reg();
6516 // Copy low word. 6963 // Copy low word.
6517 __ mov(out_lo, Operand(in)); 6964 __ mov(out_lo, Operand(in));
6518 // Zero upper word. 6965 if (from() == kUnboxedUint32) {
6519 __ eor(out_hi, out_hi, Operand(out_hi)); 6966 __ eor(out_hi, out_hi, Operand(out_hi));
6967 } else {
6968 ASSERT(from() == kUnboxedInt32);
6969 __ mov(out_hi, Operand(in, ASR, kBitsPerWord - 1));
6970 }
6971 } else {
6972 UNREACHABLE();
6520 } 6973 }
6521 } 6974 }
6522 6975
6523 6976
6524 LocationSummary* ThrowInstr::MakeLocationSummary(Isolate* isolate, 6977 LocationSummary* ThrowInstr::MakeLocationSummary(Isolate* isolate,
6525 bool opt) const { 6978 bool opt) const {
6526 return new(isolate) LocationSummary(isolate, 0, 0, LocationSummary::kCall); 6979 return new(isolate) LocationSummary(isolate, 0, 0, LocationSummary::kCall);
6527 } 6980 }
6528 6981
6529 6982
(...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after
6747 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs()); 7200 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs());
6748 #if defined(DEBUG) 7201 #if defined(DEBUG)
6749 __ LoadImmediate(R4, kInvalidObjectPointer); 7202 __ LoadImmediate(R4, kInvalidObjectPointer);
6750 __ LoadImmediate(R5, kInvalidObjectPointer); 7203 __ LoadImmediate(R5, kInvalidObjectPointer);
6751 #endif 7204 #endif
6752 } 7205 }
6753 7206
6754 } // namespace dart 7207 } // namespace dart
6755 7208
6756 #endif // defined TARGET_ARCH_ARM 7209 #endif // defined TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698