| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
| 6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 49 | 49 |
| 50 | 50 |
| 51 void PushArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 51 void PushArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 52 // In SSA mode, we need an explicit push. Nothing to do in non-SSA mode | 52 // In SSA mode, we need an explicit push. Nothing to do in non-SSA mode |
| 53 // where PushArgument is handled by BindInstr::EmitNativeCode. | 53 // where PushArgument is handled by BindInstr::EmitNativeCode. |
| 54 if (compiler->is_optimizing()) { | 54 if (compiler->is_optimizing()) { |
| 55 Location value = locs()->in(0); | 55 Location value = locs()->in(0); |
| 56 if (value.IsRegister()) { | 56 if (value.IsRegister()) { |
| 57 __ pushq(value.reg()); | 57 __ pushq(value.reg()); |
| 58 } else if (value.IsConstant()) { | 58 } else if (value.IsConstant()) { |
| 59 __ PushObject(value.constant(), PP); | 59 __ PushObject(value.constant()); |
| 60 } else { | 60 } else { |
| 61 ASSERT(value.IsStackSlot()); | 61 ASSERT(value.IsStackSlot()); |
| 62 __ pushq(value.ToStackSlotAddress()); | 62 __ pushq(value.ToStackSlotAddress()); |
| 63 } | 63 } |
| 64 } | 64 } |
| 65 } | 65 } |
| 66 | 66 |
| 67 | 67 |
| 68 LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, | 68 LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, |
| 69 bool opt) const { | 69 bool opt) const { |
| (...skipping 20 matching lines...) Expand all Loading... |
| 90 } | 90 } |
| 91 | 91 |
| 92 #if defined(DEBUG) | 92 #if defined(DEBUG) |
| 93 __ Comment("Stack Check"); | 93 __ Comment("Stack Check"); |
| 94 Label done; | 94 Label done; |
| 95 const intptr_t fp_sp_dist = | 95 const intptr_t fp_sp_dist = |
| 96 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 96 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
| 97 ASSERT(fp_sp_dist <= 0); | 97 ASSERT(fp_sp_dist <= 0); |
| 98 __ movq(RDI, RSP); | 98 __ movq(RDI, RSP); |
| 99 __ subq(RDI, RBP); | 99 __ subq(RDI, RBP); |
| 100 __ CompareImmediate(RDI, Immediate(fp_sp_dist), PP); | 100 __ CompareImmediate(RDI, Immediate(fp_sp_dist)); |
| 101 __ j(EQUAL, &done, Assembler::kNearJump); | 101 __ j(EQUAL, &done, Assembler::kNearJump); |
| 102 __ int3(); | 102 __ int3(); |
| 103 __ Bind(&done); | 103 __ Bind(&done); |
| 104 #endif | 104 #endif |
| 105 __ LeaveDartFrame(); | 105 __ LeaveDartFrame(); // Disallows constant pool use. |
| 106 __ ret(); | 106 __ ret(); |
| 107 // This ReturnInstr may be emitted out of order by the optimizer. The next |
| 108 // block may be a target expecting a properly set constant pool pointer. |
| 109 __ set_constant_pool_allowed(true); |
| 107 } | 110 } |
| 108 | 111 |
| 109 | 112 |
| 110 static Condition NegateCondition(Condition condition) { | 113 static Condition NegateCondition(Condition condition) { |
| 111 switch (condition) { | 114 switch (condition) { |
| 112 case EQUAL: return NOT_EQUAL; | 115 case EQUAL: return NOT_EQUAL; |
| 113 case NOT_EQUAL: return EQUAL; | 116 case NOT_EQUAL: return EQUAL; |
| 114 case LESS: return GREATER_EQUAL; | 117 case LESS: return GREATER_EQUAL; |
| 115 case LESS_EQUAL: return GREATER; | 118 case LESS_EQUAL: return GREATER; |
| 116 case GREATER: return LESS_EQUAL; | 119 case GREATER: return LESS_EQUAL; |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 176 | 179 |
| 177 __ setcc(true_condition, DL); | 180 __ setcc(true_condition, DL); |
| 178 | 181 |
| 179 if (is_power_of_two_kind) { | 182 if (is_power_of_two_kind) { |
| 180 const intptr_t shift = | 183 const intptr_t shift = |
| 181 Utils::ShiftForPowerOfTwo(Utils::Maximum(true_value, false_value)); | 184 Utils::ShiftForPowerOfTwo(Utils::Maximum(true_value, false_value)); |
| 182 __ shlq(RDX, Immediate(shift + kSmiTagSize)); | 185 __ shlq(RDX, Immediate(shift + kSmiTagSize)); |
| 183 } else { | 186 } else { |
| 184 __ decq(RDX); | 187 __ decq(RDX); |
| 185 __ AndImmediate(RDX, | 188 __ AndImmediate(RDX, |
| 186 Immediate(Smi::RawValue(true_value) - Smi::RawValue(false_value)), PP); | 189 Immediate(Smi::RawValue(true_value) - Smi::RawValue(false_value))); |
| 187 if (false_value != 0) { | 190 if (false_value != 0) { |
| 188 __ AddImmediate(RDX, Immediate(Smi::RawValue(false_value)), PP); | 191 __ AddImmediate(RDX, Immediate(Smi::RawValue(false_value))); |
| 189 } | 192 } |
| 190 } | 193 } |
| 191 } | 194 } |
| 192 | 195 |
| 193 | 196 |
| 194 LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone, | 197 LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone, |
| 195 bool opt) const { | 198 bool opt) const { |
| 196 const intptr_t kNumInputs = 0; | 199 const intptr_t kNumInputs = 0; |
| 197 const intptr_t stack_index = (local().index() < 0) | 200 const intptr_t stack_index = (local().index() < 0) |
| 198 ? kFirstLocalSlotFromFp - local().index() | 201 ? kFirstLocalSlotFromFp - local().index() |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 235 kNumInputs, | 238 kNumInputs, |
| 236 Location::RequiresRegister(), | 239 Location::RequiresRegister(), |
| 237 LocationSummary::kNoCall); | 240 LocationSummary::kNoCall); |
| 238 } | 241 } |
| 239 | 242 |
| 240 | 243 |
| 241 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 244 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 242 // The register allocator drops constant definitions that have no uses. | 245 // The register allocator drops constant definitions that have no uses. |
| 243 if (!locs()->out(0).IsInvalid()) { | 246 if (!locs()->out(0).IsInvalid()) { |
| 244 Register result = locs()->out(0).reg(); | 247 Register result = locs()->out(0).reg(); |
| 245 __ LoadObject(result, value(), PP); | 248 __ LoadObject(result, value()); |
| 246 } | 249 } |
| 247 } | 250 } |
| 248 | 251 |
| 249 | 252 |
| 250 LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone, | 253 LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone, |
| 251 bool opt) const { | 254 bool opt) const { |
| 252 const intptr_t kNumInputs = 0; | 255 const intptr_t kNumInputs = 0; |
| 253 const intptr_t kNumTemps = 0; | 256 const intptr_t kNumTemps = 0; |
| 254 LocationSummary* locs = new(zone) LocationSummary( | 257 LocationSummary* locs = new(zone) LocationSummary( |
| 255 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 258 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| (...skipping 14 matching lines...) Expand all Loading... |
| 270 | 273 |
| 271 void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 274 void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 272 // The register allocator drops constant definitions that have no uses. | 275 // The register allocator drops constant definitions that have no uses. |
| 273 if (!locs()->out(0).IsInvalid()) { | 276 if (!locs()->out(0).IsInvalid()) { |
| 274 switch (representation()) { | 277 switch (representation()) { |
| 275 case kUnboxedDouble: { | 278 case kUnboxedDouble: { |
| 276 XmmRegister result = locs()->out(0).fpu_reg(); | 279 XmmRegister result = locs()->out(0).fpu_reg(); |
| 277 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0)) { | 280 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0)) { |
| 278 __ xorps(result, result); | 281 __ xorps(result, result); |
| 279 } else { | 282 } else { |
| 280 __ LoadObject(TMP, value(), PP); | 283 __ LoadObject(TMP, value()); |
| 281 __ movsd(result, FieldAddress(TMP, Double::value_offset())); | 284 __ movsd(result, FieldAddress(TMP, Double::value_offset())); |
| 282 } | 285 } |
| 283 break; | 286 break; |
| 284 } | 287 } |
| 285 case kUnboxedInt32: | 288 case kUnboxedInt32: |
| 286 __ movl(locs()->out(0).reg(), | 289 __ movl(locs()->out(0).reg(), |
| 287 Immediate(static_cast<int32_t>(Smi::Cast(value()).Value()))); | 290 Immediate(static_cast<int32_t>(Smi::Cast(value()).Value()))); |
| 288 break; | 291 break; |
| 289 default: | 292 default: |
| 290 UNREACHABLE(); | 293 UNREACHABLE(); |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 323 intptr_t token_pos, | 326 intptr_t token_pos, |
| 324 intptr_t deopt_id, | 327 intptr_t deopt_id, |
| 325 LocationSummary* locs, | 328 LocationSummary* locs, |
| 326 FlowGraphCompiler* compiler) { | 329 FlowGraphCompiler* compiler) { |
| 327 // Check that the type of the value is allowed in conditional context. | 330 // Check that the type of the value is allowed in conditional context. |
| 328 // Call the runtime if the object is not bool::true or bool::false. | 331 // Call the runtime if the object is not bool::true or bool::false. |
| 329 ASSERT(locs->always_calls()); | 332 ASSERT(locs->always_calls()); |
| 330 Label done; | 333 Label done; |
| 331 | 334 |
| 332 if (Isolate::Current()->flags().type_checks()) { | 335 if (Isolate::Current()->flags().type_checks()) { |
| 333 __ CompareObject(reg, Bool::True(), PP); | 336 __ CompareObject(reg, Bool::True()); |
| 334 __ j(EQUAL, &done, Assembler::kNearJump); | 337 __ j(EQUAL, &done, Assembler::kNearJump); |
| 335 __ CompareObject(reg, Bool::False(), PP); | 338 __ CompareObject(reg, Bool::False()); |
| 336 __ j(EQUAL, &done, Assembler::kNearJump); | 339 __ j(EQUAL, &done, Assembler::kNearJump); |
| 337 } else { | 340 } else { |
| 338 ASSERT(Isolate::Current()->flags().asserts()); | 341 ASSERT(Isolate::Current()->flags().asserts()); |
| 339 __ CompareObject(reg, Object::null_instance(), PP); | 342 __ CompareObject(reg, Object::null_instance()); |
| 340 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 343 __ j(NOT_EQUAL, &done, Assembler::kNearJump); |
| 341 } | 344 } |
| 342 | 345 |
| 343 __ pushq(reg); // Push the source object. | 346 __ pushq(reg); // Push the source object. |
| 344 compiler->GenerateRuntimeCall(token_pos, | 347 compiler->GenerateRuntimeCall(token_pos, |
| 345 deopt_id, | 348 deopt_id, |
| 346 kNonBoolTypeErrorRuntimeEntry, | 349 kNonBoolTypeErrorRuntimeEntry, |
| 347 1, | 350 1, |
| 348 locs); | 351 locs); |
| 349 // We should never return here. | 352 // We should never return here. |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 415 return NULL; | 418 return NULL; |
| 416 } | 419 } |
| 417 | 420 |
| 418 | 421 |
| 419 static void LoadValueCid(FlowGraphCompiler* compiler, | 422 static void LoadValueCid(FlowGraphCompiler* compiler, |
| 420 Register value_cid_reg, | 423 Register value_cid_reg, |
| 421 Register value_reg, | 424 Register value_reg, |
| 422 Label* value_is_smi = NULL) { | 425 Label* value_is_smi = NULL) { |
| 423 Label done; | 426 Label done; |
| 424 if (value_is_smi == NULL) { | 427 if (value_is_smi == NULL) { |
| 425 __ LoadImmediate(value_cid_reg, Immediate(kSmiCid), PP); | 428 __ LoadImmediate(value_cid_reg, Immediate(kSmiCid)); |
| 426 } | 429 } |
| 427 __ testq(value_reg, Immediate(kSmiTagMask)); | 430 __ testq(value_reg, Immediate(kSmiTagMask)); |
| 428 if (value_is_smi == NULL) { | 431 if (value_is_smi == NULL) { |
| 429 __ j(ZERO, &done, Assembler::kNearJump); | 432 __ j(ZERO, &done, Assembler::kNearJump); |
| 430 } else { | 433 } else { |
| 431 __ j(ZERO, value_is_smi); | 434 __ j(ZERO, value_is_smi); |
| 432 } | 435 } |
| 433 __ LoadClassId(value_cid_reg, value_reg); | 436 __ LoadClassId(value_cid_reg, value_reg); |
| 434 __ Bind(&done); | 437 __ Bind(&done); |
| 435 } | 438 } |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 477 const LocationSummary& locs, | 480 const LocationSummary& locs, |
| 478 Token::Kind kind, | 481 Token::Kind kind, |
| 479 BranchLabels labels) { | 482 BranchLabels labels) { |
| 480 Location left = locs.in(0); | 483 Location left = locs.in(0); |
| 481 Location right = locs.in(1); | 484 Location right = locs.in(1); |
| 482 ASSERT(!left.IsConstant() || !right.IsConstant()); | 485 ASSERT(!left.IsConstant() || !right.IsConstant()); |
| 483 | 486 |
| 484 Condition true_condition = TokenKindToIntCondition(kind); | 487 Condition true_condition = TokenKindToIntCondition(kind); |
| 485 | 488 |
| 486 if (left.IsConstant()) { | 489 if (left.IsConstant()) { |
| 487 __ CompareObject(right.reg(), left.constant(), PP); | 490 __ CompareObject(right.reg(), left.constant()); |
| 488 true_condition = FlipCondition(true_condition); | 491 true_condition = FlipCondition(true_condition); |
| 489 } else if (right.IsConstant()) { | 492 } else if (right.IsConstant()) { |
| 490 __ CompareObject(left.reg(), right.constant(), PP); | 493 __ CompareObject(left.reg(), right.constant()); |
| 491 } else if (right.IsStackSlot()) { | 494 } else if (right.IsStackSlot()) { |
| 492 __ cmpq(left.reg(), right.ToStackSlotAddress()); | 495 __ cmpq(left.reg(), right.ToStackSlotAddress()); |
| 493 } else { | 496 } else { |
| 494 __ cmpq(left.reg(), right.reg()); | 497 __ cmpq(left.reg(), right.reg()); |
| 495 } | 498 } |
| 496 return true_condition; | 499 return true_condition; |
| 497 } | 500 } |
| 498 | 501 |
| 499 | 502 |
| 500 static Condition TokenKindToDoubleCondition(Token::Kind kind) { | 503 static Condition TokenKindToDoubleCondition(Token::Kind kind) { |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 544 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); | 547 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); |
| 545 | 548 |
| 546 Label is_true, is_false; | 549 Label is_true, is_false; |
| 547 BranchLabels labels = { &is_true, &is_false, &is_false }; | 550 BranchLabels labels = { &is_true, &is_false, &is_false }; |
| 548 Condition true_condition = EmitComparisonCode(compiler, labels); | 551 Condition true_condition = EmitComparisonCode(compiler, labels); |
| 549 EmitBranchOnCondition(compiler, true_condition, labels); | 552 EmitBranchOnCondition(compiler, true_condition, labels); |
| 550 | 553 |
| 551 Register result = locs()->out(0).reg(); | 554 Register result = locs()->out(0).reg(); |
| 552 Label done; | 555 Label done; |
| 553 __ Bind(&is_false); | 556 __ Bind(&is_false); |
| 554 __ LoadObject(result, Bool::False(), PP); | 557 __ LoadObject(result, Bool::False()); |
| 555 __ jmp(&done); | 558 __ jmp(&done); |
| 556 __ Bind(&is_true); | 559 __ Bind(&is_true); |
| 557 __ LoadObject(result, Bool::True(), PP); | 560 __ LoadObject(result, Bool::True()); |
| 558 __ Bind(&done); | 561 __ Bind(&done); |
| 559 } | 562 } |
| 560 | 563 |
| 561 | 564 |
| 562 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 565 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
| 563 BranchInstr* branch) { | 566 BranchInstr* branch) { |
| 564 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); | 567 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); |
| 565 | 568 |
| 566 BranchLabels labels = compiler->CreateBranchLabels(branch); | 569 BranchLabels labels = compiler->CreateBranchLabels(branch); |
| 567 Condition true_condition = EmitComparisonCode(compiler, labels); | 570 Condition true_condition = EmitComparisonCode(compiler, labels); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 584 | 587 |
| 585 | 588 |
| 586 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | 589 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
| 587 BranchLabels labels) { | 590 BranchLabels labels) { |
| 588 Register left_reg = locs()->in(0).reg(); | 591 Register left_reg = locs()->in(0).reg(); |
| 589 Location right = locs()->in(1); | 592 Location right = locs()->in(1); |
| 590 if (right.IsConstant()) { | 593 if (right.IsConstant()) { |
| 591 ASSERT(right.constant().IsSmi()); | 594 ASSERT(right.constant().IsSmi()); |
| 592 const int64_t imm = | 595 const int64_t imm = |
| 593 reinterpret_cast<int64_t>(right.constant().raw()); | 596 reinterpret_cast<int64_t>(right.constant().raw()); |
| 594 __ TestImmediate(left_reg, Immediate(imm), PP); | 597 __ TestImmediate(left_reg, Immediate(imm)); |
| 595 } else { | 598 } else { |
| 596 __ testq(left_reg, right.reg()); | 599 __ testq(left_reg, right.reg()); |
| 597 } | 600 } |
| 598 Condition true_condition = (kind() == Token::kNE) ? NOT_ZERO : ZERO; | 601 Condition true_condition = (kind() == Token::kNE) ? NOT_ZERO : ZERO; |
| 599 return true_condition; | 602 return true_condition; |
| 600 } | 603 } |
| 601 | 604 |
| 602 | 605 |
| 603 void TestSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 606 void TestSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 604 // Never emitted outside of the BranchInstr. | 607 // Never emitted outside of the BranchInstr. |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 672 EmitComparisonCode(compiler, labels); | 675 EmitComparisonCode(compiler, labels); |
| 673 } | 676 } |
| 674 | 677 |
| 675 | 678 |
| 676 void TestCidsInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 679 void TestCidsInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 677 Register result_reg = locs()->out(0).reg(); | 680 Register result_reg = locs()->out(0).reg(); |
| 678 Label is_true, is_false, done; | 681 Label is_true, is_false, done; |
| 679 BranchLabels labels = { &is_true, &is_false, &is_false }; | 682 BranchLabels labels = { &is_true, &is_false, &is_false }; |
| 680 EmitComparisonCode(compiler, labels); | 683 EmitComparisonCode(compiler, labels); |
| 681 __ Bind(&is_false); | 684 __ Bind(&is_false); |
| 682 __ LoadObject(result_reg, Bool::False(), PP); | 685 __ LoadObject(result_reg, Bool::False()); |
| 683 __ jmp(&done, Assembler::kNearJump); | 686 __ jmp(&done, Assembler::kNearJump); |
| 684 __ Bind(&is_true); | 687 __ Bind(&is_true); |
| 685 __ LoadObject(result_reg, Bool::True(), PP); | 688 __ LoadObject(result_reg, Bool::True()); |
| 686 __ Bind(&done); | 689 __ Bind(&done); |
| 687 } | 690 } |
| 688 | 691 |
| 689 | 692 |
| 690 LocationSummary* RelationalOpInstr::MakeLocationSummary(Zone* zone, | 693 LocationSummary* RelationalOpInstr::MakeLocationSummary(Zone* zone, |
| 691 bool opt) const { | 694 bool opt) const { |
| 692 const intptr_t kNumInputs = 2; | 695 const intptr_t kNumInputs = 2; |
| 693 const intptr_t kNumTemps = 0; | 696 const intptr_t kNumTemps = 0; |
| 694 if (operation_cid() == kDoubleCid) { | 697 if (operation_cid() == kDoubleCid) { |
| 695 LocationSummary* summary = new(zone) LocationSummary( | 698 LocationSummary* summary = new(zone) LocationSummary( |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 733 | 736 |
| 734 void RelationalOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 737 void RelationalOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 735 Label is_true, is_false; | 738 Label is_true, is_false; |
| 736 BranchLabels labels = { &is_true, &is_false, &is_false }; | 739 BranchLabels labels = { &is_true, &is_false, &is_false }; |
| 737 Condition true_condition = EmitComparisonCode(compiler, labels); | 740 Condition true_condition = EmitComparisonCode(compiler, labels); |
| 738 EmitBranchOnCondition(compiler, true_condition, labels); | 741 EmitBranchOnCondition(compiler, true_condition, labels); |
| 739 | 742 |
| 740 Register result = locs()->out(0).reg(); | 743 Register result = locs()->out(0).reg(); |
| 741 Label done; | 744 Label done; |
| 742 __ Bind(&is_false); | 745 __ Bind(&is_false); |
| 743 __ LoadObject(result, Bool::False(), PP); | 746 __ LoadObject(result, Bool::False()); |
| 744 __ jmp(&done); | 747 __ jmp(&done); |
| 745 __ Bind(&is_true); | 748 __ Bind(&is_true); |
| 746 __ LoadObject(result, Bool::True(), PP); | 749 __ LoadObject(result, Bool::True()); |
| 747 __ Bind(&done); | 750 __ Bind(&done); |
| 748 } | 751 } |
| 749 | 752 |
| 750 | 753 |
| 751 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 754 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
| 752 BranchInstr* branch) { | 755 BranchInstr* branch) { |
| 753 BranchLabels labels = compiler->CreateBranchLabels(branch); | 756 BranchLabels labels = compiler->CreateBranchLabels(branch); |
| 754 Condition true_condition = EmitComparisonCode(compiler, labels); | 757 Condition true_condition = EmitComparisonCode(compiler, labels); |
| 755 EmitBranchOnCondition(compiler, true_condition, labels); | 758 EmitBranchOnCondition(compiler, true_condition, labels); |
| 756 } | 759 } |
| 757 | 760 |
| 758 | 761 |
| 759 LocationSummary* NativeCallInstr::MakeLocationSummary(Zone* zone, | 762 LocationSummary* NativeCallInstr::MakeLocationSummary(Zone* zone, |
| 760 bool opt) const { | 763 bool opt) const { |
| 761 return MakeCallSummary(zone); | 764 return MakeCallSummary(zone); |
| 762 } | 765 } |
| 763 | 766 |
| 764 | 767 |
| 765 void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 768 void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 766 Register result = locs()->out(0).reg(); | 769 Register result = locs()->out(0).reg(); |
| 767 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); | 770 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); |
| 768 const bool is_leaf_call = | 771 const bool is_leaf_call = |
| 769 (argc_tag & NativeArguments::AutoSetupScopeMask()) == 0; | 772 (argc_tag & NativeArguments::AutoSetupScopeMask()) == 0; |
| 770 | 773 |
| 771 // Push the result place holder initialized to NULL. | 774 // Push the result place holder initialized to NULL. |
| 772 __ PushObject(Object::null_object(), PP); | 775 __ PushObject(Object::null_object()); |
| 773 // Pass a pointer to the first argument in RAX. | 776 // Pass a pointer to the first argument in RAX. |
| 774 if (!function().HasOptionalParameters()) { | 777 if (!function().HasOptionalParameters()) { |
| 775 __ leaq(RAX, Address(RBP, (kParamEndSlotFromFp + | 778 __ leaq(RAX, Address(RBP, (kParamEndSlotFromFp + |
| 776 function().NumParameters()) * kWordSize)); | 779 function().NumParameters()) * kWordSize)); |
| 777 } else { | 780 } else { |
| 778 __ leaq(RAX, | 781 __ leaq(RAX, Address(RBP, kFirstLocalSlotFromFp * kWordSize)); |
| 779 Address(RBP, kFirstLocalSlotFromFp * kWordSize)); | |
| 780 } | 782 } |
| 781 __ LoadImmediate( | 783 __ LoadImmediate( |
| 782 RBX, Immediate(reinterpret_cast<uword>(native_c_function())), PP); | 784 RBX, Immediate(reinterpret_cast<uword>(native_c_function()))); |
| 783 __ LoadImmediate( | 785 __ LoadImmediate(R10, Immediate(argc_tag)); |
| 784 R10, Immediate(argc_tag), PP); | |
| 785 const ExternalLabel* stub_entry = (is_bootstrap_native() || is_leaf_call) ? | 786 const ExternalLabel* stub_entry = (is_bootstrap_native() || is_leaf_call) ? |
| 786 &StubCode::CallBootstrapCFunctionLabel() : | 787 &StubCode::CallBootstrapCFunctionLabel() : |
| 787 &StubCode::CallNativeCFunctionLabel(); | 788 &StubCode::CallNativeCFunctionLabel(); |
| 788 compiler->GenerateCall(token_pos(), | 789 compiler->GenerateCall(token_pos(), |
| 789 stub_entry, | 790 stub_entry, |
| 790 RawPcDescriptors::kOther, | 791 RawPcDescriptors::kOther, |
| 791 locs()); | 792 locs()); |
| 792 __ popq(result); | 793 __ popq(result); |
| 793 } | 794 } |
| 794 | 795 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 814 Location::RequiresRegister(), | 815 Location::RequiresRegister(), |
| 815 LocationSummary::kNoCall); | 816 LocationSummary::kNoCall); |
| 816 } | 817 } |
| 817 | 818 |
| 818 | 819 |
| 819 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 820 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 820 ASSERT(compiler->is_optimizing()); | 821 ASSERT(compiler->is_optimizing()); |
| 821 Register char_code = locs()->in(0).reg(); | 822 Register char_code = locs()->in(0).reg(); |
| 822 Register result = locs()->out(0).reg(); | 823 Register result = locs()->out(0).reg(); |
| 823 __ LoadImmediate(result, | 824 __ LoadImmediate(result, |
| 824 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress())), PP); | 825 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress()))); |
| 825 __ movq(result, Address(result, | 826 __ movq(result, Address(result, |
| 826 char_code, | 827 char_code, |
| 827 TIMES_HALF_WORD_SIZE, // Char code is a smi. | 828 TIMES_HALF_WORD_SIZE, // Char code is a smi. |
| 828 Symbols::kNullCharCodeSymbolOffset * kWordSize)); | 829 Symbols::kNullCharCodeSymbolOffset * kWordSize)); |
| 829 } | 830 } |
| 830 | 831 |
| 831 | 832 |
| 832 LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone, | 833 LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone, |
| 833 bool opt) const { | 834 bool opt) const { |
| 834 const intptr_t kNumInputs = 1; | 835 const intptr_t kNumInputs = 1; |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 919 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 920 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 920 const Register object = locs()->in(0).reg(); | 921 const Register object = locs()->in(0).reg(); |
| 921 const Register result = locs()->out(0).reg(); | 922 const Register result = locs()->out(0).reg(); |
| 922 Label load, done; | 923 Label load, done; |
| 923 | 924 |
| 924 // We don't use Assembler::LoadTaggedClassIdMayBeSmi() here---which uses | 925 // We don't use Assembler::LoadTaggedClassIdMayBeSmi() here---which uses |
| 925 // a conditional move instead---because it is slower, probably due to | 926 // a conditional move instead---because it is slower, probably due to |
| 926 // branch prediction usually working just fine in this case. | 927 // branch prediction usually working just fine in this case. |
| 927 __ testq(object, Immediate(kSmiTagMask)); | 928 __ testq(object, Immediate(kSmiTagMask)); |
| 928 __ j(NOT_ZERO, &load, Assembler::kNearJump); | 929 __ j(NOT_ZERO, &load, Assembler::kNearJump); |
| 929 __ LoadImmediate(result, Immediate(Smi::RawValue(kSmiCid)), PP); | 930 __ LoadImmediate(result, Immediate(Smi::RawValue(kSmiCid))); |
| 930 __ jmp(&done); | 931 __ jmp(&done); |
| 931 __ Bind(&load); | 932 __ Bind(&load); |
| 932 __ LoadClassId(result, object); | 933 __ LoadClassId(result, object); |
| 933 __ SmiTag(result); | 934 __ SmiTag(result); |
| 934 __ Bind(&done); | 935 __ Bind(&done); |
| 935 } | 936 } |
| 936 | 937 |
| 937 | 938 |
| 938 CompileType LoadIndexedInstr::ComputeType() const { | 939 CompileType LoadIndexedInstr::ComputeType() const { |
| 939 switch (class_id_) { | 940 switch (class_id_) { |
| (...skipping 372 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1312 if ((index_scale() == 1) && index.IsRegister()) { | 1313 if ((index_scale() == 1) && index.IsRegister()) { |
| 1313 __ SmiUntag(index.reg()); | 1314 __ SmiUntag(index.reg()); |
| 1314 } | 1315 } |
| 1315 switch (class_id()) { | 1316 switch (class_id()) { |
| 1316 case kArrayCid: | 1317 case kArrayCid: |
| 1317 if (ShouldEmitStoreBarrier()) { | 1318 if (ShouldEmitStoreBarrier()) { |
| 1318 Register value = locs()->in(2).reg(); | 1319 Register value = locs()->in(2).reg(); |
| 1319 __ StoreIntoObject(array, element_address, value); | 1320 __ StoreIntoObject(array, element_address, value); |
| 1320 } else if (locs()->in(2).IsConstant()) { | 1321 } else if (locs()->in(2).IsConstant()) { |
| 1321 const Object& constant = locs()->in(2).constant(); | 1322 const Object& constant = locs()->in(2).constant(); |
| 1322 __ StoreIntoObjectNoBarrier(array, element_address, constant, PP); | 1323 __ StoreIntoObjectNoBarrier(array, element_address, constant); |
| 1323 } else { | 1324 } else { |
| 1324 Register value = locs()->in(2).reg(); | 1325 Register value = locs()->in(2).reg(); |
| 1325 __ StoreIntoObjectNoBarrier(array, element_address, value); | 1326 __ StoreIntoObjectNoBarrier(array, element_address, value); |
| 1326 } | 1327 } |
| 1327 break; | 1328 break; |
| 1328 case kTypedDataInt8ArrayCid: | 1329 case kTypedDataInt8ArrayCid: |
| 1329 case kTypedDataUint8ArrayCid: | 1330 case kTypedDataUint8ArrayCid: |
| 1330 case kExternalTypedDataUint8ArrayCid: | 1331 case kExternalTypedDataUint8ArrayCid: |
| 1331 case kOneByteStringCid: | 1332 case kOneByteStringCid: |
| 1332 if (locs()->in(2).IsConstant()) { | 1333 if (locs()->in(2).IsConstant()) { |
| (...skipping 16 matching lines...) Expand all Loading... |
| 1349 value = 0xFF; | 1350 value = 0xFF; |
| 1350 } else if (value < 0) { | 1351 } else if (value < 0) { |
| 1351 value = 0; | 1352 value = 0; |
| 1352 } | 1353 } |
| 1353 __ movb(element_address, | 1354 __ movb(element_address, |
| 1354 Immediate(static_cast<int8_t>(value))); | 1355 Immediate(static_cast<int8_t>(value))); |
| 1355 } else { | 1356 } else { |
| 1356 ASSERT(locs()->in(2).reg() == RAX); | 1357 ASSERT(locs()->in(2).reg() == RAX); |
| 1357 Label store_value, store_0xff; | 1358 Label store_value, store_0xff; |
| 1358 __ SmiUntag(RAX); | 1359 __ SmiUntag(RAX); |
| 1359 __ CompareImmediate(RAX, Immediate(0xFF), PP); | 1360 __ CompareImmediate(RAX, Immediate(0xFF)); |
| 1360 __ j(BELOW_EQUAL, &store_value, Assembler::kNearJump); | 1361 __ j(BELOW_EQUAL, &store_value, Assembler::kNearJump); |
| 1361 // Clamp to 0x0 or 0xFF respectively. | 1362 // Clamp to 0x0 or 0xFF respectively. |
| 1362 __ j(GREATER, &store_0xff); | 1363 __ j(GREATER, &store_0xff); |
| 1363 __ xorq(RAX, RAX); | 1364 __ xorq(RAX, RAX); |
| 1364 __ jmp(&store_value, Assembler::kNearJump); | 1365 __ jmp(&store_value, Assembler::kNearJump); |
| 1365 __ Bind(&store_0xff); | 1366 __ Bind(&store_0xff); |
| 1366 __ LoadImmediate(RAX, Immediate(0xFF), PP); | 1367 __ LoadImmediate(RAX, Immediate(0xFF)); |
| 1367 __ Bind(&store_value); | 1368 __ Bind(&store_value); |
| 1368 __ movb(element_address, RAX); | 1369 __ movb(element_address, RAX); |
| 1369 } | 1370 } |
| 1370 break; | 1371 break; |
| 1371 } | 1372 } |
| 1372 case kTypedDataInt16ArrayCid: | 1373 case kTypedDataInt16ArrayCid: |
| 1373 case kTypedDataUint16ArrayCid: { | 1374 case kTypedDataUint16ArrayCid: { |
| 1374 Register value = locs()->in(2).reg(); | 1375 Register value = locs()->in(2).reg(); |
| 1375 __ SmiUntag(value); | 1376 __ SmiUntag(value); |
| 1376 __ movw(element_address, value); | 1377 __ movw(element_address, value); |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1465 locs()->temp(locs()->temp_count() - 1).reg() : kNoRegister; | 1466 locs()->temp(locs()->temp_count() - 1).reg() : kNoRegister; |
| 1466 | 1467 |
| 1467 Label ok, fail_label; | 1468 Label ok, fail_label; |
| 1468 | 1469 |
| 1469 Label* deopt = compiler->is_optimizing() ? | 1470 Label* deopt = compiler->is_optimizing() ? |
| 1470 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) : NULL; | 1471 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) : NULL; |
| 1471 | 1472 |
| 1472 Label* fail = (deopt != NULL) ? deopt : &fail_label; | 1473 Label* fail = (deopt != NULL) ? deopt : &fail_label; |
| 1473 | 1474 |
| 1474 if (emit_full_guard) { | 1475 if (emit_full_guard) { |
| 1475 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); | 1476 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); |
| 1476 | 1477 |
| 1477 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); | 1478 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); |
| 1478 FieldAddress field_nullability_operand( | 1479 FieldAddress field_nullability_operand( |
| 1479 field_reg, Field::is_nullable_offset()); | 1480 field_reg, Field::is_nullable_offset()); |
| 1480 | 1481 |
| 1481 if (value_cid == kDynamicCid) { | 1482 if (value_cid == kDynamicCid) { |
| 1482 LoadValueCid(compiler, value_cid_reg, value_reg); | 1483 LoadValueCid(compiler, value_cid_reg, value_reg); |
| 1483 | 1484 |
| 1484 __ cmpl(value_cid_reg, field_cid_operand); | 1485 __ cmpl(value_cid_reg, field_cid_operand); |
| 1485 __ j(EQUAL, &ok); | 1486 __ j(EQUAL, &ok); |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1534 ASSERT(deopt != NULL); | 1535 ASSERT(deopt != NULL); |
| 1535 | 1536 |
| 1536 // Field guard class has been initialized and is known. | 1537 // Field guard class has been initialized and is known. |
| 1537 if (value_cid == kDynamicCid) { | 1538 if (value_cid == kDynamicCid) { |
| 1538 // Value's class id is not known. | 1539 // Value's class id is not known. |
| 1539 __ testq(value_reg, Immediate(kSmiTagMask)); | 1540 __ testq(value_reg, Immediate(kSmiTagMask)); |
| 1540 | 1541 |
| 1541 if (field_cid != kSmiCid) { | 1542 if (field_cid != kSmiCid) { |
| 1542 __ j(ZERO, fail); | 1543 __ j(ZERO, fail); |
| 1543 __ LoadClassId(value_cid_reg, value_reg); | 1544 __ LoadClassId(value_cid_reg, value_reg); |
| 1544 __ CompareImmediate(value_cid_reg, Immediate(field_cid), PP); | 1545 __ CompareImmediate(value_cid_reg, Immediate(field_cid)); |
| 1545 } | 1546 } |
| 1546 | 1547 |
| 1547 if (field().is_nullable() && (field_cid != kNullCid)) { | 1548 if (field().is_nullable() && (field_cid != kNullCid)) { |
| 1548 __ j(EQUAL, &ok); | 1549 __ j(EQUAL, &ok); |
| 1549 __ CompareObject(value_reg, Object::null_object(), PP); | 1550 __ CompareObject(value_reg, Object::null_object()); |
| 1550 } | 1551 } |
| 1551 | 1552 |
| 1552 __ j(NOT_EQUAL, fail); | 1553 __ j(NOT_EQUAL, fail); |
| 1553 } else { | 1554 } else { |
| 1554 // Both value's and field's class id is known. | 1555 // Both value's and field's class id is known. |
| 1555 ASSERT((value_cid != field_cid) && (value_cid != nullability)); | 1556 ASSERT((value_cid != field_cid) && (value_cid != nullability)); |
| 1556 __ jmp(fail); | 1557 __ jmp(fail); |
| 1557 } | 1558 } |
| 1558 } | 1559 } |
| 1559 __ Bind(&ok); | 1560 __ Bind(&ok); |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1595 const Register value_reg = locs()->in(0).reg(); | 1596 const Register value_reg = locs()->in(0).reg(); |
| 1596 | 1597 |
| 1597 if (!compiler->is_optimizing() || | 1598 if (!compiler->is_optimizing() || |
| 1598 (field().guarded_list_length() == Field::kUnknownFixedLength)) { | 1599 (field().guarded_list_length() == Field::kUnknownFixedLength)) { |
| 1599 const Register field_reg = locs()->temp(0).reg(); | 1600 const Register field_reg = locs()->temp(0).reg(); |
| 1600 const Register offset_reg = locs()->temp(1).reg(); | 1601 const Register offset_reg = locs()->temp(1).reg(); |
| 1601 const Register length_reg = locs()->temp(2).reg(); | 1602 const Register length_reg = locs()->temp(2).reg(); |
| 1602 | 1603 |
| 1603 Label ok; | 1604 Label ok; |
| 1604 | 1605 |
| 1605 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); | 1606 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); |
| 1606 | 1607 |
| 1607 __ movsxb(offset_reg, FieldAddress(field_reg, | 1608 __ movsxb(offset_reg, FieldAddress(field_reg, |
| 1608 Field::guarded_list_length_in_object_offset_offset())); | 1609 Field::guarded_list_length_in_object_offset_offset())); |
| 1609 __ movq(length_reg, FieldAddress(field_reg, | 1610 __ movq(length_reg, FieldAddress(field_reg, |
| 1610 Field::guarded_list_length_offset())); | 1611 Field::guarded_list_length_offset())); |
| 1611 | 1612 |
| 1612 __ cmpq(offset_reg, Immediate(0)); | 1613 __ cmpq(offset_reg, Immediate(0)); |
| 1613 __ j(NEGATIVE, &ok); | 1614 __ j(NEGATIVE, &ok); |
| 1614 | 1615 |
| 1615 // Load the length from the value. GuardFieldClass already verified that | 1616 // Load the length from the value. GuardFieldClass already verified that |
| (...skipping 16 matching lines...) Expand all Loading... |
| 1632 __ Bind(&ok); | 1633 __ Bind(&ok); |
| 1633 } else { | 1634 } else { |
| 1634 ASSERT(compiler->is_optimizing()); | 1635 ASSERT(compiler->is_optimizing()); |
| 1635 ASSERT(field().guarded_list_length() >= 0); | 1636 ASSERT(field().guarded_list_length() >= 0); |
| 1636 ASSERT(field().guarded_list_length_in_object_offset() != | 1637 ASSERT(field().guarded_list_length_in_object_offset() != |
| 1637 Field::kUnknownLengthOffset); | 1638 Field::kUnknownLengthOffset); |
| 1638 | 1639 |
| 1639 __ CompareImmediate( | 1640 __ CompareImmediate( |
| 1640 FieldAddress(value_reg, | 1641 FieldAddress(value_reg, |
| 1641 field().guarded_list_length_in_object_offset()), | 1642 field().guarded_list_length_in_object_offset()), |
| 1642 Immediate(Smi::RawValue(field().guarded_list_length())), | 1643 Immediate(Smi::RawValue(field().guarded_list_length()))); |
| 1643 PP); | |
| 1644 __ j(NOT_EQUAL, deopt); | 1644 __ j(NOT_EQUAL, deopt); |
| 1645 } | 1645 } |
| 1646 } | 1646 } |
| 1647 | 1647 |
| 1648 | 1648 |
| 1649 class BoxAllocationSlowPath : public SlowPathCode { | 1649 class BoxAllocationSlowPath : public SlowPathCode { |
| 1650 public: | 1650 public: |
| 1651 BoxAllocationSlowPath(Instruction* instruction, | 1651 BoxAllocationSlowPath(Instruction* instruction, |
| 1652 const Class& cls, | 1652 const Class& cls, |
| 1653 Register result) | 1653 Register result) |
| (...skipping 29 matching lines...) Expand all Loading... |
| 1683 } | 1683 } |
| 1684 | 1684 |
| 1685 static void Allocate(FlowGraphCompiler* compiler, | 1685 static void Allocate(FlowGraphCompiler* compiler, |
| 1686 Instruction* instruction, | 1686 Instruction* instruction, |
| 1687 const Class& cls, | 1687 const Class& cls, |
| 1688 Register result) { | 1688 Register result) { |
| 1689 if (compiler->intrinsic_mode()) { | 1689 if (compiler->intrinsic_mode()) { |
| 1690 __ TryAllocate(cls, | 1690 __ TryAllocate(cls, |
| 1691 compiler->intrinsic_slow_path_label(), | 1691 compiler->intrinsic_slow_path_label(), |
| 1692 Assembler::kFarJump, | 1692 Assembler::kFarJump, |
| 1693 result, | 1693 result); |
| 1694 PP); | |
| 1695 } else { | 1694 } else { |
| 1696 BoxAllocationSlowPath* slow_path = | 1695 BoxAllocationSlowPath* slow_path = |
| 1697 new BoxAllocationSlowPath(instruction, cls, result); | 1696 new BoxAllocationSlowPath(instruction, cls, result); |
| 1698 compiler->AddSlowPathCode(slow_path); | 1697 compiler->AddSlowPathCode(slow_path); |
| 1699 | 1698 |
| 1700 __ TryAllocate(cls, | 1699 __ TryAllocate(cls, |
| 1701 slow_path->entry_label(), | 1700 slow_path->entry_label(), |
| 1702 Assembler::kFarJump, | 1701 Assembler::kFarJump, |
| 1703 result, | 1702 result); |
| 1704 PP); | |
| 1705 __ Bind(slow_path->exit_label()); | 1703 __ Bind(slow_path->exit_label()); |
| 1706 } | 1704 } |
| 1707 } | 1705 } |
| 1708 | 1706 |
| 1709 private: | 1707 private: |
| 1710 Instruction* instruction_; | 1708 Instruction* instruction_; |
| 1711 const Class& cls_; | 1709 const Class& cls_; |
| 1712 const Register result_; | 1710 const Register result_; |
| 1713 }; | 1711 }; |
| 1714 | 1712 |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1750 | 1748 |
| 1751 static void EnsureMutableBox(FlowGraphCompiler* compiler, | 1749 static void EnsureMutableBox(FlowGraphCompiler* compiler, |
| 1752 StoreInstanceFieldInstr* instruction, | 1750 StoreInstanceFieldInstr* instruction, |
| 1753 Register box_reg, | 1751 Register box_reg, |
| 1754 const Class& cls, | 1752 const Class& cls, |
| 1755 Register instance_reg, | 1753 Register instance_reg, |
| 1756 intptr_t offset, | 1754 intptr_t offset, |
| 1757 Register temp) { | 1755 Register temp) { |
| 1758 Label done; | 1756 Label done; |
| 1759 __ movq(box_reg, FieldAddress(instance_reg, offset)); | 1757 __ movq(box_reg, FieldAddress(instance_reg, offset)); |
| 1760 __ CompareObject(box_reg, Object::null_object(), PP); | 1758 __ CompareObject(box_reg, Object::null_object()); |
| 1761 __ j(NOT_EQUAL, &done); | 1759 __ j(NOT_EQUAL, &done); |
| 1762 BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg); | 1760 BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg); |
| 1763 __ movq(temp, box_reg); | 1761 __ movq(temp, box_reg); |
| 1764 __ StoreIntoObject(instance_reg, | 1762 __ StoreIntoObject(instance_reg, |
| 1765 FieldAddress(instance_reg, offset), | 1763 FieldAddress(instance_reg, offset), |
| 1766 temp); | 1764 temp); |
| 1767 | 1765 |
| 1768 __ Bind(&done); | 1766 __ Bind(&done); |
| 1769 } | 1767 } |
| 1770 | 1768 |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1834 // Value input is a writable register and should be manually preserved | 1832 // Value input is a writable register and should be manually preserved |
| 1835 // across allocation slow-path. | 1833 // across allocation slow-path. |
| 1836 locs()->live_registers()->Add(locs()->in(1), kTagged); | 1834 locs()->live_registers()->Add(locs()->in(1), kTagged); |
| 1837 } | 1835 } |
| 1838 | 1836 |
| 1839 Label store_pointer; | 1837 Label store_pointer; |
| 1840 Label store_double; | 1838 Label store_double; |
| 1841 Label store_float32x4; | 1839 Label store_float32x4; |
| 1842 Label store_float64x2; | 1840 Label store_float64x2; |
| 1843 | 1841 |
| 1844 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP); | 1842 __ LoadObject(temp, Field::ZoneHandle(field().raw())); |
| 1845 | 1843 |
| 1846 __ cmpl(FieldAddress(temp, Field::is_nullable_offset()), | 1844 __ cmpl(FieldAddress(temp, Field::is_nullable_offset()), |
| 1847 Immediate(kNullCid)); | 1845 Immediate(kNullCid)); |
| 1848 __ j(EQUAL, &store_pointer); | 1846 __ j(EQUAL, &store_pointer); |
| 1849 | 1847 |
| 1850 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); | 1848 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); |
| 1851 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); | 1849 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); |
| 1852 __ j(ZERO, &store_pointer); | 1850 __ j(ZERO, &store_pointer); |
| 1853 | 1851 |
| 1854 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), | 1852 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1920 Register value_reg = locs()->in(1).reg(); | 1918 Register value_reg = locs()->in(1).reg(); |
| 1921 __ StoreIntoObject(instance_reg, | 1919 __ StoreIntoObject(instance_reg, |
| 1922 FieldAddress(instance_reg, offset_in_bytes_), | 1920 FieldAddress(instance_reg, offset_in_bytes_), |
| 1923 value_reg, | 1921 value_reg, |
| 1924 CanValueBeSmi()); | 1922 CanValueBeSmi()); |
| 1925 } else { | 1923 } else { |
| 1926 if (locs()->in(1).IsConstant()) { | 1924 if (locs()->in(1).IsConstant()) { |
| 1927 __ StoreIntoObjectNoBarrier(instance_reg, | 1925 __ StoreIntoObjectNoBarrier(instance_reg, |
| 1928 FieldAddress(instance_reg, offset_in_bytes_), | 1926 FieldAddress(instance_reg, offset_in_bytes_), |
| 1929 locs()->in(1).constant(), | 1927 locs()->in(1).constant(), |
| 1930 PP, | |
| 1931 is_object_reference_initialization_ ? | 1928 is_object_reference_initialization_ ? |
| 1932 Assembler::kEmptyOrSmiOrNull : | 1929 Assembler::kEmptyOrSmiOrNull : |
| 1933 Assembler::kHeapObjectOrSmi); | 1930 Assembler::kHeapObjectOrSmi); |
| 1934 } else { | 1931 } else { |
| 1935 Register value_reg = locs()->in(1).reg(); | 1932 Register value_reg = locs()->in(1).reg(); |
| 1936 __ StoreIntoObjectNoBarrier(instance_reg, | 1933 __ StoreIntoObjectNoBarrier(instance_reg, |
| 1937 FieldAddress(instance_reg, offset_in_bytes_), | 1934 FieldAddress(instance_reg, offset_in_bytes_), |
| 1938 value_reg, | 1935 value_reg, |
| 1939 is_object_reference_initialization_ ? | 1936 is_object_reference_initialization_ ? |
| 1940 Assembler::kEmptyOrSmiOrNull : | 1937 Assembler::kEmptyOrSmiOrNull : |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1977 : Location::RequiresRegister()); | 1974 : Location::RequiresRegister()); |
| 1978 locs->set_temp(0, Location::RequiresRegister()); | 1975 locs->set_temp(0, Location::RequiresRegister()); |
| 1979 return locs; | 1976 return locs; |
| 1980 } | 1977 } |
| 1981 | 1978 |
| 1982 | 1979 |
| 1983 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1980 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1984 Register value = locs()->in(0).reg(); | 1981 Register value = locs()->in(0).reg(); |
| 1985 Register temp = locs()->temp(0).reg(); | 1982 Register temp = locs()->temp(0).reg(); |
| 1986 | 1983 |
| 1987 __ LoadObject(temp, field(), PP); | 1984 __ LoadObject(temp, field()); |
| 1988 if (this->value()->NeedsStoreBuffer()) { | 1985 if (this->value()->NeedsStoreBuffer()) { |
| 1989 __ StoreIntoObject(temp, | 1986 __ StoreIntoObject(temp, |
| 1990 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); | 1987 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); |
| 1991 } else { | 1988 } else { |
| 1992 __ StoreIntoObjectNoBarrier( | 1989 __ StoreIntoObjectNoBarrier( |
| 1993 temp, FieldAddress(temp, Field::value_offset()), value); | 1990 temp, FieldAddress(temp, Field::value_offset()), value); |
| 1994 } | 1991 } |
| 1995 } | 1992 } |
| 1996 | 1993 |
| 1997 | 1994 |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2063 FieldAddress(RAX, Array::length_offset()), | 2060 FieldAddress(RAX, Array::length_offset()), |
| 2064 kLengthReg); | 2061 kLengthReg); |
| 2065 | 2062 |
| 2066 // Initialize all array elements to raw_null. | 2063 // Initialize all array elements to raw_null. |
| 2067 // RAX: new object start as a tagged pointer. | 2064 // RAX: new object start as a tagged pointer. |
| 2068 // RCX: new object end address. | 2065 // RCX: new object end address. |
| 2069 // RDI: iterator which initially points to the start of the variable | 2066 // RDI: iterator which initially points to the start of the variable |
| 2070 // data area to be initialized. | 2067 // data area to be initialized. |
| 2071 if (num_elements > 0) { | 2068 if (num_elements > 0) { |
| 2072 const intptr_t array_size = instance_size - sizeof(RawArray); | 2069 const intptr_t array_size = instance_size - sizeof(RawArray); |
| 2073 __ LoadObject(R12, Object::null_object(), PP); | 2070 __ LoadObject(R12, Object::null_object()); |
| 2074 __ leaq(RDI, FieldAddress(RAX, sizeof(RawArray))); | 2071 __ leaq(RDI, FieldAddress(RAX, sizeof(RawArray))); |
| 2075 if (array_size < (kInlineArraySize * kWordSize)) { | 2072 if (array_size < (kInlineArraySize * kWordSize)) { |
| 2076 intptr_t current_offset = 0; | 2073 intptr_t current_offset = 0; |
| 2077 while (current_offset < array_size) { | 2074 while (current_offset < array_size) { |
| 2078 __ InitializeFieldNoBarrier(RAX, Address(RDI, current_offset), R12); | 2075 __ InitializeFieldNoBarrier(RAX, Address(RDI, current_offset), R12); |
| 2079 current_offset += kWordSize; | 2076 current_offset += kWordSize; |
| 2080 } | 2077 } |
| 2081 } else { | 2078 } else { |
| 2082 Label init_loop; | 2079 Label init_loop; |
| 2083 __ Bind(&init_loop); | 2080 __ Bind(&init_loop); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2101 | 2098 |
| 2102 Label slow_path, done; | 2099 Label slow_path, done; |
| 2103 if (compiler->is_optimizing() && | 2100 if (compiler->is_optimizing() && |
| 2104 num_elements()->BindsToConstant() && | 2101 num_elements()->BindsToConstant() && |
| 2105 num_elements()->BoundConstant().IsSmi()) { | 2102 num_elements()->BoundConstant().IsSmi()) { |
| 2106 const intptr_t length = Smi::Cast(num_elements()->BoundConstant()).Value(); | 2103 const intptr_t length = Smi::Cast(num_elements()->BoundConstant()).Value(); |
| 2107 if ((length >= 0) && (length <= Array::kMaxElements)) { | 2104 if ((length >= 0) && (length <= Array::kMaxElements)) { |
| 2108 Label slow_path, done; | 2105 Label slow_path, done; |
| 2109 InlineArrayAllocation(compiler, length, &slow_path, &done); | 2106 InlineArrayAllocation(compiler, length, &slow_path, &done); |
| 2110 __ Bind(&slow_path); | 2107 __ Bind(&slow_path); |
| 2111 __ PushObject(Object::null_object(), PP); // Make room for the result. | 2108 __ PushObject(Object::null_object()); // Make room for the result. |
| 2112 __ pushq(kLengthReg); | 2109 __ pushq(kLengthReg); |
| 2113 __ pushq(kElemTypeReg); | 2110 __ pushq(kElemTypeReg); |
| 2114 compiler->GenerateRuntimeCall(token_pos(), | 2111 compiler->GenerateRuntimeCall(token_pos(), |
| 2115 deopt_id(), | 2112 deopt_id(), |
| 2116 kAllocateArrayRuntimeEntry, | 2113 kAllocateArrayRuntimeEntry, |
| 2117 2, | 2114 2, |
| 2118 locs()); | 2115 locs()); |
| 2119 __ Drop(2); | 2116 __ Drop(2); |
| 2120 __ popq(kResultReg); | 2117 __ popq(kResultReg); |
| 2121 __ Bind(&done); | 2118 __ Bind(&done); |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2191 Register result = locs()->out(0).reg(); | 2188 Register result = locs()->out(0).reg(); |
| 2192 if (IsPotentialUnboxedLoad()) { | 2189 if (IsPotentialUnboxedLoad()) { |
| 2193 Register temp = locs()->temp(1).reg(); | 2190 Register temp = locs()->temp(1).reg(); |
| 2194 XmmRegister value = locs()->temp(0).fpu_reg(); | 2191 XmmRegister value = locs()->temp(0).fpu_reg(); |
| 2195 | 2192 |
| 2196 Label load_pointer; | 2193 Label load_pointer; |
| 2197 Label load_double; | 2194 Label load_double; |
| 2198 Label load_float32x4; | 2195 Label load_float32x4; |
| 2199 Label load_float64x2; | 2196 Label load_float64x2; |
| 2200 | 2197 |
| 2201 __ LoadObject(result, Field::ZoneHandle(field()->raw()), PP); | 2198 __ LoadObject(result, Field::ZoneHandle(field()->raw())); |
| 2202 | 2199 |
| 2203 __ cmpl(FieldAddress(result, Field::is_nullable_offset()), | 2200 __ cmpl(FieldAddress(result, Field::is_nullable_offset()), |
| 2204 Immediate(kNullCid)); | 2201 Immediate(kNullCid)); |
| 2205 __ j(EQUAL, &load_pointer); | 2202 __ j(EQUAL, &load_pointer); |
| 2206 | 2203 |
| 2207 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), | 2204 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), |
| 2208 Immediate(kDoubleCid)); | 2205 Immediate(kDoubleCid)); |
| 2209 __ j(EQUAL, &load_double); | 2206 __ j(EQUAL, &load_double); |
| 2210 | 2207 |
| 2211 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), | 2208 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2271 return locs; | 2268 return locs; |
| 2272 } | 2269 } |
| 2273 | 2270 |
| 2274 | 2271 |
| 2275 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2272 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2276 Register instantiator_reg = locs()->in(0).reg(); | 2273 Register instantiator_reg = locs()->in(0).reg(); |
| 2277 Register result_reg = locs()->out(0).reg(); | 2274 Register result_reg = locs()->out(0).reg(); |
| 2278 | 2275 |
| 2279 // 'instantiator_reg' is the instantiator TypeArguments object (or null). | 2276 // 'instantiator_reg' is the instantiator TypeArguments object (or null). |
| 2280 // A runtime call to instantiate the type is required. | 2277 // A runtime call to instantiate the type is required. |
| 2281 __ PushObject(Object::null_object(), PP); // Make room for the result. | 2278 __ PushObject(Object::null_object()); // Make room for the result. |
| 2282 __ PushObject(type(), PP); | 2279 __ PushObject(type()); |
| 2283 __ pushq(instantiator_reg); // Push instantiator type arguments. | 2280 __ pushq(instantiator_reg); // Push instantiator type arguments. |
| 2284 compiler->GenerateRuntimeCall(token_pos(), | 2281 compiler->GenerateRuntimeCall(token_pos(), |
| 2285 deopt_id(), | 2282 deopt_id(), |
| 2286 kInstantiateTypeRuntimeEntry, | 2283 kInstantiateTypeRuntimeEntry, |
| 2287 2, | 2284 2, |
| 2288 locs()); | 2285 locs()); |
| 2289 __ Drop(2); // Drop instantiator and uninstantiated type. | 2286 __ Drop(2); // Drop instantiator and uninstantiated type. |
| 2290 __ popq(result_reg); // Pop instantiated type. | 2287 __ popq(result_reg); // Pop instantiated type. |
| 2291 ASSERT(instantiator_reg == result_reg); | 2288 ASSERT(instantiator_reg == result_reg); |
| 2292 } | 2289 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 2314 // 'instantiator_reg' is the instantiator TypeArguments object (or null). | 2311 // 'instantiator_reg' is the instantiator TypeArguments object (or null). |
| 2315 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2312 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
| 2316 !type_arguments().CanShareInstantiatorTypeArguments( | 2313 !type_arguments().CanShareInstantiatorTypeArguments( |
| 2317 instantiator_class())); | 2314 instantiator_class())); |
| 2318 // If the instantiator is null and if the type argument vector | 2315 // If the instantiator is null and if the type argument vector |
| 2319 // instantiated from null becomes a vector of dynamic, then use null as | 2316 // instantiated from null becomes a vector of dynamic, then use null as |
| 2320 // the type arguments. | 2317 // the type arguments. |
| 2321 Label type_arguments_instantiated; | 2318 Label type_arguments_instantiated; |
| 2322 const intptr_t len = type_arguments().Length(); | 2319 const intptr_t len = type_arguments().Length(); |
| 2323 if (type_arguments().IsRawInstantiatedRaw(len)) { | 2320 if (type_arguments().IsRawInstantiatedRaw(len)) { |
| 2324 __ CompareObject(instantiator_reg, Object::null_object(), PP); | 2321 __ CompareObject(instantiator_reg, Object::null_object()); |
| 2325 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); | 2322 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); |
| 2326 } | 2323 } |
| 2327 | 2324 |
| 2328 // Lookup cache before calling runtime. | 2325 // Lookup cache before calling runtime. |
| 2329 // TODO(fschneider): Consider moving this into a shared stub to reduce | 2326 // TODO(fschneider): Consider moving this into a shared stub to reduce |
| 2330 // generated code size. | 2327 // generated code size. |
| 2331 __ LoadObject(RDI, type_arguments(), PP); | 2328 __ LoadObject(RDI, type_arguments()); |
| 2332 __ movq(RDI, FieldAddress(RDI, TypeArguments::instantiations_offset())); | 2329 __ movq(RDI, FieldAddress(RDI, TypeArguments::instantiations_offset())); |
| 2333 __ leaq(RDI, FieldAddress(RDI, Array::data_offset())); | 2330 __ leaq(RDI, FieldAddress(RDI, Array::data_offset())); |
| 2334 // The instantiations cache is initialized with Object::zero_array() and is | 2331 // The instantiations cache is initialized with Object::zero_array() and is |
| 2335 // therefore guaranteed to contain kNoInstantiator. No length check needed. | 2332 // therefore guaranteed to contain kNoInstantiator. No length check needed. |
| 2336 Label loop, found, slow_case; | 2333 Label loop, found, slow_case; |
| 2337 __ Bind(&loop); | 2334 __ Bind(&loop); |
| 2338 __ movq(RDX, Address(RDI, 0 * kWordSize)); // Cached instantiator. | 2335 __ movq(RDX, Address(RDI, 0 * kWordSize)); // Cached instantiator. |
| 2339 __ cmpq(RDX, RAX); | 2336 __ cmpq(RDX, RAX); |
| 2340 __ j(EQUAL, &found, Assembler::kNearJump); | 2337 __ j(EQUAL, &found, Assembler::kNearJump); |
| 2341 __ addq(RDI, Immediate(2 * kWordSize)); | 2338 __ addq(RDI, Immediate(2 * kWordSize)); |
| 2342 __ cmpq(RDX, Immediate(Smi::RawValue(StubCode::kNoInstantiator))); | 2339 __ cmpq(RDX, Immediate(Smi::RawValue(StubCode::kNoInstantiator))); |
| 2343 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); | 2340 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); |
| 2344 __ jmp(&slow_case, Assembler::kNearJump); | 2341 __ jmp(&slow_case, Assembler::kNearJump); |
| 2345 __ Bind(&found); | 2342 __ Bind(&found); |
| 2346 __ movq(RAX, Address(RDI, 1 * kWordSize)); // Cached instantiated args. | 2343 __ movq(RAX, Address(RDI, 1 * kWordSize)); // Cached instantiated args. |
| 2347 __ jmp(&type_arguments_instantiated, Assembler::kNearJump); | 2344 __ jmp(&type_arguments_instantiated, Assembler::kNearJump); |
| 2348 | 2345 |
| 2349 __ Bind(&slow_case); | 2346 __ Bind(&slow_case); |
| 2350 // Instantiate non-null type arguments. | 2347 // Instantiate non-null type arguments. |
| 2351 // A runtime call to instantiate the type arguments is required. | 2348 // A runtime call to instantiate the type arguments is required. |
| 2352 __ PushObject(Object::null_object(), PP); // Make room for the result. | 2349 __ PushObject(Object::null_object()); // Make room for the result. |
| 2353 __ PushObject(type_arguments(), PP); | 2350 __ PushObject(type_arguments()); |
| 2354 __ pushq(instantiator_reg); // Push instantiator type arguments. | 2351 __ pushq(instantiator_reg); // Push instantiator type arguments. |
| 2355 compiler->GenerateRuntimeCall(token_pos(), | 2352 compiler->GenerateRuntimeCall(token_pos(), |
| 2356 deopt_id(), | 2353 deopt_id(), |
| 2357 kInstantiateTypeArgumentsRuntimeEntry, | 2354 kInstantiateTypeArgumentsRuntimeEntry, |
| 2358 2, | 2355 2, |
| 2359 locs()); | 2356 locs()); |
| 2360 __ Drop(2); // Drop instantiator and uninstantiated type arguments. | 2357 __ Drop(2); // Drop instantiator and uninstantiated type arguments. |
| 2361 __ popq(result_reg); // Pop instantiated type arguments. | 2358 __ popq(result_reg); // Pop instantiated type arguments. |
| 2362 __ Bind(&type_arguments_instantiated); | 2359 __ Bind(&type_arguments_instantiated); |
| 2363 ASSERT(instantiator_reg == result_reg); | 2360 ASSERT(instantiator_reg == result_reg); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 2386 | 2383 |
| 2387 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 2384 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2388 __ Comment("AllocateContextSlowPath"); | 2385 __ Comment("AllocateContextSlowPath"); |
| 2389 __ Bind(entry_label()); | 2386 __ Bind(entry_label()); |
| 2390 | 2387 |
| 2391 LocationSummary* locs = instruction_->locs(); | 2388 LocationSummary* locs = instruction_->locs(); |
| 2392 locs->live_registers()->Remove(locs->out(0)); | 2389 locs->live_registers()->Remove(locs->out(0)); |
| 2393 | 2390 |
| 2394 compiler->SaveLiveRegisters(locs); | 2391 compiler->SaveLiveRegisters(locs); |
| 2395 | 2392 |
| 2396 __ LoadImmediate(R10, Immediate(instruction_->num_context_variables()), PP); | 2393 __ LoadImmediate(R10, Immediate(instruction_->num_context_variables())); |
| 2397 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); | 2394 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); |
| 2398 compiler->GenerateCall(instruction_->token_pos(), | 2395 compiler->GenerateCall(instruction_->token_pos(), |
| 2399 &label, | 2396 &label, |
| 2400 RawPcDescriptors::kOther, | 2397 RawPcDescriptors::kOther, |
| 2401 locs); | 2398 locs); |
| 2402 ASSERT(instruction_->locs()->out(0).reg() == RAX); | 2399 ASSERT(instruction_->locs()->out(0).reg() == RAX); |
| 2403 compiler->RestoreLiveRegisters(instruction_->locs()); | 2400 compiler->RestoreLiveRegisters(instruction_->locs()); |
| 2404 __ jmp(exit_label()); | 2401 __ jmp(exit_label()); |
| 2405 } | 2402 } |
| 2406 | 2403 |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2441 locs->set_temp(0, Location::RegisterLocation(R10)); | 2438 locs->set_temp(0, Location::RegisterLocation(R10)); |
| 2442 locs->set_out(0, Location::RegisterLocation(RAX)); | 2439 locs->set_out(0, Location::RegisterLocation(RAX)); |
| 2443 return locs; | 2440 return locs; |
| 2444 } | 2441 } |
| 2445 | 2442 |
| 2446 | 2443 |
| 2447 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2444 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2448 ASSERT(locs()->temp(0).reg() == R10); | 2445 ASSERT(locs()->temp(0).reg() == R10); |
| 2449 ASSERT(locs()->out(0).reg() == RAX); | 2446 ASSERT(locs()->out(0).reg() == RAX); |
| 2450 | 2447 |
| 2451 __ LoadImmediate(R10, Immediate(num_context_variables()), PP); | 2448 __ LoadImmediate(R10, Immediate(num_context_variables())); |
| 2452 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); | 2449 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); |
| 2453 compiler->GenerateCall(token_pos(), | 2450 compiler->GenerateCall(token_pos(), |
| 2454 &label, | 2451 &label, |
| 2455 RawPcDescriptors::kOther, | 2452 RawPcDescriptors::kOther, |
| 2456 locs()); | 2453 locs()); |
| 2457 } | 2454 } |
| 2458 | 2455 |
| 2459 | 2456 |
| 2460 LocationSummary* InitStaticFieldInstr::MakeLocationSummary(Zone* zone, | 2457 LocationSummary* InitStaticFieldInstr::MakeLocationSummary(Zone* zone, |
| 2461 bool opt) const { | 2458 bool opt) const { |
| 2462 const intptr_t kNumInputs = 1; | 2459 const intptr_t kNumInputs = 1; |
| 2463 const intptr_t kNumTemps = 1; | 2460 const intptr_t kNumTemps = 1; |
| 2464 LocationSummary* locs = new(zone) LocationSummary( | 2461 LocationSummary* locs = new(zone) LocationSummary( |
| 2465 zone, kNumInputs, kNumTemps, LocationSummary::kCall); | 2462 zone, kNumInputs, kNumTemps, LocationSummary::kCall); |
| 2466 locs->set_in(0, Location::RegisterLocation(RAX)); | 2463 locs->set_in(0, Location::RegisterLocation(RAX)); |
| 2467 locs->set_temp(0, Location::RegisterLocation(RCX)); | 2464 locs->set_temp(0, Location::RegisterLocation(RCX)); |
| 2468 return locs; | 2465 return locs; |
| 2469 } | 2466 } |
| 2470 | 2467 |
| 2471 | 2468 |
| 2472 void InitStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2469 void InitStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2473 Register field = locs()->in(0).reg(); | 2470 Register field = locs()->in(0).reg(); |
| 2474 Register temp = locs()->temp(0).reg(); | 2471 Register temp = locs()->temp(0).reg(); |
| 2475 | 2472 |
| 2476 Label call_runtime, no_call; | 2473 Label call_runtime, no_call; |
| 2477 | 2474 |
| 2478 __ movq(temp, FieldAddress(field, Field::value_offset())); | 2475 __ movq(temp, FieldAddress(field, Field::value_offset())); |
| 2479 __ CompareObject(temp, Object::sentinel(), PP); | 2476 __ CompareObject(temp, Object::sentinel()); |
| 2480 __ j(EQUAL, &call_runtime); | 2477 __ j(EQUAL, &call_runtime); |
| 2481 | 2478 |
| 2482 __ CompareObject(temp, Object::transition_sentinel(), PP); | 2479 __ CompareObject(temp, Object::transition_sentinel()); |
| 2483 __ j(NOT_EQUAL, &no_call); | 2480 __ j(NOT_EQUAL, &no_call); |
| 2484 | 2481 |
| 2485 __ Bind(&call_runtime); | 2482 __ Bind(&call_runtime); |
| 2486 __ PushObject(Object::null_object(), PP); // Make room for (unused) result. | 2483 __ PushObject(Object::null_object()); // Make room for (unused) result. |
| 2487 __ pushq(field); | 2484 __ pushq(field); |
| 2488 compiler->GenerateRuntimeCall(token_pos(), | 2485 compiler->GenerateRuntimeCall(token_pos(), |
| 2489 deopt_id(), | 2486 deopt_id(), |
| 2490 kInitStaticFieldRuntimeEntry, | 2487 kInitStaticFieldRuntimeEntry, |
| 2491 1, | 2488 1, |
| 2492 locs()); | 2489 locs()); |
| 2493 __ Drop(2); // Remove argument and unused result. | 2490 __ Drop(2); // Remove argument and unused result. |
| 2494 __ Bind(&no_call); | 2491 __ Bind(&no_call); |
| 2495 } | 2492 } |
| 2496 | 2493 |
| 2497 | 2494 |
| 2498 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone, | 2495 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone, |
| 2499 bool opt) const { | 2496 bool opt) const { |
| 2500 const intptr_t kNumInputs = 1; | 2497 const intptr_t kNumInputs = 1; |
| 2501 const intptr_t kNumTemps = 0; | 2498 const intptr_t kNumTemps = 0; |
| 2502 LocationSummary* locs = new(zone) LocationSummary( | 2499 LocationSummary* locs = new(zone) LocationSummary( |
| 2503 zone, kNumInputs, kNumTemps, LocationSummary::kCall); | 2500 zone, kNumInputs, kNumTemps, LocationSummary::kCall); |
| 2504 locs->set_in(0, Location::RegisterLocation(RAX)); | 2501 locs->set_in(0, Location::RegisterLocation(RAX)); |
| 2505 locs->set_out(0, Location::RegisterLocation(RAX)); | 2502 locs->set_out(0, Location::RegisterLocation(RAX)); |
| 2506 return locs; | 2503 return locs; |
| 2507 } | 2504 } |
| 2508 | 2505 |
| 2509 | 2506 |
| 2510 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2507 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2511 Register context_value = locs()->in(0).reg(); | 2508 Register context_value = locs()->in(0).reg(); |
| 2512 Register result = locs()->out(0).reg(); | 2509 Register result = locs()->out(0).reg(); |
| 2513 | 2510 |
| 2514 __ PushObject(Object::null_object(), PP); // Make room for the result. | 2511 __ PushObject(Object::null_object()); // Make room for the result. |
| 2515 __ pushq(context_value); | 2512 __ pushq(context_value); |
| 2516 compiler->GenerateRuntimeCall(token_pos(), | 2513 compiler->GenerateRuntimeCall(token_pos(), |
| 2517 deopt_id(), | 2514 deopt_id(), |
| 2518 kCloneContextRuntimeEntry, | 2515 kCloneContextRuntimeEntry, |
| 2519 1, | 2516 1, |
| 2520 locs()); | 2517 locs()); |
| 2521 __ popq(result); // Remove argument. | 2518 __ popq(result); // Remove argument. |
| 2522 __ popq(result); // Get result (cloned context). | 2519 __ popq(result); // Get result (cloned context). |
| 2523 } | 2520 } |
| 2524 | 2521 |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2578 public: | 2575 public: |
| 2579 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) | 2576 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) |
| 2580 : instruction_(instruction) { } | 2577 : instruction_(instruction) { } |
| 2581 | 2578 |
| 2582 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 2579 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2583 if (FLAG_use_osr && osr_entry_label()->IsLinked()) { | 2580 if (FLAG_use_osr && osr_entry_label()->IsLinked()) { |
| 2584 uword flags_address = Isolate::Current()->stack_overflow_flags_address(); | 2581 uword flags_address = Isolate::Current()->stack_overflow_flags_address(); |
| 2585 Register temp = instruction_->locs()->temp(0).reg(); | 2582 Register temp = instruction_->locs()->temp(0).reg(); |
| 2586 __ Comment("CheckStackOverflowSlowPathOsr"); | 2583 __ Comment("CheckStackOverflowSlowPathOsr"); |
| 2587 __ Bind(osr_entry_label()); | 2584 __ Bind(osr_entry_label()); |
| 2588 __ LoadImmediate(temp, Immediate(flags_address), PP); | 2585 __ LoadImmediate(temp, Immediate(flags_address)); |
| 2589 __ movq(Address(temp, 0), Immediate(Isolate::kOsrRequest)); | 2586 __ movq(Address(temp, 0), Immediate(Isolate::kOsrRequest)); |
| 2590 } | 2587 } |
| 2591 __ Comment("CheckStackOverflowSlowPath"); | 2588 __ Comment("CheckStackOverflowSlowPath"); |
| 2592 __ Bind(entry_label()); | 2589 __ Bind(entry_label()); |
| 2593 compiler->SaveLiveRegisters(instruction_->locs()); | 2590 compiler->SaveLiveRegisters(instruction_->locs()); |
| 2594 // pending_deoptimization_env_ is needed to generate a runtime call that | 2591 // pending_deoptimization_env_ is needed to generate a runtime call that |
| 2595 // may throw an exception. | 2592 // may throw an exception. |
| 2596 ASSERT(compiler->pending_deoptimization_env_ == NULL); | 2593 ASSERT(compiler->pending_deoptimization_env_ == NULL); |
| 2597 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); | 2594 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); |
| 2598 compiler->pending_deoptimization_env_ = env; | 2595 compiler->pending_deoptimization_env_ = env; |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2626 | 2623 |
| 2627 | 2624 |
| 2628 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2625 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2629 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); | 2626 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); |
| 2630 compiler->AddSlowPathCode(slow_path); | 2627 compiler->AddSlowPathCode(slow_path); |
| 2631 | 2628 |
| 2632 Register temp = locs()->temp(0).reg(); | 2629 Register temp = locs()->temp(0).reg(); |
| 2633 // Generate stack overflow check. | 2630 // Generate stack overflow check. |
| 2634 if (compiler->is_optimizing()) { | 2631 if (compiler->is_optimizing()) { |
| 2635 __ LoadImmediate( | 2632 __ LoadImmediate( |
| 2636 temp, Immediate(Isolate::Current()->stack_limit_address()), PP); | 2633 temp, Immediate(Isolate::Current()->stack_limit_address())); |
| 2637 __ cmpq(RSP, Address(temp, 0)); | 2634 __ cmpq(RSP, Address(temp, 0)); |
| 2638 } else { | 2635 } else { |
| 2639 __ LoadIsolate(temp); | 2636 __ LoadIsolate(temp); |
| 2640 __ cmpq(RSP, Address(temp, Isolate::stack_limit_offset())); | 2637 __ cmpq(RSP, Address(temp, Isolate::stack_limit_offset())); |
| 2641 } | 2638 } |
| 2642 __ j(BELOW_EQUAL, slow_path->entry_label()); | 2639 __ j(BELOW_EQUAL, slow_path->entry_label()); |
| 2643 if (compiler->CanOSRFunction() && in_loop()) { | 2640 if (compiler->CanOSRFunction() && in_loop()) { |
| 2644 // In unoptimized code check the usage counter to trigger OSR at loop | 2641 // In unoptimized code check the usage counter to trigger OSR at loop |
| 2645 // stack checks. Use progressively higher thresholds for more deeply | 2642 // stack checks. Use progressively higher thresholds for more deeply |
| 2646 // nested loops to attempt to hit outer loops with OSR when possible. | 2643 // nested loops to attempt to hit outer loops with OSR when possible. |
| 2647 __ LoadObject(temp, compiler->parsed_function().function(), PP); | 2644 __ LoadObject(temp, compiler->parsed_function().function()); |
| 2648 int32_t threshold = | 2645 int32_t threshold = |
| 2649 FLAG_optimization_counter_threshold * (loop_depth() + 1); | 2646 FLAG_optimization_counter_threshold * (loop_depth() + 1); |
| 2650 __ cmpl(FieldAddress(temp, Function::usage_counter_offset()), | 2647 __ cmpl(FieldAddress(temp, Function::usage_counter_offset()), |
| 2651 Immediate(threshold)); | 2648 Immediate(threshold)); |
| 2652 __ j(GREATER_EQUAL, slow_path->osr_entry_label()); | 2649 __ j(GREATER_EQUAL, slow_path->osr_entry_label()); |
| 2653 } | 2650 } |
| 2654 if (compiler->ForceSlowPathForStackOverflow()) { | 2651 if (compiler->ForceSlowPathForStackOverflow()) { |
| 2655 __ jmp(slow_path->entry_label()); | 2652 __ jmp(slow_path->entry_label()); |
| 2656 } | 2653 } |
| 2657 __ Bind(slow_path->exit_label()); | 2654 __ Bind(slow_path->exit_label()); |
| 2658 } | 2655 } |
| 2659 | 2656 |
| 2660 | 2657 |
| 2661 static void EmitJavascriptOverflowCheck(FlowGraphCompiler* compiler, | 2658 static void EmitJavascriptOverflowCheck(FlowGraphCompiler* compiler, |
| 2662 Range* range, | 2659 Range* range, |
| 2663 Label* overflow, | 2660 Label* overflow, |
| 2664 Register result) { | 2661 Register result) { |
| 2665 if (!RangeUtils::IsWithin(range, -0x20000000000000LL, 0x20000000000000LL)) { | 2662 if (!RangeUtils::IsWithin(range, -0x20000000000000LL, 0x20000000000000LL)) { |
| 2666 ASSERT(overflow != NULL); | 2663 ASSERT(overflow != NULL); |
| 2667 // TODO(zra): This can be tightened to one compare/branch using: | 2664 // TODO(zra): This can be tightened to one compare/branch using: |
| 2668 // overflow = (result + 2^52) > 2^53 with an unsigned comparison. | 2665 // overflow = (result + 2^52) > 2^53 with an unsigned comparison. |
| 2669 __ CompareImmediate(result, Immediate(-0x20000000000000LL), PP); | 2666 __ CompareImmediate(result, Immediate(-0x20000000000000LL)); |
| 2670 __ j(LESS, overflow); | 2667 __ j(LESS, overflow); |
| 2671 __ CompareImmediate(result, Immediate(0x20000000000000LL), PP); | 2668 __ CompareImmediate(result, Immediate(0x20000000000000LL)); |
| 2672 __ j(GREATER, overflow); | 2669 __ j(GREATER, overflow); |
| 2673 } | 2670 } |
| 2674 } | 2671 } |
| 2675 | 2672 |
| 2676 | 2673 |
| 2677 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler, | 2674 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler, |
| 2678 BinarySmiOpInstr* shift_left) { | 2675 BinarySmiOpInstr* shift_left) { |
| 2679 const LocationSummary& locs = *shift_left->locs(); | 2676 const LocationSummary& locs = *shift_left->locs(); |
| 2680 Register left = locs.in(0).reg(); | 2677 Register left = locs.in(0).reg(); |
| 2681 Register result = locs.out(0).reg(); | 2678 Register result = locs.out(0).reg(); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 2710 // Right (locs.in(1)) is not constant. | 2707 // Right (locs.in(1)) is not constant. |
| 2711 Register right = locs.in(1).reg(); | 2708 Register right = locs.in(1).reg(); |
| 2712 Range* right_range = shift_left->right()->definition()->range(); | 2709 Range* right_range = shift_left->right()->definition()->range(); |
| 2713 if (shift_left->left()->BindsToConstant() && shift_left->can_overflow()) { | 2710 if (shift_left->left()->BindsToConstant() && shift_left->can_overflow()) { |
| 2714 // TODO(srdjan): Implement code below for is_truncating(). | 2711 // TODO(srdjan): Implement code below for is_truncating(). |
| 2715 // If left is constant, we know the maximal allowed size for right. | 2712 // If left is constant, we know the maximal allowed size for right. |
| 2716 const Object& obj = shift_left->left()->BoundConstant(); | 2713 const Object& obj = shift_left->left()->BoundConstant(); |
| 2717 if (obj.IsSmi()) { | 2714 if (obj.IsSmi()) { |
| 2718 const intptr_t left_int = Smi::Cast(obj).Value(); | 2715 const intptr_t left_int = Smi::Cast(obj).Value(); |
| 2719 if (left_int == 0) { | 2716 if (left_int == 0) { |
| 2720 __ CompareImmediate(right, Immediate(0), PP); | 2717 __ CompareImmediate(right, Immediate(0)); |
| 2721 __ j(NEGATIVE, deopt); | 2718 __ j(NEGATIVE, deopt); |
| 2722 return; | 2719 return; |
| 2723 } | 2720 } |
| 2724 const intptr_t max_right = kSmiBits - Utils::HighestBit(left_int); | 2721 const intptr_t max_right = kSmiBits - Utils::HighestBit(left_int); |
| 2725 const bool right_needs_check = | 2722 const bool right_needs_check = |
| 2726 !RangeUtils::IsWithin(right_range, 0, max_right - 1); | 2723 !RangeUtils::IsWithin(right_range, 0, max_right - 1); |
| 2727 if (right_needs_check) { | 2724 if (right_needs_check) { |
| 2728 __ CompareImmediate(right, | 2725 __ CompareImmediate(right, |
| 2729 Immediate(reinterpret_cast<int64_t>(Smi::New(max_right))), PP); | 2726 Immediate(reinterpret_cast<int64_t>(Smi::New(max_right)))); |
| 2730 __ j(ABOVE_EQUAL, deopt); | 2727 __ j(ABOVE_EQUAL, deopt); |
| 2731 } | 2728 } |
| 2732 __ SmiUntag(right); | 2729 __ SmiUntag(right); |
| 2733 __ shlq(left, right); | 2730 __ shlq(left, right); |
| 2734 } | 2731 } |
| 2735 if (FLAG_throw_on_javascript_int_overflow) { | 2732 if (FLAG_throw_on_javascript_int_overflow) { |
| 2736 EmitJavascriptOverflowCheck(compiler, shift_left->range(), deopt, result); | 2733 EmitJavascriptOverflowCheck(compiler, shift_left->range(), deopt, result); |
| 2737 } | 2734 } |
| 2738 return; | 2735 return; |
| 2739 } | 2736 } |
| 2740 | 2737 |
| 2741 const bool right_needs_check = | 2738 const bool right_needs_check = |
| 2742 !RangeUtils::IsWithin(right_range, 0, (Smi::kBits - 1)); | 2739 !RangeUtils::IsWithin(right_range, 0, (Smi::kBits - 1)); |
| 2743 ASSERT(right == RCX); // Count must be in RCX | 2740 ASSERT(right == RCX); // Count must be in RCX |
| 2744 if (!shift_left->can_overflow()) { | 2741 if (!shift_left->can_overflow()) { |
| 2745 if (right_needs_check) { | 2742 if (right_needs_check) { |
| 2746 const bool right_may_be_negative = | 2743 const bool right_may_be_negative = |
| 2747 (right_range == NULL) || !right_range->IsPositive(); | 2744 (right_range == NULL) || !right_range->IsPositive(); |
| 2748 if (right_may_be_negative) { | 2745 if (right_may_be_negative) { |
| 2749 ASSERT(shift_left->CanDeoptimize()); | 2746 ASSERT(shift_left->CanDeoptimize()); |
| 2750 __ CompareImmediate(right, Immediate(0), PP); | 2747 __ CompareImmediate(right, Immediate(0)); |
| 2751 __ j(NEGATIVE, deopt); | 2748 __ j(NEGATIVE, deopt); |
| 2752 } | 2749 } |
| 2753 Label done, is_not_zero; | 2750 Label done, is_not_zero; |
| 2754 __ CompareImmediate(right, | 2751 __ CompareImmediate(right, |
| 2755 Immediate(reinterpret_cast<int64_t>(Smi::New(Smi::kBits))), PP); | 2752 Immediate(reinterpret_cast<int64_t>(Smi::New(Smi::kBits)))); |
| 2756 __ j(BELOW, &is_not_zero, Assembler::kNearJump); | 2753 __ j(BELOW, &is_not_zero, Assembler::kNearJump); |
| 2757 __ xorq(left, left); | 2754 __ xorq(left, left); |
| 2758 __ jmp(&done, Assembler::kNearJump); | 2755 __ jmp(&done, Assembler::kNearJump); |
| 2759 __ Bind(&is_not_zero); | 2756 __ Bind(&is_not_zero); |
| 2760 __ SmiUntag(right); | 2757 __ SmiUntag(right); |
| 2761 __ shlq(left, right); | 2758 __ shlq(left, right); |
| 2762 __ Bind(&done); | 2759 __ Bind(&done); |
| 2763 } else { | 2760 } else { |
| 2764 __ SmiUntag(right); | 2761 __ SmiUntag(right); |
| 2765 __ shlq(left, right); | 2762 __ shlq(left, right); |
| 2766 } | 2763 } |
| 2767 } else { | 2764 } else { |
| 2768 if (right_needs_check) { | 2765 if (right_needs_check) { |
| 2769 ASSERT(shift_left->CanDeoptimize()); | 2766 ASSERT(shift_left->CanDeoptimize()); |
| 2770 __ CompareImmediate(right, | 2767 __ CompareImmediate(right, |
| 2771 Immediate(reinterpret_cast<int64_t>(Smi::New(Smi::kBits))), PP); | 2768 Immediate(reinterpret_cast<int64_t>(Smi::New(Smi::kBits)))); |
| 2772 __ j(ABOVE_EQUAL, deopt); | 2769 __ j(ABOVE_EQUAL, deopt); |
| 2773 } | 2770 } |
| 2774 // Left is not a constant. | 2771 // Left is not a constant. |
| 2775 Register temp = locs.temp(0).reg(); | 2772 Register temp = locs.temp(0).reg(); |
| 2776 // Check if count too large for handling it inlined. | 2773 // Check if count too large for handling it inlined. |
| 2777 __ movq(temp, left); | 2774 __ movq(temp, left); |
| 2778 __ SmiUntag(right); | 2775 __ SmiUntag(right); |
| 2779 // Overflow test (preserve temp and right); | 2776 // Overflow test (preserve temp and right); |
| 2780 __ shlq(left, right); | 2777 __ shlq(left, right); |
| 2781 __ sarq(left, right); | 2778 __ sarq(left, right); |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2894 if (CanDeoptimize()) { | 2891 if (CanDeoptimize()) { |
| 2895 deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp); | 2892 deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp); |
| 2896 } | 2893 } |
| 2897 | 2894 |
| 2898 if (locs()->in(1).IsConstant()) { | 2895 if (locs()->in(1).IsConstant()) { |
| 2899 const Object& constant = locs()->in(1).constant(); | 2896 const Object& constant = locs()->in(1).constant(); |
| 2900 ASSERT(constant.IsSmi()); | 2897 ASSERT(constant.IsSmi()); |
| 2901 const int64_t imm = reinterpret_cast<int64_t>(constant.raw()); | 2898 const int64_t imm = reinterpret_cast<int64_t>(constant.raw()); |
| 2902 switch (op_kind()) { | 2899 switch (op_kind()) { |
| 2903 case Token::kADD: { | 2900 case Token::kADD: { |
| 2904 __ AddImmediate(left, Immediate(imm), PP); | 2901 __ AddImmediate(left, Immediate(imm)); |
| 2905 if (deopt != NULL) __ j(OVERFLOW, deopt); | 2902 if (deopt != NULL) __ j(OVERFLOW, deopt); |
| 2906 break; | 2903 break; |
| 2907 } | 2904 } |
| 2908 case Token::kSUB: { | 2905 case Token::kSUB: { |
| 2909 __ SubImmediate(left, Immediate(imm), PP); | 2906 __ SubImmediate(left, Immediate(imm)); |
| 2910 if (deopt != NULL) __ j(OVERFLOW, deopt); | 2907 if (deopt != NULL) __ j(OVERFLOW, deopt); |
| 2911 break; | 2908 break; |
| 2912 } | 2909 } |
| 2913 case Token::kMUL: { | 2910 case Token::kMUL: { |
| 2914 // Keep left value tagged and untag right value. | 2911 // Keep left value tagged and untag right value. |
| 2915 const intptr_t value = Smi::Cast(constant).Value(); | 2912 const intptr_t value = Smi::Cast(constant).Value(); |
| 2916 __ MulImmediate(left, Immediate(value), PP); | 2913 __ MulImmediate(left, Immediate(value)); |
| 2917 if (deopt != NULL) __ j(OVERFLOW, deopt); | 2914 if (deopt != NULL) __ j(OVERFLOW, deopt); |
| 2918 break; | 2915 break; |
| 2919 } | 2916 } |
| 2920 case Token::kTRUNCDIV: { | 2917 case Token::kTRUNCDIV: { |
| 2921 const intptr_t value = Smi::Cast(constant).Value(); | 2918 const intptr_t value = Smi::Cast(constant).Value(); |
| 2922 ASSERT(Utils::IsPowerOfTwo(Utils::Abs(value))); | 2919 ASSERT(Utils::IsPowerOfTwo(Utils::Abs(value))); |
| 2923 const intptr_t shift_count = | 2920 const intptr_t shift_count = |
| 2924 Utils::ShiftForPowerOfTwo(Utils::Abs(value)) + kSmiTagSize; | 2921 Utils::ShiftForPowerOfTwo(Utils::Abs(value)) + kSmiTagSize; |
| 2925 ASSERT(kSmiTagSize == 1); | 2922 ASSERT(kSmiTagSize == 1); |
| 2926 Register temp = locs()->temp(0).reg(); | 2923 Register temp = locs()->temp(0).reg(); |
| 2927 __ movq(temp, left); | 2924 __ movq(temp, left); |
| 2928 __ sarq(temp, Immediate(63)); | 2925 __ sarq(temp, Immediate(63)); |
| 2929 ASSERT(shift_count > 1); // 1, -1 case handled above. | 2926 ASSERT(shift_count > 1); // 1, -1 case handled above. |
| 2930 __ shrq(temp, Immediate(64 - shift_count)); | 2927 __ shrq(temp, Immediate(64 - shift_count)); |
| 2931 __ addq(left, temp); | 2928 __ addq(left, temp); |
| 2932 ASSERT(shift_count > 0); | 2929 ASSERT(shift_count > 0); |
| 2933 __ sarq(left, Immediate(shift_count)); | 2930 __ sarq(left, Immediate(shift_count)); |
| 2934 if (value < 0) { | 2931 if (value < 0) { |
| 2935 __ negq(left); | 2932 __ negq(left); |
| 2936 } | 2933 } |
| 2937 __ SmiTag(left); | 2934 __ SmiTag(left); |
| 2938 break; | 2935 break; |
| 2939 } | 2936 } |
| 2940 case Token::kBIT_AND: { | 2937 case Token::kBIT_AND: { |
| 2941 // No overflow check. | 2938 // No overflow check. |
| 2942 __ AndImmediate(left, Immediate(imm), PP); | 2939 __ AndImmediate(left, Immediate(imm)); |
| 2943 break; | 2940 break; |
| 2944 } | 2941 } |
| 2945 case Token::kBIT_OR: { | 2942 case Token::kBIT_OR: { |
| 2946 // No overflow check. | 2943 // No overflow check. |
| 2947 __ OrImmediate(left, Immediate(imm), PP); | 2944 __ OrImmediate(left, Immediate(imm)); |
| 2948 break; | 2945 break; |
| 2949 } | 2946 } |
| 2950 case Token::kBIT_XOR: { | 2947 case Token::kBIT_XOR: { |
| 2951 // No overflow check. | 2948 // No overflow check. |
| 2952 __ XorImmediate(left, Immediate(imm), PP); | 2949 __ XorImmediate(left, Immediate(imm)); |
| 2953 break; | 2950 break; |
| 2954 } | 2951 } |
| 2955 | 2952 |
| 2956 case Token::kSHR: { | 2953 case Token::kSHR: { |
| 2957 // sarq operation masks the count to 6 bits. | 2954 // sarq operation masks the count to 6 bits. |
| 2958 const intptr_t kCountLimit = 0x3F; | 2955 const intptr_t kCountLimit = 0x3F; |
| 2959 const intptr_t value = Smi::Cast(constant).Value(); | 2956 const intptr_t value = Smi::Cast(constant).Value(); |
| 2960 __ sarq(left, Immediate( | 2957 __ sarq(left, Immediate( |
| 2961 Utils::Minimum(value + kSmiTagSize, kCountLimit))); | 2958 Utils::Minimum(value + kSmiTagSize, kCountLimit))); |
| 2962 __ SmiTag(left); | 2959 __ SmiTag(left); |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3087 __ jmp(&done); | 3084 __ jmp(&done); |
| 3088 | 3085 |
| 3089 // Divide using 64bit idiv. | 3086 // Divide using 64bit idiv. |
| 3090 __ Bind(¬_32bit); | 3087 __ Bind(¬_32bit); |
| 3091 __ SmiUntag(left); | 3088 __ SmiUntag(left); |
| 3092 __ SmiUntag(right); | 3089 __ SmiUntag(right); |
| 3093 __ cqo(); // Sign extend RAX -> RDX:RAX. | 3090 __ cqo(); // Sign extend RAX -> RDX:RAX. |
| 3094 __ idivq(right); // RAX: quotient, RDX: remainder. | 3091 __ idivq(right); // RAX: quotient, RDX: remainder. |
| 3095 // Check the corner case of dividing the 'MIN_SMI' with -1, in which | 3092 // Check the corner case of dividing the 'MIN_SMI' with -1, in which |
| 3096 // case we cannot tag the result. | 3093 // case we cannot tag the result. |
| 3097 __ CompareImmediate(result, Immediate(0x4000000000000000), PP); | 3094 __ CompareImmediate(result, Immediate(0x4000000000000000)); |
| 3098 __ j(EQUAL, deopt); | 3095 __ j(EQUAL, deopt); |
| 3099 __ Bind(&done); | 3096 __ Bind(&done); |
| 3100 __ SmiTag(result); | 3097 __ SmiTag(result); |
| 3101 break; | 3098 break; |
| 3102 } | 3099 } |
| 3103 case Token::kMOD: { | 3100 case Token::kMOD: { |
| 3104 Label not_32bit, div_done; | 3101 Label not_32bit, div_done; |
| 3105 | 3102 |
| 3106 Register temp = locs()->temp(0).reg(); | 3103 Register temp = locs()->temp(0).reg(); |
| 3107 ASSERT(left == RDX); | 3104 ASSERT(left == RDX); |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3167 } else { | 3164 } else { |
| 3168 // Right is negative. | 3165 // Right is negative. |
| 3169 __ subq(result, right); | 3166 __ subq(result, right); |
| 3170 } | 3167 } |
| 3171 __ Bind(&all_done); | 3168 __ Bind(&all_done); |
| 3172 __ SmiTag(result); | 3169 __ SmiTag(result); |
| 3173 break; | 3170 break; |
| 3174 } | 3171 } |
| 3175 case Token::kSHR: { | 3172 case Token::kSHR: { |
| 3176 if (CanDeoptimize()) { | 3173 if (CanDeoptimize()) { |
| 3177 __ CompareImmediate(right, Immediate(0), PP); | 3174 __ CompareImmediate(right, Immediate(0)); |
| 3178 __ j(LESS, deopt); | 3175 __ j(LESS, deopt); |
| 3179 } | 3176 } |
| 3180 __ SmiUntag(right); | 3177 __ SmiUntag(right); |
| 3181 // sarq operation masks the count to 6 bits. | 3178 // sarq operation masks the count to 6 bits. |
| 3182 const intptr_t kCountLimit = 0x3F; | 3179 const intptr_t kCountLimit = 0x3F; |
| 3183 if ((right_range == NULL) || | 3180 if ((right_range == NULL) || |
| 3184 !right_range->OnlyLessThanOrEqualTo(kCountLimit)) { | 3181 !right_range->OnlyLessThanOrEqualTo(kCountLimit)) { |
| 3185 __ CompareImmediate(right, Immediate(kCountLimit), PP); | 3182 __ CompareImmediate(right, Immediate(kCountLimit)); |
| 3186 Label count_ok; | 3183 Label count_ok; |
| 3187 __ j(LESS, &count_ok, Assembler::kNearJump); | 3184 __ j(LESS, &count_ok, Assembler::kNearJump); |
| 3188 __ LoadImmediate(right, Immediate(kCountLimit), PP); | 3185 __ LoadImmediate(right, Immediate(kCountLimit)); |
| 3189 __ Bind(&count_ok); | 3186 __ Bind(&count_ok); |
| 3190 } | 3187 } |
| 3191 ASSERT(right == RCX); // Count must be in RCX | 3188 ASSERT(right == RCX); // Count must be in RCX |
| 3192 __ SmiUntag(left); | 3189 __ SmiUntag(left); |
| 3193 __ sarq(left, right); | 3190 __ sarq(left, right); |
| 3194 __ SmiTag(left); | 3191 __ SmiTag(left); |
| 3195 break; | 3192 break; |
| 3196 } | 3193 } |
| 3197 case Token::kDIV: { | 3194 case Token::kDIV: { |
| 3198 // Dispatches to 'Double./'. | 3195 // Dispatches to 'Double./'. |
| (...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3728 return summary; | 3725 return summary; |
| 3729 } | 3726 } |
| 3730 | 3727 |
| 3731 | 3728 |
| 3732 void Float32x4ConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3729 void Float32x4ConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3733 XmmRegister v0 = locs()->in(0).fpu_reg(); | 3730 XmmRegister v0 = locs()->in(0).fpu_reg(); |
| 3734 XmmRegister v1 = locs()->in(1).fpu_reg(); | 3731 XmmRegister v1 = locs()->in(1).fpu_reg(); |
| 3735 XmmRegister v2 = locs()->in(2).fpu_reg(); | 3732 XmmRegister v2 = locs()->in(2).fpu_reg(); |
| 3736 XmmRegister v3 = locs()->in(3).fpu_reg(); | 3733 XmmRegister v3 = locs()->in(3).fpu_reg(); |
| 3737 ASSERT(v0 == locs()->out(0).fpu_reg()); | 3734 ASSERT(v0 == locs()->out(0).fpu_reg()); |
| 3738 __ AddImmediate(RSP, Immediate(-16), PP); | 3735 __ AddImmediate(RSP, Immediate(-16)); |
| 3739 __ cvtsd2ss(v0, v0); | 3736 __ cvtsd2ss(v0, v0); |
| 3740 __ movss(Address(RSP, 0), v0); | 3737 __ movss(Address(RSP, 0), v0); |
| 3741 __ movsd(v0, v1); | 3738 __ movsd(v0, v1); |
| 3742 __ cvtsd2ss(v0, v0); | 3739 __ cvtsd2ss(v0, v0); |
| 3743 __ movss(Address(RSP, 4), v0); | 3740 __ movss(Address(RSP, 4), v0); |
| 3744 __ movsd(v0, v2); | 3741 __ movsd(v0, v2); |
| 3745 __ cvtsd2ss(v0, v0); | 3742 __ cvtsd2ss(v0, v0); |
| 3746 __ movss(Address(RSP, 8), v0); | 3743 __ movss(Address(RSP, 8), v0); |
| 3747 __ movsd(v0, v3); | 3744 __ movsd(v0, v3); |
| 3748 __ cvtsd2ss(v0, v0); | 3745 __ cvtsd2ss(v0, v0); |
| 3749 __ movss(Address(RSP, 12), v0); | 3746 __ movss(Address(RSP, 12), v0); |
| 3750 __ movups(v0, Address(RSP, 0)); | 3747 __ movups(v0, Address(RSP, 0)); |
| 3751 __ AddImmediate(RSP, Immediate(16), PP); | 3748 __ AddImmediate(RSP, Immediate(16)); |
| 3752 } | 3749 } |
| 3753 | 3750 |
| 3754 | 3751 |
| 3755 LocationSummary* Float32x4ZeroInstr::MakeLocationSummary(Zone* zone, | 3752 LocationSummary* Float32x4ZeroInstr::MakeLocationSummary(Zone* zone, |
| 3756 bool opt) const { | 3753 bool opt) const { |
| 3757 const intptr_t kNumInputs = 0; | 3754 const intptr_t kNumInputs = 0; |
| 3758 const intptr_t kNumTemps = 0; | 3755 const intptr_t kNumTemps = 0; |
| 3759 LocationSummary* summary = new(zone) LocationSummary( | 3756 LocationSummary* summary = new(zone) LocationSummary( |
| 3760 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 3757 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 3761 summary->set_out(0, Location::RequiresFpuRegister()); | 3758 summary->set_out(0, Location::RequiresFpuRegister()); |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3995 | 3992 |
| 3996 void Float32x4WithInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3993 void Float32x4WithInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3997 XmmRegister replacement = locs()->in(0).fpu_reg(); | 3994 XmmRegister replacement = locs()->in(0).fpu_reg(); |
| 3998 XmmRegister value = locs()->in(1).fpu_reg(); | 3995 XmmRegister value = locs()->in(1).fpu_reg(); |
| 3999 | 3996 |
| 4000 ASSERT(locs()->out(0).fpu_reg() == replacement); | 3997 ASSERT(locs()->out(0).fpu_reg() == replacement); |
| 4001 | 3998 |
| 4002 switch (op_kind()) { | 3999 switch (op_kind()) { |
| 4003 case MethodRecognizer::kFloat32x4WithX: | 4000 case MethodRecognizer::kFloat32x4WithX: |
| 4004 __ cvtsd2ss(replacement, replacement); | 4001 __ cvtsd2ss(replacement, replacement); |
| 4005 __ AddImmediate(RSP, Immediate(-16), PP); | 4002 __ AddImmediate(RSP, Immediate(-16)); |
| 4006 // Move value to stack. | 4003 // Move value to stack. |
| 4007 __ movups(Address(RSP, 0), value); | 4004 __ movups(Address(RSP, 0), value); |
| 4008 // Write over X value. | 4005 // Write over X value. |
| 4009 __ movss(Address(RSP, 0), replacement); | 4006 __ movss(Address(RSP, 0), replacement); |
| 4010 // Move updated value into output register. | 4007 // Move updated value into output register. |
| 4011 __ movups(replacement, Address(RSP, 0)); | 4008 __ movups(replacement, Address(RSP, 0)); |
| 4012 __ AddImmediate(RSP, Immediate(16), PP); | 4009 __ AddImmediate(RSP, Immediate(16)); |
| 4013 break; | 4010 break; |
| 4014 case MethodRecognizer::kFloat32x4WithY: | 4011 case MethodRecognizer::kFloat32x4WithY: |
| 4015 __ cvtsd2ss(replacement, replacement); | 4012 __ cvtsd2ss(replacement, replacement); |
| 4016 __ AddImmediate(RSP, Immediate(-16), PP); | 4013 __ AddImmediate(RSP, Immediate(-16)); |
| 4017 // Move value to stack. | 4014 // Move value to stack. |
| 4018 __ movups(Address(RSP, 0), value); | 4015 __ movups(Address(RSP, 0), value); |
| 4019 // Write over Y value. | 4016 // Write over Y value. |
| 4020 __ movss(Address(RSP, 4), replacement); | 4017 __ movss(Address(RSP, 4), replacement); |
| 4021 // Move updated value into output register. | 4018 // Move updated value into output register. |
| 4022 __ movups(replacement, Address(RSP, 0)); | 4019 __ movups(replacement, Address(RSP, 0)); |
| 4023 __ AddImmediate(RSP, Immediate(16), PP); | 4020 __ AddImmediate(RSP, Immediate(16)); |
| 4024 break; | 4021 break; |
| 4025 case MethodRecognizer::kFloat32x4WithZ: | 4022 case MethodRecognizer::kFloat32x4WithZ: |
| 4026 __ cvtsd2ss(replacement, replacement); | 4023 __ cvtsd2ss(replacement, replacement); |
| 4027 __ AddImmediate(RSP, Immediate(-16), PP); | 4024 __ AddImmediate(RSP, Immediate(-16)); |
| 4028 // Move value to stack. | 4025 // Move value to stack. |
| 4029 __ movups(Address(RSP, 0), value); | 4026 __ movups(Address(RSP, 0), value); |
| 4030 // Write over Z value. | 4027 // Write over Z value. |
| 4031 __ movss(Address(RSP, 8), replacement); | 4028 __ movss(Address(RSP, 8), replacement); |
| 4032 // Move updated value into output register. | 4029 // Move updated value into output register. |
| 4033 __ movups(replacement, Address(RSP, 0)); | 4030 __ movups(replacement, Address(RSP, 0)); |
| 4034 __ AddImmediate(RSP, Immediate(16), PP); | 4031 __ AddImmediate(RSP, Immediate(16)); |
| 4035 break; | 4032 break; |
| 4036 case MethodRecognizer::kFloat32x4WithW: | 4033 case MethodRecognizer::kFloat32x4WithW: |
| 4037 __ cvtsd2ss(replacement, replacement); | 4034 __ cvtsd2ss(replacement, replacement); |
| 4038 __ AddImmediate(RSP, Immediate(-16), PP); | 4035 __ AddImmediate(RSP, Immediate(-16)); |
| 4039 // Move value to stack. | 4036 // Move value to stack. |
| 4040 __ movups(Address(RSP, 0), value); | 4037 __ movups(Address(RSP, 0), value); |
| 4041 // Write over W value. | 4038 // Write over W value. |
| 4042 __ movss(Address(RSP, 12), replacement); | 4039 __ movss(Address(RSP, 12), replacement); |
| 4043 // Move updated value into output register. | 4040 // Move updated value into output register. |
| 4044 __ movups(replacement, Address(RSP, 0)); | 4041 __ movups(replacement, Address(RSP, 0)); |
| 4045 __ AddImmediate(RSP, Immediate(16), PP); | 4042 __ AddImmediate(RSP, Immediate(16)); |
| 4046 break; | 4043 break; |
| 4047 default: UNREACHABLE(); | 4044 default: UNREACHABLE(); |
| 4048 } | 4045 } |
| 4049 } | 4046 } |
| 4050 | 4047 |
| 4051 | 4048 |
| 4052 LocationSummary* Float32x4ToInt32x4Instr::MakeLocationSummary(Zone* zone, | 4049 LocationSummary* Float32x4ToInt32x4Instr::MakeLocationSummary(Zone* zone, |
| 4053 bool opt) const { | 4050 bool opt) const { |
| 4054 const intptr_t kNumInputs = 1; | 4051 const intptr_t kNumInputs = 1; |
| 4055 const intptr_t kNumTemps = 0; | 4052 const intptr_t kNumTemps = 0; |
| (...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4300 return summary; | 4297 return summary; |
| 4301 } | 4298 } |
| 4302 | 4299 |
| 4303 | 4300 |
| 4304 void Int32x4ConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4301 void Int32x4ConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 4305 Register v0 = locs()->in(0).reg(); | 4302 Register v0 = locs()->in(0).reg(); |
| 4306 Register v1 = locs()->in(1).reg(); | 4303 Register v1 = locs()->in(1).reg(); |
| 4307 Register v2 = locs()->in(2).reg(); | 4304 Register v2 = locs()->in(2).reg(); |
| 4308 Register v3 = locs()->in(3).reg(); | 4305 Register v3 = locs()->in(3).reg(); |
| 4309 XmmRegister result = locs()->out(0).fpu_reg(); | 4306 XmmRegister result = locs()->out(0).fpu_reg(); |
| 4310 __ AddImmediate(RSP, Immediate(-4 * kInt32Size), PP); | 4307 __ AddImmediate(RSP, Immediate(-4 * kInt32Size)); |
| 4311 __ movl(Address(RSP, 0 * kInt32Size), v0); | 4308 __ movl(Address(RSP, 0 * kInt32Size), v0); |
| 4312 __ movl(Address(RSP, 1 * kInt32Size), v1); | 4309 __ movl(Address(RSP, 1 * kInt32Size), v1); |
| 4313 __ movl(Address(RSP, 2 * kInt32Size), v2); | 4310 __ movl(Address(RSP, 2 * kInt32Size), v2); |
| 4314 __ movl(Address(RSP, 3 * kInt32Size), v3); | 4311 __ movl(Address(RSP, 3 * kInt32Size), v3); |
| 4315 __ movups(result, Address(RSP, 0)); | 4312 __ movups(result, Address(RSP, 0)); |
| 4316 __ AddImmediate(RSP, Immediate(4 * kInt32Size), PP); | 4313 __ AddImmediate(RSP, Immediate(4 * kInt32Size)); |
| 4317 } | 4314 } |
| 4318 | 4315 |
| 4319 | 4316 |
| 4320 LocationSummary* Int32x4BoolConstructorInstr::MakeLocationSummary( | 4317 LocationSummary* Int32x4BoolConstructorInstr::MakeLocationSummary( |
| 4321 Zone* zone, bool opt) const { | 4318 Zone* zone, bool opt) const { |
| 4322 const intptr_t kNumInputs = 4; | 4319 const intptr_t kNumInputs = 4; |
| 4323 const intptr_t kNumTemps = 1; | 4320 const intptr_t kNumTemps = 1; |
| 4324 LocationSummary* summary = new(zone) LocationSummary( | 4321 LocationSummary* summary = new(zone) LocationSummary( |
| 4325 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 4322 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 4326 summary->set_in(0, Location::RequiresRegister()); | 4323 summary->set_in(0, Location::RequiresRegister()); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 4337 Register v0 = locs()->in(0).reg(); | 4334 Register v0 = locs()->in(0).reg(); |
| 4338 Register v1 = locs()->in(1).reg(); | 4335 Register v1 = locs()->in(1).reg(); |
| 4339 Register v2 = locs()->in(2).reg(); | 4336 Register v2 = locs()->in(2).reg(); |
| 4340 Register v3 = locs()->in(3).reg(); | 4337 Register v3 = locs()->in(3).reg(); |
| 4341 Register temp = locs()->temp(0).reg(); | 4338 Register temp = locs()->temp(0).reg(); |
| 4342 XmmRegister result = locs()->out(0).fpu_reg(); | 4339 XmmRegister result = locs()->out(0).fpu_reg(); |
| 4343 Label x_false, x_done; | 4340 Label x_false, x_done; |
| 4344 Label y_false, y_done; | 4341 Label y_false, y_done; |
| 4345 Label z_false, z_done; | 4342 Label z_false, z_done; |
| 4346 Label w_false, w_done; | 4343 Label w_false, w_done; |
| 4347 __ AddImmediate(RSP, Immediate(-16), PP); | 4344 __ AddImmediate(RSP, Immediate(-16)); |
| 4348 | 4345 |
| 4349 __ CompareObject(v0, Bool::True(), PP); | 4346 __ CompareObject(v0, Bool::True()); |
| 4350 __ j(NOT_EQUAL, &x_false); | 4347 __ j(NOT_EQUAL, &x_false); |
| 4351 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); | 4348 __ LoadImmediate(temp, Immediate(0xFFFFFFFF)); |
| 4352 __ jmp(&x_done); | 4349 __ jmp(&x_done); |
| 4353 __ Bind(&x_false); | 4350 __ Bind(&x_false); |
| 4354 __ LoadImmediate(temp, Immediate(0x0), PP); | 4351 __ LoadImmediate(temp, Immediate(0x0)); |
| 4355 __ Bind(&x_done); | 4352 __ Bind(&x_done); |
| 4356 __ movl(Address(RSP, 0), temp); | 4353 __ movl(Address(RSP, 0), temp); |
| 4357 | 4354 |
| 4358 __ CompareObject(v1, Bool::True(), PP); | 4355 __ CompareObject(v1, Bool::True()); |
| 4359 __ j(NOT_EQUAL, &y_false); | 4356 __ j(NOT_EQUAL, &y_false); |
| 4360 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); | 4357 __ LoadImmediate(temp, Immediate(0xFFFFFFFF)); |
| 4361 __ jmp(&y_done); | 4358 __ jmp(&y_done); |
| 4362 __ Bind(&y_false); | 4359 __ Bind(&y_false); |
| 4363 __ LoadImmediate(temp, Immediate(0x0), PP); | 4360 __ LoadImmediate(temp, Immediate(0x0)); |
| 4364 __ Bind(&y_done); | 4361 __ Bind(&y_done); |
| 4365 __ movl(Address(RSP, 4), temp); | 4362 __ movl(Address(RSP, 4), temp); |
| 4366 | 4363 |
| 4367 __ CompareObject(v2, Bool::True(), PP); | 4364 __ CompareObject(v2, Bool::True()); |
| 4368 __ j(NOT_EQUAL, &z_false); | 4365 __ j(NOT_EQUAL, &z_false); |
| 4369 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); | 4366 __ LoadImmediate(temp, Immediate(0xFFFFFFFF)); |
| 4370 __ jmp(&z_done); | 4367 __ jmp(&z_done); |
| 4371 __ Bind(&z_false); | 4368 __ Bind(&z_false); |
| 4372 __ LoadImmediate(temp, Immediate(0x0), PP); | 4369 __ LoadImmediate(temp, Immediate(0x0)); |
| 4373 __ Bind(&z_done); | 4370 __ Bind(&z_done); |
| 4374 __ movl(Address(RSP, 8), temp); | 4371 __ movl(Address(RSP, 8), temp); |
| 4375 | 4372 |
| 4376 __ CompareObject(v3, Bool::True(), PP); | 4373 __ CompareObject(v3, Bool::True()); |
| 4377 __ j(NOT_EQUAL, &w_false); | 4374 __ j(NOT_EQUAL, &w_false); |
| 4378 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); | 4375 __ LoadImmediate(temp, Immediate(0xFFFFFFFF)); |
| 4379 __ jmp(&w_done); | 4376 __ jmp(&w_done); |
| 4380 __ Bind(&w_false); | 4377 __ Bind(&w_false); |
| 4381 __ LoadImmediate(temp, Immediate(0x0), PP); | 4378 __ LoadImmediate(temp, Immediate(0x0)); |
| 4382 __ Bind(&w_done); | 4379 __ Bind(&w_done); |
| 4383 __ movl(Address(RSP, 12), temp); | 4380 __ movl(Address(RSP, 12), temp); |
| 4384 | 4381 |
| 4385 __ movups(result, Address(RSP, 0)); | 4382 __ movups(result, Address(RSP, 0)); |
| 4386 __ AddImmediate(RSP, Immediate(16), PP); | 4383 __ AddImmediate(RSP, Immediate(16)); |
| 4387 } | 4384 } |
| 4388 | 4385 |
| 4389 | 4386 |
| 4390 LocationSummary* Int32x4GetFlagInstr::MakeLocationSummary(Zone* zone, | 4387 LocationSummary* Int32x4GetFlagInstr::MakeLocationSummary(Zone* zone, |
| 4391 bool opt) const { | 4388 bool opt) const { |
| 4392 const intptr_t kNumInputs = 1; | 4389 const intptr_t kNumInputs = 1; |
| 4393 const intptr_t kNumTemps = 0; | 4390 const intptr_t kNumTemps = 0; |
| 4394 LocationSummary* summary = new(zone) LocationSummary( | 4391 LocationSummary* summary = new(zone) LocationSummary( |
| 4395 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 4392 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 4396 summary->set_in(0, Location::RequiresFpuRegister()); | 4393 summary->set_in(0, Location::RequiresFpuRegister()); |
| 4397 summary->set_out(0, Location::RequiresRegister()); | 4394 summary->set_out(0, Location::RequiresRegister()); |
| 4398 return summary; | 4395 return summary; |
| 4399 } | 4396 } |
| 4400 | 4397 |
| 4401 | 4398 |
| 4402 void Int32x4GetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4399 void Int32x4GetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 4403 XmmRegister value = locs()->in(0).fpu_reg(); | 4400 XmmRegister value = locs()->in(0).fpu_reg(); |
| 4404 Register result = locs()->out(0).reg(); | 4401 Register result = locs()->out(0).reg(); |
| 4405 Label done; | 4402 Label done; |
| 4406 Label non_zero; | 4403 Label non_zero; |
| 4407 __ AddImmediate(RSP, Immediate(-16), PP); | 4404 __ AddImmediate(RSP, Immediate(-16)); |
| 4408 // Move value to stack. | 4405 // Move value to stack. |
| 4409 __ movups(Address(RSP, 0), value); | 4406 __ movups(Address(RSP, 0), value); |
| 4410 switch (op_kind()) { | 4407 switch (op_kind()) { |
| 4411 case MethodRecognizer::kInt32x4GetFlagX: | 4408 case MethodRecognizer::kInt32x4GetFlagX: |
| 4412 __ movl(result, Address(RSP, 0)); | 4409 __ movl(result, Address(RSP, 0)); |
| 4413 break; | 4410 break; |
| 4414 case MethodRecognizer::kInt32x4GetFlagY: | 4411 case MethodRecognizer::kInt32x4GetFlagY: |
| 4415 __ movl(result, Address(RSP, 4)); | 4412 __ movl(result, Address(RSP, 4)); |
| 4416 break; | 4413 break; |
| 4417 case MethodRecognizer::kInt32x4GetFlagZ: | 4414 case MethodRecognizer::kInt32x4GetFlagZ: |
| 4418 __ movl(result, Address(RSP, 8)); | 4415 __ movl(result, Address(RSP, 8)); |
| 4419 break; | 4416 break; |
| 4420 case MethodRecognizer::kInt32x4GetFlagW: | 4417 case MethodRecognizer::kInt32x4GetFlagW: |
| 4421 __ movl(result, Address(RSP, 12)); | 4418 __ movl(result, Address(RSP, 12)); |
| 4422 break; | 4419 break; |
| 4423 default: UNREACHABLE(); | 4420 default: UNREACHABLE(); |
| 4424 } | 4421 } |
| 4425 __ AddImmediate(RSP, Immediate(16), PP); | 4422 __ AddImmediate(RSP, Immediate(16)); |
| 4426 __ testl(result, result); | 4423 __ testl(result, result); |
| 4427 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump); | 4424 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump); |
| 4428 __ LoadObject(result, Bool::False(), PP); | 4425 __ LoadObject(result, Bool::False()); |
| 4429 __ jmp(&done); | 4426 __ jmp(&done); |
| 4430 __ Bind(&non_zero); | 4427 __ Bind(&non_zero); |
| 4431 __ LoadObject(result, Bool::True(), PP); | 4428 __ LoadObject(result, Bool::True()); |
| 4432 __ Bind(&done); | 4429 __ Bind(&done); |
| 4433 } | 4430 } |
| 4434 | 4431 |
| 4435 | 4432 |
| 4436 LocationSummary* Int32x4SelectInstr::MakeLocationSummary(Zone* zone, | 4433 LocationSummary* Int32x4SelectInstr::MakeLocationSummary(Zone* zone, |
| 4437 bool opt) const { | 4434 bool opt) const { |
| 4438 const intptr_t kNumInputs = 3; | 4435 const intptr_t kNumInputs = 3; |
| 4439 const intptr_t kNumTemps = 1; | 4436 const intptr_t kNumTemps = 1; |
| 4440 LocationSummary* summary = new(zone) LocationSummary( | 4437 LocationSummary* summary = new(zone) LocationSummary( |
| 4441 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 4438 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4480 summary->set_out(0, Location::SameAsFirstInput()); | 4477 summary->set_out(0, Location::SameAsFirstInput()); |
| 4481 return summary; | 4478 return summary; |
| 4482 } | 4479 } |
| 4483 | 4480 |
| 4484 | 4481 |
| 4485 void Int32x4SetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4482 void Int32x4SetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 4486 XmmRegister mask = locs()->in(0).fpu_reg(); | 4483 XmmRegister mask = locs()->in(0).fpu_reg(); |
| 4487 Register flag = locs()->in(1).reg(); | 4484 Register flag = locs()->in(1).reg(); |
| 4488 Register temp = locs()->temp(0).reg(); | 4485 Register temp = locs()->temp(0).reg(); |
| 4489 ASSERT(mask == locs()->out(0).fpu_reg()); | 4486 ASSERT(mask == locs()->out(0).fpu_reg()); |
| 4490 __ AddImmediate(RSP, Immediate(-16), PP); | 4487 __ AddImmediate(RSP, Immediate(-16)); |
| 4491 // Copy mask to stack. | 4488 // Copy mask to stack. |
| 4492 __ movups(Address(RSP, 0), mask); | 4489 __ movups(Address(RSP, 0), mask); |
| 4493 Label falsePath, exitPath; | 4490 Label falsePath, exitPath; |
| 4494 __ CompareObject(flag, Bool::True(), PP); | 4491 __ CompareObject(flag, Bool::True()); |
| 4495 __ j(NOT_EQUAL, &falsePath); | 4492 __ j(NOT_EQUAL, &falsePath); |
| 4496 switch (op_kind()) { | 4493 switch (op_kind()) { |
| 4497 case MethodRecognizer::kInt32x4WithFlagX: | 4494 case MethodRecognizer::kInt32x4WithFlagX: |
| 4498 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); | 4495 __ LoadImmediate(temp, Immediate(0xFFFFFFFF)); |
| 4499 __ movl(Address(RSP, 0), temp); | 4496 __ movl(Address(RSP, 0), temp); |
| 4500 __ jmp(&exitPath); | 4497 __ jmp(&exitPath); |
| 4501 __ Bind(&falsePath); | 4498 __ Bind(&falsePath); |
| 4502 __ LoadImmediate(temp, Immediate(0x0), PP); | 4499 __ LoadImmediate(temp, Immediate(0x0)); |
| 4503 __ movl(Address(RSP, 0), temp); | 4500 __ movl(Address(RSP, 0), temp); |
| 4504 break; | 4501 break; |
| 4505 case MethodRecognizer::kInt32x4WithFlagY: | 4502 case MethodRecognizer::kInt32x4WithFlagY: |
| 4506 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); | 4503 __ LoadImmediate(temp, Immediate(0xFFFFFFFF)); |
| 4507 __ movl(Address(RSP, 4), temp); | 4504 __ movl(Address(RSP, 4), temp); |
| 4508 __ jmp(&exitPath); | 4505 __ jmp(&exitPath); |
| 4509 __ Bind(&falsePath); | 4506 __ Bind(&falsePath); |
| 4510 __ LoadImmediate(temp, Immediate(0x0), PP); | 4507 __ LoadImmediate(temp, Immediate(0x0)); |
| 4511 __ movl(Address(RSP, 4), temp); | 4508 __ movl(Address(RSP, 4), temp); |
| 4512 break; | 4509 break; |
| 4513 case MethodRecognizer::kInt32x4WithFlagZ: | 4510 case MethodRecognizer::kInt32x4WithFlagZ: |
| 4514 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); | 4511 __ LoadImmediate(temp, Immediate(0xFFFFFFFF)); |
| 4515 __ movl(Address(RSP, 8), temp); | 4512 __ movl(Address(RSP, 8), temp); |
| 4516 __ jmp(&exitPath); | 4513 __ jmp(&exitPath); |
| 4517 __ Bind(&falsePath); | 4514 __ Bind(&falsePath); |
| 4518 __ LoadImmediate(temp, Immediate(0x0), PP); | 4515 __ LoadImmediate(temp, Immediate(0x0)); |
| 4519 __ movl(Address(RSP, 8), temp); | 4516 __ movl(Address(RSP, 8), temp); |
| 4520 break; | 4517 break; |
| 4521 case MethodRecognizer::kInt32x4WithFlagW: | 4518 case MethodRecognizer::kInt32x4WithFlagW: |
| 4522 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); | 4519 __ LoadImmediate(temp, Immediate(0xFFFFFFFF)); |
| 4523 __ movl(Address(RSP, 12), temp); | 4520 __ movl(Address(RSP, 12), temp); |
| 4524 __ jmp(&exitPath); | 4521 __ jmp(&exitPath); |
| 4525 __ Bind(&falsePath); | 4522 __ Bind(&falsePath); |
| 4526 __ LoadImmediate(temp, Immediate(0x0), PP); | 4523 __ LoadImmediate(temp, Immediate(0x0)); |
| 4527 __ movl(Address(RSP, 12), temp); | 4524 __ movl(Address(RSP, 12), temp); |
| 4528 break; | 4525 break; |
| 4529 default: UNREACHABLE(); | 4526 default: UNREACHABLE(); |
| 4530 } | 4527 } |
| 4531 __ Bind(&exitPath); | 4528 __ Bind(&exitPath); |
| 4532 // Copy mask back to register. | 4529 // Copy mask back to register. |
| 4533 __ movups(mask, Address(RSP, 0)); | 4530 __ movups(mask, Address(RSP, 0)); |
| 4534 __ AddImmediate(RSP, Immediate(16), PP); | 4531 __ AddImmediate(RSP, Immediate(16)); |
| 4535 } | 4532 } |
| 4536 | 4533 |
| 4537 | 4534 |
| 4538 LocationSummary* Int32x4ToFloat32x4Instr::MakeLocationSummary(Zone* zone, | 4535 LocationSummary* Int32x4ToFloat32x4Instr::MakeLocationSummary(Zone* zone, |
| 4539 bool opt) const { | 4536 bool opt) const { |
| 4540 const intptr_t kNumInputs = 1; | 4537 const intptr_t kNumInputs = 1; |
| 4541 const intptr_t kNumTemps = 0; | 4538 const intptr_t kNumTemps = 0; |
| 4542 LocationSummary* summary = new(zone) LocationSummary( | 4539 LocationSummary* summary = new(zone) LocationSummary( |
| 4543 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 4540 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 4544 summary->set_in(0, Location::RequiresFpuRegister()); | 4541 summary->set_in(0, Location::RequiresFpuRegister()); |
| (...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4699 __ negq(value); | 4696 __ negq(value); |
| 4700 __ j(OVERFLOW, deopt); | 4697 __ j(OVERFLOW, deopt); |
| 4701 if (FLAG_throw_on_javascript_int_overflow) { | 4698 if (FLAG_throw_on_javascript_int_overflow) { |
| 4702 EmitJavascriptOverflowCheck(compiler, range(), deopt, value); | 4699 EmitJavascriptOverflowCheck(compiler, range(), deopt, value); |
| 4703 } | 4700 } |
| 4704 break; | 4701 break; |
| 4705 } | 4702 } |
| 4706 case Token::kBIT_NOT: | 4703 case Token::kBIT_NOT: |
| 4707 __ notq(value); | 4704 __ notq(value); |
| 4708 // Remove inverted smi-tag. | 4705 // Remove inverted smi-tag. |
| 4709 __ AndImmediate(value, Immediate(~kSmiTagMask), PP); | 4706 __ AndImmediate(value, Immediate(~kSmiTagMask)); |
| 4710 break; | 4707 break; |
| 4711 default: | 4708 default: |
| 4712 UNREACHABLE(); | 4709 UNREACHABLE(); |
| 4713 } | 4710 } |
| 4714 } | 4711 } |
| 4715 | 4712 |
| 4716 | 4713 |
| 4717 LocationSummary* UnaryDoubleOpInstr::MakeLocationSummary(Zone* zone, | 4714 LocationSummary* UnaryDoubleOpInstr::MakeLocationSummary(Zone* zone, |
| 4718 bool opt) const { | 4715 bool opt) const { |
| 4719 const intptr_t kNumInputs = 1; | 4716 const intptr_t kNumInputs = 1; |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4776 const Condition double_condition = | 4773 const Condition double_condition = |
| 4777 is_min ? TokenKindToDoubleCondition(Token::kLT) | 4774 is_min ? TokenKindToDoubleCondition(Token::kLT) |
| 4778 : TokenKindToDoubleCondition(Token::kGT); | 4775 : TokenKindToDoubleCondition(Token::kGT); |
| 4779 ASSERT(left == result); | 4776 ASSERT(left == result); |
| 4780 __ j(double_condition, &done, Assembler::kNearJump); | 4777 __ j(double_condition, &done, Assembler::kNearJump); |
| 4781 __ movsd(result, right); | 4778 __ movsd(result, right); |
| 4782 __ jmp(&done, Assembler::kNearJump); | 4779 __ jmp(&done, Assembler::kNearJump); |
| 4783 | 4780 |
| 4784 __ Bind(&returns_nan); | 4781 __ Bind(&returns_nan); |
| 4785 static double kNaN = NAN; | 4782 static double kNaN = NAN; |
| 4786 __ LoadImmediate(temp, Immediate(reinterpret_cast<intptr_t>(&kNaN)), PP); | 4783 __ LoadImmediate(temp, Immediate(reinterpret_cast<intptr_t>(&kNaN))); |
| 4787 __ movsd(result, Address(temp, 0)); | 4784 __ movsd(result, Address(temp, 0)); |
| 4788 __ jmp(&done, Assembler::kNearJump); | 4785 __ jmp(&done, Assembler::kNearJump); |
| 4789 | 4786 |
| 4790 __ Bind(&are_equal); | 4787 __ Bind(&are_equal); |
| 4791 Label left_is_negative; | 4788 Label left_is_negative; |
| 4792 // Check for negative zero: -0.0 is equal 0.0 but min or max must return | 4789 // Check for negative zero: -0.0 is equal 0.0 but min or max must return |
| 4793 // -0.0 or 0.0 respectively. | 4790 // -0.0 or 0.0 respectively. |
| 4794 // Check for negative left value (get the sign bit): | 4791 // Check for negative left value (get the sign bit): |
| 4795 // - min -> left is negative ? left : right. | 4792 // - min -> left is negative ? left : right. |
| 4796 // - max -> left is negative ? right : left | 4793 // - max -> left is negative ? right : left |
| (...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5077 | 5074 |
| 5078 XmmRegister base = locs->in(0).fpu_reg(); | 5075 XmmRegister base = locs->in(0).fpu_reg(); |
| 5079 XmmRegister exp = locs->in(1).fpu_reg(); | 5076 XmmRegister exp = locs->in(1).fpu_reg(); |
| 5080 XmmRegister result = locs->out(0).fpu_reg(); | 5077 XmmRegister result = locs->out(0).fpu_reg(); |
| 5081 Register temp = | 5078 Register temp = |
| 5082 locs->temp(InvokeMathCFunctionInstr::kObjectTempIndex).reg(); | 5079 locs->temp(InvokeMathCFunctionInstr::kObjectTempIndex).reg(); |
| 5083 XmmRegister zero_temp = | 5080 XmmRegister zero_temp = |
| 5084 locs->temp(InvokeMathCFunctionInstr::kDoubleTempIndex).fpu_reg(); | 5081 locs->temp(InvokeMathCFunctionInstr::kDoubleTempIndex).fpu_reg(); |
| 5085 | 5082 |
| 5086 __ xorps(zero_temp, zero_temp); | 5083 __ xorps(zero_temp, zero_temp); |
| 5087 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(1)), PP); | 5084 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(1))); |
| 5088 __ movsd(result, FieldAddress(temp, Double::value_offset())); | 5085 __ movsd(result, FieldAddress(temp, Double::value_offset())); |
| 5089 | 5086 |
| 5090 Label check_base, skip_call; | 5087 Label check_base, skip_call; |
| 5091 // exponent == 0.0 -> return 1.0; | 5088 // exponent == 0.0 -> return 1.0; |
| 5092 __ comisd(exp, zero_temp); | 5089 __ comisd(exp, zero_temp); |
| 5093 __ j(PARITY_EVEN, &check_base, Assembler::kNearJump); | 5090 __ j(PARITY_EVEN, &check_base, Assembler::kNearJump); |
| 5094 __ j(EQUAL, &skip_call); // 'result' is 1.0. | 5091 __ j(EQUAL, &skip_call); // 'result' is 1.0. |
| 5095 | 5092 |
| 5096 // exponent == 1.0 ? | 5093 // exponent == 1.0 ? |
| 5097 __ comisd(exp, result); | 5094 __ comisd(exp, result); |
| 5098 Label return_base; | 5095 Label return_base; |
| 5099 __ j(EQUAL, &return_base, Assembler::kNearJump); | 5096 __ j(EQUAL, &return_base, Assembler::kNearJump); |
| 5100 | 5097 |
| 5101 // exponent == 2.0 ? | 5098 // exponent == 2.0 ? |
| 5102 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(2.0)), PP); | 5099 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(2.0))); |
| 5103 __ movsd(XMM0, FieldAddress(temp, Double::value_offset())); | 5100 __ movsd(XMM0, FieldAddress(temp, Double::value_offset())); |
| 5104 __ comisd(exp, XMM0); | 5101 __ comisd(exp, XMM0); |
| 5105 Label return_base_times_2; | 5102 Label return_base_times_2; |
| 5106 __ j(EQUAL, &return_base_times_2, Assembler::kNearJump); | 5103 __ j(EQUAL, &return_base_times_2, Assembler::kNearJump); |
| 5107 | 5104 |
| 5108 // exponent == 3.0 ? | 5105 // exponent == 3.0 ? |
| 5109 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(3.0)), PP); | 5106 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(3.0))); |
| 5110 __ movsd(XMM0, FieldAddress(temp, Double::value_offset())); | 5107 __ movsd(XMM0, FieldAddress(temp, Double::value_offset())); |
| 5111 __ comisd(exp, XMM0); | 5108 __ comisd(exp, XMM0); |
| 5112 __ j(NOT_EQUAL, &check_base); | 5109 __ j(NOT_EQUAL, &check_base); |
| 5113 | 5110 |
| 5114 // Base times 3. | 5111 // Base times 3. |
| 5115 __ movsd(result, base); | 5112 __ movsd(result, base); |
| 5116 __ mulsd(result, base); | 5113 __ mulsd(result, base); |
| 5117 __ mulsd(result, base); | 5114 __ mulsd(result, base); |
| 5118 __ jmp(&skip_call); | 5115 __ jmp(&skip_call); |
| 5119 | 5116 |
| (...skipping 14 matching lines...) Expand all Loading... |
| 5134 __ comisd(base, result); | 5131 __ comisd(base, result); |
| 5135 __ j(PARITY_EVEN, &return_nan, Assembler::kNearJump); | 5132 __ j(PARITY_EVEN, &return_nan, Assembler::kNearJump); |
| 5136 __ j(EQUAL, &skip_call, Assembler::kNearJump); | 5133 __ j(EQUAL, &skip_call, Assembler::kNearJump); |
| 5137 // Note: 'base' could be NaN. | 5134 // Note: 'base' could be NaN. |
| 5138 __ comisd(exp, base); | 5135 __ comisd(exp, base); |
| 5139 // Neither 'exp' nor 'base' is NaN. | 5136 // Neither 'exp' nor 'base' is NaN. |
| 5140 Label try_sqrt; | 5137 Label try_sqrt; |
| 5141 __ j(PARITY_ODD, &try_sqrt, Assembler::kNearJump); | 5138 __ j(PARITY_ODD, &try_sqrt, Assembler::kNearJump); |
| 5142 // Return NaN. | 5139 // Return NaN. |
| 5143 __ Bind(&return_nan); | 5140 __ Bind(&return_nan); |
| 5144 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(NAN)), PP); | 5141 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(NAN))); |
| 5145 __ movsd(result, FieldAddress(temp, Double::value_offset())); | 5142 __ movsd(result, FieldAddress(temp, Double::value_offset())); |
| 5146 __ jmp(&skip_call); | 5143 __ jmp(&skip_call); |
| 5147 | 5144 |
| 5148 Label do_pow, return_zero; | 5145 Label do_pow, return_zero; |
| 5149 __ Bind(&try_sqrt); | 5146 __ Bind(&try_sqrt); |
| 5150 // Before calling pow, check if we could use sqrt instead of pow. | 5147 // Before calling pow, check if we could use sqrt instead of pow. |
| 5151 __ LoadObject(temp, | 5148 __ LoadObject(temp, |
| 5152 Double::ZoneHandle(Double::NewCanonical(kNegInfinity)), PP); | 5149 Double::ZoneHandle(Double::NewCanonical(kNegInfinity))); |
| 5153 __ movsd(result, FieldAddress(temp, Double::value_offset())); | 5150 __ movsd(result, FieldAddress(temp, Double::value_offset())); |
| 5154 // base == -Infinity -> call pow; | 5151 // base == -Infinity -> call pow; |
| 5155 __ comisd(base, result); | 5152 __ comisd(base, result); |
| 5156 __ j(EQUAL, &do_pow, Assembler::kNearJump); | 5153 __ j(EQUAL, &do_pow, Assembler::kNearJump); |
| 5157 | 5154 |
| 5158 // exponent == 0.5 ? | 5155 // exponent == 0.5 ? |
| 5159 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(0.5)), PP); | 5156 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(0.5))); |
| 5160 __ movsd(result, FieldAddress(temp, Double::value_offset())); | 5157 __ movsd(result, FieldAddress(temp, Double::value_offset())); |
| 5161 __ comisd(exp, result); | 5158 __ comisd(exp, result); |
| 5162 __ j(NOT_EQUAL, &do_pow, Assembler::kNearJump); | 5159 __ j(NOT_EQUAL, &do_pow, Assembler::kNearJump); |
| 5163 | 5160 |
| 5164 // base == 0 -> return 0; | 5161 // base == 0 -> return 0; |
| 5165 __ comisd(base, zero_temp); | 5162 __ comisd(base, zero_temp); |
| 5166 __ j(EQUAL, &return_zero, Assembler::kNearJump); | 5163 __ j(EQUAL, &return_zero, Assembler::kNearJump); |
| 5167 | 5164 |
| 5168 __ sqrtsd(result, base); | 5165 __ sqrtsd(result, base); |
| 5169 __ jmp(&skip_call, Assembler::kNearJump); | 5166 __ jmp(&skip_call, Assembler::kNearJump); |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5346 __ jmp(&done); | 5343 __ jmp(&done); |
| 5347 | 5344 |
| 5348 // Divide using 64bit idiv. | 5345 // Divide using 64bit idiv. |
| 5349 __ Bind(¬_32bit); | 5346 __ Bind(¬_32bit); |
| 5350 __ SmiUntag(left); | 5347 __ SmiUntag(left); |
| 5351 __ SmiUntag(right); | 5348 __ SmiUntag(right); |
| 5352 __ cqo(); // Sign extend RAX -> RDX:RAX. | 5349 __ cqo(); // Sign extend RAX -> RDX:RAX. |
| 5353 __ idivq(right); // RAX: quotient, RDX: remainder. | 5350 __ idivq(right); // RAX: quotient, RDX: remainder. |
| 5354 // Check the corner case of dividing the 'MIN_SMI' with -1, in which | 5351 // Check the corner case of dividing the 'MIN_SMI' with -1, in which |
| 5355 // case we cannot tag the result. | 5352 // case we cannot tag the result. |
| 5356 __ CompareImmediate(RAX, Immediate(0x4000000000000000), PP); | 5353 __ CompareImmediate(RAX, Immediate(0x4000000000000000)); |
| 5357 __ j(EQUAL, deopt); | 5354 __ j(EQUAL, deopt); |
| 5358 __ Bind(&done); | 5355 __ Bind(&done); |
| 5359 | 5356 |
| 5360 // Modulo correction (RDX). | 5357 // Modulo correction (RDX). |
| 5361 // res = left % right; | 5358 // res = left % right; |
| 5362 // if (res < 0) { | 5359 // if (res < 0) { |
| 5363 // if (right < 0) { | 5360 // if (right < 0) { |
| 5364 // res = res - right; | 5361 // res = res - right; |
| 5365 // } else { | 5362 // } else { |
| 5366 // res = res + right; | 5363 // res = res + right; |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5470 return summary; | 5467 return summary; |
| 5471 } | 5468 } |
| 5472 | 5469 |
| 5473 | 5470 |
| 5474 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5471 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 5475 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 5472 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
| 5476 ICData::kDeoptCheckClass, | 5473 ICData::kDeoptCheckClass, |
| 5477 licm_hoisted_ ? ICData::kHoisted : 0); | 5474 licm_hoisted_ ? ICData::kHoisted : 0); |
| 5478 if (IsNullCheck()) { | 5475 if (IsNullCheck()) { |
| 5479 __ CompareObject(locs()->in(0).reg(), | 5476 __ CompareObject(locs()->in(0).reg(), |
| 5480 Object::null_object(), PP); | 5477 Object::null_object()); |
| 5481 Condition cond = DeoptIfNull() ? EQUAL : NOT_EQUAL; | 5478 Condition cond = DeoptIfNull() ? EQUAL : NOT_EQUAL; |
| 5482 __ j(cond, deopt); | 5479 __ j(cond, deopt); |
| 5483 return; | 5480 return; |
| 5484 } | 5481 } |
| 5485 | 5482 |
| 5486 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || | 5483 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || |
| 5487 (unary_checks().NumberOfChecks() > 1)); | 5484 (unary_checks().NumberOfChecks() > 1)); |
| 5488 Register value = locs()->in(0).reg(); | 5485 Register value = locs()->in(0).reg(); |
| 5489 Register temp = locs()->temp(0).reg(); | 5486 Register temp = locs()->temp(0).reg(); |
| 5490 Label is_ok; | 5487 Label is_ok; |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5565 LocationSummary* summary = new(zone) LocationSummary( | 5562 LocationSummary* summary = new(zone) LocationSummary( |
| 5566 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 5563 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 5567 summary->set_in(0, Location::RequiresRegister()); | 5564 summary->set_in(0, Location::RequiresRegister()); |
| 5568 return summary; | 5565 return summary; |
| 5569 } | 5566 } |
| 5570 | 5567 |
| 5571 | 5568 |
| 5572 void CheckClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5569 void CheckClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 5573 Register value = locs()->in(0).reg(); | 5570 Register value = locs()->in(0).reg(); |
| 5574 Label* deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckClass); | 5571 Label* deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckClass); |
| 5575 __ CompareImmediate(value, Immediate(Smi::RawValue(cid_)), PP); | 5572 __ CompareImmediate(value, Immediate(Smi::RawValue(cid_))); |
| 5576 __ j(NOT_ZERO, deopt); | 5573 __ j(NOT_ZERO, deopt); |
| 5577 } | 5574 } |
| 5578 | 5575 |
| 5579 | 5576 |
| 5580 LocationSummary* CheckArrayBoundInstr::MakeLocationSummary(Zone* zone, | 5577 LocationSummary* CheckArrayBoundInstr::MakeLocationSummary(Zone* zone, |
| 5581 bool opt) const { | 5578 bool opt) const { |
| 5582 const intptr_t kNumInputs = 2; | 5579 const intptr_t kNumInputs = 2; |
| 5583 const intptr_t kNumTemps = 0; | 5580 const intptr_t kNumTemps = 0; |
| 5584 LocationSummary* locs = new(zone) LocationSummary( | 5581 LocationSummary* locs = new(zone) LocationSummary( |
| 5585 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 5582 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 5607 // Unconditionally deoptimize for constant bounds checks because they | 5604 // Unconditionally deoptimize for constant bounds checks because they |
| 5608 // only occur only when index is out-of-bounds. | 5605 // only occur only when index is out-of-bounds. |
| 5609 __ jmp(deopt); | 5606 __ jmp(deopt); |
| 5610 return; | 5607 return; |
| 5611 } | 5608 } |
| 5612 | 5609 |
| 5613 if (index_loc.IsConstant()) { | 5610 if (index_loc.IsConstant()) { |
| 5614 Register length = length_loc.reg(); | 5611 Register length = length_loc.reg(); |
| 5615 const Smi& index = Smi::Cast(index_loc.constant()); | 5612 const Smi& index = Smi::Cast(index_loc.constant()); |
| 5616 __ CompareImmediate( | 5613 __ CompareImmediate( |
| 5617 length, Immediate(reinterpret_cast<int64_t>(index.raw())), PP); | 5614 length, Immediate(reinterpret_cast<int64_t>(index.raw()))); |
| 5618 __ j(BELOW_EQUAL, deopt); | 5615 __ j(BELOW_EQUAL, deopt); |
| 5619 } else if (length_loc.IsConstant()) { | 5616 } else if (length_loc.IsConstant()) { |
| 5620 const Smi& length = Smi::Cast(length_loc.constant()); | 5617 const Smi& length = Smi::Cast(length_loc.constant()); |
| 5621 Register index = index_loc.reg(); | 5618 Register index = index_loc.reg(); |
| 5622 if (length.Value() == Smi::kMaxValue) { | 5619 if (length.Value() == Smi::kMaxValue) { |
| 5623 __ testq(index, index); | 5620 __ testq(index, index); |
| 5624 __ j(NEGATIVE, deopt); | 5621 __ j(NEGATIVE, deopt); |
| 5625 } else { | 5622 } else { |
| 5626 __ CompareImmediate( | 5623 __ CompareImmediate( |
| 5627 index, Immediate(reinterpret_cast<int64_t>(length.raw())), PP); | 5624 index, Immediate(reinterpret_cast<int64_t>(length.raw()))); |
| 5628 __ j(ABOVE_EQUAL, deopt); | 5625 __ j(ABOVE_EQUAL, deopt); |
| 5629 } | 5626 } |
| 5630 } else { | 5627 } else { |
| 5631 Register length = length_loc.reg(); | 5628 Register length = length_loc.reg(); |
| 5632 Register index = index_loc.reg(); | 5629 Register index = index_loc.reg(); |
| 5633 __ cmpq(index, length); | 5630 __ cmpq(index, length); |
| 5634 __ j(ABOVE_EQUAL, deopt); | 5631 __ j(ABOVE_EQUAL, deopt); |
| 5635 } | 5632 } |
| 5636 } | 5633 } |
| 5637 | 5634 |
| (...skipping 636 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6274 | 6271 |
| 6275 Label is_true, is_false; | 6272 Label is_true, is_false; |
| 6276 BranchLabels labels = { &is_true, &is_false, &is_false }; | 6273 BranchLabels labels = { &is_true, &is_false, &is_false }; |
| 6277 | 6274 |
| 6278 Condition true_condition = EmitComparisonCode(compiler, labels); | 6275 Condition true_condition = EmitComparisonCode(compiler, labels); |
| 6279 EmitBranchOnCondition(compiler, true_condition, labels); | 6276 EmitBranchOnCondition(compiler, true_condition, labels); |
| 6280 | 6277 |
| 6281 Register result = locs()->out(0).reg(); | 6278 Register result = locs()->out(0).reg(); |
| 6282 Label done; | 6279 Label done; |
| 6283 __ Bind(&is_false); | 6280 __ Bind(&is_false); |
| 6284 __ LoadObject(result, Bool::False(), PP); | 6281 __ LoadObject(result, Bool::False()); |
| 6285 __ jmp(&done); | 6282 __ jmp(&done); |
| 6286 __ Bind(&is_true); | 6283 __ Bind(&is_true); |
| 6287 __ LoadObject(result, Bool::True(), PP); | 6284 __ LoadObject(result, Bool::True()); |
| 6288 __ Bind(&done); | 6285 __ Bind(&done); |
| 6289 } | 6286 } |
| 6290 | 6287 |
| 6291 | 6288 |
| 6292 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 6289 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
| 6293 BranchInstr* branch) { | 6290 BranchInstr* branch) { |
| 6294 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 6291 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
| 6295 | 6292 |
| 6296 BranchLabels labels = compiler->CreateBranchLabels(branch); | 6293 BranchLabels labels = compiler->CreateBranchLabels(branch); |
| 6297 Condition true_condition = EmitComparisonCode(compiler, labels); | 6294 Condition true_condition = EmitComparisonCode(compiler, labels); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 6310 return summary; | 6307 return summary; |
| 6311 } | 6308 } |
| 6312 | 6309 |
| 6313 | 6310 |
| 6314 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 6311 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 6315 // Arguments descriptor is expected in R10. | 6312 // Arguments descriptor is expected in R10. |
| 6316 intptr_t argument_count = ArgumentCount(); | 6313 intptr_t argument_count = ArgumentCount(); |
| 6317 const Array& arguments_descriptor = | 6314 const Array& arguments_descriptor = |
| 6318 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, | 6315 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, |
| 6319 argument_names())); | 6316 argument_names())); |
| 6320 __ LoadObject(R10, arguments_descriptor, PP); | 6317 __ LoadObject(R10, arguments_descriptor); |
| 6321 | 6318 |
| 6322 // Function in RAX. | 6319 // Function in RAX. |
| 6323 ASSERT(locs()->in(0).reg() == RAX); | 6320 ASSERT(locs()->in(0).reg() == RAX); |
| 6324 __ movq(RCX, FieldAddress(RAX, Function::instructions_offset())); | 6321 __ movq(RCX, FieldAddress(RAX, Function::instructions_offset())); |
| 6325 | 6322 |
| 6326 // RAX: Function. | 6323 // RAX: Function. |
| 6327 // R10: Arguments descriptor array. | 6324 // R10: Arguments descriptor array. |
| 6328 // RBX: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value). | 6325 // RBX: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value). |
| 6329 __ xorq(RBX, RBX); | 6326 __ xorq(RBX, RBX); |
| 6330 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 6327 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 6353 Location::RequiresRegister(), | 6350 Location::RequiresRegister(), |
| 6354 LocationSummary::kNoCall); | 6351 LocationSummary::kNoCall); |
| 6355 } | 6352 } |
| 6356 | 6353 |
| 6357 | 6354 |
| 6358 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 6355 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 6359 Register value = locs()->in(0).reg(); | 6356 Register value = locs()->in(0).reg(); |
| 6360 Register result = locs()->out(0).reg(); | 6357 Register result = locs()->out(0).reg(); |
| 6361 | 6358 |
| 6362 Label done; | 6359 Label done; |
| 6363 __ LoadObject(result, Bool::True(), PP); | 6360 __ LoadObject(result, Bool::True()); |
| 6364 __ CompareRegisters(result, value); | 6361 __ CompareRegisters(result, value); |
| 6365 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 6362 __ j(NOT_EQUAL, &done, Assembler::kNearJump); |
| 6366 __ LoadObject(result, Bool::False(), PP); | 6363 __ LoadObject(result, Bool::False()); |
| 6367 __ Bind(&done); | 6364 __ Bind(&done); |
| 6368 } | 6365 } |
| 6369 | 6366 |
| 6370 | 6367 |
| 6371 LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone, | 6368 LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone, |
| 6372 bool opt) const { | 6369 bool opt) const { |
| 6373 return MakeCallSummary(zone); | 6370 return MakeCallSummary(zone); |
| 6374 } | 6371 } |
| 6375 | 6372 |
| 6376 | 6373 |
| (...skipping 28 matching lines...) Expand all Loading... |
| 6405 zone, kNumInputs, kNumTemps, LocationSummary::kCall); | 6402 zone, kNumInputs, kNumTemps, LocationSummary::kCall); |
| 6406 locs->set_in(0, Location::RegisterLocation(RAX)); | 6403 locs->set_in(0, Location::RegisterLocation(RAX)); |
| 6407 locs->set_out(0, Location::RegisterLocation(RAX)); | 6404 locs->set_out(0, Location::RegisterLocation(RAX)); |
| 6408 return locs; | 6405 return locs; |
| 6409 } | 6406 } |
| 6410 | 6407 |
| 6411 | 6408 |
| 6412 void GrowRegExpStackInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 6409 void GrowRegExpStackInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 6413 const Register typed_data = locs()->in(0).reg(); | 6410 const Register typed_data = locs()->in(0).reg(); |
| 6414 const Register result = locs()->out(0).reg(); | 6411 const Register result = locs()->out(0).reg(); |
| 6415 __ PushObject(Object::null_object(), PP); | 6412 __ PushObject(Object::null_object()); |
| 6416 __ pushq(typed_data); | 6413 __ pushq(typed_data); |
| 6417 compiler->GenerateRuntimeCall(Scanner::kNoSourcePos, // No token position. | 6414 compiler->GenerateRuntimeCall(Scanner::kNoSourcePos, // No token position. |
| 6418 deopt_id(), | 6415 deopt_id(), |
| 6419 kGrowRegExpStackRuntimeEntry, | 6416 kGrowRegExpStackRuntimeEntry, |
| 6420 1, | 6417 1, |
| 6421 locs()); | 6418 locs()); |
| 6422 __ Drop(1); | 6419 __ Drop(1); |
| 6423 __ popq(result); | 6420 __ popq(result); |
| 6424 } | 6421 } |
| 6425 | 6422 |
| 6426 | 6423 |
| 6427 } // namespace dart | 6424 } // namespace dart |
| 6428 | 6425 |
| 6429 #undef __ | 6426 #undef __ |
| 6430 | 6427 |
| 6431 #endif // defined TARGET_ARCH_X64 | 6428 #endif // defined TARGET_ARCH_X64 |
| OLD | NEW |