Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(59)

Side by Side Diff: runtime/vm/intermediate_language_x64.cc

Issue 1268783003: Simplify constant pool usage in x64 code generator (by removing extra argument (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: address comments Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/instructions_x64_test.cc ('k') | runtime/vm/intrinsifier_x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
49 49
50 50
51 void PushArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 51 void PushArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
52 // In SSA mode, we need an explicit push. Nothing to do in non-SSA mode 52 // In SSA mode, we need an explicit push. Nothing to do in non-SSA mode
53 // where PushArgument is handled by BindInstr::EmitNativeCode. 53 // where PushArgument is handled by BindInstr::EmitNativeCode.
54 if (compiler->is_optimizing()) { 54 if (compiler->is_optimizing()) {
55 Location value = locs()->in(0); 55 Location value = locs()->in(0);
56 if (value.IsRegister()) { 56 if (value.IsRegister()) {
57 __ pushq(value.reg()); 57 __ pushq(value.reg());
58 } else if (value.IsConstant()) { 58 } else if (value.IsConstant()) {
59 __ PushObject(value.constant(), PP); 59 __ PushObject(value.constant());
60 } else { 60 } else {
61 ASSERT(value.IsStackSlot()); 61 ASSERT(value.IsStackSlot());
62 __ pushq(value.ToStackSlotAddress()); 62 __ pushq(value.ToStackSlotAddress());
63 } 63 }
64 } 64 }
65 } 65 }
66 66
67 67
68 LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, 68 LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone,
69 bool opt) const { 69 bool opt) const {
(...skipping 20 matching lines...) Expand all
90 } 90 }
91 91
92 #if defined(DEBUG) 92 #if defined(DEBUG)
93 __ Comment("Stack Check"); 93 __ Comment("Stack Check");
94 Label done; 94 Label done;
95 const intptr_t fp_sp_dist = 95 const intptr_t fp_sp_dist =
96 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; 96 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize;
97 ASSERT(fp_sp_dist <= 0); 97 ASSERT(fp_sp_dist <= 0);
98 __ movq(RDI, RSP); 98 __ movq(RDI, RSP);
99 __ subq(RDI, RBP); 99 __ subq(RDI, RBP);
100 __ CompareImmediate(RDI, Immediate(fp_sp_dist), PP); 100 __ CompareImmediate(RDI, Immediate(fp_sp_dist));
101 __ j(EQUAL, &done, Assembler::kNearJump); 101 __ j(EQUAL, &done, Assembler::kNearJump);
102 __ int3(); 102 __ int3();
103 __ Bind(&done); 103 __ Bind(&done);
104 #endif 104 #endif
105 __ LeaveDartFrame(); 105 ASSERT(__ constant_pool_allowed());
106 __ LeaveDartFrame(); // Disallows constant pool use.
106 __ ret(); 107 __ ret();
108 // This ReturnInstr may be emitted out of order by the optimizer. The next
109 // block may be a target expecting a properly set constant pool pointer.
110 __ set_constant_pool_allowed(true);
107 } 111 }
108 112
109 113
110 static Condition NegateCondition(Condition condition) { 114 static Condition NegateCondition(Condition condition) {
111 switch (condition) { 115 switch (condition) {
112 case EQUAL: return NOT_EQUAL; 116 case EQUAL: return NOT_EQUAL;
113 case NOT_EQUAL: return EQUAL; 117 case NOT_EQUAL: return EQUAL;
114 case LESS: return GREATER_EQUAL; 118 case LESS: return GREATER_EQUAL;
115 case LESS_EQUAL: return GREATER; 119 case LESS_EQUAL: return GREATER;
116 case GREATER: return LESS_EQUAL; 120 case GREATER: return LESS_EQUAL;
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
176 180
177 __ setcc(true_condition, DL); 181 __ setcc(true_condition, DL);
178 182
179 if (is_power_of_two_kind) { 183 if (is_power_of_two_kind) {
180 const intptr_t shift = 184 const intptr_t shift =
181 Utils::ShiftForPowerOfTwo(Utils::Maximum(true_value, false_value)); 185 Utils::ShiftForPowerOfTwo(Utils::Maximum(true_value, false_value));
182 __ shlq(RDX, Immediate(shift + kSmiTagSize)); 186 __ shlq(RDX, Immediate(shift + kSmiTagSize));
183 } else { 187 } else {
184 __ decq(RDX); 188 __ decq(RDX);
185 __ AndImmediate(RDX, 189 __ AndImmediate(RDX,
186 Immediate(Smi::RawValue(true_value) - Smi::RawValue(false_value)), PP); 190 Immediate(Smi::RawValue(true_value) - Smi::RawValue(false_value)));
187 if (false_value != 0) { 191 if (false_value != 0) {
188 __ AddImmediate(RDX, Immediate(Smi::RawValue(false_value)), PP); 192 __ AddImmediate(RDX, Immediate(Smi::RawValue(false_value)));
189 } 193 }
190 } 194 }
191 } 195 }
192 196
193 197
194 LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone, 198 LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone,
195 bool opt) const { 199 bool opt) const {
196 const intptr_t kNumInputs = 0; 200 const intptr_t kNumInputs = 0;
197 const intptr_t stack_index = (local().index() < 0) 201 const intptr_t stack_index = (local().index() < 0)
198 ? kFirstLocalSlotFromFp - local().index() 202 ? kFirstLocalSlotFromFp - local().index()
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
235 kNumInputs, 239 kNumInputs,
236 Location::RequiresRegister(), 240 Location::RequiresRegister(),
237 LocationSummary::kNoCall); 241 LocationSummary::kNoCall);
238 } 242 }
239 243
240 244
241 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 245 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
242 // The register allocator drops constant definitions that have no uses. 246 // The register allocator drops constant definitions that have no uses.
243 if (!locs()->out(0).IsInvalid()) { 247 if (!locs()->out(0).IsInvalid()) {
244 Register result = locs()->out(0).reg(); 248 Register result = locs()->out(0).reg();
245 __ LoadObject(result, value(), PP); 249 __ LoadObject(result, value());
246 } 250 }
247 } 251 }
248 252
249 253
250 LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone, 254 LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone,
251 bool opt) const { 255 bool opt) const {
252 const intptr_t kNumInputs = 0; 256 const intptr_t kNumInputs = 0;
253 const intptr_t kNumTemps = 0; 257 const intptr_t kNumTemps = 0;
254 LocationSummary* locs = new(zone) LocationSummary( 258 LocationSummary* locs = new(zone) LocationSummary(
255 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 259 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
(...skipping 14 matching lines...) Expand all
270 274
271 void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 275 void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
272 // The register allocator drops constant definitions that have no uses. 276 // The register allocator drops constant definitions that have no uses.
273 if (!locs()->out(0).IsInvalid()) { 277 if (!locs()->out(0).IsInvalid()) {
274 switch (representation()) { 278 switch (representation()) {
275 case kUnboxedDouble: { 279 case kUnboxedDouble: {
276 XmmRegister result = locs()->out(0).fpu_reg(); 280 XmmRegister result = locs()->out(0).fpu_reg();
277 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0)) { 281 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0)) {
278 __ xorps(result, result); 282 __ xorps(result, result);
279 } else { 283 } else {
280 __ LoadObject(TMP, value(), PP); 284 __ LoadObject(TMP, value());
281 __ movsd(result, FieldAddress(TMP, Double::value_offset())); 285 __ movsd(result, FieldAddress(TMP, Double::value_offset()));
282 } 286 }
283 break; 287 break;
284 } 288 }
285 case kUnboxedInt32: 289 case kUnboxedInt32:
286 __ movl(locs()->out(0).reg(), 290 __ movl(locs()->out(0).reg(),
287 Immediate(static_cast<int32_t>(Smi::Cast(value()).Value()))); 291 Immediate(static_cast<int32_t>(Smi::Cast(value()).Value())));
288 break; 292 break;
289 default: 293 default:
290 UNREACHABLE(); 294 UNREACHABLE();
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
323 intptr_t token_pos, 327 intptr_t token_pos,
324 intptr_t deopt_id, 328 intptr_t deopt_id,
325 LocationSummary* locs, 329 LocationSummary* locs,
326 FlowGraphCompiler* compiler) { 330 FlowGraphCompiler* compiler) {
327 // Check that the type of the value is allowed in conditional context. 331 // Check that the type of the value is allowed in conditional context.
328 // Call the runtime if the object is not bool::true or bool::false. 332 // Call the runtime if the object is not bool::true or bool::false.
329 ASSERT(locs->always_calls()); 333 ASSERT(locs->always_calls());
330 Label done; 334 Label done;
331 335
332 if (Isolate::Current()->flags().type_checks()) { 336 if (Isolate::Current()->flags().type_checks()) {
333 __ CompareObject(reg, Bool::True(), PP); 337 __ CompareObject(reg, Bool::True());
334 __ j(EQUAL, &done, Assembler::kNearJump); 338 __ j(EQUAL, &done, Assembler::kNearJump);
335 __ CompareObject(reg, Bool::False(), PP); 339 __ CompareObject(reg, Bool::False());
336 __ j(EQUAL, &done, Assembler::kNearJump); 340 __ j(EQUAL, &done, Assembler::kNearJump);
337 } else { 341 } else {
338 ASSERT(Isolate::Current()->flags().asserts()); 342 ASSERT(Isolate::Current()->flags().asserts());
339 __ CompareObject(reg, Object::null_instance(), PP); 343 __ CompareObject(reg, Object::null_instance());
340 __ j(NOT_EQUAL, &done, Assembler::kNearJump); 344 __ j(NOT_EQUAL, &done, Assembler::kNearJump);
341 } 345 }
342 346
343 __ pushq(reg); // Push the source object. 347 __ pushq(reg); // Push the source object.
344 compiler->GenerateRuntimeCall(token_pos, 348 compiler->GenerateRuntimeCall(token_pos,
345 deopt_id, 349 deopt_id,
346 kNonBoolTypeErrorRuntimeEntry, 350 kNonBoolTypeErrorRuntimeEntry,
347 1, 351 1,
348 locs); 352 locs);
349 // We should never return here. 353 // We should never return here.
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
415 return NULL; 419 return NULL;
416 } 420 }
417 421
418 422
419 static void LoadValueCid(FlowGraphCompiler* compiler, 423 static void LoadValueCid(FlowGraphCompiler* compiler,
420 Register value_cid_reg, 424 Register value_cid_reg,
421 Register value_reg, 425 Register value_reg,
422 Label* value_is_smi = NULL) { 426 Label* value_is_smi = NULL) {
423 Label done; 427 Label done;
424 if (value_is_smi == NULL) { 428 if (value_is_smi == NULL) {
425 __ LoadImmediate(value_cid_reg, Immediate(kSmiCid), PP); 429 __ LoadImmediate(value_cid_reg, Immediate(kSmiCid));
426 } 430 }
427 __ testq(value_reg, Immediate(kSmiTagMask)); 431 __ testq(value_reg, Immediate(kSmiTagMask));
428 if (value_is_smi == NULL) { 432 if (value_is_smi == NULL) {
429 __ j(ZERO, &done, Assembler::kNearJump); 433 __ j(ZERO, &done, Assembler::kNearJump);
430 } else { 434 } else {
431 __ j(ZERO, value_is_smi); 435 __ j(ZERO, value_is_smi);
432 } 436 }
433 __ LoadClassId(value_cid_reg, value_reg); 437 __ LoadClassId(value_cid_reg, value_reg);
434 __ Bind(&done); 438 __ Bind(&done);
435 } 439 }
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
477 const LocationSummary& locs, 481 const LocationSummary& locs,
478 Token::Kind kind, 482 Token::Kind kind,
479 BranchLabels labels) { 483 BranchLabels labels) {
480 Location left = locs.in(0); 484 Location left = locs.in(0);
481 Location right = locs.in(1); 485 Location right = locs.in(1);
482 ASSERT(!left.IsConstant() || !right.IsConstant()); 486 ASSERT(!left.IsConstant() || !right.IsConstant());
483 487
484 Condition true_condition = TokenKindToIntCondition(kind); 488 Condition true_condition = TokenKindToIntCondition(kind);
485 489
486 if (left.IsConstant()) { 490 if (left.IsConstant()) {
487 __ CompareObject(right.reg(), left.constant(), PP); 491 __ CompareObject(right.reg(), left.constant());
488 true_condition = FlipCondition(true_condition); 492 true_condition = FlipCondition(true_condition);
489 } else if (right.IsConstant()) { 493 } else if (right.IsConstant()) {
490 __ CompareObject(left.reg(), right.constant(), PP); 494 __ CompareObject(left.reg(), right.constant());
491 } else if (right.IsStackSlot()) { 495 } else if (right.IsStackSlot()) {
492 __ cmpq(left.reg(), right.ToStackSlotAddress()); 496 __ cmpq(left.reg(), right.ToStackSlotAddress());
493 } else { 497 } else {
494 __ cmpq(left.reg(), right.reg()); 498 __ cmpq(left.reg(), right.reg());
495 } 499 }
496 return true_condition; 500 return true_condition;
497 } 501 }
498 502
499 503
500 static Condition TokenKindToDoubleCondition(Token::Kind kind) { 504 static Condition TokenKindToDoubleCondition(Token::Kind kind) {
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
544 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); 548 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE));
545 549
546 Label is_true, is_false; 550 Label is_true, is_false;
547 BranchLabels labels = { &is_true, &is_false, &is_false }; 551 BranchLabels labels = { &is_true, &is_false, &is_false };
548 Condition true_condition = EmitComparisonCode(compiler, labels); 552 Condition true_condition = EmitComparisonCode(compiler, labels);
549 EmitBranchOnCondition(compiler, true_condition, labels); 553 EmitBranchOnCondition(compiler, true_condition, labels);
550 554
551 Register result = locs()->out(0).reg(); 555 Register result = locs()->out(0).reg();
552 Label done; 556 Label done;
553 __ Bind(&is_false); 557 __ Bind(&is_false);
554 __ LoadObject(result, Bool::False(), PP); 558 __ LoadObject(result, Bool::False());
555 __ jmp(&done); 559 __ jmp(&done);
556 __ Bind(&is_true); 560 __ Bind(&is_true);
557 __ LoadObject(result, Bool::True(), PP); 561 __ LoadObject(result, Bool::True());
558 __ Bind(&done); 562 __ Bind(&done);
559 } 563 }
560 564
561 565
562 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, 566 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler,
563 BranchInstr* branch) { 567 BranchInstr* branch) {
564 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); 568 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ));
565 569
566 BranchLabels labels = compiler->CreateBranchLabels(branch); 570 BranchLabels labels = compiler->CreateBranchLabels(branch);
567 Condition true_condition = EmitComparisonCode(compiler, labels); 571 Condition true_condition = EmitComparisonCode(compiler, labels);
(...skipping 16 matching lines...) Expand all
584 588
585 589
586 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler, 590 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
587 BranchLabels labels) { 591 BranchLabels labels) {
588 Register left_reg = locs()->in(0).reg(); 592 Register left_reg = locs()->in(0).reg();
589 Location right = locs()->in(1); 593 Location right = locs()->in(1);
590 if (right.IsConstant()) { 594 if (right.IsConstant()) {
591 ASSERT(right.constant().IsSmi()); 595 ASSERT(right.constant().IsSmi());
592 const int64_t imm = 596 const int64_t imm =
593 reinterpret_cast<int64_t>(right.constant().raw()); 597 reinterpret_cast<int64_t>(right.constant().raw());
594 __ TestImmediate(left_reg, Immediate(imm), PP); 598 __ TestImmediate(left_reg, Immediate(imm));
595 } else { 599 } else {
596 __ testq(left_reg, right.reg()); 600 __ testq(left_reg, right.reg());
597 } 601 }
598 Condition true_condition = (kind() == Token::kNE) ? NOT_ZERO : ZERO; 602 Condition true_condition = (kind() == Token::kNE) ? NOT_ZERO : ZERO;
599 return true_condition; 603 return true_condition;
600 } 604 }
601 605
602 606
603 void TestSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 607 void TestSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
604 // Never emitted outside of the BranchInstr. 608 // Never emitted outside of the BranchInstr.
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
672 EmitComparisonCode(compiler, labels); 676 EmitComparisonCode(compiler, labels);
673 } 677 }
674 678
675 679
676 void TestCidsInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 680 void TestCidsInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
677 Register result_reg = locs()->out(0).reg(); 681 Register result_reg = locs()->out(0).reg();
678 Label is_true, is_false, done; 682 Label is_true, is_false, done;
679 BranchLabels labels = { &is_true, &is_false, &is_false }; 683 BranchLabels labels = { &is_true, &is_false, &is_false };
680 EmitComparisonCode(compiler, labels); 684 EmitComparisonCode(compiler, labels);
681 __ Bind(&is_false); 685 __ Bind(&is_false);
682 __ LoadObject(result_reg, Bool::False(), PP); 686 __ LoadObject(result_reg, Bool::False());
683 __ jmp(&done, Assembler::kNearJump); 687 __ jmp(&done, Assembler::kNearJump);
684 __ Bind(&is_true); 688 __ Bind(&is_true);
685 __ LoadObject(result_reg, Bool::True(), PP); 689 __ LoadObject(result_reg, Bool::True());
686 __ Bind(&done); 690 __ Bind(&done);
687 } 691 }
688 692
689 693
690 LocationSummary* RelationalOpInstr::MakeLocationSummary(Zone* zone, 694 LocationSummary* RelationalOpInstr::MakeLocationSummary(Zone* zone,
691 bool opt) const { 695 bool opt) const {
692 const intptr_t kNumInputs = 2; 696 const intptr_t kNumInputs = 2;
693 const intptr_t kNumTemps = 0; 697 const intptr_t kNumTemps = 0;
694 if (operation_cid() == kDoubleCid) { 698 if (operation_cid() == kDoubleCid) {
695 LocationSummary* summary = new(zone) LocationSummary( 699 LocationSummary* summary = new(zone) LocationSummary(
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
733 737
734 void RelationalOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 738 void RelationalOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
735 Label is_true, is_false; 739 Label is_true, is_false;
736 BranchLabels labels = { &is_true, &is_false, &is_false }; 740 BranchLabels labels = { &is_true, &is_false, &is_false };
737 Condition true_condition = EmitComparisonCode(compiler, labels); 741 Condition true_condition = EmitComparisonCode(compiler, labels);
738 EmitBranchOnCondition(compiler, true_condition, labels); 742 EmitBranchOnCondition(compiler, true_condition, labels);
739 743
740 Register result = locs()->out(0).reg(); 744 Register result = locs()->out(0).reg();
741 Label done; 745 Label done;
742 __ Bind(&is_false); 746 __ Bind(&is_false);
743 __ LoadObject(result, Bool::False(), PP); 747 __ LoadObject(result, Bool::False());
744 __ jmp(&done); 748 __ jmp(&done);
745 __ Bind(&is_true); 749 __ Bind(&is_true);
746 __ LoadObject(result, Bool::True(), PP); 750 __ LoadObject(result, Bool::True());
747 __ Bind(&done); 751 __ Bind(&done);
748 } 752 }
749 753
750 754
751 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, 755 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler,
752 BranchInstr* branch) { 756 BranchInstr* branch) {
753 BranchLabels labels = compiler->CreateBranchLabels(branch); 757 BranchLabels labels = compiler->CreateBranchLabels(branch);
754 Condition true_condition = EmitComparisonCode(compiler, labels); 758 Condition true_condition = EmitComparisonCode(compiler, labels);
755 EmitBranchOnCondition(compiler, true_condition, labels); 759 EmitBranchOnCondition(compiler, true_condition, labels);
756 } 760 }
757 761
758 762
759 LocationSummary* NativeCallInstr::MakeLocationSummary(Zone* zone, 763 LocationSummary* NativeCallInstr::MakeLocationSummary(Zone* zone,
760 bool opt) const { 764 bool opt) const {
761 return MakeCallSummary(zone); 765 return MakeCallSummary(zone);
762 } 766 }
763 767
764 768
765 void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 769 void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
766 Register result = locs()->out(0).reg(); 770 Register result = locs()->out(0).reg();
767 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); 771 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function());
768 const bool is_leaf_call = 772 const bool is_leaf_call =
769 (argc_tag & NativeArguments::AutoSetupScopeMask()) == 0; 773 (argc_tag & NativeArguments::AutoSetupScopeMask()) == 0;
770 774
771 // Push the result place holder initialized to NULL. 775 // Push the result place holder initialized to NULL.
772 __ PushObject(Object::null_object(), PP); 776 __ PushObject(Object::null_object());
773 // Pass a pointer to the first argument in RAX. 777 // Pass a pointer to the first argument in RAX.
774 if (!function().HasOptionalParameters()) { 778 if (!function().HasOptionalParameters()) {
775 __ leaq(RAX, Address(RBP, (kParamEndSlotFromFp + 779 __ leaq(RAX, Address(RBP, (kParamEndSlotFromFp +
776 function().NumParameters()) * kWordSize)); 780 function().NumParameters()) * kWordSize));
777 } else { 781 } else {
778 __ leaq(RAX, 782 __ leaq(RAX, Address(RBP, kFirstLocalSlotFromFp * kWordSize));
779 Address(RBP, kFirstLocalSlotFromFp * kWordSize));
780 } 783 }
781 __ LoadImmediate( 784 __ LoadImmediate(
782 RBX, Immediate(reinterpret_cast<uword>(native_c_function())), PP); 785 RBX, Immediate(reinterpret_cast<uword>(native_c_function())));
783 __ LoadImmediate( 786 __ LoadImmediate(R10, Immediate(argc_tag));
784 R10, Immediate(argc_tag), PP);
785 const ExternalLabel* stub_entry = (is_bootstrap_native() || is_leaf_call) ? 787 const ExternalLabel* stub_entry = (is_bootstrap_native() || is_leaf_call) ?
786 &StubCode::CallBootstrapCFunctionLabel() : 788 &StubCode::CallBootstrapCFunctionLabel() :
787 &StubCode::CallNativeCFunctionLabel(); 789 &StubCode::CallNativeCFunctionLabel();
788 compiler->GenerateCall(token_pos(), 790 compiler->GenerateCall(token_pos(),
789 stub_entry, 791 stub_entry,
790 RawPcDescriptors::kOther, 792 RawPcDescriptors::kOther,
791 locs()); 793 locs());
792 __ popq(result); 794 __ popq(result);
793 } 795 }
794 796
(...skipping 19 matching lines...) Expand all
814 Location::RequiresRegister(), 816 Location::RequiresRegister(),
815 LocationSummary::kNoCall); 817 LocationSummary::kNoCall);
816 } 818 }
817 819
818 820
819 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 821 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
820 ASSERT(compiler->is_optimizing()); 822 ASSERT(compiler->is_optimizing());
821 Register char_code = locs()->in(0).reg(); 823 Register char_code = locs()->in(0).reg();
822 Register result = locs()->out(0).reg(); 824 Register result = locs()->out(0).reg();
823 __ LoadImmediate(result, 825 __ LoadImmediate(result,
824 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress())), PP); 826 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress())));
825 __ movq(result, Address(result, 827 __ movq(result, Address(result,
826 char_code, 828 char_code,
827 TIMES_HALF_WORD_SIZE, // Char code is a smi. 829 TIMES_HALF_WORD_SIZE, // Char code is a smi.
828 Symbols::kNullCharCodeSymbolOffset * kWordSize)); 830 Symbols::kNullCharCodeSymbolOffset * kWordSize));
829 } 831 }
830 832
831 833
832 LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone, 834 LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone,
833 bool opt) const { 835 bool opt) const {
834 const intptr_t kNumInputs = 1; 836 const intptr_t kNumInputs = 1;
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
919 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 921 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
920 const Register object = locs()->in(0).reg(); 922 const Register object = locs()->in(0).reg();
921 const Register result = locs()->out(0).reg(); 923 const Register result = locs()->out(0).reg();
922 Label load, done; 924 Label load, done;
923 925
924 // We don't use Assembler::LoadTaggedClassIdMayBeSmi() here---which uses 926 // We don't use Assembler::LoadTaggedClassIdMayBeSmi() here---which uses
925 // a conditional move instead---because it is slower, probably due to 927 // a conditional move instead---because it is slower, probably due to
926 // branch prediction usually working just fine in this case. 928 // branch prediction usually working just fine in this case.
927 __ testq(object, Immediate(kSmiTagMask)); 929 __ testq(object, Immediate(kSmiTagMask));
928 __ j(NOT_ZERO, &load, Assembler::kNearJump); 930 __ j(NOT_ZERO, &load, Assembler::kNearJump);
929 __ LoadImmediate(result, Immediate(Smi::RawValue(kSmiCid)), PP); 931 __ LoadImmediate(result, Immediate(Smi::RawValue(kSmiCid)));
930 __ jmp(&done); 932 __ jmp(&done);
931 __ Bind(&load); 933 __ Bind(&load);
932 __ LoadClassId(result, object); 934 __ LoadClassId(result, object);
933 __ SmiTag(result); 935 __ SmiTag(result);
934 __ Bind(&done); 936 __ Bind(&done);
935 } 937 }
936 938
937 939
938 CompileType LoadIndexedInstr::ComputeType() const { 940 CompileType LoadIndexedInstr::ComputeType() const {
939 switch (class_id_) { 941 switch (class_id_) {
(...skipping 372 matching lines...) Expand 10 before | Expand all | Expand 10 after
1312 if ((index_scale() == 1) && index.IsRegister()) { 1314 if ((index_scale() == 1) && index.IsRegister()) {
1313 __ SmiUntag(index.reg()); 1315 __ SmiUntag(index.reg());
1314 } 1316 }
1315 switch (class_id()) { 1317 switch (class_id()) {
1316 case kArrayCid: 1318 case kArrayCid:
1317 if (ShouldEmitStoreBarrier()) { 1319 if (ShouldEmitStoreBarrier()) {
1318 Register value = locs()->in(2).reg(); 1320 Register value = locs()->in(2).reg();
1319 __ StoreIntoObject(array, element_address, value); 1321 __ StoreIntoObject(array, element_address, value);
1320 } else if (locs()->in(2).IsConstant()) { 1322 } else if (locs()->in(2).IsConstant()) {
1321 const Object& constant = locs()->in(2).constant(); 1323 const Object& constant = locs()->in(2).constant();
1322 __ StoreIntoObjectNoBarrier(array, element_address, constant, PP); 1324 __ StoreIntoObjectNoBarrier(array, element_address, constant);
1323 } else { 1325 } else {
1324 Register value = locs()->in(2).reg(); 1326 Register value = locs()->in(2).reg();
1325 __ StoreIntoObjectNoBarrier(array, element_address, value); 1327 __ StoreIntoObjectNoBarrier(array, element_address, value);
1326 } 1328 }
1327 break; 1329 break;
1328 case kTypedDataInt8ArrayCid: 1330 case kTypedDataInt8ArrayCid:
1329 case kTypedDataUint8ArrayCid: 1331 case kTypedDataUint8ArrayCid:
1330 case kExternalTypedDataUint8ArrayCid: 1332 case kExternalTypedDataUint8ArrayCid:
1331 case kOneByteStringCid: 1333 case kOneByteStringCid:
1332 if (locs()->in(2).IsConstant()) { 1334 if (locs()->in(2).IsConstant()) {
(...skipping 16 matching lines...) Expand all
1349 value = 0xFF; 1351 value = 0xFF;
1350 } else if (value < 0) { 1352 } else if (value < 0) {
1351 value = 0; 1353 value = 0;
1352 } 1354 }
1353 __ movb(element_address, 1355 __ movb(element_address,
1354 Immediate(static_cast<int8_t>(value))); 1356 Immediate(static_cast<int8_t>(value)));
1355 } else { 1357 } else {
1356 ASSERT(locs()->in(2).reg() == RAX); 1358 ASSERT(locs()->in(2).reg() == RAX);
1357 Label store_value, store_0xff; 1359 Label store_value, store_0xff;
1358 __ SmiUntag(RAX); 1360 __ SmiUntag(RAX);
1359 __ CompareImmediate(RAX, Immediate(0xFF), PP); 1361 __ CompareImmediate(RAX, Immediate(0xFF));
1360 __ j(BELOW_EQUAL, &store_value, Assembler::kNearJump); 1362 __ j(BELOW_EQUAL, &store_value, Assembler::kNearJump);
1361 // Clamp to 0x0 or 0xFF respectively. 1363 // Clamp to 0x0 or 0xFF respectively.
1362 __ j(GREATER, &store_0xff); 1364 __ j(GREATER, &store_0xff);
1363 __ xorq(RAX, RAX); 1365 __ xorq(RAX, RAX);
1364 __ jmp(&store_value, Assembler::kNearJump); 1366 __ jmp(&store_value, Assembler::kNearJump);
1365 __ Bind(&store_0xff); 1367 __ Bind(&store_0xff);
1366 __ LoadImmediate(RAX, Immediate(0xFF), PP); 1368 __ LoadImmediate(RAX, Immediate(0xFF));
1367 __ Bind(&store_value); 1369 __ Bind(&store_value);
1368 __ movb(element_address, RAX); 1370 __ movb(element_address, RAX);
1369 } 1371 }
1370 break; 1372 break;
1371 } 1373 }
1372 case kTypedDataInt16ArrayCid: 1374 case kTypedDataInt16ArrayCid:
1373 case kTypedDataUint16ArrayCid: { 1375 case kTypedDataUint16ArrayCid: {
1374 Register value = locs()->in(2).reg(); 1376 Register value = locs()->in(2).reg();
1375 __ SmiUntag(value); 1377 __ SmiUntag(value);
1376 __ movw(element_address, value); 1378 __ movw(element_address, value);
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
1465 locs()->temp(locs()->temp_count() - 1).reg() : kNoRegister; 1467 locs()->temp(locs()->temp_count() - 1).reg() : kNoRegister;
1466 1468
1467 Label ok, fail_label; 1469 Label ok, fail_label;
1468 1470
1469 Label* deopt = compiler->is_optimizing() ? 1471 Label* deopt = compiler->is_optimizing() ?
1470 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) : NULL; 1472 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) : NULL;
1471 1473
1472 Label* fail = (deopt != NULL) ? deopt : &fail_label; 1474 Label* fail = (deopt != NULL) ? deopt : &fail_label;
1473 1475
1474 if (emit_full_guard) { 1476 if (emit_full_guard) {
1475 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); 1477 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()));
1476 1478
1477 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); 1479 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset());
1478 FieldAddress field_nullability_operand( 1480 FieldAddress field_nullability_operand(
1479 field_reg, Field::is_nullable_offset()); 1481 field_reg, Field::is_nullable_offset());
1480 1482
1481 if (value_cid == kDynamicCid) { 1483 if (value_cid == kDynamicCid) {
1482 LoadValueCid(compiler, value_cid_reg, value_reg); 1484 LoadValueCid(compiler, value_cid_reg, value_reg);
1483 1485
1484 __ cmpl(value_cid_reg, field_cid_operand); 1486 __ cmpl(value_cid_reg, field_cid_operand);
1485 __ j(EQUAL, &ok); 1487 __ j(EQUAL, &ok);
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1534 ASSERT(deopt != NULL); 1536 ASSERT(deopt != NULL);
1535 1537
1536 // Field guard class has been initialized and is known. 1538 // Field guard class has been initialized and is known.
1537 if (value_cid == kDynamicCid) { 1539 if (value_cid == kDynamicCid) {
1538 // Value's class id is not known. 1540 // Value's class id is not known.
1539 __ testq(value_reg, Immediate(kSmiTagMask)); 1541 __ testq(value_reg, Immediate(kSmiTagMask));
1540 1542
1541 if (field_cid != kSmiCid) { 1543 if (field_cid != kSmiCid) {
1542 __ j(ZERO, fail); 1544 __ j(ZERO, fail);
1543 __ LoadClassId(value_cid_reg, value_reg); 1545 __ LoadClassId(value_cid_reg, value_reg);
1544 __ CompareImmediate(value_cid_reg, Immediate(field_cid), PP); 1546 __ CompareImmediate(value_cid_reg, Immediate(field_cid));
1545 } 1547 }
1546 1548
1547 if (field().is_nullable() && (field_cid != kNullCid)) { 1549 if (field().is_nullable() && (field_cid != kNullCid)) {
1548 __ j(EQUAL, &ok); 1550 __ j(EQUAL, &ok);
1549 __ CompareObject(value_reg, Object::null_object(), PP); 1551 __ CompareObject(value_reg, Object::null_object());
1550 } 1552 }
1551 1553
1552 __ j(NOT_EQUAL, fail); 1554 __ j(NOT_EQUAL, fail);
1553 } else { 1555 } else {
1554 // Both value's and field's class id is known. 1556 // Both value's and field's class id is known.
1555 ASSERT((value_cid != field_cid) && (value_cid != nullability)); 1557 ASSERT((value_cid != field_cid) && (value_cid != nullability));
1556 __ jmp(fail); 1558 __ jmp(fail);
1557 } 1559 }
1558 } 1560 }
1559 __ Bind(&ok); 1561 __ Bind(&ok);
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1595 const Register value_reg = locs()->in(0).reg(); 1597 const Register value_reg = locs()->in(0).reg();
1596 1598
1597 if (!compiler->is_optimizing() || 1599 if (!compiler->is_optimizing() ||
1598 (field().guarded_list_length() == Field::kUnknownFixedLength)) { 1600 (field().guarded_list_length() == Field::kUnknownFixedLength)) {
1599 const Register field_reg = locs()->temp(0).reg(); 1601 const Register field_reg = locs()->temp(0).reg();
1600 const Register offset_reg = locs()->temp(1).reg(); 1602 const Register offset_reg = locs()->temp(1).reg();
1601 const Register length_reg = locs()->temp(2).reg(); 1603 const Register length_reg = locs()->temp(2).reg();
1602 1604
1603 Label ok; 1605 Label ok;
1604 1606
1605 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); 1607 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()));
1606 1608
1607 __ movsxb(offset_reg, FieldAddress(field_reg, 1609 __ movsxb(offset_reg, FieldAddress(field_reg,
1608 Field::guarded_list_length_in_object_offset_offset())); 1610 Field::guarded_list_length_in_object_offset_offset()));
1609 __ movq(length_reg, FieldAddress(field_reg, 1611 __ movq(length_reg, FieldAddress(field_reg,
1610 Field::guarded_list_length_offset())); 1612 Field::guarded_list_length_offset()));
1611 1613
1612 __ cmpq(offset_reg, Immediate(0)); 1614 __ cmpq(offset_reg, Immediate(0));
1613 __ j(NEGATIVE, &ok); 1615 __ j(NEGATIVE, &ok);
1614 1616
1615 // Load the length from the value. GuardFieldClass already verified that 1617 // Load the length from the value. GuardFieldClass already verified that
(...skipping 16 matching lines...) Expand all
1632 __ Bind(&ok); 1634 __ Bind(&ok);
1633 } else { 1635 } else {
1634 ASSERT(compiler->is_optimizing()); 1636 ASSERT(compiler->is_optimizing());
1635 ASSERT(field().guarded_list_length() >= 0); 1637 ASSERT(field().guarded_list_length() >= 0);
1636 ASSERT(field().guarded_list_length_in_object_offset() != 1638 ASSERT(field().guarded_list_length_in_object_offset() !=
1637 Field::kUnknownLengthOffset); 1639 Field::kUnknownLengthOffset);
1638 1640
1639 __ CompareImmediate( 1641 __ CompareImmediate(
1640 FieldAddress(value_reg, 1642 FieldAddress(value_reg,
1641 field().guarded_list_length_in_object_offset()), 1643 field().guarded_list_length_in_object_offset()),
1642 Immediate(Smi::RawValue(field().guarded_list_length())), 1644 Immediate(Smi::RawValue(field().guarded_list_length())));
1643 PP);
1644 __ j(NOT_EQUAL, deopt); 1645 __ j(NOT_EQUAL, deopt);
1645 } 1646 }
1646 } 1647 }
1647 1648
1648 1649
1649 class BoxAllocationSlowPath : public SlowPathCode { 1650 class BoxAllocationSlowPath : public SlowPathCode {
1650 public: 1651 public:
1651 BoxAllocationSlowPath(Instruction* instruction, 1652 BoxAllocationSlowPath(Instruction* instruction,
1652 const Class& cls, 1653 const Class& cls,
1653 Register result) 1654 Register result)
(...skipping 29 matching lines...) Expand all
1683 } 1684 }
1684 1685
1685 static void Allocate(FlowGraphCompiler* compiler, 1686 static void Allocate(FlowGraphCompiler* compiler,
1686 Instruction* instruction, 1687 Instruction* instruction,
1687 const Class& cls, 1688 const Class& cls,
1688 Register result) { 1689 Register result) {
1689 if (compiler->intrinsic_mode()) { 1690 if (compiler->intrinsic_mode()) {
1690 __ TryAllocate(cls, 1691 __ TryAllocate(cls,
1691 compiler->intrinsic_slow_path_label(), 1692 compiler->intrinsic_slow_path_label(),
1692 Assembler::kFarJump, 1693 Assembler::kFarJump,
1693 result, 1694 result);
1694 PP);
1695 } else { 1695 } else {
1696 BoxAllocationSlowPath* slow_path = 1696 BoxAllocationSlowPath* slow_path =
1697 new BoxAllocationSlowPath(instruction, cls, result); 1697 new BoxAllocationSlowPath(instruction, cls, result);
1698 compiler->AddSlowPathCode(slow_path); 1698 compiler->AddSlowPathCode(slow_path);
1699 1699
1700 __ TryAllocate(cls, 1700 __ TryAllocate(cls,
1701 slow_path->entry_label(), 1701 slow_path->entry_label(),
1702 Assembler::kFarJump, 1702 Assembler::kFarJump,
1703 result, 1703 result);
1704 PP);
1705 __ Bind(slow_path->exit_label()); 1704 __ Bind(slow_path->exit_label());
1706 } 1705 }
1707 } 1706 }
1708 1707
1709 private: 1708 private:
1710 Instruction* instruction_; 1709 Instruction* instruction_;
1711 const Class& cls_; 1710 const Class& cls_;
1712 const Register result_; 1711 const Register result_;
1713 }; 1712 };
1714 1713
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1750 1749
1751 static void EnsureMutableBox(FlowGraphCompiler* compiler, 1750 static void EnsureMutableBox(FlowGraphCompiler* compiler,
1752 StoreInstanceFieldInstr* instruction, 1751 StoreInstanceFieldInstr* instruction,
1753 Register box_reg, 1752 Register box_reg,
1754 const Class& cls, 1753 const Class& cls,
1755 Register instance_reg, 1754 Register instance_reg,
1756 intptr_t offset, 1755 intptr_t offset,
1757 Register temp) { 1756 Register temp) {
1758 Label done; 1757 Label done;
1759 __ movq(box_reg, FieldAddress(instance_reg, offset)); 1758 __ movq(box_reg, FieldAddress(instance_reg, offset));
1760 __ CompareObject(box_reg, Object::null_object(), PP); 1759 __ CompareObject(box_reg, Object::null_object());
1761 __ j(NOT_EQUAL, &done); 1760 __ j(NOT_EQUAL, &done);
1762 BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg); 1761 BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg);
1763 __ movq(temp, box_reg); 1762 __ movq(temp, box_reg);
1764 __ StoreIntoObject(instance_reg, 1763 __ StoreIntoObject(instance_reg,
1765 FieldAddress(instance_reg, offset), 1764 FieldAddress(instance_reg, offset),
1766 temp); 1765 temp);
1767 1766
1768 __ Bind(&done); 1767 __ Bind(&done);
1769 } 1768 }
1770 1769
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1834 // Value input is a writable register and should be manually preserved 1833 // Value input is a writable register and should be manually preserved
1835 // across allocation slow-path. 1834 // across allocation slow-path.
1836 locs()->live_registers()->Add(locs()->in(1), kTagged); 1835 locs()->live_registers()->Add(locs()->in(1), kTagged);
1837 } 1836 }
1838 1837
1839 Label store_pointer; 1838 Label store_pointer;
1840 Label store_double; 1839 Label store_double;
1841 Label store_float32x4; 1840 Label store_float32x4;
1842 Label store_float64x2; 1841 Label store_float64x2;
1843 1842
1844 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP); 1843 __ LoadObject(temp, Field::ZoneHandle(field().raw()));
1845 1844
1846 __ cmpl(FieldAddress(temp, Field::is_nullable_offset()), 1845 __ cmpl(FieldAddress(temp, Field::is_nullable_offset()),
1847 Immediate(kNullCid)); 1846 Immediate(kNullCid));
1848 __ j(EQUAL, &store_pointer); 1847 __ j(EQUAL, &store_pointer);
1849 1848
1850 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); 1849 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset()));
1851 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); 1850 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit));
1852 __ j(ZERO, &store_pointer); 1851 __ j(ZERO, &store_pointer);
1853 1852
1854 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), 1853 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()),
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
1920 Register value_reg = locs()->in(1).reg(); 1919 Register value_reg = locs()->in(1).reg();
1921 __ StoreIntoObject(instance_reg, 1920 __ StoreIntoObject(instance_reg,
1922 FieldAddress(instance_reg, offset_in_bytes_), 1921 FieldAddress(instance_reg, offset_in_bytes_),
1923 value_reg, 1922 value_reg,
1924 CanValueBeSmi()); 1923 CanValueBeSmi());
1925 } else { 1924 } else {
1926 if (locs()->in(1).IsConstant()) { 1925 if (locs()->in(1).IsConstant()) {
1927 __ StoreIntoObjectNoBarrier(instance_reg, 1926 __ StoreIntoObjectNoBarrier(instance_reg,
1928 FieldAddress(instance_reg, offset_in_bytes_), 1927 FieldAddress(instance_reg, offset_in_bytes_),
1929 locs()->in(1).constant(), 1928 locs()->in(1).constant(),
1930 PP,
1931 is_object_reference_initialization_ ? 1929 is_object_reference_initialization_ ?
1932 Assembler::kEmptyOrSmiOrNull : 1930 Assembler::kEmptyOrSmiOrNull :
1933 Assembler::kHeapObjectOrSmi); 1931 Assembler::kHeapObjectOrSmi);
1934 } else { 1932 } else {
1935 Register value_reg = locs()->in(1).reg(); 1933 Register value_reg = locs()->in(1).reg();
1936 __ StoreIntoObjectNoBarrier(instance_reg, 1934 __ StoreIntoObjectNoBarrier(instance_reg,
1937 FieldAddress(instance_reg, offset_in_bytes_), 1935 FieldAddress(instance_reg, offset_in_bytes_),
1938 value_reg, 1936 value_reg,
1939 is_object_reference_initialization_ ? 1937 is_object_reference_initialization_ ?
1940 Assembler::kEmptyOrSmiOrNull : 1938 Assembler::kEmptyOrSmiOrNull :
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1977 : Location::RequiresRegister()); 1975 : Location::RequiresRegister());
1978 locs->set_temp(0, Location::RequiresRegister()); 1976 locs->set_temp(0, Location::RequiresRegister());
1979 return locs; 1977 return locs;
1980 } 1978 }
1981 1979
1982 1980
1983 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1981 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1984 Register value = locs()->in(0).reg(); 1982 Register value = locs()->in(0).reg();
1985 Register temp = locs()->temp(0).reg(); 1983 Register temp = locs()->temp(0).reg();
1986 1984
1987 __ LoadObject(temp, field(), PP); 1985 __ LoadObject(temp, field());
1988 if (this->value()->NeedsStoreBuffer()) { 1986 if (this->value()->NeedsStoreBuffer()) {
1989 __ StoreIntoObject(temp, 1987 __ StoreIntoObject(temp,
1990 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); 1988 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi());
1991 } else { 1989 } else {
1992 __ StoreIntoObjectNoBarrier( 1990 __ StoreIntoObjectNoBarrier(
1993 temp, FieldAddress(temp, Field::value_offset()), value); 1991 temp, FieldAddress(temp, Field::value_offset()), value);
1994 } 1992 }
1995 } 1993 }
1996 1994
1997 1995
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
2063 FieldAddress(RAX, Array::length_offset()), 2061 FieldAddress(RAX, Array::length_offset()),
2064 kLengthReg); 2062 kLengthReg);
2065 2063
2066 // Initialize all array elements to raw_null. 2064 // Initialize all array elements to raw_null.
2067 // RAX: new object start as a tagged pointer. 2065 // RAX: new object start as a tagged pointer.
2068 // RCX: new object end address. 2066 // RCX: new object end address.
2069 // RDI: iterator which initially points to the start of the variable 2067 // RDI: iterator which initially points to the start of the variable
2070 // data area to be initialized. 2068 // data area to be initialized.
2071 if (num_elements > 0) { 2069 if (num_elements > 0) {
2072 const intptr_t array_size = instance_size - sizeof(RawArray); 2070 const intptr_t array_size = instance_size - sizeof(RawArray);
2073 __ LoadObject(R12, Object::null_object(), PP); 2071 __ LoadObject(R12, Object::null_object());
2074 __ leaq(RDI, FieldAddress(RAX, sizeof(RawArray))); 2072 __ leaq(RDI, FieldAddress(RAX, sizeof(RawArray)));
2075 if (array_size < (kInlineArraySize * kWordSize)) { 2073 if (array_size < (kInlineArraySize * kWordSize)) {
2076 intptr_t current_offset = 0; 2074 intptr_t current_offset = 0;
2077 while (current_offset < array_size) { 2075 while (current_offset < array_size) {
2078 __ InitializeFieldNoBarrier(RAX, Address(RDI, current_offset), R12); 2076 __ InitializeFieldNoBarrier(RAX, Address(RDI, current_offset), R12);
2079 current_offset += kWordSize; 2077 current_offset += kWordSize;
2080 } 2078 }
2081 } else { 2079 } else {
2082 Label init_loop; 2080 Label init_loop;
2083 __ Bind(&init_loop); 2081 __ Bind(&init_loop);
(...skipping 17 matching lines...) Expand all
2101 2099
2102 Label slow_path, done; 2100 Label slow_path, done;
2103 if (compiler->is_optimizing() && 2101 if (compiler->is_optimizing() &&
2104 num_elements()->BindsToConstant() && 2102 num_elements()->BindsToConstant() &&
2105 num_elements()->BoundConstant().IsSmi()) { 2103 num_elements()->BoundConstant().IsSmi()) {
2106 const intptr_t length = Smi::Cast(num_elements()->BoundConstant()).Value(); 2104 const intptr_t length = Smi::Cast(num_elements()->BoundConstant()).Value();
2107 if ((length >= 0) && (length <= Array::kMaxElements)) { 2105 if ((length >= 0) && (length <= Array::kMaxElements)) {
2108 Label slow_path, done; 2106 Label slow_path, done;
2109 InlineArrayAllocation(compiler, length, &slow_path, &done); 2107 InlineArrayAllocation(compiler, length, &slow_path, &done);
2110 __ Bind(&slow_path); 2108 __ Bind(&slow_path);
2111 __ PushObject(Object::null_object(), PP); // Make room for the result. 2109 __ PushObject(Object::null_object()); // Make room for the result.
2112 __ pushq(kLengthReg); 2110 __ pushq(kLengthReg);
2113 __ pushq(kElemTypeReg); 2111 __ pushq(kElemTypeReg);
2114 compiler->GenerateRuntimeCall(token_pos(), 2112 compiler->GenerateRuntimeCall(token_pos(),
2115 deopt_id(), 2113 deopt_id(),
2116 kAllocateArrayRuntimeEntry, 2114 kAllocateArrayRuntimeEntry,
2117 2, 2115 2,
2118 locs()); 2116 locs());
2119 __ Drop(2); 2117 __ Drop(2);
2120 __ popq(kResultReg); 2118 __ popq(kResultReg);
2121 __ Bind(&done); 2119 __ Bind(&done);
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
2191 Register result = locs()->out(0).reg(); 2189 Register result = locs()->out(0).reg();
2192 if (IsPotentialUnboxedLoad()) { 2190 if (IsPotentialUnboxedLoad()) {
2193 Register temp = locs()->temp(1).reg(); 2191 Register temp = locs()->temp(1).reg();
2194 XmmRegister value = locs()->temp(0).fpu_reg(); 2192 XmmRegister value = locs()->temp(0).fpu_reg();
2195 2193
2196 Label load_pointer; 2194 Label load_pointer;
2197 Label load_double; 2195 Label load_double;
2198 Label load_float32x4; 2196 Label load_float32x4;
2199 Label load_float64x2; 2197 Label load_float64x2;
2200 2198
2201 __ LoadObject(result, Field::ZoneHandle(field()->raw()), PP); 2199 __ LoadObject(result, Field::ZoneHandle(field()->raw()));
2202 2200
2203 __ cmpl(FieldAddress(result, Field::is_nullable_offset()), 2201 __ cmpl(FieldAddress(result, Field::is_nullable_offset()),
2204 Immediate(kNullCid)); 2202 Immediate(kNullCid));
2205 __ j(EQUAL, &load_pointer); 2203 __ j(EQUAL, &load_pointer);
2206 2204
2207 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), 2205 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()),
2208 Immediate(kDoubleCid)); 2206 Immediate(kDoubleCid));
2209 __ j(EQUAL, &load_double); 2207 __ j(EQUAL, &load_double);
2210 2208
2211 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), 2209 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()),
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
2271 return locs; 2269 return locs;
2272 } 2270 }
2273 2271
2274 2272
2275 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2273 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2276 Register instantiator_reg = locs()->in(0).reg(); 2274 Register instantiator_reg = locs()->in(0).reg();
2277 Register result_reg = locs()->out(0).reg(); 2275 Register result_reg = locs()->out(0).reg();
2278 2276
2279 // 'instantiator_reg' is the instantiator TypeArguments object (or null). 2277 // 'instantiator_reg' is the instantiator TypeArguments object (or null).
2280 // A runtime call to instantiate the type is required. 2278 // A runtime call to instantiate the type is required.
2281 __ PushObject(Object::null_object(), PP); // Make room for the result. 2279 __ PushObject(Object::null_object()); // Make room for the result.
2282 __ PushObject(type(), PP); 2280 __ PushObject(type());
2283 __ pushq(instantiator_reg); // Push instantiator type arguments. 2281 __ pushq(instantiator_reg); // Push instantiator type arguments.
2284 compiler->GenerateRuntimeCall(token_pos(), 2282 compiler->GenerateRuntimeCall(token_pos(),
2285 deopt_id(), 2283 deopt_id(),
2286 kInstantiateTypeRuntimeEntry, 2284 kInstantiateTypeRuntimeEntry,
2287 2, 2285 2,
2288 locs()); 2286 locs());
2289 __ Drop(2); // Drop instantiator and uninstantiated type. 2287 __ Drop(2); // Drop instantiator and uninstantiated type.
2290 __ popq(result_reg); // Pop instantiated type. 2288 __ popq(result_reg); // Pop instantiated type.
2291 ASSERT(instantiator_reg == result_reg); 2289 ASSERT(instantiator_reg == result_reg);
2292 } 2290 }
(...skipping 21 matching lines...) Expand all
2314 // 'instantiator_reg' is the instantiator TypeArguments object (or null). 2312 // 'instantiator_reg' is the instantiator TypeArguments object (or null).
2315 ASSERT(!type_arguments().IsUninstantiatedIdentity() && 2313 ASSERT(!type_arguments().IsUninstantiatedIdentity() &&
2316 !type_arguments().CanShareInstantiatorTypeArguments( 2314 !type_arguments().CanShareInstantiatorTypeArguments(
2317 instantiator_class())); 2315 instantiator_class()));
2318 // If the instantiator is null and if the type argument vector 2316 // If the instantiator is null and if the type argument vector
2319 // instantiated from null becomes a vector of dynamic, then use null as 2317 // instantiated from null becomes a vector of dynamic, then use null as
2320 // the type arguments. 2318 // the type arguments.
2321 Label type_arguments_instantiated; 2319 Label type_arguments_instantiated;
2322 const intptr_t len = type_arguments().Length(); 2320 const intptr_t len = type_arguments().Length();
2323 if (type_arguments().IsRawInstantiatedRaw(len)) { 2321 if (type_arguments().IsRawInstantiatedRaw(len)) {
2324 __ CompareObject(instantiator_reg, Object::null_object(), PP); 2322 __ CompareObject(instantiator_reg, Object::null_object());
2325 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); 2323 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump);
2326 } 2324 }
2327 2325
2328 // Lookup cache before calling runtime. 2326 // Lookup cache before calling runtime.
2329 // TODO(fschneider): Consider moving this into a shared stub to reduce 2327 // TODO(fschneider): Consider moving this into a shared stub to reduce
2330 // generated code size. 2328 // generated code size.
2331 __ LoadObject(RDI, type_arguments(), PP); 2329 __ LoadObject(RDI, type_arguments());
2332 __ movq(RDI, FieldAddress(RDI, TypeArguments::instantiations_offset())); 2330 __ movq(RDI, FieldAddress(RDI, TypeArguments::instantiations_offset()));
2333 __ leaq(RDI, FieldAddress(RDI, Array::data_offset())); 2331 __ leaq(RDI, FieldAddress(RDI, Array::data_offset()));
2334 // The instantiations cache is initialized with Object::zero_array() and is 2332 // The instantiations cache is initialized with Object::zero_array() and is
2335 // therefore guaranteed to contain kNoInstantiator. No length check needed. 2333 // therefore guaranteed to contain kNoInstantiator. No length check needed.
2336 Label loop, found, slow_case; 2334 Label loop, found, slow_case;
2337 __ Bind(&loop); 2335 __ Bind(&loop);
2338 __ movq(RDX, Address(RDI, 0 * kWordSize)); // Cached instantiator. 2336 __ movq(RDX, Address(RDI, 0 * kWordSize)); // Cached instantiator.
2339 __ cmpq(RDX, RAX); 2337 __ cmpq(RDX, RAX);
2340 __ j(EQUAL, &found, Assembler::kNearJump); 2338 __ j(EQUAL, &found, Assembler::kNearJump);
2341 __ addq(RDI, Immediate(2 * kWordSize)); 2339 __ addq(RDI, Immediate(2 * kWordSize));
2342 __ cmpq(RDX, Immediate(Smi::RawValue(StubCode::kNoInstantiator))); 2340 __ cmpq(RDX, Immediate(Smi::RawValue(StubCode::kNoInstantiator)));
2343 __ j(NOT_EQUAL, &loop, Assembler::kNearJump); 2341 __ j(NOT_EQUAL, &loop, Assembler::kNearJump);
2344 __ jmp(&slow_case, Assembler::kNearJump); 2342 __ jmp(&slow_case, Assembler::kNearJump);
2345 __ Bind(&found); 2343 __ Bind(&found);
2346 __ movq(RAX, Address(RDI, 1 * kWordSize)); // Cached instantiated args. 2344 __ movq(RAX, Address(RDI, 1 * kWordSize)); // Cached instantiated args.
2347 __ jmp(&type_arguments_instantiated, Assembler::kNearJump); 2345 __ jmp(&type_arguments_instantiated, Assembler::kNearJump);
2348 2346
2349 __ Bind(&slow_case); 2347 __ Bind(&slow_case);
2350 // Instantiate non-null type arguments. 2348 // Instantiate non-null type arguments.
2351 // A runtime call to instantiate the type arguments is required. 2349 // A runtime call to instantiate the type arguments is required.
2352 __ PushObject(Object::null_object(), PP); // Make room for the result. 2350 __ PushObject(Object::null_object()); // Make room for the result.
2353 __ PushObject(type_arguments(), PP); 2351 __ PushObject(type_arguments());
2354 __ pushq(instantiator_reg); // Push instantiator type arguments. 2352 __ pushq(instantiator_reg); // Push instantiator type arguments.
2355 compiler->GenerateRuntimeCall(token_pos(), 2353 compiler->GenerateRuntimeCall(token_pos(),
2356 deopt_id(), 2354 deopt_id(),
2357 kInstantiateTypeArgumentsRuntimeEntry, 2355 kInstantiateTypeArgumentsRuntimeEntry,
2358 2, 2356 2,
2359 locs()); 2357 locs());
2360 __ Drop(2); // Drop instantiator and uninstantiated type arguments. 2358 __ Drop(2); // Drop instantiator and uninstantiated type arguments.
2361 __ popq(result_reg); // Pop instantiated type arguments. 2359 __ popq(result_reg); // Pop instantiated type arguments.
2362 __ Bind(&type_arguments_instantiated); 2360 __ Bind(&type_arguments_instantiated);
2363 ASSERT(instantiator_reg == result_reg); 2361 ASSERT(instantiator_reg == result_reg);
(...skipping 22 matching lines...) Expand all
2386 2384
2387 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2385 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2388 __ Comment("AllocateContextSlowPath"); 2386 __ Comment("AllocateContextSlowPath");
2389 __ Bind(entry_label()); 2387 __ Bind(entry_label());
2390 2388
2391 LocationSummary* locs = instruction_->locs(); 2389 LocationSummary* locs = instruction_->locs();
2392 locs->live_registers()->Remove(locs->out(0)); 2390 locs->live_registers()->Remove(locs->out(0));
2393 2391
2394 compiler->SaveLiveRegisters(locs); 2392 compiler->SaveLiveRegisters(locs);
2395 2393
2396 __ LoadImmediate(R10, Immediate(instruction_->num_context_variables()), PP); 2394 __ LoadImmediate(R10, Immediate(instruction_->num_context_variables()));
2397 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); 2395 const ExternalLabel label(StubCode::AllocateContextEntryPoint());
2398 compiler->GenerateCall(instruction_->token_pos(), 2396 compiler->GenerateCall(instruction_->token_pos(),
2399 &label, 2397 &label,
2400 RawPcDescriptors::kOther, 2398 RawPcDescriptors::kOther,
2401 locs); 2399 locs);
2402 ASSERT(instruction_->locs()->out(0).reg() == RAX); 2400 ASSERT(instruction_->locs()->out(0).reg() == RAX);
2403 compiler->RestoreLiveRegisters(instruction_->locs()); 2401 compiler->RestoreLiveRegisters(instruction_->locs());
2404 __ jmp(exit_label()); 2402 __ jmp(exit_label());
2405 } 2403 }
2406 2404
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2441 locs->set_temp(0, Location::RegisterLocation(R10)); 2439 locs->set_temp(0, Location::RegisterLocation(R10));
2442 locs->set_out(0, Location::RegisterLocation(RAX)); 2440 locs->set_out(0, Location::RegisterLocation(RAX));
2443 return locs; 2441 return locs;
2444 } 2442 }
2445 2443
2446 2444
2447 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2445 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2448 ASSERT(locs()->temp(0).reg() == R10); 2446 ASSERT(locs()->temp(0).reg() == R10);
2449 ASSERT(locs()->out(0).reg() == RAX); 2447 ASSERT(locs()->out(0).reg() == RAX);
2450 2448
2451 __ LoadImmediate(R10, Immediate(num_context_variables()), PP); 2449 __ LoadImmediate(R10, Immediate(num_context_variables()));
2452 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); 2450 const ExternalLabel label(StubCode::AllocateContextEntryPoint());
2453 compiler->GenerateCall(token_pos(), 2451 compiler->GenerateCall(token_pos(),
2454 &label, 2452 &label,
2455 RawPcDescriptors::kOther, 2453 RawPcDescriptors::kOther,
2456 locs()); 2454 locs());
2457 } 2455 }
2458 2456
2459 2457
2460 LocationSummary* InitStaticFieldInstr::MakeLocationSummary(Zone* zone, 2458 LocationSummary* InitStaticFieldInstr::MakeLocationSummary(Zone* zone,
2461 bool opt) const { 2459 bool opt) const {
2462 const intptr_t kNumInputs = 1; 2460 const intptr_t kNumInputs = 1;
2463 const intptr_t kNumTemps = 1; 2461 const intptr_t kNumTemps = 1;
2464 LocationSummary* locs = new(zone) LocationSummary( 2462 LocationSummary* locs = new(zone) LocationSummary(
2465 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 2463 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
2466 locs->set_in(0, Location::RegisterLocation(RAX)); 2464 locs->set_in(0, Location::RegisterLocation(RAX));
2467 locs->set_temp(0, Location::RegisterLocation(RCX)); 2465 locs->set_temp(0, Location::RegisterLocation(RCX));
2468 return locs; 2466 return locs;
2469 } 2467 }
2470 2468
2471 2469
2472 void InitStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2470 void InitStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2473 Register field = locs()->in(0).reg(); 2471 Register field = locs()->in(0).reg();
2474 Register temp = locs()->temp(0).reg(); 2472 Register temp = locs()->temp(0).reg();
2475 2473
2476 Label call_runtime, no_call; 2474 Label call_runtime, no_call;
2477 2475
2478 __ movq(temp, FieldAddress(field, Field::value_offset())); 2476 __ movq(temp, FieldAddress(field, Field::value_offset()));
2479 __ CompareObject(temp, Object::sentinel(), PP); 2477 __ CompareObject(temp, Object::sentinel());
2480 __ j(EQUAL, &call_runtime); 2478 __ j(EQUAL, &call_runtime);
2481 2479
2482 __ CompareObject(temp, Object::transition_sentinel(), PP); 2480 __ CompareObject(temp, Object::transition_sentinel());
2483 __ j(NOT_EQUAL, &no_call); 2481 __ j(NOT_EQUAL, &no_call);
2484 2482
2485 __ Bind(&call_runtime); 2483 __ Bind(&call_runtime);
2486 __ PushObject(Object::null_object(), PP); // Make room for (unused) result. 2484 __ PushObject(Object::null_object()); // Make room for (unused) result.
2487 __ pushq(field); 2485 __ pushq(field);
2488 compiler->GenerateRuntimeCall(token_pos(), 2486 compiler->GenerateRuntimeCall(token_pos(),
2489 deopt_id(), 2487 deopt_id(),
2490 kInitStaticFieldRuntimeEntry, 2488 kInitStaticFieldRuntimeEntry,
2491 1, 2489 1,
2492 locs()); 2490 locs());
2493 __ Drop(2); // Remove argument and unused result. 2491 __ Drop(2); // Remove argument and unused result.
2494 __ Bind(&no_call); 2492 __ Bind(&no_call);
2495 } 2493 }
2496 2494
2497 2495
2498 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone, 2496 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone,
2499 bool opt) const { 2497 bool opt) const {
2500 const intptr_t kNumInputs = 1; 2498 const intptr_t kNumInputs = 1;
2501 const intptr_t kNumTemps = 0; 2499 const intptr_t kNumTemps = 0;
2502 LocationSummary* locs = new(zone) LocationSummary( 2500 LocationSummary* locs = new(zone) LocationSummary(
2503 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 2501 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
2504 locs->set_in(0, Location::RegisterLocation(RAX)); 2502 locs->set_in(0, Location::RegisterLocation(RAX));
2505 locs->set_out(0, Location::RegisterLocation(RAX)); 2503 locs->set_out(0, Location::RegisterLocation(RAX));
2506 return locs; 2504 return locs;
2507 } 2505 }
2508 2506
2509 2507
2510 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2508 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2511 Register context_value = locs()->in(0).reg(); 2509 Register context_value = locs()->in(0).reg();
2512 Register result = locs()->out(0).reg(); 2510 Register result = locs()->out(0).reg();
2513 2511
2514 __ PushObject(Object::null_object(), PP); // Make room for the result. 2512 __ PushObject(Object::null_object()); // Make room for the result.
2515 __ pushq(context_value); 2513 __ pushq(context_value);
2516 compiler->GenerateRuntimeCall(token_pos(), 2514 compiler->GenerateRuntimeCall(token_pos(),
2517 deopt_id(), 2515 deopt_id(),
2518 kCloneContextRuntimeEntry, 2516 kCloneContextRuntimeEntry,
2519 1, 2517 1,
2520 locs()); 2518 locs());
2521 __ popq(result); // Remove argument. 2519 __ popq(result); // Remove argument.
2522 __ popq(result); // Get result (cloned context). 2520 __ popq(result); // Get result (cloned context).
2523 } 2521 }
2524 2522
2525 2523
2526 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone, 2524 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
2527 bool opt) const { 2525 bool opt) const {
2528 UNREACHABLE(); 2526 UNREACHABLE();
2529 return NULL; 2527 return NULL;
2530 } 2528 }
2531 2529
2532 2530
2533 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2531 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2534 __ Bind(compiler->GetJumpLabel(this)); 2532 __ Bind(compiler->GetJumpLabel(this));
2535 compiler->AddExceptionHandler(catch_try_index(), 2533 compiler->AddExceptionHandler(catch_try_index(),
2536 try_index(), 2534 try_index(),
2537 compiler->assembler()->CodeSize(), 2535 compiler->assembler()->CodeSize(),
2538 catch_handler_types_, 2536 catch_handler_types_,
2539 needs_stacktrace()); 2537 needs_stacktrace());
2540 2538
2541 // Restore the pool pointer. 2539 // Restore the pool pointer.
2542 __ LoadPoolPointer(PP); 2540 __ LoadPoolPointer();
2543 2541
2544 if (HasParallelMove()) { 2542 if (HasParallelMove()) {
2545 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); 2543 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
2546 } 2544 }
2547 2545
2548 // Restore RSP from RBP as we are coming from a throw and the code for 2546 // Restore RSP from RBP as we are coming from a throw and the code for
2549 // popping arguments has not been run. 2547 // popping arguments has not been run.
2550 const intptr_t fp_sp_dist = 2548 const intptr_t fp_sp_dist =
2551 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; 2549 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize;
2552 ASSERT(fp_sp_dist <= 0); 2550 ASSERT(fp_sp_dist <= 0);
(...skipping 25 matching lines...) Expand all
2578 public: 2576 public:
2579 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) 2577 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction)
2580 : instruction_(instruction) { } 2578 : instruction_(instruction) { }
2581 2579
2582 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2580 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2583 if (FLAG_use_osr && osr_entry_label()->IsLinked()) { 2581 if (FLAG_use_osr && osr_entry_label()->IsLinked()) {
2584 uword flags_address = Isolate::Current()->stack_overflow_flags_address(); 2582 uword flags_address = Isolate::Current()->stack_overflow_flags_address();
2585 Register temp = instruction_->locs()->temp(0).reg(); 2583 Register temp = instruction_->locs()->temp(0).reg();
2586 __ Comment("CheckStackOverflowSlowPathOsr"); 2584 __ Comment("CheckStackOverflowSlowPathOsr");
2587 __ Bind(osr_entry_label()); 2585 __ Bind(osr_entry_label());
2588 __ LoadImmediate(temp, Immediate(flags_address), PP); 2586 __ LoadImmediate(temp, Immediate(flags_address));
2589 __ movq(Address(temp, 0), Immediate(Isolate::kOsrRequest)); 2587 __ movq(Address(temp, 0), Immediate(Isolate::kOsrRequest));
2590 } 2588 }
2591 __ Comment("CheckStackOverflowSlowPath"); 2589 __ Comment("CheckStackOverflowSlowPath");
2592 __ Bind(entry_label()); 2590 __ Bind(entry_label());
2593 compiler->SaveLiveRegisters(instruction_->locs()); 2591 compiler->SaveLiveRegisters(instruction_->locs());
2594 // pending_deoptimization_env_ is needed to generate a runtime call that 2592 // pending_deoptimization_env_ is needed to generate a runtime call that
2595 // may throw an exception. 2593 // may throw an exception.
2596 ASSERT(compiler->pending_deoptimization_env_ == NULL); 2594 ASSERT(compiler->pending_deoptimization_env_ == NULL);
2597 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); 2595 Environment* env = compiler->SlowPathEnvironmentFor(instruction_);
2598 compiler->pending_deoptimization_env_ = env; 2596 compiler->pending_deoptimization_env_ = env;
(...skipping 27 matching lines...) Expand all
2626 2624
2627 2625
2628 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2626 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2629 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); 2627 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this);
2630 compiler->AddSlowPathCode(slow_path); 2628 compiler->AddSlowPathCode(slow_path);
2631 2629
2632 Register temp = locs()->temp(0).reg(); 2630 Register temp = locs()->temp(0).reg();
2633 // Generate stack overflow check. 2631 // Generate stack overflow check.
2634 if (compiler->is_optimizing()) { 2632 if (compiler->is_optimizing()) {
2635 __ LoadImmediate( 2633 __ LoadImmediate(
2636 temp, Immediate(Isolate::Current()->stack_limit_address()), PP); 2634 temp, Immediate(Isolate::Current()->stack_limit_address()));
2637 __ cmpq(RSP, Address(temp, 0)); 2635 __ cmpq(RSP, Address(temp, 0));
2638 } else { 2636 } else {
2639 __ LoadIsolate(temp); 2637 __ LoadIsolate(temp);
2640 __ cmpq(RSP, Address(temp, Isolate::stack_limit_offset())); 2638 __ cmpq(RSP, Address(temp, Isolate::stack_limit_offset()));
2641 } 2639 }
2642 __ j(BELOW_EQUAL, slow_path->entry_label()); 2640 __ j(BELOW_EQUAL, slow_path->entry_label());
2643 if (compiler->CanOSRFunction() && in_loop()) { 2641 if (compiler->CanOSRFunction() && in_loop()) {
2644 // In unoptimized code check the usage counter to trigger OSR at loop 2642 // In unoptimized code check the usage counter to trigger OSR at loop
2645 // stack checks. Use progressively higher thresholds for more deeply 2643 // stack checks. Use progressively higher thresholds for more deeply
2646 // nested loops to attempt to hit outer loops with OSR when possible. 2644 // nested loops to attempt to hit outer loops with OSR when possible.
2647 __ LoadObject(temp, compiler->parsed_function().function(), PP); 2645 __ LoadObject(temp, compiler->parsed_function().function());
2648 int32_t threshold = 2646 int32_t threshold =
2649 FLAG_optimization_counter_threshold * (loop_depth() + 1); 2647 FLAG_optimization_counter_threshold * (loop_depth() + 1);
2650 __ cmpl(FieldAddress(temp, Function::usage_counter_offset()), 2648 __ cmpl(FieldAddress(temp, Function::usage_counter_offset()),
2651 Immediate(threshold)); 2649 Immediate(threshold));
2652 __ j(GREATER_EQUAL, slow_path->osr_entry_label()); 2650 __ j(GREATER_EQUAL, slow_path->osr_entry_label());
2653 } 2651 }
2654 if (compiler->ForceSlowPathForStackOverflow()) { 2652 if (compiler->ForceSlowPathForStackOverflow()) {
2655 __ jmp(slow_path->entry_label()); 2653 __ jmp(slow_path->entry_label());
2656 } 2654 }
2657 __ Bind(slow_path->exit_label()); 2655 __ Bind(slow_path->exit_label());
2658 } 2656 }
2659 2657
2660 2658
2661 static void EmitJavascriptOverflowCheck(FlowGraphCompiler* compiler, 2659 static void EmitJavascriptOverflowCheck(FlowGraphCompiler* compiler,
2662 Range* range, 2660 Range* range,
2663 Label* overflow, 2661 Label* overflow,
2664 Register result) { 2662 Register result) {
2665 if (!RangeUtils::IsWithin(range, -0x20000000000000LL, 0x20000000000000LL)) { 2663 if (!RangeUtils::IsWithin(range, -0x20000000000000LL, 0x20000000000000LL)) {
2666 ASSERT(overflow != NULL); 2664 ASSERT(overflow != NULL);
2667 // TODO(zra): This can be tightened to one compare/branch using: 2665 // TODO(zra): This can be tightened to one compare/branch using:
2668 // overflow = (result + 2^52) > 2^53 with an unsigned comparison. 2666 // overflow = (result + 2^52) > 2^53 with an unsigned comparison.
2669 __ CompareImmediate(result, Immediate(-0x20000000000000LL), PP); 2667 __ CompareImmediate(result, Immediate(-0x20000000000000LL));
2670 __ j(LESS, overflow); 2668 __ j(LESS, overflow);
2671 __ CompareImmediate(result, Immediate(0x20000000000000LL), PP); 2669 __ CompareImmediate(result, Immediate(0x20000000000000LL));
2672 __ j(GREATER, overflow); 2670 __ j(GREATER, overflow);
2673 } 2671 }
2674 } 2672 }
2675 2673
2676 2674
2677 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler, 2675 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler,
2678 BinarySmiOpInstr* shift_left) { 2676 BinarySmiOpInstr* shift_left) {
2679 const LocationSummary& locs = *shift_left->locs(); 2677 const LocationSummary& locs = *shift_left->locs();
2680 Register left = locs.in(0).reg(); 2678 Register left = locs.in(0).reg();
2681 Register result = locs.out(0).reg(); 2679 Register result = locs.out(0).reg();
(...skipping 28 matching lines...) Expand all
2710 // Right (locs.in(1)) is not constant. 2708 // Right (locs.in(1)) is not constant.
2711 Register right = locs.in(1).reg(); 2709 Register right = locs.in(1).reg();
2712 Range* right_range = shift_left->right()->definition()->range(); 2710 Range* right_range = shift_left->right()->definition()->range();
2713 if (shift_left->left()->BindsToConstant() && shift_left->can_overflow()) { 2711 if (shift_left->left()->BindsToConstant() && shift_left->can_overflow()) {
2714 // TODO(srdjan): Implement code below for is_truncating(). 2712 // TODO(srdjan): Implement code below for is_truncating().
2715 // If left is constant, we know the maximal allowed size for right. 2713 // If left is constant, we know the maximal allowed size for right.
2716 const Object& obj = shift_left->left()->BoundConstant(); 2714 const Object& obj = shift_left->left()->BoundConstant();
2717 if (obj.IsSmi()) { 2715 if (obj.IsSmi()) {
2718 const intptr_t left_int = Smi::Cast(obj).Value(); 2716 const intptr_t left_int = Smi::Cast(obj).Value();
2719 if (left_int == 0) { 2717 if (left_int == 0) {
2720 __ CompareImmediate(right, Immediate(0), PP); 2718 __ CompareImmediate(right, Immediate(0));
2721 __ j(NEGATIVE, deopt); 2719 __ j(NEGATIVE, deopt);
2722 return; 2720 return;
2723 } 2721 }
2724 const intptr_t max_right = kSmiBits - Utils::HighestBit(left_int); 2722 const intptr_t max_right = kSmiBits - Utils::HighestBit(left_int);
2725 const bool right_needs_check = 2723 const bool right_needs_check =
2726 !RangeUtils::IsWithin(right_range, 0, max_right - 1); 2724 !RangeUtils::IsWithin(right_range, 0, max_right - 1);
2727 if (right_needs_check) { 2725 if (right_needs_check) {
2728 __ CompareImmediate(right, 2726 __ CompareImmediate(right,
2729 Immediate(reinterpret_cast<int64_t>(Smi::New(max_right))), PP); 2727 Immediate(reinterpret_cast<int64_t>(Smi::New(max_right))));
2730 __ j(ABOVE_EQUAL, deopt); 2728 __ j(ABOVE_EQUAL, deopt);
2731 } 2729 }
2732 __ SmiUntag(right); 2730 __ SmiUntag(right);
2733 __ shlq(left, right); 2731 __ shlq(left, right);
2734 } 2732 }
2735 if (FLAG_throw_on_javascript_int_overflow) { 2733 if (FLAG_throw_on_javascript_int_overflow) {
2736 EmitJavascriptOverflowCheck(compiler, shift_left->range(), deopt, result); 2734 EmitJavascriptOverflowCheck(compiler, shift_left->range(), deopt, result);
2737 } 2735 }
2738 return; 2736 return;
2739 } 2737 }
2740 2738
2741 const bool right_needs_check = 2739 const bool right_needs_check =
2742 !RangeUtils::IsWithin(right_range, 0, (Smi::kBits - 1)); 2740 !RangeUtils::IsWithin(right_range, 0, (Smi::kBits - 1));
2743 ASSERT(right == RCX); // Count must be in RCX 2741 ASSERT(right == RCX); // Count must be in RCX
2744 if (!shift_left->can_overflow()) { 2742 if (!shift_left->can_overflow()) {
2745 if (right_needs_check) { 2743 if (right_needs_check) {
2746 const bool right_may_be_negative = 2744 const bool right_may_be_negative =
2747 (right_range == NULL) || !right_range->IsPositive(); 2745 (right_range == NULL) || !right_range->IsPositive();
2748 if (right_may_be_negative) { 2746 if (right_may_be_negative) {
2749 ASSERT(shift_left->CanDeoptimize()); 2747 ASSERT(shift_left->CanDeoptimize());
2750 __ CompareImmediate(right, Immediate(0), PP); 2748 __ CompareImmediate(right, Immediate(0));
2751 __ j(NEGATIVE, deopt); 2749 __ j(NEGATIVE, deopt);
2752 } 2750 }
2753 Label done, is_not_zero; 2751 Label done, is_not_zero;
2754 __ CompareImmediate(right, 2752 __ CompareImmediate(right,
2755 Immediate(reinterpret_cast<int64_t>(Smi::New(Smi::kBits))), PP); 2753 Immediate(reinterpret_cast<int64_t>(Smi::New(Smi::kBits))));
2756 __ j(BELOW, &is_not_zero, Assembler::kNearJump); 2754 __ j(BELOW, &is_not_zero, Assembler::kNearJump);
2757 __ xorq(left, left); 2755 __ xorq(left, left);
2758 __ jmp(&done, Assembler::kNearJump); 2756 __ jmp(&done, Assembler::kNearJump);
2759 __ Bind(&is_not_zero); 2757 __ Bind(&is_not_zero);
2760 __ SmiUntag(right); 2758 __ SmiUntag(right);
2761 __ shlq(left, right); 2759 __ shlq(left, right);
2762 __ Bind(&done); 2760 __ Bind(&done);
2763 } else { 2761 } else {
2764 __ SmiUntag(right); 2762 __ SmiUntag(right);
2765 __ shlq(left, right); 2763 __ shlq(left, right);
2766 } 2764 }
2767 } else { 2765 } else {
2768 if (right_needs_check) { 2766 if (right_needs_check) {
2769 ASSERT(shift_left->CanDeoptimize()); 2767 ASSERT(shift_left->CanDeoptimize());
2770 __ CompareImmediate(right, 2768 __ CompareImmediate(right,
2771 Immediate(reinterpret_cast<int64_t>(Smi::New(Smi::kBits))), PP); 2769 Immediate(reinterpret_cast<int64_t>(Smi::New(Smi::kBits))));
2772 __ j(ABOVE_EQUAL, deopt); 2770 __ j(ABOVE_EQUAL, deopt);
2773 } 2771 }
2774 // Left is not a constant. 2772 // Left is not a constant.
2775 Register temp = locs.temp(0).reg(); 2773 Register temp = locs.temp(0).reg();
2776 // Check if count too large for handling it inlined. 2774 // Check if count too large for handling it inlined.
2777 __ movq(temp, left); 2775 __ movq(temp, left);
2778 __ SmiUntag(right); 2776 __ SmiUntag(right);
2779 // Overflow test (preserve temp and right); 2777 // Overflow test (preserve temp and right);
2780 __ shlq(left, right); 2778 __ shlq(left, right);
2781 __ sarq(left, right); 2779 __ sarq(left, right);
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
2894 if (CanDeoptimize()) { 2892 if (CanDeoptimize()) {
2895 deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp); 2893 deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp);
2896 } 2894 }
2897 2895
2898 if (locs()->in(1).IsConstant()) { 2896 if (locs()->in(1).IsConstant()) {
2899 const Object& constant = locs()->in(1).constant(); 2897 const Object& constant = locs()->in(1).constant();
2900 ASSERT(constant.IsSmi()); 2898 ASSERT(constant.IsSmi());
2901 const int64_t imm = reinterpret_cast<int64_t>(constant.raw()); 2899 const int64_t imm = reinterpret_cast<int64_t>(constant.raw());
2902 switch (op_kind()) { 2900 switch (op_kind()) {
2903 case Token::kADD: { 2901 case Token::kADD: {
2904 __ AddImmediate(left, Immediate(imm), PP); 2902 __ AddImmediate(left, Immediate(imm));
2905 if (deopt != NULL) __ j(OVERFLOW, deopt); 2903 if (deopt != NULL) __ j(OVERFLOW, deopt);
2906 break; 2904 break;
2907 } 2905 }
2908 case Token::kSUB: { 2906 case Token::kSUB: {
2909 __ SubImmediate(left, Immediate(imm), PP); 2907 __ SubImmediate(left, Immediate(imm));
2910 if (deopt != NULL) __ j(OVERFLOW, deopt); 2908 if (deopt != NULL) __ j(OVERFLOW, deopt);
2911 break; 2909 break;
2912 } 2910 }
2913 case Token::kMUL: { 2911 case Token::kMUL: {
2914 // Keep left value tagged and untag right value. 2912 // Keep left value tagged and untag right value.
2915 const intptr_t value = Smi::Cast(constant).Value(); 2913 const intptr_t value = Smi::Cast(constant).Value();
2916 __ MulImmediate(left, Immediate(value), PP); 2914 __ MulImmediate(left, Immediate(value));
2917 if (deopt != NULL) __ j(OVERFLOW, deopt); 2915 if (deopt != NULL) __ j(OVERFLOW, deopt);
2918 break; 2916 break;
2919 } 2917 }
2920 case Token::kTRUNCDIV: { 2918 case Token::kTRUNCDIV: {
2921 const intptr_t value = Smi::Cast(constant).Value(); 2919 const intptr_t value = Smi::Cast(constant).Value();
2922 ASSERT(Utils::IsPowerOfTwo(Utils::Abs(value))); 2920 ASSERT(Utils::IsPowerOfTwo(Utils::Abs(value)));
2923 const intptr_t shift_count = 2921 const intptr_t shift_count =
2924 Utils::ShiftForPowerOfTwo(Utils::Abs(value)) + kSmiTagSize; 2922 Utils::ShiftForPowerOfTwo(Utils::Abs(value)) + kSmiTagSize;
2925 ASSERT(kSmiTagSize == 1); 2923 ASSERT(kSmiTagSize == 1);
2926 Register temp = locs()->temp(0).reg(); 2924 Register temp = locs()->temp(0).reg();
2927 __ movq(temp, left); 2925 __ movq(temp, left);
2928 __ sarq(temp, Immediate(63)); 2926 __ sarq(temp, Immediate(63));
2929 ASSERT(shift_count > 1); // 1, -1 case handled above. 2927 ASSERT(shift_count > 1); // 1, -1 case handled above.
2930 __ shrq(temp, Immediate(64 - shift_count)); 2928 __ shrq(temp, Immediate(64 - shift_count));
2931 __ addq(left, temp); 2929 __ addq(left, temp);
2932 ASSERT(shift_count > 0); 2930 ASSERT(shift_count > 0);
2933 __ sarq(left, Immediate(shift_count)); 2931 __ sarq(left, Immediate(shift_count));
2934 if (value < 0) { 2932 if (value < 0) {
2935 __ negq(left); 2933 __ negq(left);
2936 } 2934 }
2937 __ SmiTag(left); 2935 __ SmiTag(left);
2938 break; 2936 break;
2939 } 2937 }
2940 case Token::kBIT_AND: { 2938 case Token::kBIT_AND: {
2941 // No overflow check. 2939 // No overflow check.
2942 __ AndImmediate(left, Immediate(imm), PP); 2940 __ AndImmediate(left, Immediate(imm));
2943 break; 2941 break;
2944 } 2942 }
2945 case Token::kBIT_OR: { 2943 case Token::kBIT_OR: {
2946 // No overflow check. 2944 // No overflow check.
2947 __ OrImmediate(left, Immediate(imm), PP); 2945 __ OrImmediate(left, Immediate(imm));
2948 break; 2946 break;
2949 } 2947 }
2950 case Token::kBIT_XOR: { 2948 case Token::kBIT_XOR: {
2951 // No overflow check. 2949 // No overflow check.
2952 __ XorImmediate(left, Immediate(imm), PP); 2950 __ XorImmediate(left, Immediate(imm));
2953 break; 2951 break;
2954 } 2952 }
2955 2953
2956 case Token::kSHR: { 2954 case Token::kSHR: {
2957 // sarq operation masks the count to 6 bits. 2955 // sarq operation masks the count to 6 bits.
2958 const intptr_t kCountLimit = 0x3F; 2956 const intptr_t kCountLimit = 0x3F;
2959 const intptr_t value = Smi::Cast(constant).Value(); 2957 const intptr_t value = Smi::Cast(constant).Value();
2960 __ sarq(left, Immediate( 2958 __ sarq(left, Immediate(
2961 Utils::Minimum(value + kSmiTagSize, kCountLimit))); 2959 Utils::Minimum(value + kSmiTagSize, kCountLimit)));
2962 __ SmiTag(left); 2960 __ SmiTag(left);
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
3087 __ jmp(&done); 3085 __ jmp(&done);
3088 3086
3089 // Divide using 64bit idiv. 3087 // Divide using 64bit idiv.
3090 __ Bind(&not_32bit); 3088 __ Bind(&not_32bit);
3091 __ SmiUntag(left); 3089 __ SmiUntag(left);
3092 __ SmiUntag(right); 3090 __ SmiUntag(right);
3093 __ cqo(); // Sign extend RAX -> RDX:RAX. 3091 __ cqo(); // Sign extend RAX -> RDX:RAX.
3094 __ idivq(right); // RAX: quotient, RDX: remainder. 3092 __ idivq(right); // RAX: quotient, RDX: remainder.
3095 // Check the corner case of dividing the 'MIN_SMI' with -1, in which 3093 // Check the corner case of dividing the 'MIN_SMI' with -1, in which
3096 // case we cannot tag the result. 3094 // case we cannot tag the result.
3097 __ CompareImmediate(result, Immediate(0x4000000000000000), PP); 3095 __ CompareImmediate(result, Immediate(0x4000000000000000));
3098 __ j(EQUAL, deopt); 3096 __ j(EQUAL, deopt);
3099 __ Bind(&done); 3097 __ Bind(&done);
3100 __ SmiTag(result); 3098 __ SmiTag(result);
3101 break; 3099 break;
3102 } 3100 }
3103 case Token::kMOD: { 3101 case Token::kMOD: {
3104 Label not_32bit, div_done; 3102 Label not_32bit, div_done;
3105 3103
3106 Register temp = locs()->temp(0).reg(); 3104 Register temp = locs()->temp(0).reg();
3107 ASSERT(left == RDX); 3105 ASSERT(left == RDX);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
3167 } else { 3165 } else {
3168 // Right is negative. 3166 // Right is negative.
3169 __ subq(result, right); 3167 __ subq(result, right);
3170 } 3168 }
3171 __ Bind(&all_done); 3169 __ Bind(&all_done);
3172 __ SmiTag(result); 3170 __ SmiTag(result);
3173 break; 3171 break;
3174 } 3172 }
3175 case Token::kSHR: { 3173 case Token::kSHR: {
3176 if (CanDeoptimize()) { 3174 if (CanDeoptimize()) {
3177 __ CompareImmediate(right, Immediate(0), PP); 3175 __ CompareImmediate(right, Immediate(0));
3178 __ j(LESS, deopt); 3176 __ j(LESS, deopt);
3179 } 3177 }
3180 __ SmiUntag(right); 3178 __ SmiUntag(right);
3181 // sarq operation masks the count to 6 bits. 3179 // sarq operation masks the count to 6 bits.
3182 const intptr_t kCountLimit = 0x3F; 3180 const intptr_t kCountLimit = 0x3F;
3183 if ((right_range == NULL) || 3181 if ((right_range == NULL) ||
3184 !right_range->OnlyLessThanOrEqualTo(kCountLimit)) { 3182 !right_range->OnlyLessThanOrEqualTo(kCountLimit)) {
3185 __ CompareImmediate(right, Immediate(kCountLimit), PP); 3183 __ CompareImmediate(right, Immediate(kCountLimit));
3186 Label count_ok; 3184 Label count_ok;
3187 __ j(LESS, &count_ok, Assembler::kNearJump); 3185 __ j(LESS, &count_ok, Assembler::kNearJump);
3188 __ LoadImmediate(right, Immediate(kCountLimit), PP); 3186 __ LoadImmediate(right, Immediate(kCountLimit));
3189 __ Bind(&count_ok); 3187 __ Bind(&count_ok);
3190 } 3188 }
3191 ASSERT(right == RCX); // Count must be in RCX 3189 ASSERT(right == RCX); // Count must be in RCX
3192 __ SmiUntag(left); 3190 __ SmiUntag(left);
3193 __ sarq(left, right); 3191 __ sarq(left, right);
3194 __ SmiTag(left); 3192 __ SmiTag(left);
3195 break; 3193 break;
3196 } 3194 }
3197 case Token::kDIV: { 3195 case Token::kDIV: {
3198 // Dispatches to 'Double./'. 3196 // Dispatches to 'Double./'.
(...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after
3728 return summary; 3726 return summary;
3729 } 3727 }
3730 3728
3731 3729
3732 void Float32x4ConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 3730 void Float32x4ConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
3733 XmmRegister v0 = locs()->in(0).fpu_reg(); 3731 XmmRegister v0 = locs()->in(0).fpu_reg();
3734 XmmRegister v1 = locs()->in(1).fpu_reg(); 3732 XmmRegister v1 = locs()->in(1).fpu_reg();
3735 XmmRegister v2 = locs()->in(2).fpu_reg(); 3733 XmmRegister v2 = locs()->in(2).fpu_reg();
3736 XmmRegister v3 = locs()->in(3).fpu_reg(); 3734 XmmRegister v3 = locs()->in(3).fpu_reg();
3737 ASSERT(v0 == locs()->out(0).fpu_reg()); 3735 ASSERT(v0 == locs()->out(0).fpu_reg());
3738 __ AddImmediate(RSP, Immediate(-16), PP); 3736 __ AddImmediate(RSP, Immediate(-16));
3739 __ cvtsd2ss(v0, v0); 3737 __ cvtsd2ss(v0, v0);
3740 __ movss(Address(RSP, 0), v0); 3738 __ movss(Address(RSP, 0), v0);
3741 __ movsd(v0, v1); 3739 __ movsd(v0, v1);
3742 __ cvtsd2ss(v0, v0); 3740 __ cvtsd2ss(v0, v0);
3743 __ movss(Address(RSP, 4), v0); 3741 __ movss(Address(RSP, 4), v0);
3744 __ movsd(v0, v2); 3742 __ movsd(v0, v2);
3745 __ cvtsd2ss(v0, v0); 3743 __ cvtsd2ss(v0, v0);
3746 __ movss(Address(RSP, 8), v0); 3744 __ movss(Address(RSP, 8), v0);
3747 __ movsd(v0, v3); 3745 __ movsd(v0, v3);
3748 __ cvtsd2ss(v0, v0); 3746 __ cvtsd2ss(v0, v0);
3749 __ movss(Address(RSP, 12), v0); 3747 __ movss(Address(RSP, 12), v0);
3750 __ movups(v0, Address(RSP, 0)); 3748 __ movups(v0, Address(RSP, 0));
3751 __ AddImmediate(RSP, Immediate(16), PP); 3749 __ AddImmediate(RSP, Immediate(16));
3752 } 3750 }
3753 3751
3754 3752
3755 LocationSummary* Float32x4ZeroInstr::MakeLocationSummary(Zone* zone, 3753 LocationSummary* Float32x4ZeroInstr::MakeLocationSummary(Zone* zone,
3756 bool opt) const { 3754 bool opt) const {
3757 const intptr_t kNumInputs = 0; 3755 const intptr_t kNumInputs = 0;
3758 const intptr_t kNumTemps = 0; 3756 const intptr_t kNumTemps = 0;
3759 LocationSummary* summary = new(zone) LocationSummary( 3757 LocationSummary* summary = new(zone) LocationSummary(
3760 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 3758 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
3761 summary->set_out(0, Location::RequiresFpuRegister()); 3759 summary->set_out(0, Location::RequiresFpuRegister());
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after
3995 3993
3996 void Float32x4WithInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 3994 void Float32x4WithInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
3997 XmmRegister replacement = locs()->in(0).fpu_reg(); 3995 XmmRegister replacement = locs()->in(0).fpu_reg();
3998 XmmRegister value = locs()->in(1).fpu_reg(); 3996 XmmRegister value = locs()->in(1).fpu_reg();
3999 3997
4000 ASSERT(locs()->out(0).fpu_reg() == replacement); 3998 ASSERT(locs()->out(0).fpu_reg() == replacement);
4001 3999
4002 switch (op_kind()) { 4000 switch (op_kind()) {
4003 case MethodRecognizer::kFloat32x4WithX: 4001 case MethodRecognizer::kFloat32x4WithX:
4004 __ cvtsd2ss(replacement, replacement); 4002 __ cvtsd2ss(replacement, replacement);
4005 __ AddImmediate(RSP, Immediate(-16), PP); 4003 __ AddImmediate(RSP, Immediate(-16));
4006 // Move value to stack. 4004 // Move value to stack.
4007 __ movups(Address(RSP, 0), value); 4005 __ movups(Address(RSP, 0), value);
4008 // Write over X value. 4006 // Write over X value.
4009 __ movss(Address(RSP, 0), replacement); 4007 __ movss(Address(RSP, 0), replacement);
4010 // Move updated value into output register. 4008 // Move updated value into output register.
4011 __ movups(replacement, Address(RSP, 0)); 4009 __ movups(replacement, Address(RSP, 0));
4012 __ AddImmediate(RSP, Immediate(16), PP); 4010 __ AddImmediate(RSP, Immediate(16));
4013 break; 4011 break;
4014 case MethodRecognizer::kFloat32x4WithY: 4012 case MethodRecognizer::kFloat32x4WithY:
4015 __ cvtsd2ss(replacement, replacement); 4013 __ cvtsd2ss(replacement, replacement);
4016 __ AddImmediate(RSP, Immediate(-16), PP); 4014 __ AddImmediate(RSP, Immediate(-16));
4017 // Move value to stack. 4015 // Move value to stack.
4018 __ movups(Address(RSP, 0), value); 4016 __ movups(Address(RSP, 0), value);
4019 // Write over Y value. 4017 // Write over Y value.
4020 __ movss(Address(RSP, 4), replacement); 4018 __ movss(Address(RSP, 4), replacement);
4021 // Move updated value into output register. 4019 // Move updated value into output register.
4022 __ movups(replacement, Address(RSP, 0)); 4020 __ movups(replacement, Address(RSP, 0));
4023 __ AddImmediate(RSP, Immediate(16), PP); 4021 __ AddImmediate(RSP, Immediate(16));
4024 break; 4022 break;
4025 case MethodRecognizer::kFloat32x4WithZ: 4023 case MethodRecognizer::kFloat32x4WithZ:
4026 __ cvtsd2ss(replacement, replacement); 4024 __ cvtsd2ss(replacement, replacement);
4027 __ AddImmediate(RSP, Immediate(-16), PP); 4025 __ AddImmediate(RSP, Immediate(-16));
4028 // Move value to stack. 4026 // Move value to stack.
4029 __ movups(Address(RSP, 0), value); 4027 __ movups(Address(RSP, 0), value);
4030 // Write over Z value. 4028 // Write over Z value.
4031 __ movss(Address(RSP, 8), replacement); 4029 __ movss(Address(RSP, 8), replacement);
4032 // Move updated value into output register. 4030 // Move updated value into output register.
4033 __ movups(replacement, Address(RSP, 0)); 4031 __ movups(replacement, Address(RSP, 0));
4034 __ AddImmediate(RSP, Immediate(16), PP); 4032 __ AddImmediate(RSP, Immediate(16));
4035 break; 4033 break;
4036 case MethodRecognizer::kFloat32x4WithW: 4034 case MethodRecognizer::kFloat32x4WithW:
4037 __ cvtsd2ss(replacement, replacement); 4035 __ cvtsd2ss(replacement, replacement);
4038 __ AddImmediate(RSP, Immediate(-16), PP); 4036 __ AddImmediate(RSP, Immediate(-16));
4039 // Move value to stack. 4037 // Move value to stack.
4040 __ movups(Address(RSP, 0), value); 4038 __ movups(Address(RSP, 0), value);
4041 // Write over W value. 4039 // Write over W value.
4042 __ movss(Address(RSP, 12), replacement); 4040 __ movss(Address(RSP, 12), replacement);
4043 // Move updated value into output register. 4041 // Move updated value into output register.
4044 __ movups(replacement, Address(RSP, 0)); 4042 __ movups(replacement, Address(RSP, 0));
4045 __ AddImmediate(RSP, Immediate(16), PP); 4043 __ AddImmediate(RSP, Immediate(16));
4046 break; 4044 break;
4047 default: UNREACHABLE(); 4045 default: UNREACHABLE();
4048 } 4046 }
4049 } 4047 }
4050 4048
4051 4049
4052 LocationSummary* Float32x4ToInt32x4Instr::MakeLocationSummary(Zone* zone, 4050 LocationSummary* Float32x4ToInt32x4Instr::MakeLocationSummary(Zone* zone,
4053 bool opt) const { 4051 bool opt) const {
4054 const intptr_t kNumInputs = 1; 4052 const intptr_t kNumInputs = 1;
4055 const intptr_t kNumTemps = 0; 4053 const intptr_t kNumTemps = 0;
(...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after
4300 return summary; 4298 return summary;
4301 } 4299 }
4302 4300
4303 4301
4304 void Int32x4ConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 4302 void Int32x4ConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
4305 Register v0 = locs()->in(0).reg(); 4303 Register v0 = locs()->in(0).reg();
4306 Register v1 = locs()->in(1).reg(); 4304 Register v1 = locs()->in(1).reg();
4307 Register v2 = locs()->in(2).reg(); 4305 Register v2 = locs()->in(2).reg();
4308 Register v3 = locs()->in(3).reg(); 4306 Register v3 = locs()->in(3).reg();
4309 XmmRegister result = locs()->out(0).fpu_reg(); 4307 XmmRegister result = locs()->out(0).fpu_reg();
4310 __ AddImmediate(RSP, Immediate(-4 * kInt32Size), PP); 4308 __ AddImmediate(RSP, Immediate(-4 * kInt32Size));
4311 __ movl(Address(RSP, 0 * kInt32Size), v0); 4309 __ movl(Address(RSP, 0 * kInt32Size), v0);
4312 __ movl(Address(RSP, 1 * kInt32Size), v1); 4310 __ movl(Address(RSP, 1 * kInt32Size), v1);
4313 __ movl(Address(RSP, 2 * kInt32Size), v2); 4311 __ movl(Address(RSP, 2 * kInt32Size), v2);
4314 __ movl(Address(RSP, 3 * kInt32Size), v3); 4312 __ movl(Address(RSP, 3 * kInt32Size), v3);
4315 __ movups(result, Address(RSP, 0)); 4313 __ movups(result, Address(RSP, 0));
4316 __ AddImmediate(RSP, Immediate(4 * kInt32Size), PP); 4314 __ AddImmediate(RSP, Immediate(4 * kInt32Size));
4317 } 4315 }
4318 4316
4319 4317
4320 LocationSummary* Int32x4BoolConstructorInstr::MakeLocationSummary( 4318 LocationSummary* Int32x4BoolConstructorInstr::MakeLocationSummary(
4321 Zone* zone, bool opt) const { 4319 Zone* zone, bool opt) const {
4322 const intptr_t kNumInputs = 4; 4320 const intptr_t kNumInputs = 4;
4323 const intptr_t kNumTemps = 1; 4321 const intptr_t kNumTemps = 1;
4324 LocationSummary* summary = new(zone) LocationSummary( 4322 LocationSummary* summary = new(zone) LocationSummary(
4325 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 4323 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
4326 summary->set_in(0, Location::RequiresRegister()); 4324 summary->set_in(0, Location::RequiresRegister());
(...skipping 10 matching lines...) Expand all
4337 Register v0 = locs()->in(0).reg(); 4335 Register v0 = locs()->in(0).reg();
4338 Register v1 = locs()->in(1).reg(); 4336 Register v1 = locs()->in(1).reg();
4339 Register v2 = locs()->in(2).reg(); 4337 Register v2 = locs()->in(2).reg();
4340 Register v3 = locs()->in(3).reg(); 4338 Register v3 = locs()->in(3).reg();
4341 Register temp = locs()->temp(0).reg(); 4339 Register temp = locs()->temp(0).reg();
4342 XmmRegister result = locs()->out(0).fpu_reg(); 4340 XmmRegister result = locs()->out(0).fpu_reg();
4343 Label x_false, x_done; 4341 Label x_false, x_done;
4344 Label y_false, y_done; 4342 Label y_false, y_done;
4345 Label z_false, z_done; 4343 Label z_false, z_done;
4346 Label w_false, w_done; 4344 Label w_false, w_done;
4347 __ AddImmediate(RSP, Immediate(-16), PP); 4345 __ AddImmediate(RSP, Immediate(-16));
4348 4346
4349 __ CompareObject(v0, Bool::True(), PP); 4347 __ CompareObject(v0, Bool::True());
4350 __ j(NOT_EQUAL, &x_false); 4348 __ j(NOT_EQUAL, &x_false);
4351 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); 4349 __ LoadImmediate(temp, Immediate(0xFFFFFFFF));
4352 __ jmp(&x_done); 4350 __ jmp(&x_done);
4353 __ Bind(&x_false); 4351 __ Bind(&x_false);
4354 __ LoadImmediate(temp, Immediate(0x0), PP); 4352 __ LoadImmediate(temp, Immediate(0x0));
4355 __ Bind(&x_done); 4353 __ Bind(&x_done);
4356 __ movl(Address(RSP, 0), temp); 4354 __ movl(Address(RSP, 0), temp);
4357 4355
4358 __ CompareObject(v1, Bool::True(), PP); 4356 __ CompareObject(v1, Bool::True());
4359 __ j(NOT_EQUAL, &y_false); 4357 __ j(NOT_EQUAL, &y_false);
4360 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); 4358 __ LoadImmediate(temp, Immediate(0xFFFFFFFF));
4361 __ jmp(&y_done); 4359 __ jmp(&y_done);
4362 __ Bind(&y_false); 4360 __ Bind(&y_false);
4363 __ LoadImmediate(temp, Immediate(0x0), PP); 4361 __ LoadImmediate(temp, Immediate(0x0));
4364 __ Bind(&y_done); 4362 __ Bind(&y_done);
4365 __ movl(Address(RSP, 4), temp); 4363 __ movl(Address(RSP, 4), temp);
4366 4364
4367 __ CompareObject(v2, Bool::True(), PP); 4365 __ CompareObject(v2, Bool::True());
4368 __ j(NOT_EQUAL, &z_false); 4366 __ j(NOT_EQUAL, &z_false);
4369 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); 4367 __ LoadImmediate(temp, Immediate(0xFFFFFFFF));
4370 __ jmp(&z_done); 4368 __ jmp(&z_done);
4371 __ Bind(&z_false); 4369 __ Bind(&z_false);
4372 __ LoadImmediate(temp, Immediate(0x0), PP); 4370 __ LoadImmediate(temp, Immediate(0x0));
4373 __ Bind(&z_done); 4371 __ Bind(&z_done);
4374 __ movl(Address(RSP, 8), temp); 4372 __ movl(Address(RSP, 8), temp);
4375 4373
4376 __ CompareObject(v3, Bool::True(), PP); 4374 __ CompareObject(v3, Bool::True());
4377 __ j(NOT_EQUAL, &w_false); 4375 __ j(NOT_EQUAL, &w_false);
4378 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); 4376 __ LoadImmediate(temp, Immediate(0xFFFFFFFF));
4379 __ jmp(&w_done); 4377 __ jmp(&w_done);
4380 __ Bind(&w_false); 4378 __ Bind(&w_false);
4381 __ LoadImmediate(temp, Immediate(0x0), PP); 4379 __ LoadImmediate(temp, Immediate(0x0));
4382 __ Bind(&w_done); 4380 __ Bind(&w_done);
4383 __ movl(Address(RSP, 12), temp); 4381 __ movl(Address(RSP, 12), temp);
4384 4382
4385 __ movups(result, Address(RSP, 0)); 4383 __ movups(result, Address(RSP, 0));
4386 __ AddImmediate(RSP, Immediate(16), PP); 4384 __ AddImmediate(RSP, Immediate(16));
4387 } 4385 }
4388 4386
4389 4387
4390 LocationSummary* Int32x4GetFlagInstr::MakeLocationSummary(Zone* zone, 4388 LocationSummary* Int32x4GetFlagInstr::MakeLocationSummary(Zone* zone,
4391 bool opt) const { 4389 bool opt) const {
4392 const intptr_t kNumInputs = 1; 4390 const intptr_t kNumInputs = 1;
4393 const intptr_t kNumTemps = 0; 4391 const intptr_t kNumTemps = 0;
4394 LocationSummary* summary = new(zone) LocationSummary( 4392 LocationSummary* summary = new(zone) LocationSummary(
4395 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 4393 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
4396 summary->set_in(0, Location::RequiresFpuRegister()); 4394 summary->set_in(0, Location::RequiresFpuRegister());
4397 summary->set_out(0, Location::RequiresRegister()); 4395 summary->set_out(0, Location::RequiresRegister());
4398 return summary; 4396 return summary;
4399 } 4397 }
4400 4398
4401 4399
4402 void Int32x4GetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 4400 void Int32x4GetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
4403 XmmRegister value = locs()->in(0).fpu_reg(); 4401 XmmRegister value = locs()->in(0).fpu_reg();
4404 Register result = locs()->out(0).reg(); 4402 Register result = locs()->out(0).reg();
4405 Label done; 4403 Label done;
4406 Label non_zero; 4404 Label non_zero;
4407 __ AddImmediate(RSP, Immediate(-16), PP); 4405 __ AddImmediate(RSP, Immediate(-16));
4408 // Move value to stack. 4406 // Move value to stack.
4409 __ movups(Address(RSP, 0), value); 4407 __ movups(Address(RSP, 0), value);
4410 switch (op_kind()) { 4408 switch (op_kind()) {
4411 case MethodRecognizer::kInt32x4GetFlagX: 4409 case MethodRecognizer::kInt32x4GetFlagX:
4412 __ movl(result, Address(RSP, 0)); 4410 __ movl(result, Address(RSP, 0));
4413 break; 4411 break;
4414 case MethodRecognizer::kInt32x4GetFlagY: 4412 case MethodRecognizer::kInt32x4GetFlagY:
4415 __ movl(result, Address(RSP, 4)); 4413 __ movl(result, Address(RSP, 4));
4416 break; 4414 break;
4417 case MethodRecognizer::kInt32x4GetFlagZ: 4415 case MethodRecognizer::kInt32x4GetFlagZ:
4418 __ movl(result, Address(RSP, 8)); 4416 __ movl(result, Address(RSP, 8));
4419 break; 4417 break;
4420 case MethodRecognizer::kInt32x4GetFlagW: 4418 case MethodRecognizer::kInt32x4GetFlagW:
4421 __ movl(result, Address(RSP, 12)); 4419 __ movl(result, Address(RSP, 12));
4422 break; 4420 break;
4423 default: UNREACHABLE(); 4421 default: UNREACHABLE();
4424 } 4422 }
4425 __ AddImmediate(RSP, Immediate(16), PP); 4423 __ AddImmediate(RSP, Immediate(16));
4426 __ testl(result, result); 4424 __ testl(result, result);
4427 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump); 4425 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump);
4428 __ LoadObject(result, Bool::False(), PP); 4426 __ LoadObject(result, Bool::False());
4429 __ jmp(&done); 4427 __ jmp(&done);
4430 __ Bind(&non_zero); 4428 __ Bind(&non_zero);
4431 __ LoadObject(result, Bool::True(), PP); 4429 __ LoadObject(result, Bool::True());
4432 __ Bind(&done); 4430 __ Bind(&done);
4433 } 4431 }
4434 4432
4435 4433
4436 LocationSummary* Int32x4SelectInstr::MakeLocationSummary(Zone* zone, 4434 LocationSummary* Int32x4SelectInstr::MakeLocationSummary(Zone* zone,
4437 bool opt) const { 4435 bool opt) const {
4438 const intptr_t kNumInputs = 3; 4436 const intptr_t kNumInputs = 3;
4439 const intptr_t kNumTemps = 1; 4437 const intptr_t kNumTemps = 1;
4440 LocationSummary* summary = new(zone) LocationSummary( 4438 LocationSummary* summary = new(zone) LocationSummary(
4441 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 4439 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
4480 summary->set_out(0, Location::SameAsFirstInput()); 4478 summary->set_out(0, Location::SameAsFirstInput());
4481 return summary; 4479 return summary;
4482 } 4480 }
4483 4481
4484 4482
4485 void Int32x4SetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 4483 void Int32x4SetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
4486 XmmRegister mask = locs()->in(0).fpu_reg(); 4484 XmmRegister mask = locs()->in(0).fpu_reg();
4487 Register flag = locs()->in(1).reg(); 4485 Register flag = locs()->in(1).reg();
4488 Register temp = locs()->temp(0).reg(); 4486 Register temp = locs()->temp(0).reg();
4489 ASSERT(mask == locs()->out(0).fpu_reg()); 4487 ASSERT(mask == locs()->out(0).fpu_reg());
4490 __ AddImmediate(RSP, Immediate(-16), PP); 4488 __ AddImmediate(RSP, Immediate(-16));
4491 // Copy mask to stack. 4489 // Copy mask to stack.
4492 __ movups(Address(RSP, 0), mask); 4490 __ movups(Address(RSP, 0), mask);
4493 Label falsePath, exitPath; 4491 Label falsePath, exitPath;
4494 __ CompareObject(flag, Bool::True(), PP); 4492 __ CompareObject(flag, Bool::True());
4495 __ j(NOT_EQUAL, &falsePath); 4493 __ j(NOT_EQUAL, &falsePath);
4496 switch (op_kind()) { 4494 switch (op_kind()) {
4497 case MethodRecognizer::kInt32x4WithFlagX: 4495 case MethodRecognizer::kInt32x4WithFlagX:
4498 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); 4496 __ LoadImmediate(temp, Immediate(0xFFFFFFFF));
4499 __ movl(Address(RSP, 0), temp); 4497 __ movl(Address(RSP, 0), temp);
4500 __ jmp(&exitPath); 4498 __ jmp(&exitPath);
4501 __ Bind(&falsePath); 4499 __ Bind(&falsePath);
4502 __ LoadImmediate(temp, Immediate(0x0), PP); 4500 __ LoadImmediate(temp, Immediate(0x0));
4503 __ movl(Address(RSP, 0), temp); 4501 __ movl(Address(RSP, 0), temp);
4504 break; 4502 break;
4505 case MethodRecognizer::kInt32x4WithFlagY: 4503 case MethodRecognizer::kInt32x4WithFlagY:
4506 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); 4504 __ LoadImmediate(temp, Immediate(0xFFFFFFFF));
4507 __ movl(Address(RSP, 4), temp); 4505 __ movl(Address(RSP, 4), temp);
4508 __ jmp(&exitPath); 4506 __ jmp(&exitPath);
4509 __ Bind(&falsePath); 4507 __ Bind(&falsePath);
4510 __ LoadImmediate(temp, Immediate(0x0), PP); 4508 __ LoadImmediate(temp, Immediate(0x0));
4511 __ movl(Address(RSP, 4), temp); 4509 __ movl(Address(RSP, 4), temp);
4512 break; 4510 break;
4513 case MethodRecognizer::kInt32x4WithFlagZ: 4511 case MethodRecognizer::kInt32x4WithFlagZ:
4514 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); 4512 __ LoadImmediate(temp, Immediate(0xFFFFFFFF));
4515 __ movl(Address(RSP, 8), temp); 4513 __ movl(Address(RSP, 8), temp);
4516 __ jmp(&exitPath); 4514 __ jmp(&exitPath);
4517 __ Bind(&falsePath); 4515 __ Bind(&falsePath);
4518 __ LoadImmediate(temp, Immediate(0x0), PP); 4516 __ LoadImmediate(temp, Immediate(0x0));
4519 __ movl(Address(RSP, 8), temp); 4517 __ movl(Address(RSP, 8), temp);
4520 break; 4518 break;
4521 case MethodRecognizer::kInt32x4WithFlagW: 4519 case MethodRecognizer::kInt32x4WithFlagW:
4522 __ LoadImmediate(temp, Immediate(0xFFFFFFFF), PP); 4520 __ LoadImmediate(temp, Immediate(0xFFFFFFFF));
4523 __ movl(Address(RSP, 12), temp); 4521 __ movl(Address(RSP, 12), temp);
4524 __ jmp(&exitPath); 4522 __ jmp(&exitPath);
4525 __ Bind(&falsePath); 4523 __ Bind(&falsePath);
4526 __ LoadImmediate(temp, Immediate(0x0), PP); 4524 __ LoadImmediate(temp, Immediate(0x0));
4527 __ movl(Address(RSP, 12), temp); 4525 __ movl(Address(RSP, 12), temp);
4528 break; 4526 break;
4529 default: UNREACHABLE(); 4527 default: UNREACHABLE();
4530 } 4528 }
4531 __ Bind(&exitPath); 4529 __ Bind(&exitPath);
4532 // Copy mask back to register. 4530 // Copy mask back to register.
4533 __ movups(mask, Address(RSP, 0)); 4531 __ movups(mask, Address(RSP, 0));
4534 __ AddImmediate(RSP, Immediate(16), PP); 4532 __ AddImmediate(RSP, Immediate(16));
4535 } 4533 }
4536 4534
4537 4535
4538 LocationSummary* Int32x4ToFloat32x4Instr::MakeLocationSummary(Zone* zone, 4536 LocationSummary* Int32x4ToFloat32x4Instr::MakeLocationSummary(Zone* zone,
4539 bool opt) const { 4537 bool opt) const {
4540 const intptr_t kNumInputs = 1; 4538 const intptr_t kNumInputs = 1;
4541 const intptr_t kNumTemps = 0; 4539 const intptr_t kNumTemps = 0;
4542 LocationSummary* summary = new(zone) LocationSummary( 4540 LocationSummary* summary = new(zone) LocationSummary(
4543 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 4541 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
4544 summary->set_in(0, Location::RequiresFpuRegister()); 4542 summary->set_in(0, Location::RequiresFpuRegister());
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
4699 __ negq(value); 4697 __ negq(value);
4700 __ j(OVERFLOW, deopt); 4698 __ j(OVERFLOW, deopt);
4701 if (FLAG_throw_on_javascript_int_overflow) { 4699 if (FLAG_throw_on_javascript_int_overflow) {
4702 EmitJavascriptOverflowCheck(compiler, range(), deopt, value); 4700 EmitJavascriptOverflowCheck(compiler, range(), deopt, value);
4703 } 4701 }
4704 break; 4702 break;
4705 } 4703 }
4706 case Token::kBIT_NOT: 4704 case Token::kBIT_NOT:
4707 __ notq(value); 4705 __ notq(value);
4708 // Remove inverted smi-tag. 4706 // Remove inverted smi-tag.
4709 __ AndImmediate(value, Immediate(~kSmiTagMask), PP); 4707 __ AndImmediate(value, Immediate(~kSmiTagMask));
4710 break; 4708 break;
4711 default: 4709 default:
4712 UNREACHABLE(); 4710 UNREACHABLE();
4713 } 4711 }
4714 } 4712 }
4715 4713
4716 4714
4717 LocationSummary* UnaryDoubleOpInstr::MakeLocationSummary(Zone* zone, 4715 LocationSummary* UnaryDoubleOpInstr::MakeLocationSummary(Zone* zone,
4718 bool opt) const { 4716 bool opt) const {
4719 const intptr_t kNumInputs = 1; 4717 const intptr_t kNumInputs = 1;
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
4776 const Condition double_condition = 4774 const Condition double_condition =
4777 is_min ? TokenKindToDoubleCondition(Token::kLT) 4775 is_min ? TokenKindToDoubleCondition(Token::kLT)
4778 : TokenKindToDoubleCondition(Token::kGT); 4776 : TokenKindToDoubleCondition(Token::kGT);
4779 ASSERT(left == result); 4777 ASSERT(left == result);
4780 __ j(double_condition, &done, Assembler::kNearJump); 4778 __ j(double_condition, &done, Assembler::kNearJump);
4781 __ movsd(result, right); 4779 __ movsd(result, right);
4782 __ jmp(&done, Assembler::kNearJump); 4780 __ jmp(&done, Assembler::kNearJump);
4783 4781
4784 __ Bind(&returns_nan); 4782 __ Bind(&returns_nan);
4785 static double kNaN = NAN; 4783 static double kNaN = NAN;
4786 __ LoadImmediate(temp, Immediate(reinterpret_cast<intptr_t>(&kNaN)), PP); 4784 __ LoadImmediate(temp, Immediate(reinterpret_cast<intptr_t>(&kNaN)));
4787 __ movsd(result, Address(temp, 0)); 4785 __ movsd(result, Address(temp, 0));
4788 __ jmp(&done, Assembler::kNearJump); 4786 __ jmp(&done, Assembler::kNearJump);
4789 4787
4790 __ Bind(&are_equal); 4788 __ Bind(&are_equal);
4791 Label left_is_negative; 4789 Label left_is_negative;
4792 // Check for negative zero: -0.0 is equal 0.0 but min or max must return 4790 // Check for negative zero: -0.0 is equal 0.0 but min or max must return
4793 // -0.0 or 0.0 respectively. 4791 // -0.0 or 0.0 respectively.
4794 // Check for negative left value (get the sign bit): 4792 // Check for negative left value (get the sign bit):
4795 // - min -> left is negative ? left : right. 4793 // - min -> left is negative ? left : right.
4796 // - max -> left is negative ? right : left 4794 // - max -> left is negative ? right : left
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after
5077 5075
5078 XmmRegister base = locs->in(0).fpu_reg(); 5076 XmmRegister base = locs->in(0).fpu_reg();
5079 XmmRegister exp = locs->in(1).fpu_reg(); 5077 XmmRegister exp = locs->in(1).fpu_reg();
5080 XmmRegister result = locs->out(0).fpu_reg(); 5078 XmmRegister result = locs->out(0).fpu_reg();
5081 Register temp = 5079 Register temp =
5082 locs->temp(InvokeMathCFunctionInstr::kObjectTempIndex).reg(); 5080 locs->temp(InvokeMathCFunctionInstr::kObjectTempIndex).reg();
5083 XmmRegister zero_temp = 5081 XmmRegister zero_temp =
5084 locs->temp(InvokeMathCFunctionInstr::kDoubleTempIndex).fpu_reg(); 5082 locs->temp(InvokeMathCFunctionInstr::kDoubleTempIndex).fpu_reg();
5085 5083
5086 __ xorps(zero_temp, zero_temp); 5084 __ xorps(zero_temp, zero_temp);
5087 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(1)), PP); 5085 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(1)));
5088 __ movsd(result, FieldAddress(temp, Double::value_offset())); 5086 __ movsd(result, FieldAddress(temp, Double::value_offset()));
5089 5087
5090 Label check_base, skip_call; 5088 Label check_base, skip_call;
5091 // exponent == 0.0 -> return 1.0; 5089 // exponent == 0.0 -> return 1.0;
5092 __ comisd(exp, zero_temp); 5090 __ comisd(exp, zero_temp);
5093 __ j(PARITY_EVEN, &check_base, Assembler::kNearJump); 5091 __ j(PARITY_EVEN, &check_base, Assembler::kNearJump);
5094 __ j(EQUAL, &skip_call); // 'result' is 1.0. 5092 __ j(EQUAL, &skip_call); // 'result' is 1.0.
5095 5093
5096 // exponent == 1.0 ? 5094 // exponent == 1.0 ?
5097 __ comisd(exp, result); 5095 __ comisd(exp, result);
5098 Label return_base; 5096 Label return_base;
5099 __ j(EQUAL, &return_base, Assembler::kNearJump); 5097 __ j(EQUAL, &return_base, Assembler::kNearJump);
5100 5098
5101 // exponent == 2.0 ? 5099 // exponent == 2.0 ?
5102 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(2.0)), PP); 5100 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(2.0)));
5103 __ movsd(XMM0, FieldAddress(temp, Double::value_offset())); 5101 __ movsd(XMM0, FieldAddress(temp, Double::value_offset()));
5104 __ comisd(exp, XMM0); 5102 __ comisd(exp, XMM0);
5105 Label return_base_times_2; 5103 Label return_base_times_2;
5106 __ j(EQUAL, &return_base_times_2, Assembler::kNearJump); 5104 __ j(EQUAL, &return_base_times_2, Assembler::kNearJump);
5107 5105
5108 // exponent == 3.0 ? 5106 // exponent == 3.0 ?
5109 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(3.0)), PP); 5107 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(3.0)));
5110 __ movsd(XMM0, FieldAddress(temp, Double::value_offset())); 5108 __ movsd(XMM0, FieldAddress(temp, Double::value_offset()));
5111 __ comisd(exp, XMM0); 5109 __ comisd(exp, XMM0);
5112 __ j(NOT_EQUAL, &check_base); 5110 __ j(NOT_EQUAL, &check_base);
5113 5111
5114 // Base times 3. 5112 // Base times 3.
5115 __ movsd(result, base); 5113 __ movsd(result, base);
5116 __ mulsd(result, base); 5114 __ mulsd(result, base);
5117 __ mulsd(result, base); 5115 __ mulsd(result, base);
5118 __ jmp(&skip_call); 5116 __ jmp(&skip_call);
5119 5117
(...skipping 14 matching lines...) Expand all
5134 __ comisd(base, result); 5132 __ comisd(base, result);
5135 __ j(PARITY_EVEN, &return_nan, Assembler::kNearJump); 5133 __ j(PARITY_EVEN, &return_nan, Assembler::kNearJump);
5136 __ j(EQUAL, &skip_call, Assembler::kNearJump); 5134 __ j(EQUAL, &skip_call, Assembler::kNearJump);
5137 // Note: 'base' could be NaN. 5135 // Note: 'base' could be NaN.
5138 __ comisd(exp, base); 5136 __ comisd(exp, base);
5139 // Neither 'exp' nor 'base' is NaN. 5137 // Neither 'exp' nor 'base' is NaN.
5140 Label try_sqrt; 5138 Label try_sqrt;
5141 __ j(PARITY_ODD, &try_sqrt, Assembler::kNearJump); 5139 __ j(PARITY_ODD, &try_sqrt, Assembler::kNearJump);
5142 // Return NaN. 5140 // Return NaN.
5143 __ Bind(&return_nan); 5141 __ Bind(&return_nan);
5144 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(NAN)), PP); 5142 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(NAN)));
5145 __ movsd(result, FieldAddress(temp, Double::value_offset())); 5143 __ movsd(result, FieldAddress(temp, Double::value_offset()));
5146 __ jmp(&skip_call); 5144 __ jmp(&skip_call);
5147 5145
5148 Label do_pow, return_zero; 5146 Label do_pow, return_zero;
5149 __ Bind(&try_sqrt); 5147 __ Bind(&try_sqrt);
5150 // Before calling pow, check if we could use sqrt instead of pow. 5148 // Before calling pow, check if we could use sqrt instead of pow.
5151 __ LoadObject(temp, 5149 __ LoadObject(temp,
5152 Double::ZoneHandle(Double::NewCanonical(kNegInfinity)), PP); 5150 Double::ZoneHandle(Double::NewCanonical(kNegInfinity)));
5153 __ movsd(result, FieldAddress(temp, Double::value_offset())); 5151 __ movsd(result, FieldAddress(temp, Double::value_offset()));
5154 // base == -Infinity -> call pow; 5152 // base == -Infinity -> call pow;
5155 __ comisd(base, result); 5153 __ comisd(base, result);
5156 __ j(EQUAL, &do_pow, Assembler::kNearJump); 5154 __ j(EQUAL, &do_pow, Assembler::kNearJump);
5157 5155
5158 // exponent == 0.5 ? 5156 // exponent == 0.5 ?
5159 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(0.5)), PP); 5157 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(0.5)));
5160 __ movsd(result, FieldAddress(temp, Double::value_offset())); 5158 __ movsd(result, FieldAddress(temp, Double::value_offset()));
5161 __ comisd(exp, result); 5159 __ comisd(exp, result);
5162 __ j(NOT_EQUAL, &do_pow, Assembler::kNearJump); 5160 __ j(NOT_EQUAL, &do_pow, Assembler::kNearJump);
5163 5161
5164 // base == 0 -> return 0; 5162 // base == 0 -> return 0;
5165 __ comisd(base, zero_temp); 5163 __ comisd(base, zero_temp);
5166 __ j(EQUAL, &return_zero, Assembler::kNearJump); 5164 __ j(EQUAL, &return_zero, Assembler::kNearJump);
5167 5165
5168 __ sqrtsd(result, base); 5166 __ sqrtsd(result, base);
5169 __ jmp(&skip_call, Assembler::kNearJump); 5167 __ jmp(&skip_call, Assembler::kNearJump);
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
5346 __ jmp(&done); 5344 __ jmp(&done);
5347 5345
5348 // Divide using 64bit idiv. 5346 // Divide using 64bit idiv.
5349 __ Bind(&not_32bit); 5347 __ Bind(&not_32bit);
5350 __ SmiUntag(left); 5348 __ SmiUntag(left);
5351 __ SmiUntag(right); 5349 __ SmiUntag(right);
5352 __ cqo(); // Sign extend RAX -> RDX:RAX. 5350 __ cqo(); // Sign extend RAX -> RDX:RAX.
5353 __ idivq(right); // RAX: quotient, RDX: remainder. 5351 __ idivq(right); // RAX: quotient, RDX: remainder.
5354 // Check the corner case of dividing the 'MIN_SMI' with -1, in which 5352 // Check the corner case of dividing the 'MIN_SMI' with -1, in which
5355 // case we cannot tag the result. 5353 // case we cannot tag the result.
5356 __ CompareImmediate(RAX, Immediate(0x4000000000000000), PP); 5354 __ CompareImmediate(RAX, Immediate(0x4000000000000000));
5357 __ j(EQUAL, deopt); 5355 __ j(EQUAL, deopt);
5358 __ Bind(&done); 5356 __ Bind(&done);
5359 5357
5360 // Modulo correction (RDX). 5358 // Modulo correction (RDX).
5361 // res = left % right; 5359 // res = left % right;
5362 // if (res < 0) { 5360 // if (res < 0) {
5363 // if (right < 0) { 5361 // if (right < 0) {
5364 // res = res - right; 5362 // res = res - right;
5365 // } else { 5363 // } else {
5366 // res = res + right; 5364 // res = res + right;
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
5470 return summary; 5468 return summary;
5471 } 5469 }
5472 5470
5473 5471
5474 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 5472 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
5475 Label* deopt = compiler->AddDeoptStub(deopt_id(), 5473 Label* deopt = compiler->AddDeoptStub(deopt_id(),
5476 ICData::kDeoptCheckClass, 5474 ICData::kDeoptCheckClass,
5477 licm_hoisted_ ? ICData::kHoisted : 0); 5475 licm_hoisted_ ? ICData::kHoisted : 0);
5478 if (IsNullCheck()) { 5476 if (IsNullCheck()) {
5479 __ CompareObject(locs()->in(0).reg(), 5477 __ CompareObject(locs()->in(0).reg(),
5480 Object::null_object(), PP); 5478 Object::null_object());
5481 Condition cond = DeoptIfNull() ? EQUAL : NOT_EQUAL; 5479 Condition cond = DeoptIfNull() ? EQUAL : NOT_EQUAL;
5482 __ j(cond, deopt); 5480 __ j(cond, deopt);
5483 return; 5481 return;
5484 } 5482 }
5485 5483
5486 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || 5484 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) ||
5487 (unary_checks().NumberOfChecks() > 1)); 5485 (unary_checks().NumberOfChecks() > 1));
5488 Register value = locs()->in(0).reg(); 5486 Register value = locs()->in(0).reg();
5489 Register temp = locs()->temp(0).reg(); 5487 Register temp = locs()->temp(0).reg();
5490 Label is_ok; 5488 Label is_ok;
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
5565 LocationSummary* summary = new(zone) LocationSummary( 5563 LocationSummary* summary = new(zone) LocationSummary(
5566 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 5564 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
5567 summary->set_in(0, Location::RequiresRegister()); 5565 summary->set_in(0, Location::RequiresRegister());
5568 return summary; 5566 return summary;
5569 } 5567 }
5570 5568
5571 5569
5572 void CheckClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 5570 void CheckClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
5573 Register value = locs()->in(0).reg(); 5571 Register value = locs()->in(0).reg();
5574 Label* deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckClass); 5572 Label* deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckClass);
5575 __ CompareImmediate(value, Immediate(Smi::RawValue(cid_)), PP); 5573 __ CompareImmediate(value, Immediate(Smi::RawValue(cid_)));
5576 __ j(NOT_ZERO, deopt); 5574 __ j(NOT_ZERO, deopt);
5577 } 5575 }
5578 5576
5579 5577
5580 LocationSummary* CheckArrayBoundInstr::MakeLocationSummary(Zone* zone, 5578 LocationSummary* CheckArrayBoundInstr::MakeLocationSummary(Zone* zone,
5581 bool opt) const { 5579 bool opt) const {
5582 const intptr_t kNumInputs = 2; 5580 const intptr_t kNumInputs = 2;
5583 const intptr_t kNumTemps = 0; 5581 const intptr_t kNumTemps = 0;
5584 LocationSummary* locs = new(zone) LocationSummary( 5582 LocationSummary* locs = new(zone) LocationSummary(
5585 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 5583 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
(...skipping 21 matching lines...) Expand all
5607 // Unconditionally deoptimize for constant bounds checks because they 5605 // Unconditionally deoptimize for constant bounds checks because they
5608 // only occur only when index is out-of-bounds. 5606 // only occur only when index is out-of-bounds.
5609 __ jmp(deopt); 5607 __ jmp(deopt);
5610 return; 5608 return;
5611 } 5609 }
5612 5610
5613 if (index_loc.IsConstant()) { 5611 if (index_loc.IsConstant()) {
5614 Register length = length_loc.reg(); 5612 Register length = length_loc.reg();
5615 const Smi& index = Smi::Cast(index_loc.constant()); 5613 const Smi& index = Smi::Cast(index_loc.constant());
5616 __ CompareImmediate( 5614 __ CompareImmediate(
5617 length, Immediate(reinterpret_cast<int64_t>(index.raw())), PP); 5615 length, Immediate(reinterpret_cast<int64_t>(index.raw())));
5618 __ j(BELOW_EQUAL, deopt); 5616 __ j(BELOW_EQUAL, deopt);
5619 } else if (length_loc.IsConstant()) { 5617 } else if (length_loc.IsConstant()) {
5620 const Smi& length = Smi::Cast(length_loc.constant()); 5618 const Smi& length = Smi::Cast(length_loc.constant());
5621 Register index = index_loc.reg(); 5619 Register index = index_loc.reg();
5622 if (length.Value() == Smi::kMaxValue) { 5620 if (length.Value() == Smi::kMaxValue) {
5623 __ testq(index, index); 5621 __ testq(index, index);
5624 __ j(NEGATIVE, deopt); 5622 __ j(NEGATIVE, deopt);
5625 } else { 5623 } else {
5626 __ CompareImmediate( 5624 __ CompareImmediate(
5627 index, Immediate(reinterpret_cast<int64_t>(length.raw())), PP); 5625 index, Immediate(reinterpret_cast<int64_t>(length.raw())));
5628 __ j(ABOVE_EQUAL, deopt); 5626 __ j(ABOVE_EQUAL, deopt);
5629 } 5627 }
5630 } else { 5628 } else {
5631 Register length = length_loc.reg(); 5629 Register length = length_loc.reg();
5632 Register index = index_loc.reg(); 5630 Register index = index_loc.reg();
5633 __ cmpq(index, length); 5631 __ cmpq(index, length);
5634 __ j(ABOVE_EQUAL, deopt); 5632 __ j(ABOVE_EQUAL, deopt);
5635 } 5633 }
5636 } 5634 }
5637 5635
(...skipping 636 matching lines...) Expand 10 before | Expand all | Expand 10 after
6274 6272
6275 Label is_true, is_false; 6273 Label is_true, is_false;
6276 BranchLabels labels = { &is_true, &is_false, &is_false }; 6274 BranchLabels labels = { &is_true, &is_false, &is_false };
6277 6275
6278 Condition true_condition = EmitComparisonCode(compiler, labels); 6276 Condition true_condition = EmitComparisonCode(compiler, labels);
6279 EmitBranchOnCondition(compiler, true_condition, labels); 6277 EmitBranchOnCondition(compiler, true_condition, labels);
6280 6278
6281 Register result = locs()->out(0).reg(); 6279 Register result = locs()->out(0).reg();
6282 Label done; 6280 Label done;
6283 __ Bind(&is_false); 6281 __ Bind(&is_false);
6284 __ LoadObject(result, Bool::False(), PP); 6282 __ LoadObject(result, Bool::False());
6285 __ jmp(&done); 6283 __ jmp(&done);
6286 __ Bind(&is_true); 6284 __ Bind(&is_true);
6287 __ LoadObject(result, Bool::True(), PP); 6285 __ LoadObject(result, Bool::True());
6288 __ Bind(&done); 6286 __ Bind(&done);
6289 } 6287 }
6290 6288
6291 6289
6292 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, 6290 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler,
6293 BranchInstr* branch) { 6291 BranchInstr* branch) {
6294 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); 6292 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT);
6295 6293
6296 BranchLabels labels = compiler->CreateBranchLabels(branch); 6294 BranchLabels labels = compiler->CreateBranchLabels(branch);
6297 Condition true_condition = EmitComparisonCode(compiler, labels); 6295 Condition true_condition = EmitComparisonCode(compiler, labels);
(...skipping 12 matching lines...) Expand all
6310 return summary; 6308 return summary;
6311 } 6309 }
6312 6310
6313 6311
6314 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 6312 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
6315 // Arguments descriptor is expected in R10. 6313 // Arguments descriptor is expected in R10.
6316 intptr_t argument_count = ArgumentCount(); 6314 intptr_t argument_count = ArgumentCount();
6317 const Array& arguments_descriptor = 6315 const Array& arguments_descriptor =
6318 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 6316 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
6319 argument_names())); 6317 argument_names()));
6320 __ LoadObject(R10, arguments_descriptor, PP); 6318 __ LoadObject(R10, arguments_descriptor);
6321 6319
6322 // Function in RAX. 6320 // Function in RAX.
6323 ASSERT(locs()->in(0).reg() == RAX); 6321 ASSERT(locs()->in(0).reg() == RAX);
6324 __ movq(RCX, FieldAddress(RAX, Function::instructions_offset())); 6322 __ movq(RCX, FieldAddress(RAX, Function::instructions_offset()));
6325 6323
6326 // RAX: Function. 6324 // RAX: Function.
6327 // R10: Arguments descriptor array. 6325 // R10: Arguments descriptor array.
6328 // RBX: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value). 6326 // RBX: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value).
6329 __ xorq(RBX, RBX); 6327 __ xorq(RBX, RBX);
6330 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); 6328 __ addq(RCX, Immediate(Instructions::HeaderSize() - kHeapObjectTag));
(...skipping 22 matching lines...) Expand all
6353 Location::RequiresRegister(), 6351 Location::RequiresRegister(),
6354 LocationSummary::kNoCall); 6352 LocationSummary::kNoCall);
6355 } 6353 }
6356 6354
6357 6355
6358 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 6356 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
6359 Register value = locs()->in(0).reg(); 6357 Register value = locs()->in(0).reg();
6360 Register result = locs()->out(0).reg(); 6358 Register result = locs()->out(0).reg();
6361 6359
6362 Label done; 6360 Label done;
6363 __ LoadObject(result, Bool::True(), PP); 6361 __ LoadObject(result, Bool::True());
6364 __ CompareRegisters(result, value); 6362 __ CompareRegisters(result, value);
6365 __ j(NOT_EQUAL, &done, Assembler::kNearJump); 6363 __ j(NOT_EQUAL, &done, Assembler::kNearJump);
6366 __ LoadObject(result, Bool::False(), PP); 6364 __ LoadObject(result, Bool::False());
6367 __ Bind(&done); 6365 __ Bind(&done);
6368 } 6366 }
6369 6367
6370 6368
6371 LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone, 6369 LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
6372 bool opt) const { 6370 bool opt) const {
6373 return MakeCallSummary(zone); 6371 return MakeCallSummary(zone);
6374 } 6372 }
6375 6373
6376 6374
(...skipping 28 matching lines...) Expand all
6405 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 6403 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
6406 locs->set_in(0, Location::RegisterLocation(RAX)); 6404 locs->set_in(0, Location::RegisterLocation(RAX));
6407 locs->set_out(0, Location::RegisterLocation(RAX)); 6405 locs->set_out(0, Location::RegisterLocation(RAX));
6408 return locs; 6406 return locs;
6409 } 6407 }
6410 6408
6411 6409
6412 void GrowRegExpStackInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 6410 void GrowRegExpStackInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
6413 const Register typed_data = locs()->in(0).reg(); 6411 const Register typed_data = locs()->in(0).reg();
6414 const Register result = locs()->out(0).reg(); 6412 const Register result = locs()->out(0).reg();
6415 __ PushObject(Object::null_object(), PP); 6413 __ PushObject(Object::null_object());
6416 __ pushq(typed_data); 6414 __ pushq(typed_data);
6417 compiler->GenerateRuntimeCall(Scanner::kNoSourcePos, // No token position. 6415 compiler->GenerateRuntimeCall(Scanner::kNoSourcePos, // No token position.
6418 deopt_id(), 6416 deopt_id(),
6419 kGrowRegExpStackRuntimeEntry, 6417 kGrowRegExpStackRuntimeEntry,
6420 1, 6418 1,
6421 locs()); 6419 locs());
6422 __ Drop(1); 6420 __ Drop(1);
6423 __ popq(result); 6421 __ popq(result);
6424 } 6422 }
6425 6423
6426 6424
6427 } // namespace dart 6425 } // namespace dart
6428 6426
6429 #undef __ 6427 #undef __
6430 6428
6431 #endif // defined TARGET_ARCH_X64 6429 #endif // defined TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « runtime/vm/instructions_x64_test.cc ('k') | runtime/vm/intrinsifier_x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698