Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(620)

Side by Side Diff: src/crankshaft/mips64/lithium-codegen-mips64.cc

Issue 2829073002: MIPS64: Move load/store instructions to macro-assembler. (Closed)
Patch Set: Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/crankshaft/mips64/lithium-codegen-mips64.h" 5 #include "src/crankshaft/mips64/lithium-codegen-mips64.h"
6 6
7 #include "src/builtins/builtins-constructor.h" 7 #include "src/builtins/builtins-constructor.h"
8 #include "src/code-factory.h" 8 #include "src/code-factory.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/crankshaft/hydrogen-osr.h" 10 #include "src/crankshaft/hydrogen-osr.h"
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
84 84
85 85
86 void LCodeGen::SaveCallerDoubles() { 86 void LCodeGen::SaveCallerDoubles() {
87 DCHECK(info()->saves_caller_doubles()); 87 DCHECK(info()->saves_caller_doubles());
88 DCHECK(NeedsEagerFrame()); 88 DCHECK(NeedsEagerFrame());
89 Comment(";;; Save clobbered callee double registers"); 89 Comment(";;; Save clobbered callee double registers");
90 int count = 0; 90 int count = 0;
91 BitVector* doubles = chunk()->allocated_double_registers(); 91 BitVector* doubles = chunk()->allocated_double_registers();
92 BitVector::Iterator save_iterator(doubles); 92 BitVector::Iterator save_iterator(doubles);
93 while (!save_iterator.Done()) { 93 while (!save_iterator.Done()) {
94 __ sdc1(DoubleRegister::from_code(save_iterator.Current()), 94 __ Sdc1(DoubleRegister::from_code(save_iterator.Current()),
95 MemOperand(sp, count * kDoubleSize)); 95 MemOperand(sp, count * kDoubleSize));
96 save_iterator.Advance(); 96 save_iterator.Advance();
97 count++; 97 count++;
98 } 98 }
99 } 99 }
100 100
101 101
102 void LCodeGen::RestoreCallerDoubles() { 102 void LCodeGen::RestoreCallerDoubles() {
103 DCHECK(info()->saves_caller_doubles()); 103 DCHECK(info()->saves_caller_doubles());
104 DCHECK(NeedsEagerFrame()); 104 DCHECK(NeedsEagerFrame());
105 Comment(";;; Restore clobbered callee double registers"); 105 Comment(";;; Restore clobbered callee double registers");
106 BitVector* doubles = chunk()->allocated_double_registers(); 106 BitVector* doubles = chunk()->allocated_double_registers();
107 BitVector::Iterator save_iterator(doubles); 107 BitVector::Iterator save_iterator(doubles);
108 int count = 0; 108 int count = 0;
109 while (!save_iterator.Done()) { 109 while (!save_iterator.Done()) {
110 __ ldc1(DoubleRegister::from_code(save_iterator.Current()), 110 __ Ldc1(DoubleRegister::from_code(save_iterator.Current()),
111 MemOperand(sp, count * kDoubleSize)); 111 MemOperand(sp, count * kDoubleSize));
112 save_iterator.Advance(); 112 save_iterator.Advance();
113 count++; 113 count++;
114 } 114 }
115 } 115 }
116 116
117 117
118 bool LCodeGen::GeneratePrologue() { 118 bool LCodeGen::GeneratePrologue() {
119 DCHECK(is_generating()); 119 DCHECK(is_generating());
120 120
(...skipping 20 matching lines...) Expand all
141 int slots = GetStackSlotCount(); 141 int slots = GetStackSlotCount();
142 if (slots > 0) { 142 if (slots > 0) {
143 if (FLAG_debug_code) { 143 if (FLAG_debug_code) {
144 __ Dsubu(sp, sp, Operand(slots * kPointerSize)); 144 __ Dsubu(sp, sp, Operand(slots * kPointerSize));
145 __ Push(a0, a1); 145 __ Push(a0, a1);
146 __ Daddu(a0, sp, Operand(slots * kPointerSize)); 146 __ Daddu(a0, sp, Operand(slots * kPointerSize));
147 __ li(a1, Operand(kSlotsZapValue)); 147 __ li(a1, Operand(kSlotsZapValue));
148 Label loop; 148 Label loop;
149 __ bind(&loop); 149 __ bind(&loop);
150 __ Dsubu(a0, a0, Operand(kPointerSize)); 150 __ Dsubu(a0, a0, Operand(kPointerSize));
151 __ sd(a1, MemOperand(a0, 2 * kPointerSize)); 151 __ Sd(a1, MemOperand(a0, 2 * kPointerSize));
152 __ Branch(&loop, ne, a0, Operand(sp)); 152 __ Branch(&loop, ne, a0, Operand(sp));
153 __ Pop(a0, a1); 153 __ Pop(a0, a1);
154 } else { 154 } else {
155 __ Dsubu(sp, sp, Operand(slots * kPointerSize)); 155 __ Dsubu(sp, sp, Operand(slots * kPointerSize));
156 } 156 }
157 } 157 }
158 158
159 if (info()->saves_caller_doubles()) { 159 if (info()->saves_caller_doubles()) {
160 SaveCallerDoubles(); 160 SaveCallerDoubles();
161 } 161 }
(...skipping 29 matching lines...) Expand all
191 __ push(a1); 191 __ push(a1);
192 __ Push(Smi::FromInt(info()->scope()->scope_type())); 192 __ Push(Smi::FromInt(info()->scope()->scope_type()));
193 __ CallRuntime(Runtime::kNewFunctionContext); 193 __ CallRuntime(Runtime::kNewFunctionContext);
194 } 194 }
195 } 195 }
196 RecordSafepoint(deopt_mode); 196 RecordSafepoint(deopt_mode);
197 197
198 // Context is returned in both v0. It replaces the context passed to us. 198 // Context is returned in both v0. It replaces the context passed to us.
199 // It's saved in the stack and kept live in cp. 199 // It's saved in the stack and kept live in cp.
200 __ mov(cp, v0); 200 __ mov(cp, v0);
201 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 201 __ Sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
202 // Copy any necessary parameters into the context. 202 // Copy any necessary parameters into the context.
203 int num_parameters = info()->scope()->num_parameters(); 203 int num_parameters = info()->scope()->num_parameters();
204 int first_parameter = info()->scope()->has_this_declaration() ? -1 : 0; 204 int first_parameter = info()->scope()->has_this_declaration() ? -1 : 0;
205 for (int i = first_parameter; i < num_parameters; i++) { 205 for (int i = first_parameter; i < num_parameters; i++) {
206 Variable* var = (i == -1) ? info()->scope()->receiver() 206 Variable* var = (i == -1) ? info()->scope()->receiver()
207 : info()->scope()->parameter(i); 207 : info()->scope()->parameter(i);
208 if (var->IsContextSlot()) { 208 if (var->IsContextSlot()) {
209 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 209 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
210 (num_parameters - 1 - i) * kPointerSize; 210 (num_parameters - 1 - i) * kPointerSize;
211 // Load parameter from stack. 211 // Load parameter from stack.
212 __ ld(a0, MemOperand(fp, parameter_offset)); 212 __ Ld(a0, MemOperand(fp, parameter_offset));
213 // Store it in the context. 213 // Store it in the context.
214 MemOperand target = ContextMemOperand(cp, var->index()); 214 MemOperand target = ContextMemOperand(cp, var->index());
215 __ sd(a0, target); 215 __ Sd(a0, target);
216 // Update the write barrier. This clobbers a3 and a0. 216 // Update the write barrier. This clobbers a3 and a0.
217 if (need_write_barrier) { 217 if (need_write_barrier) {
218 __ RecordWriteContextSlot( 218 __ RecordWriteContextSlot(
219 cp, target.offset(), a0, a3, GetRAState(), kSaveFPRegs); 219 cp, target.offset(), a0, a3, GetRAState(), kSaveFPRegs);
220 } else if (FLAG_debug_code) { 220 } else if (FLAG_debug_code) {
221 Label done; 221 Label done;
222 __ JumpIfInNewSpace(cp, a0, &done); 222 __ JumpIfInNewSpace(cp, a0, &done);
223 __ Abort(kExpectedNewSpaceObject); 223 __ Abort(kExpectedNewSpaceObject);
224 __ bind(&done); 224 __ bind(&done);
225 } 225 }
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after
410 DCHECK(constant->HasSmiValue()); 410 DCHECK(constant->HasSmiValue());
411 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value()))); 411 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value())));
412 } else if (r.IsDouble()) { 412 } else if (r.IsDouble()) {
413 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate); 413 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
414 } else { 414 } else {
415 DCHECK(r.IsSmiOrTagged()); 415 DCHECK(r.IsSmiOrTagged());
416 __ li(scratch, literal); 416 __ li(scratch, literal);
417 } 417 }
418 return scratch; 418 return scratch;
419 } else if (op->IsStackSlot()) { 419 } else if (op->IsStackSlot()) {
420 __ ld(scratch, ToMemOperand(op)); 420 __ Ld(scratch, ToMemOperand(op));
421 return scratch; 421 return scratch;
422 } 422 }
423 UNREACHABLE(); 423 UNREACHABLE();
424 return scratch; 424 return scratch;
425 } 425 }
426 426
427 427
428 DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const { 428 DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
429 DCHECK(op->IsDoubleRegister()); 429 DCHECK(op->IsDoubleRegister());
430 return ToDoubleRegister(op->index()); 430 return ToDoubleRegister(op->index());
(...skipping 16 matching lines...) Expand all
447 __ mtc1(at, flt_scratch); 447 __ mtc1(at, flt_scratch);
448 __ cvt_d_w(dbl_scratch, flt_scratch); 448 __ cvt_d_w(dbl_scratch, flt_scratch);
449 return dbl_scratch; 449 return dbl_scratch;
450 } else if (r.IsDouble()) { 450 } else if (r.IsDouble()) {
451 Abort(kUnsupportedDoubleImmediate); 451 Abort(kUnsupportedDoubleImmediate);
452 } else if (r.IsTagged()) { 452 } else if (r.IsTagged()) {
453 Abort(kUnsupportedTaggedImmediate); 453 Abort(kUnsupportedTaggedImmediate);
454 } 454 }
455 } else if (op->IsStackSlot()) { 455 } else if (op->IsStackSlot()) {
456 MemOperand mem_op = ToMemOperand(op); 456 MemOperand mem_op = ToMemOperand(op);
457 __ ldc1(dbl_scratch, mem_op); 457 __ Ldc1(dbl_scratch, mem_op);
458 return dbl_scratch; 458 return dbl_scratch;
459 } 459 }
460 UNREACHABLE(); 460 UNREACHABLE();
461 return dbl_scratch; 461 return dbl_scratch;
462 } 462 }
463 463
464 464
465 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { 465 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
466 HConstant* constant = chunk_->LookupConstant(op); 466 HConstant* constant = chunk_->LookupConstant(op);
467 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); 467 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
690 __ CallRuntime(function, num_arguments, save_doubles); 690 __ CallRuntime(function, num_arguments, save_doubles);
691 691
692 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 692 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
693 } 693 }
694 694
695 695
696 void LCodeGen::LoadContextFromDeferred(LOperand* context) { 696 void LCodeGen::LoadContextFromDeferred(LOperand* context) {
697 if (context->IsRegister()) { 697 if (context->IsRegister()) {
698 __ Move(cp, ToRegister(context)); 698 __ Move(cp, ToRegister(context));
699 } else if (context->IsStackSlot()) { 699 } else if (context->IsStackSlot()) {
700 __ ld(cp, ToMemOperand(context)); 700 __ Ld(cp, ToMemOperand(context));
701 } else if (context->IsConstantOperand()) { 701 } else if (context->IsConstantOperand()) {
702 HConstant* constant = 702 HConstant* constant =
703 chunk_->LookupConstant(LConstantOperand::cast(context)); 703 chunk_->LookupConstant(LConstantOperand::cast(context));
704 __ li(cp, Handle<Object>::cast(constant->handle(isolate()))); 704 __ li(cp, Handle<Object>::cast(constant->handle(isolate())));
705 } else { 705 } else {
706 UNREACHABLE(); 706 UNREACHABLE();
707 } 707 }
708 } 708 }
709 709
710 710
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
769 Abort(kBailoutWasNotPrepared); 769 Abort(kBailoutWasNotPrepared);
770 return; 770 return;
771 } 771 }
772 772
773 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { 773 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
774 Register scratch = scratch0(); 774 Register scratch = scratch0();
775 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); 775 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
776 Label no_deopt; 776 Label no_deopt;
777 __ Push(a1, scratch); 777 __ Push(a1, scratch);
778 __ li(scratch, Operand(count)); 778 __ li(scratch, Operand(count));
779 __ lw(a1, MemOperand(scratch)); 779 __ Lw(a1, MemOperand(scratch));
780 __ Subu(a1, a1, Operand(1)); 780 __ Subu(a1, a1, Operand(1));
781 __ Branch(&no_deopt, ne, a1, Operand(zero_reg)); 781 __ Branch(&no_deopt, ne, a1, Operand(zero_reg));
782 __ li(a1, Operand(FLAG_deopt_every_n_times)); 782 __ li(a1, Operand(FLAG_deopt_every_n_times));
783 __ sw(a1, MemOperand(scratch)); 783 __ Sw(a1, MemOperand(scratch));
784 __ Pop(a1, scratch); 784 __ Pop(a1, scratch);
785 785
786 __ Call(entry, RelocInfo::RUNTIME_ENTRY); 786 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
787 __ bind(&no_deopt); 787 __ bind(&no_deopt);
788 __ sw(a1, MemOperand(scratch)); 788 __ Sw(a1, MemOperand(scratch));
789 __ Pop(a1, scratch); 789 __ Pop(a1, scratch);
790 } 790 }
791 791
792 if (info()->ShouldTrapOnDeopt()) { 792 if (info()->ShouldTrapOnDeopt()) {
793 Label skip; 793 Label skip;
794 if (condition != al) { 794 if (condition != al) {
795 __ Branch(&skip, NegateCondition(condition), src1, src2); 795 __ Branch(&skip, NegateCondition(condition), src1, src2);
796 } 796 }
797 __ stop("trap_on_deopt"); 797 __ stop("trap_on_deopt");
798 __ bind(&skip); 798 __ bind(&skip);
(...skipping 959 matching lines...) Expand 10 before | Expand all | Expand 10 after
1758 } 1758 }
1759 1759
1760 1760
1761 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) { 1761 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
1762 String::Encoding encoding = instr->hydrogen()->encoding(); 1762 String::Encoding encoding = instr->hydrogen()->encoding();
1763 Register string = ToRegister(instr->string()); 1763 Register string = ToRegister(instr->string());
1764 Register result = ToRegister(instr->result()); 1764 Register result = ToRegister(instr->result());
1765 1765
1766 if (FLAG_debug_code) { 1766 if (FLAG_debug_code) {
1767 Register scratch = scratch0(); 1767 Register scratch = scratch0();
1768 __ ld(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); 1768 __ Ld(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
1769 __ lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 1769 __ Lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1770 1770
1771 __ And(scratch, scratch, 1771 __ And(scratch, scratch,
1772 Operand(kStringRepresentationMask | kStringEncodingMask)); 1772 Operand(kStringRepresentationMask | kStringEncodingMask));
1773 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 1773 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1774 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 1774 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1775 __ Dsubu(at, scratch, Operand(encoding == String::ONE_BYTE_ENCODING 1775 __ Dsubu(at, scratch, Operand(encoding == String::ONE_BYTE_ENCODING
1776 ? one_byte_seq_type : two_byte_seq_type)); 1776 ? one_byte_seq_type : two_byte_seq_type));
1777 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg)); 1777 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
1778 } 1778 }
1779 1779
1780 MemOperand operand = BuildSeqStringOperand(string, instr->index(), encoding); 1780 MemOperand operand = BuildSeqStringOperand(string, instr->index(), encoding);
1781 if (encoding == String::ONE_BYTE_ENCODING) { 1781 if (encoding == String::ONE_BYTE_ENCODING) {
1782 __ lbu(result, operand); 1782 __ Lbu(result, operand);
1783 } else { 1783 } else {
1784 __ lhu(result, operand); 1784 __ Lhu(result, operand);
1785 } 1785 }
1786 } 1786 }
1787 1787
1788 1788
1789 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { 1789 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
1790 String::Encoding encoding = instr->hydrogen()->encoding(); 1790 String::Encoding encoding = instr->hydrogen()->encoding();
1791 Register string = ToRegister(instr->string()); 1791 Register string = ToRegister(instr->string());
1792 Register value = ToRegister(instr->value()); 1792 Register value = ToRegister(instr->value());
1793 1793
1794 if (FLAG_debug_code) { 1794 if (FLAG_debug_code) {
1795 Register scratch = scratch0(); 1795 Register scratch = scratch0();
1796 Register index = ToRegister(instr->index()); 1796 Register index = ToRegister(instr->index());
1797 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 1797 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1798 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 1798 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1799 int encoding_mask = 1799 int encoding_mask =
1800 instr->hydrogen()->encoding() == String::ONE_BYTE_ENCODING 1800 instr->hydrogen()->encoding() == String::ONE_BYTE_ENCODING
1801 ? one_byte_seq_type : two_byte_seq_type; 1801 ? one_byte_seq_type : two_byte_seq_type;
1802 __ EmitSeqStringSetCharCheck(string, index, value, scratch, encoding_mask); 1802 __ EmitSeqStringSetCharCheck(string, index, value, scratch, encoding_mask);
1803 } 1803 }
1804 1804
1805 MemOperand operand = BuildSeqStringOperand(string, instr->index(), encoding); 1805 MemOperand operand = BuildSeqStringOperand(string, instr->index(), encoding);
1806 if (encoding == String::ONE_BYTE_ENCODING) { 1806 if (encoding == String::ONE_BYTE_ENCODING) {
1807 __ sb(value, operand); 1807 __ Sb(value, operand);
1808 } else { 1808 } else {
1809 __ sh(value, operand); 1809 __ Sh(value, operand);
1810 } 1810 }
1811 } 1811 }
1812 1812
1813 1813
1814 void LCodeGen::DoAddE(LAddE* instr) { 1814 void LCodeGen::DoAddE(LAddE* instr) {
1815 LOperand* result = instr->result(); 1815 LOperand* result = instr->result();
1816 LOperand* left = instr->left(); 1816 LOperand* left = instr->left();
1817 LOperand* right = instr->right(); 1817 LOperand* right = instr->right();
1818 1818
1819 DCHECK(!instr->hydrogen()->CheckFlag(HValue::kCanOverflow)); 1819 DCHECK(!instr->hydrogen()->CheckFlag(HValue::kCanOverflow));
(...skipping 242 matching lines...) Expand 10 before | Expand all | Expand 10 after
2062 EmitBranch(instr, eq, reg, Operand(at)); 2062 EmitBranch(instr, eq, reg, Operand(at));
2063 } else if (type.IsSmi()) { 2063 } else if (type.IsSmi()) {
2064 DCHECK(!info()->IsStub()); 2064 DCHECK(!info()->IsStub());
2065 EmitBranch(instr, ne, reg, Operand(zero_reg)); 2065 EmitBranch(instr, ne, reg, Operand(zero_reg));
2066 } else if (type.IsJSArray()) { 2066 } else if (type.IsJSArray()) {
2067 DCHECK(!info()->IsStub()); 2067 DCHECK(!info()->IsStub());
2068 EmitBranch(instr, al, zero_reg, Operand(zero_reg)); 2068 EmitBranch(instr, al, zero_reg, Operand(zero_reg));
2069 } else if (type.IsHeapNumber()) { 2069 } else if (type.IsHeapNumber()) {
2070 DCHECK(!info()->IsStub()); 2070 DCHECK(!info()->IsStub());
2071 DoubleRegister dbl_scratch = double_scratch0(); 2071 DoubleRegister dbl_scratch = double_scratch0();
2072 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); 2072 __ Ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
2073 // Test the double value. Zero and NaN are false. 2073 // Test the double value. Zero and NaN are false.
2074 EmitBranchF(instr, ogl, dbl_scratch, kDoubleRegZero); 2074 EmitBranchF(instr, ogl, dbl_scratch, kDoubleRegZero);
2075 } else if (type.IsString()) { 2075 } else if (type.IsString()) {
2076 DCHECK(!info()->IsStub()); 2076 DCHECK(!info()->IsStub());
2077 __ ld(at, FieldMemOperand(reg, String::kLengthOffset)); 2077 __ Ld(at, FieldMemOperand(reg, String::kLengthOffset));
2078 EmitBranch(instr, ne, at, Operand(zero_reg)); 2078 EmitBranch(instr, ne, at, Operand(zero_reg));
2079 } else { 2079 } else {
2080 ToBooleanHints expected = instr->hydrogen()->expected_input_types(); 2080 ToBooleanHints expected = instr->hydrogen()->expected_input_types();
2081 // Avoid deopts in the case where we've never executed this path before. 2081 // Avoid deopts in the case where we've never executed this path before.
2082 if (expected == ToBooleanHint::kNone) expected = ToBooleanHint::kAny; 2082 if (expected == ToBooleanHint::kNone) expected = ToBooleanHint::kAny;
2083 2083
2084 if (expected & ToBooleanHint::kUndefined) { 2084 if (expected & ToBooleanHint::kUndefined) {
2085 // undefined -> false. 2085 // undefined -> false.
2086 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 2086 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2087 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at)); 2087 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at));
(...skipping 16 matching lines...) Expand all
2104 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(zero_reg)); 2104 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(zero_reg));
2105 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); 2105 __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
2106 } else if (expected & ToBooleanHint::kNeedsMap) { 2106 } else if (expected & ToBooleanHint::kNeedsMap) {
2107 // If we need a map later and have a Smi -> deopt. 2107 // If we need a map later and have a Smi -> deopt.
2108 __ SmiTst(reg, at); 2108 __ SmiTst(reg, at);
2109 DeoptimizeIf(eq, instr, DeoptimizeReason::kSmi, at, Operand(zero_reg)); 2109 DeoptimizeIf(eq, instr, DeoptimizeReason::kSmi, at, Operand(zero_reg));
2110 } 2110 }
2111 2111
2112 const Register map = scratch0(); 2112 const Register map = scratch0();
2113 if (expected & ToBooleanHint::kNeedsMap) { 2113 if (expected & ToBooleanHint::kNeedsMap) {
2114 __ ld(map, FieldMemOperand(reg, HeapObject::kMapOffset)); 2114 __ Ld(map, FieldMemOperand(reg, HeapObject::kMapOffset));
2115 if (expected & ToBooleanHint::kCanBeUndetectable) { 2115 if (expected & ToBooleanHint::kCanBeUndetectable) {
2116 // Undetectable -> false. 2116 // Undetectable -> false.
2117 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset)); 2117 __ Lbu(at, FieldMemOperand(map, Map::kBitFieldOffset));
2118 __ And(at, at, Operand(1 << Map::kIsUndetectable)); 2118 __ And(at, at, Operand(1 << Map::kIsUndetectable));
2119 __ Branch(instr->FalseLabel(chunk_), ne, at, Operand(zero_reg)); 2119 __ Branch(instr->FalseLabel(chunk_), ne, at, Operand(zero_reg));
2120 } 2120 }
2121 } 2121 }
2122 2122
2123 if (expected & ToBooleanHint::kReceiver) { 2123 if (expected & ToBooleanHint::kReceiver) {
2124 // spec object -> true. 2124 // spec object -> true.
2125 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset)); 2125 __ Lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset));
2126 __ Branch(instr->TrueLabel(chunk_), 2126 __ Branch(instr->TrueLabel(chunk_),
2127 ge, at, Operand(FIRST_JS_RECEIVER_TYPE)); 2127 ge, at, Operand(FIRST_JS_RECEIVER_TYPE));
2128 } 2128 }
2129 2129
2130 if (expected & ToBooleanHint::kString) { 2130 if (expected & ToBooleanHint::kString) {
2131 // String value -> false iff empty. 2131 // String value -> false iff empty.
2132 Label not_string; 2132 Label not_string;
2133 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset)); 2133 __ Lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset));
2134 __ Branch(&not_string, ge , at, Operand(FIRST_NONSTRING_TYPE)); 2134 __ Branch(&not_string, ge , at, Operand(FIRST_NONSTRING_TYPE));
2135 __ ld(at, FieldMemOperand(reg, String::kLengthOffset)); 2135 __ Ld(at, FieldMemOperand(reg, String::kLengthOffset));
2136 __ Branch(instr->TrueLabel(chunk_), ne, at, Operand(zero_reg)); 2136 __ Branch(instr->TrueLabel(chunk_), ne, at, Operand(zero_reg));
2137 __ Branch(instr->FalseLabel(chunk_)); 2137 __ Branch(instr->FalseLabel(chunk_));
2138 __ bind(&not_string); 2138 __ bind(&not_string);
2139 } 2139 }
2140 2140
2141 if (expected & ToBooleanHint::kSymbol) { 2141 if (expected & ToBooleanHint::kSymbol) {
2142 // Symbol value -> true. 2142 // Symbol value -> true.
2143 const Register scratch = scratch1(); 2143 const Register scratch = scratch1();
2144 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); 2144 __ Lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
2145 __ Branch(instr->TrueLabel(chunk_), eq, scratch, Operand(SYMBOL_TYPE)); 2145 __ Branch(instr->TrueLabel(chunk_), eq, scratch, Operand(SYMBOL_TYPE));
2146 } 2146 }
2147 2147
2148 if (expected & ToBooleanHint::kHeapNumber) { 2148 if (expected & ToBooleanHint::kHeapNumber) {
2149 // heap number -> false iff +0, -0, or NaN. 2149 // heap number -> false iff +0, -0, or NaN.
2150 DoubleRegister dbl_scratch = double_scratch0(); 2150 DoubleRegister dbl_scratch = double_scratch0();
2151 Label not_heap_number; 2151 Label not_heap_number;
2152 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 2152 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
2153 __ Branch(&not_heap_number, ne, map, Operand(at)); 2153 __ Branch(&not_heap_number, ne, map, Operand(at));
2154 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); 2154 __ Ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
2155 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), 2155 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_),
2156 ne, dbl_scratch, kDoubleRegZero); 2156 ne, dbl_scratch, kDoubleRegZero);
2157 // Falls through if dbl_scratch == 0. 2157 // Falls through if dbl_scratch == 0.
2158 __ Branch(instr->FalseLabel(chunk_)); 2158 __ Branch(instr->FalseLabel(chunk_));
2159 __ bind(&not_heap_number); 2159 __ bind(&not_heap_number);
2160 } 2160 }
2161 2161
2162 if (expected != ToBooleanHint::kAny) { 2162 if (expected != ToBooleanHint::kAny) {
2163 // We've seen something for the first time -> deopt. 2163 // We've seen something for the first time -> deopt.
2164 // This can only happen if we are not generic already. 2164 // This can only happen if we are not generic already.
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
2338 } 2338 }
2339 2339
2340 2340
2341 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) { 2341 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
2342 Register input = ToRegister(instr->value()); 2342 Register input = ToRegister(instr->value());
2343 Register temp = ToRegister(instr->temp()); 2343 Register temp = ToRegister(instr->temp());
2344 2344
2345 if (!instr->hydrogen()->value()->type().IsHeapObject()) { 2345 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2346 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); 2346 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2347 } 2347 }
2348 __ ld(temp, FieldMemOperand(input, HeapObject::kMapOffset)); 2348 __ Ld(temp, FieldMemOperand(input, HeapObject::kMapOffset));
2349 __ lbu(temp, FieldMemOperand(temp, Map::kBitFieldOffset)); 2349 __ Lbu(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
2350 __ And(at, temp, Operand(1 << Map::kIsUndetectable)); 2350 __ And(at, temp, Operand(1 << Map::kIsUndetectable));
2351 EmitBranch(instr, ne, at, Operand(zero_reg)); 2351 EmitBranch(instr, ne, at, Operand(zero_reg));
2352 } 2352 }
2353 2353
2354 2354
2355 static Condition ComputeCompareCondition(Token::Value op) { 2355 static Condition ComputeCompareCondition(Token::Value op) {
2356 switch (op) { 2356 switch (op) {
2357 case Token::EQ_STRICT: 2357 case Token::EQ_STRICT:
2358 case Token::EQ: 2358 case Token::EQ:
2359 return eq; 2359 return eq;
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
2446 2446
2447 // Objects with a non-function constructor have class 'Object'. 2447 // Objects with a non-function constructor have class 'Object'.
2448 if (String::Equals(class_name, isolate()->factory()->Object_string())) { 2448 if (String::Equals(class_name, isolate()->factory()->Object_string())) {
2449 __ Branch(is_true, ne, instance_type, Operand(JS_FUNCTION_TYPE)); 2449 __ Branch(is_true, ne, instance_type, Operand(JS_FUNCTION_TYPE));
2450 } else { 2450 } else {
2451 __ Branch(is_false, ne, instance_type, Operand(JS_FUNCTION_TYPE)); 2451 __ Branch(is_false, ne, instance_type, Operand(JS_FUNCTION_TYPE));
2452 } 2452 }
2453 2453
2454 // temp now contains the constructor function. Grab the 2454 // temp now contains the constructor function. Grab the
2455 // instance class name from there. 2455 // instance class name from there.
2456 __ ld(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset)); 2456 __ Ld(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
2457 __ ld(temp, 2457 __ Ld(temp,
2458 FieldMemOperand(temp, SharedFunctionInfo::kInstanceClassNameOffset)); 2458 FieldMemOperand(temp, SharedFunctionInfo::kInstanceClassNameOffset));
2459 // The class name we are testing against is internalized since it's a literal. 2459 // The class name we are testing against is internalized since it's a literal.
2460 // The name in the constructor is internalized because of the way the context 2460 // The name in the constructor is internalized because of the way the context
2461 // is booted. This routine isn't expected to work for random API-created 2461 // is booted. This routine isn't expected to work for random API-created
2462 // classes and it doesn't have to because you can't access it with natives 2462 // classes and it doesn't have to because you can't access it with natives
2463 // syntax. Since both sides are internalized it is sufficient to use an 2463 // syntax. Since both sides are internalized it is sufficient to use an
2464 // identity comparison. 2464 // identity comparison.
2465 2465
2466 // End with the address of this class_name instance in temp register. 2466 // End with the address of this class_name instance in temp register.
2467 // On MIPS, the caller must do the comparison with Handle<String>class_name. 2467 // On MIPS, the caller must do the comparison with Handle<String>class_name.
2468 } 2468 }
2469 2469
2470 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) { 2470 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
2471 Register input = ToRegister(instr->value()); 2471 Register input = ToRegister(instr->value());
2472 Register temp = scratch0(); 2472 Register temp = scratch0();
2473 Register temp2 = ToRegister(instr->temp()); 2473 Register temp2 = ToRegister(instr->temp());
2474 Handle<String> class_name = instr->hydrogen()->class_name(); 2474 Handle<String> class_name = instr->hydrogen()->class_name();
2475 2475
2476 EmitClassOfTest(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), 2476 EmitClassOfTest(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_),
2477 class_name, input, temp, temp2); 2477 class_name, input, temp, temp2);
2478 2478
2479 EmitBranch(instr, eq, temp, Operand(class_name)); 2479 EmitBranch(instr, eq, temp, Operand(class_name));
2480 } 2480 }
2481 2481
2482 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { 2482 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2483 Register reg = ToRegister(instr->value()); 2483 Register reg = ToRegister(instr->value());
2484 Register temp = ToRegister(instr->temp()); 2484 Register temp = ToRegister(instr->temp());
2485 2485
2486 __ ld(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); 2486 __ Ld(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
2487 EmitBranch(instr, eq, temp, Operand(instr->map())); 2487 EmitBranch(instr, eq, temp, Operand(instr->map()));
2488 } 2488 }
2489 2489
2490 2490
2491 void LCodeGen::DoHasInPrototypeChainAndBranch( 2491 void LCodeGen::DoHasInPrototypeChainAndBranch(
2492 LHasInPrototypeChainAndBranch* instr) { 2492 LHasInPrototypeChainAndBranch* instr) {
2493 Register const object = ToRegister(instr->object()); 2493 Register const object = ToRegister(instr->object());
2494 Register const object_map = scratch0(); 2494 Register const object_map = scratch0();
2495 Register const object_instance_type = scratch1(); 2495 Register const object_instance_type = scratch1();
2496 Register const object_prototype = object_map; 2496 Register const object_prototype = object_map;
2497 Register const prototype = ToRegister(instr->prototype()); 2497 Register const prototype = ToRegister(instr->prototype());
2498 2498
2499 // The {object} must be a spec object. It's sufficient to know that {object} 2499 // The {object} must be a spec object. It's sufficient to know that {object}
2500 // is not a smi, since all other non-spec objects have {null} prototypes and 2500 // is not a smi, since all other non-spec objects have {null} prototypes and
2501 // will be ruled out below. 2501 // will be ruled out below.
2502 if (instr->hydrogen()->ObjectNeedsSmiCheck()) { 2502 if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
2503 __ SmiTst(object, at); 2503 __ SmiTst(object, at);
2504 EmitFalseBranch(instr, eq, at, Operand(zero_reg)); 2504 EmitFalseBranch(instr, eq, at, Operand(zero_reg));
2505 } 2505 }
2506 2506
2507 // Loop through the {object}s prototype chain looking for the {prototype}. 2507 // Loop through the {object}s prototype chain looking for the {prototype}.
2508 __ ld(object_map, FieldMemOperand(object, HeapObject::kMapOffset)); 2508 __ Ld(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
2509 Label loop; 2509 Label loop;
2510 __ bind(&loop); 2510 __ bind(&loop);
2511 2511
2512 // Deoptimize if the object needs to be access checked. 2512 // Deoptimize if the object needs to be access checked.
2513 __ lbu(object_instance_type, 2513 __ Lbu(object_instance_type,
2514 FieldMemOperand(object_map, Map::kBitFieldOffset)); 2514 FieldMemOperand(object_map, Map::kBitFieldOffset));
2515 __ And(object_instance_type, object_instance_type, 2515 __ And(object_instance_type, object_instance_type,
2516 Operand(1 << Map::kIsAccessCheckNeeded)); 2516 Operand(1 << Map::kIsAccessCheckNeeded));
2517 DeoptimizeIf(ne, instr, DeoptimizeReason::kAccessCheck, object_instance_type, 2517 DeoptimizeIf(ne, instr, DeoptimizeReason::kAccessCheck, object_instance_type,
2518 Operand(zero_reg)); 2518 Operand(zero_reg));
2519 __ lbu(object_instance_type, 2519 __ Lbu(object_instance_type,
2520 FieldMemOperand(object_map, Map::kInstanceTypeOffset)); 2520 FieldMemOperand(object_map, Map::kInstanceTypeOffset));
2521 DeoptimizeIf(eq, instr, DeoptimizeReason::kProxy, object_instance_type, 2521 DeoptimizeIf(eq, instr, DeoptimizeReason::kProxy, object_instance_type,
2522 Operand(JS_PROXY_TYPE)); 2522 Operand(JS_PROXY_TYPE));
2523 2523
2524 __ ld(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset)); 2524 __ Ld(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
2525 __ LoadRoot(at, Heap::kNullValueRootIndex); 2525 __ LoadRoot(at, Heap::kNullValueRootIndex);
2526 EmitFalseBranch(instr, eq, object_prototype, Operand(at)); 2526 EmitFalseBranch(instr, eq, object_prototype, Operand(at));
2527 EmitTrueBranch(instr, eq, object_prototype, Operand(prototype)); 2527 EmitTrueBranch(instr, eq, object_prototype, Operand(prototype));
2528 __ Branch(&loop, USE_DELAY_SLOT); 2528 __ Branch(&loop, USE_DELAY_SLOT);
2529 __ ld(object_map, FieldMemOperand(object_prototype, 2529 __ Ld(object_map, FieldMemOperand(object_prototype,
2530 HeapObject::kMapOffset)); // In delay slot. 2530 HeapObject::kMapOffset)); // In delay slot.
2531 } 2531 }
2532 2532
2533 2533
2534 void LCodeGen::DoCmpT(LCmpT* instr) { 2534 void LCodeGen::DoCmpT(LCmpT* instr) {
2535 DCHECK(ToRegister(instr->context()).is(cp)); 2535 DCHECK(ToRegister(instr->context()).is(cp));
2536 Token::Value op = instr->op(); 2536 Token::Value op = instr->op();
2537 2537
2538 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 2538 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2539 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2539 CallCode(ic, RelocInfo::CODE_TARGET, instr);
(...skipping 13 matching lines...) Expand all
2553 } 2553 }
2554 2554
2555 2555
2556 void LCodeGen::DoReturn(LReturn* instr) { 2556 void LCodeGen::DoReturn(LReturn* instr) {
2557 if (FLAG_trace && info()->IsOptimizing()) { 2557 if (FLAG_trace && info()->IsOptimizing()) {
2558 // Push the return value on the stack as the parameter. 2558 // Push the return value on the stack as the parameter.
2559 // Runtime::TraceExit returns its parameter in v0. We're leaving the code 2559 // Runtime::TraceExit returns its parameter in v0. We're leaving the code
2560 // managed by the register allocator and tearing down the frame, it's 2560 // managed by the register allocator and tearing down the frame, it's
2561 // safe to write to the context register. 2561 // safe to write to the context register.
2562 __ push(v0); 2562 __ push(v0);
2563 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2563 __ Ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2564 __ CallRuntime(Runtime::kTraceExit); 2564 __ CallRuntime(Runtime::kTraceExit);
2565 } 2565 }
2566 if (info()->saves_caller_doubles()) { 2566 if (info()->saves_caller_doubles()) {
2567 RestoreCallerDoubles(); 2567 RestoreCallerDoubles();
2568 } 2568 }
2569 if (NeedsEagerFrame()) { 2569 if (NeedsEagerFrame()) {
2570 __ mov(sp, fp); 2570 __ mov(sp, fp);
2571 __ Pop(ra, fp); 2571 __ Pop(ra, fp);
2572 } 2572 }
2573 if (instr->has_constant_parameter_count()) { 2573 if (instr->has_constant_parameter_count()) {
(...skipping 11 matching lines...) Expand all
2585 } 2585 }
2586 2586
2587 __ Jump(ra); 2587 __ Jump(ra);
2588 } 2588 }
2589 2589
2590 2590
2591 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 2591 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2592 Register context = ToRegister(instr->context()); 2592 Register context = ToRegister(instr->context());
2593 Register result = ToRegister(instr->result()); 2593 Register result = ToRegister(instr->result());
2594 2594
2595 __ ld(result, ContextMemOperand(context, instr->slot_index())); 2595 __ Ld(result, ContextMemOperand(context, instr->slot_index()));
2596 if (instr->hydrogen()->RequiresHoleCheck()) { 2596 if (instr->hydrogen()->RequiresHoleCheck()) {
2597 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 2597 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2598 2598
2599 if (instr->hydrogen()->DeoptimizesOnHole()) { 2599 if (instr->hydrogen()->DeoptimizesOnHole()) {
2600 DeoptimizeIf(eq, instr, DeoptimizeReason::kHole, result, Operand(at)); 2600 DeoptimizeIf(eq, instr, DeoptimizeReason::kHole, result, Operand(at));
2601 } else { 2601 } else {
2602 Label is_not_hole; 2602 Label is_not_hole;
2603 __ Branch(&is_not_hole, ne, result, Operand(at)); 2603 __ Branch(&is_not_hole, ne, result, Operand(at));
2604 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 2604 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2605 __ bind(&is_not_hole); 2605 __ bind(&is_not_hole);
2606 } 2606 }
2607 } 2607 }
2608 } 2608 }
2609 2609
2610 2610
2611 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { 2611 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2612 Register context = ToRegister(instr->context()); 2612 Register context = ToRegister(instr->context());
2613 Register value = ToRegister(instr->value()); 2613 Register value = ToRegister(instr->value());
2614 Register scratch = scratch0(); 2614 Register scratch = scratch0();
2615 MemOperand target = ContextMemOperand(context, instr->slot_index()); 2615 MemOperand target = ContextMemOperand(context, instr->slot_index());
2616 2616
2617 Label skip_assignment; 2617 Label skip_assignment;
2618 2618
2619 if (instr->hydrogen()->RequiresHoleCheck()) { 2619 if (instr->hydrogen()->RequiresHoleCheck()) {
2620 __ ld(scratch, target); 2620 __ Ld(scratch, target);
2621 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 2621 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2622 2622
2623 if (instr->hydrogen()->DeoptimizesOnHole()) { 2623 if (instr->hydrogen()->DeoptimizesOnHole()) {
2624 DeoptimizeIf(eq, instr, DeoptimizeReason::kHole, scratch, Operand(at)); 2624 DeoptimizeIf(eq, instr, DeoptimizeReason::kHole, scratch, Operand(at));
2625 } else { 2625 } else {
2626 __ Branch(&skip_assignment, ne, scratch, Operand(at)); 2626 __ Branch(&skip_assignment, ne, scratch, Operand(at));
2627 } 2627 }
2628 } 2628 }
2629 2629
2630 __ sd(value, target); 2630 __ Sd(value, target);
2631 if (instr->hydrogen()->NeedsWriteBarrier()) { 2631 if (instr->hydrogen()->NeedsWriteBarrier()) {
2632 SmiCheck check_needed = 2632 SmiCheck check_needed =
2633 instr->hydrogen()->value()->type().IsHeapObject() 2633 instr->hydrogen()->value()->type().IsHeapObject()
2634 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; 2634 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
2635 __ RecordWriteContextSlot(context, 2635 __ RecordWriteContextSlot(context,
2636 target.offset(), 2636 target.offset(),
2637 value, 2637 value,
2638 scratch0(), 2638 scratch0(),
2639 GetRAState(), 2639 GetRAState(),
2640 kSaveFPRegs, 2640 kSaveFPRegs,
(...skipping 11 matching lines...) Expand all
2652 Register object = ToRegister(instr->object()); 2652 Register object = ToRegister(instr->object());
2653 if (access.IsExternalMemory()) { 2653 if (access.IsExternalMemory()) {
2654 Register result = ToRegister(instr->result()); 2654 Register result = ToRegister(instr->result());
2655 MemOperand operand = MemOperand(object, offset); 2655 MemOperand operand = MemOperand(object, offset);
2656 __ Load(result, operand, access.representation()); 2656 __ Load(result, operand, access.representation());
2657 return; 2657 return;
2658 } 2658 }
2659 2659
2660 if (instr->hydrogen()->representation().IsDouble()) { 2660 if (instr->hydrogen()->representation().IsDouble()) {
2661 DoubleRegister result = ToDoubleRegister(instr->result()); 2661 DoubleRegister result = ToDoubleRegister(instr->result());
2662 __ ldc1(result, FieldMemOperand(object, offset)); 2662 __ Ldc1(result, FieldMemOperand(object, offset));
2663 return; 2663 return;
2664 } 2664 }
2665 2665
2666 Register result = ToRegister(instr->result()); 2666 Register result = ToRegister(instr->result());
2667 if (!access.IsInobject()) { 2667 if (!access.IsInobject()) {
2668 __ ld(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 2668 __ Ld(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2669 object = result; 2669 object = result;
2670 } 2670 }
2671 2671
2672 Representation representation = access.representation(); 2672 Representation representation = access.representation();
2673 if (representation.IsSmi() && SmiValuesAre32Bits() && 2673 if (representation.IsSmi() && SmiValuesAre32Bits() &&
2674 instr->hydrogen()->representation().IsInteger32()) { 2674 instr->hydrogen()->representation().IsInteger32()) {
2675 if (FLAG_debug_code) { 2675 if (FLAG_debug_code) {
2676 // Verify this is really an Smi. 2676 // Verify this is really an Smi.
2677 Register scratch = scratch0(); 2677 Register scratch = scratch0();
2678 __ Load(scratch, FieldMemOperand(object, offset), representation); 2678 __ Load(scratch, FieldMemOperand(object, offset), representation);
2679 __ AssertSmi(scratch); 2679 __ AssertSmi(scratch);
2680 } 2680 }
2681 2681
2682 // Read int value directly from upper half of the smi. 2682 // Read int value directly from upper half of the smi.
2683 STATIC_ASSERT(kSmiTag == 0); 2683 STATIC_ASSERT(kSmiTag == 0);
2684 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); 2684 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32);
2685 offset = SmiWordOffset(offset); 2685 offset = SmiWordOffset(offset);
2686 representation = Representation::Integer32(); 2686 representation = Representation::Integer32();
2687 } 2687 }
2688 __ Load(result, FieldMemOperand(object, offset), representation); 2688 __ Load(result, FieldMemOperand(object, offset), representation);
2689 } 2689 }
2690 2690
2691 2691
2692 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 2692 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2693 Register scratch = scratch0(); 2693 Register scratch = scratch0();
2694 Register function = ToRegister(instr->function()); 2694 Register function = ToRegister(instr->function());
2695 Register result = ToRegister(instr->result()); 2695 Register result = ToRegister(instr->result());
2696 2696
2697 // Get the prototype or initial map from the function. 2697 // Get the prototype or initial map from the function.
2698 __ ld(result, 2698 __ Ld(result,
2699 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 2699 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2700 2700
2701 // Check that the function has a prototype or an initial map. 2701 // Check that the function has a prototype or an initial map.
2702 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 2702 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2703 DeoptimizeIf(eq, instr, DeoptimizeReason::kHole, result, Operand(at)); 2703 DeoptimizeIf(eq, instr, DeoptimizeReason::kHole, result, Operand(at));
2704 2704
2705 // If the function does not have an initial map, we're done. 2705 // If the function does not have an initial map, we're done.
2706 Label done; 2706 Label done;
2707 __ GetObjectType(result, scratch, scratch); 2707 __ GetObjectType(result, scratch, scratch);
2708 __ Branch(&done, ne, scratch, Operand(MAP_TYPE)); 2708 __ Branch(&done, ne, scratch, Operand(MAP_TYPE));
2709 2709
2710 // Get the prototype from the initial map. 2710 // Get the prototype from the initial map.
2711 __ ld(result, FieldMemOperand(result, Map::kPrototypeOffset)); 2711 __ Ld(result, FieldMemOperand(result, Map::kPrototypeOffset));
2712 2712
2713 // All done. 2713 // All done.
2714 __ bind(&done); 2714 __ bind(&done);
2715 } 2715 }
2716 2716
2717 2717
2718 void LCodeGen::DoLoadRoot(LLoadRoot* instr) { 2718 void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
2719 Register result = ToRegister(instr->result()); 2719 Register result = ToRegister(instr->result());
2720 __ LoadRoot(result, instr->index()); 2720 __ LoadRoot(result, instr->index());
2721 } 2721 }
2722 2722
2723 2723
2724 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { 2724 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2725 Register arguments = ToRegister(instr->arguments()); 2725 Register arguments = ToRegister(instr->arguments());
2726 Register result = ToRegister(instr->result()); 2726 Register result = ToRegister(instr->result());
2727 // There are two words between the frame pointer and the last argument. 2727 // There are two words between the frame pointer and the last argument.
2728 // Subtracting from length accounts for one of them add one more. 2728 // Subtracting from length accounts for one of them add one more.
2729 if (instr->length()->IsConstantOperand()) { 2729 if (instr->length()->IsConstantOperand()) {
2730 int const_length = ToInteger32(LConstantOperand::cast(instr->length())); 2730 int const_length = ToInteger32(LConstantOperand::cast(instr->length()));
2731 if (instr->index()->IsConstantOperand()) { 2731 if (instr->index()->IsConstantOperand()) {
2732 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 2732 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2733 int index = (const_length - const_index) + 1; 2733 int index = (const_length - const_index) + 1;
2734 __ ld(result, MemOperand(arguments, index * kPointerSize)); 2734 __ Ld(result, MemOperand(arguments, index * kPointerSize));
2735 } else { 2735 } else {
2736 Register index = ToRegister(instr->index()); 2736 Register index = ToRegister(instr->index());
2737 __ li(at, Operand(const_length + 1)); 2737 __ li(at, Operand(const_length + 1));
2738 __ Dsubu(result, at, index); 2738 __ Dsubu(result, at, index);
2739 __ Dlsa(at, arguments, result, kPointerSizeLog2); 2739 __ Dlsa(at, arguments, result, kPointerSizeLog2);
2740 __ ld(result, MemOperand(at)); 2740 __ Ld(result, MemOperand(at));
2741 } 2741 }
2742 } else if (instr->index()->IsConstantOperand()) { 2742 } else if (instr->index()->IsConstantOperand()) {
2743 Register length = ToRegister(instr->length()); 2743 Register length = ToRegister(instr->length());
2744 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 2744 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2745 int loc = const_index - 1; 2745 int loc = const_index - 1;
2746 if (loc != 0) { 2746 if (loc != 0) {
2747 __ Dsubu(result, length, Operand(loc)); 2747 __ Dsubu(result, length, Operand(loc));
2748 __ Dlsa(at, arguments, result, kPointerSizeLog2); 2748 __ Dlsa(at, arguments, result, kPointerSizeLog2);
2749 __ ld(result, MemOperand(at)); 2749 __ Ld(result, MemOperand(at));
2750 } else { 2750 } else {
2751 __ Dlsa(at, arguments, length, kPointerSizeLog2); 2751 __ Dlsa(at, arguments, length, kPointerSizeLog2);
2752 __ ld(result, MemOperand(at)); 2752 __ Ld(result, MemOperand(at));
2753 } 2753 }
2754 } else { 2754 } else {
2755 Register length = ToRegister(instr->length()); 2755 Register length = ToRegister(instr->length());
2756 Register index = ToRegister(instr->index()); 2756 Register index = ToRegister(instr->index());
2757 __ Dsubu(result, length, index); 2757 __ Dsubu(result, length, index);
2758 __ Daddu(result, result, 1); 2758 __ Daddu(result, result, 1);
2759 __ Dlsa(at, arguments, result, kPointerSizeLog2); 2759 __ Dlsa(at, arguments, result, kPointerSizeLog2);
2760 __ ld(result, MemOperand(at)); 2760 __ Ld(result, MemOperand(at));
2761 } 2761 }
2762 } 2762 }
2763 2763
2764 2764
2765 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { 2765 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
2766 Register external_pointer = ToRegister(instr->elements()); 2766 Register external_pointer = ToRegister(instr->elements());
2767 Register key = no_reg; 2767 Register key = no_reg;
2768 ElementsKind elements_kind = instr->elements_kind(); 2768 ElementsKind elements_kind = instr->elements_kind();
2769 bool key_is_constant = instr->key()->IsConstantOperand(); 2769 bool key_is_constant = instr->key()->IsConstantOperand();
2770 int constant_key = 0; 2770 int constant_key = 0;
(...skipping 22 matching lines...) Expand all
2793 __ dsra32(scratch0(), key, 0); 2793 __ dsra32(scratch0(), key, 0);
2794 } else { 2794 } else {
2795 __ dsra(scratch0(), key, -shift_size); 2795 __ dsra(scratch0(), key, -shift_size);
2796 } 2796 }
2797 } else { 2797 } else {
2798 __ dsll(scratch0(), key, shift_size); 2798 __ dsll(scratch0(), key, shift_size);
2799 } 2799 }
2800 __ Daddu(scratch0(), scratch0(), external_pointer); 2800 __ Daddu(scratch0(), scratch0(), external_pointer);
2801 } 2801 }
2802 if (elements_kind == FLOAT32_ELEMENTS) { 2802 if (elements_kind == FLOAT32_ELEMENTS) {
2803 __ lwc1(result, MemOperand(scratch0(), base_offset)); 2803 __ Lwc1(result, MemOperand(scratch0(), base_offset));
2804 __ cvt_d_s(result, result); 2804 __ cvt_d_s(result, result);
2805 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS 2805 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
2806 __ ldc1(result, MemOperand(scratch0(), base_offset)); 2806 __ Ldc1(result, MemOperand(scratch0(), base_offset));
2807 } 2807 }
2808 } else { 2808 } else {
2809 Register result = ToRegister(instr->result()); 2809 Register result = ToRegister(instr->result());
2810 MemOperand mem_operand = PrepareKeyedOperand( 2810 MemOperand mem_operand = PrepareKeyedOperand(
2811 key, external_pointer, key_is_constant, constant_key, 2811 key, external_pointer, key_is_constant, constant_key,
2812 element_size_shift, shift_size, base_offset); 2812 element_size_shift, shift_size, base_offset);
2813 switch (elements_kind) { 2813 switch (elements_kind) {
2814 case INT8_ELEMENTS: 2814 case INT8_ELEMENTS:
2815 __ lb(result, mem_operand); 2815 __ Lb(result, mem_operand);
2816 break; 2816 break;
2817 case UINT8_ELEMENTS: 2817 case UINT8_ELEMENTS:
2818 case UINT8_CLAMPED_ELEMENTS: 2818 case UINT8_CLAMPED_ELEMENTS:
2819 __ lbu(result, mem_operand); 2819 __ Lbu(result, mem_operand);
2820 break; 2820 break;
2821 case INT16_ELEMENTS: 2821 case INT16_ELEMENTS:
2822 __ lh(result, mem_operand); 2822 __ Lh(result, mem_operand);
2823 break; 2823 break;
2824 case UINT16_ELEMENTS: 2824 case UINT16_ELEMENTS:
2825 __ lhu(result, mem_operand); 2825 __ Lhu(result, mem_operand);
2826 break; 2826 break;
2827 case INT32_ELEMENTS: 2827 case INT32_ELEMENTS:
2828 __ lw(result, mem_operand); 2828 __ Lw(result, mem_operand);
2829 break; 2829 break;
2830 case UINT32_ELEMENTS: 2830 case UINT32_ELEMENTS:
2831 __ lw(result, mem_operand); 2831 __ Lw(result, mem_operand);
2832 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { 2832 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
2833 DeoptimizeIf(Ugreater_equal, instr, DeoptimizeReason::kNegativeValue, 2833 DeoptimizeIf(Ugreater_equal, instr, DeoptimizeReason::kNegativeValue,
2834 result, Operand(0x80000000)); 2834 result, Operand(0x80000000));
2835 } 2835 }
2836 break; 2836 break;
2837 case FLOAT32_ELEMENTS: 2837 case FLOAT32_ELEMENTS:
2838 case FLOAT64_ELEMENTS: 2838 case FLOAT64_ELEMENTS:
2839 case FAST_DOUBLE_ELEMENTS: 2839 case FAST_DOUBLE_ELEMENTS:
2840 case FAST_ELEMENTS: 2840 case FAST_ELEMENTS:
2841 case FAST_SMI_ELEMENTS: 2841 case FAST_SMI_ELEMENTS:
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2882 if (shift_size > 0) { 2882 if (shift_size > 0) {
2883 __ dsll(at, key, shift_size); 2883 __ dsll(at, key, shift_size);
2884 } else if (shift_size == -32) { 2884 } else if (shift_size == -32) {
2885 __ dsra32(at, key, 0); 2885 __ dsra32(at, key, 0);
2886 } else { 2886 } else {
2887 __ dsra(at, key, -shift_size); 2887 __ dsra(at, key, -shift_size);
2888 } 2888 }
2889 __ Daddu(scratch, scratch, at); 2889 __ Daddu(scratch, scratch, at);
2890 } 2890 }
2891 2891
2892 __ ldc1(result, MemOperand(scratch)); 2892 __ Ldc1(result, MemOperand(scratch));
2893 2893
2894 if (instr->hydrogen()->RequiresHoleCheck()) { 2894 if (instr->hydrogen()->RequiresHoleCheck()) {
2895 __ FmoveHigh(scratch, result); 2895 __ FmoveHigh(scratch, result);
2896 DeoptimizeIf(eq, instr, DeoptimizeReason::kHole, scratch, 2896 DeoptimizeIf(eq, instr, DeoptimizeReason::kHole, scratch,
2897 Operand(static_cast<int32_t>(kHoleNanUpper32))); 2897 Operand(static_cast<int32_t>(kHoleNanUpper32)));
2898 } 2898 }
2899 } 2899 }
2900 2900
2901 2901
2902 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { 2902 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
2958 DCHECK(instr->hydrogen()->elements_kind() == FAST_HOLEY_ELEMENTS); 2958 DCHECK(instr->hydrogen()->elements_kind() == FAST_HOLEY_ELEMENTS);
2959 Label done; 2959 Label done;
2960 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); 2960 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2961 __ Branch(&done, ne, result, Operand(scratch)); 2961 __ Branch(&done, ne, result, Operand(scratch));
2962 if (info()->IsStub()) { 2962 if (info()->IsStub()) {
2963 // A stub can safely convert the hole to undefined only if the array 2963 // A stub can safely convert the hole to undefined only if the array
2964 // protector cell contains (Smi) Isolate::kProtectorValid. Otherwise 2964 // protector cell contains (Smi) Isolate::kProtectorValid. Otherwise
2965 // it needs to bail out. 2965 // it needs to bail out.
2966 __ LoadRoot(result, Heap::kArrayProtectorRootIndex); 2966 __ LoadRoot(result, Heap::kArrayProtectorRootIndex);
2967 // The comparison only needs LS bits of value, which is a smi. 2967 // The comparison only needs LS bits of value, which is a smi.
2968 __ ld(result, FieldMemOperand(result, PropertyCell::kValueOffset)); 2968 __ Ld(result, FieldMemOperand(result, PropertyCell::kValueOffset));
2969 DeoptimizeIf(ne, instr, DeoptimizeReason::kHole, result, 2969 DeoptimizeIf(ne, instr, DeoptimizeReason::kHole, result,
2970 Operand(Smi::FromInt(Isolate::kProtectorValid))); 2970 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2971 } 2971 }
2972 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 2972 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2973 __ bind(&done); 2973 __ bind(&done);
2974 } 2974 }
2975 } 2975 }
2976 2976
2977 2977
2978 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { 2978 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
3032 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 3032 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3033 Register scratch = scratch0(); 3033 Register scratch = scratch0();
3034 Register temp = scratch1(); 3034 Register temp = scratch1();
3035 Register result = ToRegister(instr->result()); 3035 Register result = ToRegister(instr->result());
3036 3036
3037 if (instr->hydrogen()->from_inlined()) { 3037 if (instr->hydrogen()->from_inlined()) {
3038 __ Dsubu(result, sp, 2 * kPointerSize); 3038 __ Dsubu(result, sp, 2 * kPointerSize);
3039 } else if (instr->hydrogen()->arguments_adaptor()) { 3039 } else if (instr->hydrogen()->arguments_adaptor()) {
3040 // Check if the calling frame is an arguments adaptor frame. 3040 // Check if the calling frame is an arguments adaptor frame.
3041 Label done, adapted; 3041 Label done, adapted;
3042 __ ld(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3042 __ Ld(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3043 __ ld(result, 3043 __ Ld(result,
3044 MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset)); 3044 MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset));
3045 __ Xor(temp, result, 3045 __ Xor(temp, result,
3046 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); 3046 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
3047 3047
3048 // Result is the frame pointer for the frame if not adapted and for the real 3048 // Result is the frame pointer for the frame if not adapted and for the real
3049 // frame below the adaptor frame if adapted. 3049 // frame below the adaptor frame if adapted.
3050 __ Movn(result, fp, temp); // Move only if temp is not equal to zero (ne). 3050 __ Movn(result, fp, temp); // Move only if temp is not equal to zero (ne).
3051 __ Movz(result, scratch, temp); // Move only if temp is equal to zero (eq). 3051 __ Movz(result, scratch, temp); // Move only if temp is equal to zero (eq).
3052 } else { 3052 } else {
3053 __ mov(result, fp); 3053 __ mov(result, fp);
3054 } 3054 }
3055 } 3055 }
3056 3056
3057 3057
3058 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { 3058 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
3059 Register elem = ToRegister(instr->elements()); 3059 Register elem = ToRegister(instr->elements());
3060 Register result = ToRegister(instr->result()); 3060 Register result = ToRegister(instr->result());
3061 3061
3062 Label done; 3062 Label done;
3063 3063
3064 // If no arguments adaptor frame the number of arguments is fixed. 3064 // If no arguments adaptor frame the number of arguments is fixed.
3065 __ Daddu(result, zero_reg, Operand(scope()->num_parameters())); 3065 __ Daddu(result, zero_reg, Operand(scope()->num_parameters()));
3066 __ Branch(&done, eq, fp, Operand(elem)); 3066 __ Branch(&done, eq, fp, Operand(elem));
3067 3067
3068 // Arguments adaptor frame present. Get argument length from there. 3068 // Arguments adaptor frame present. Get argument length from there.
3069 __ ld(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3069 __ Ld(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3070 __ ld(result, 3070 __ Ld(result,
3071 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3071 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
3072 __ SmiUntag(result); 3072 __ SmiUntag(result);
3073 3073
3074 // Argument length is in result register. 3074 // Argument length is in result register.
3075 __ bind(&done); 3075 __ bind(&done);
3076 } 3076 }
3077 3077
3078 3078
3079 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { 3079 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
3080 Register receiver = ToRegister(instr->receiver()); 3080 Register receiver = ToRegister(instr->receiver());
3081 Register function = ToRegister(instr->function()); 3081 Register function = ToRegister(instr->function());
3082 Register result = ToRegister(instr->result()); 3082 Register result = ToRegister(instr->result());
3083 Register scratch = scratch0(); 3083 Register scratch = scratch0();
3084 3084
3085 // If the receiver is null or undefined, we have to pass the global 3085 // If the receiver is null or undefined, we have to pass the global
3086 // object as a receiver to normal functions. Values have to be 3086 // object as a receiver to normal functions. Values have to be
3087 // passed unchanged to builtins and strict-mode functions. 3087 // passed unchanged to builtins and strict-mode functions.
3088 Label global_object, result_in_receiver; 3088 Label global_object, result_in_receiver;
3089 3089
3090 if (!instr->hydrogen()->known_function()) { 3090 if (!instr->hydrogen()->known_function()) {
3091 // Do not transform the receiver to object for strict mode functions. 3091 // Do not transform the receiver to object for strict mode functions.
3092 __ ld(scratch, 3092 __ Ld(scratch,
3093 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); 3093 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
3094 3094
3095 // Do not transform the receiver to object for builtins. 3095 // Do not transform the receiver to object for builtins.
3096 int32_t strict_mode_function_mask = 3096 int32_t strict_mode_function_mask =
3097 1 << SharedFunctionInfo::kStrictModeBitWithinByte; 3097 1 << SharedFunctionInfo::kStrictModeBitWithinByte;
3098 int32_t native_mask = 1 << SharedFunctionInfo::kNativeBitWithinByte; 3098 int32_t native_mask = 1 << SharedFunctionInfo::kNativeBitWithinByte;
3099 3099
3100 __ lbu(at, 3100 __ Lbu(at,
3101 FieldMemOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset)); 3101 FieldMemOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset));
3102 __ And(at, at, Operand(strict_mode_function_mask)); 3102 __ And(at, at, Operand(strict_mode_function_mask));
3103 __ Branch(&result_in_receiver, ne, at, Operand(zero_reg)); 3103 __ Branch(&result_in_receiver, ne, at, Operand(zero_reg));
3104 __ lbu(at, 3104 __ Lbu(at, FieldMemOperand(scratch, SharedFunctionInfo::kNativeByteOffset));
3105 FieldMemOperand(scratch, SharedFunctionInfo::kNativeByteOffset));
3106 __ And(at, at, Operand(native_mask)); 3105 __ And(at, at, Operand(native_mask));
3107 __ Branch(&result_in_receiver, ne, at, Operand(zero_reg)); 3106 __ Branch(&result_in_receiver, ne, at, Operand(zero_reg));
3108 } 3107 }
3109 3108
3110 // Normal function. Replace undefined or null with global receiver. 3109 // Normal function. Replace undefined or null with global receiver.
3111 __ LoadRoot(scratch, Heap::kNullValueRootIndex); 3110 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
3112 __ Branch(&global_object, eq, receiver, Operand(scratch)); 3111 __ Branch(&global_object, eq, receiver, Operand(scratch));
3113 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); 3112 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
3114 __ Branch(&global_object, eq, receiver, Operand(scratch)); 3113 __ Branch(&global_object, eq, receiver, Operand(scratch));
3115 3114
3116 // Deoptimize if the receiver is not a JS object. 3115 // Deoptimize if the receiver is not a JS object.
3117 __ SmiTst(receiver, scratch); 3116 __ SmiTst(receiver, scratch);
3118 DeoptimizeIf(eq, instr, DeoptimizeReason::kSmi, scratch, Operand(zero_reg)); 3117 DeoptimizeIf(eq, instr, DeoptimizeReason::kSmi, scratch, Operand(zero_reg));
3119 3118
3120 __ GetObjectType(receiver, scratch, scratch); 3119 __ GetObjectType(receiver, scratch, scratch);
3121 DeoptimizeIf(lt, instr, DeoptimizeReason::kNotAJavaScriptObject, scratch, 3120 DeoptimizeIf(lt, instr, DeoptimizeReason::kNotAJavaScriptObject, scratch,
3122 Operand(FIRST_JS_RECEIVER_TYPE)); 3121 Operand(FIRST_JS_RECEIVER_TYPE));
3123 __ Branch(&result_in_receiver); 3122 __ Branch(&result_in_receiver);
3124 3123
3125 __ bind(&global_object); 3124 __ bind(&global_object);
3126 __ ld(result, FieldMemOperand(function, JSFunction::kContextOffset)); 3125 __ Ld(result, FieldMemOperand(function, JSFunction::kContextOffset));
3127 __ ld(result, ContextMemOperand(result, Context::NATIVE_CONTEXT_INDEX)); 3126 __ Ld(result, ContextMemOperand(result, Context::NATIVE_CONTEXT_INDEX));
3128 __ ld(result, ContextMemOperand(result, Context::GLOBAL_PROXY_INDEX)); 3127 __ Ld(result, ContextMemOperand(result, Context::GLOBAL_PROXY_INDEX));
3129 3128
3130 if (result.is(receiver)) { 3129 if (result.is(receiver)) {
3131 __ bind(&result_in_receiver); 3130 __ bind(&result_in_receiver);
3132 } else { 3131 } else {
3133 Label result_ok; 3132 Label result_ok;
3134 __ Branch(&result_ok); 3133 __ Branch(&result_ok);
3135 __ bind(&result_in_receiver); 3134 __ bind(&result_in_receiver);
3136 __ mov(result, receiver); 3135 __ mov(result, receiver);
3137 __ bind(&result_ok); 3136 __ bind(&result_ok);
3138 } 3137 }
(...skipping 24 matching lines...) Expand all
3163 __ Daddu(elements, elements, Operand(1 * kPointerSize)); 3162 __ Daddu(elements, elements, Operand(1 * kPointerSize));
3164 3163
3165 // Loop through the arguments pushing them onto the execution 3164 // Loop through the arguments pushing them onto the execution
3166 // stack. 3165 // stack.
3167 Label invoke, loop; 3166 Label invoke, loop;
3168 // length is a small non-negative integer, due to the test above. 3167 // length is a small non-negative integer, due to the test above.
3169 __ Branch(USE_DELAY_SLOT, &invoke, eq, length, Operand(zero_reg)); 3168 __ Branch(USE_DELAY_SLOT, &invoke, eq, length, Operand(zero_reg));
3170 __ dsll(scratch, length, kPointerSizeLog2); 3169 __ dsll(scratch, length, kPointerSizeLog2);
3171 __ bind(&loop); 3170 __ bind(&loop);
3172 __ Daddu(scratch, elements, scratch); 3171 __ Daddu(scratch, elements, scratch);
3173 __ ld(scratch, MemOperand(scratch)); 3172 __ Ld(scratch, MemOperand(scratch));
3174 __ push(scratch); 3173 __ push(scratch);
3175 __ Dsubu(length, length, Operand(1)); 3174 __ Dsubu(length, length, Operand(1));
3176 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg)); 3175 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg));
3177 __ dsll(scratch, length, kPointerSizeLog2); 3176 __ dsll(scratch, length, kPointerSizeLog2);
3178 3177
3179 __ bind(&invoke); 3178 __ bind(&invoke);
3180 3179
3181 InvokeFlag flag = CALL_FUNCTION; 3180 InvokeFlag flag = CALL_FUNCTION;
3182 if (instr->hydrogen()->tail_call_mode() == TailCallMode::kAllow) { 3181 if (instr->hydrogen()->tail_call_mode() == TailCallMode::kAllow) {
3183 DCHECK(!info()->saves_caller_doubles()); 3182 DCHECK(!info()->saves_caller_doubles());
(...skipping 26 matching lines...) Expand all
3210 } 3209 }
3211 3210
3212 3211
3213 void LCodeGen::DoDrop(LDrop* instr) { 3212 void LCodeGen::DoDrop(LDrop* instr) {
3214 __ Drop(instr->count()); 3213 __ Drop(instr->count());
3215 } 3214 }
3216 3215
3217 3216
3218 void LCodeGen::DoThisFunction(LThisFunction* instr) { 3217 void LCodeGen::DoThisFunction(LThisFunction* instr) {
3219 Register result = ToRegister(instr->result()); 3218 Register result = ToRegister(instr->result());
3220 __ ld(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 3219 __ Ld(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3221 } 3220 }
3222 3221
3223 3222
3224 void LCodeGen::DoContext(LContext* instr) { 3223 void LCodeGen::DoContext(LContext* instr) {
3225 // If there is a non-return use, the context must be moved to a register. 3224 // If there is a non-return use, the context must be moved to a register.
3226 Register result = ToRegister(instr->result()); 3225 Register result = ToRegister(instr->result());
3227 if (info()->IsOptimizing()) { 3226 if (info()->IsOptimizing()) {
3228 __ ld(result, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3227 __ Ld(result, MemOperand(fp, StandardFrameConstants::kContextOffset));
3229 } else { 3228 } else {
3230 // If there is no frame, the context must be in cp. 3229 // If there is no frame, the context must be in cp.
3231 DCHECK(result.is(cp)); 3230 DCHECK(result.is(cp));
3232 } 3231 }
3233 } 3232 }
3234 3233
3235 3234
3236 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { 3235 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3237 DCHECK(ToRegister(instr->context()).is(cp)); 3236 DCHECK(ToRegister(instr->context()).is(cp));
3238 __ li(scratch0(), instr->hydrogen()->declarations()); 3237 __ li(scratch0(), instr->hydrogen()->declarations());
(...skipping 10 matching lines...) Expand all
3249 bool dont_adapt_arguments = 3248 bool dont_adapt_arguments =
3250 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; 3249 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3251 bool can_invoke_directly = 3250 bool can_invoke_directly =
3252 dont_adapt_arguments || formal_parameter_count == arity; 3251 dont_adapt_arguments || formal_parameter_count == arity;
3253 3252
3254 Register function_reg = a1; 3253 Register function_reg = a1;
3255 LPointerMap* pointers = instr->pointer_map(); 3254 LPointerMap* pointers = instr->pointer_map();
3256 3255
3257 if (can_invoke_directly) { 3256 if (can_invoke_directly) {
3258 // Change context. 3257 // Change context.
3259 __ ld(cp, FieldMemOperand(function_reg, JSFunction::kContextOffset)); 3258 __ Ld(cp, FieldMemOperand(function_reg, JSFunction::kContextOffset));
3260 3259
3261 // Always initialize new target and number of actual arguments. 3260 // Always initialize new target and number of actual arguments.
3262 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); 3261 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
3263 __ li(a0, Operand(arity)); 3262 __ li(a0, Operand(arity));
3264 3263
3265 bool is_self_call = function.is_identical_to(info()->closure()); 3264 bool is_self_call = function.is_identical_to(info()->closure());
3266 3265
3267 // Invoke function. 3266 // Invoke function.
3268 if (is_self_call) { 3267 if (is_self_call) {
3269 Handle<Code> self(reinterpret_cast<Code**>(__ CodeObject().location())); 3268 Handle<Code> self(reinterpret_cast<Code**>(__ CodeObject().location()));
3270 if (is_tail_call) { 3269 if (is_tail_call) {
3271 __ Jump(self, RelocInfo::CODE_TARGET); 3270 __ Jump(self, RelocInfo::CODE_TARGET);
3272 } else { 3271 } else {
3273 __ Call(self, RelocInfo::CODE_TARGET); 3272 __ Call(self, RelocInfo::CODE_TARGET);
3274 } 3273 }
3275 } else { 3274 } else {
3276 __ ld(at, FieldMemOperand(function_reg, JSFunction::kCodeEntryOffset)); 3275 __ Ld(at, FieldMemOperand(function_reg, JSFunction::kCodeEntryOffset));
3277 if (is_tail_call) { 3276 if (is_tail_call) {
3278 __ Jump(at); 3277 __ Jump(at);
3279 } else { 3278 } else {
3280 __ Call(at); 3279 __ Call(at);
3281 } 3280 }
3282 } 3281 }
3283 3282
3284 if (!is_tail_call) { 3283 if (!is_tail_call) {
3285 // Set up deoptimization. 3284 // Set up deoptimization.
3286 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 3285 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
3287 } 3286 }
3288 } else { 3287 } else {
3289 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 3288 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3290 ParameterCount actual(arity); 3289 ParameterCount actual(arity);
3291 ParameterCount expected(formal_parameter_count); 3290 ParameterCount expected(formal_parameter_count);
3292 InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION; 3291 InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION;
3293 __ InvokeFunction(function_reg, expected, actual, flag, generator); 3292 __ InvokeFunction(function_reg, expected, actual, flag, generator);
3294 } 3293 }
3295 } 3294 }
3296 3295
3297 3296
3298 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { 3297 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
3299 DCHECK(instr->context() != NULL); 3298 DCHECK(instr->context() != NULL);
3300 DCHECK(ToRegister(instr->context()).is(cp)); 3299 DCHECK(ToRegister(instr->context()).is(cp));
3301 Register input = ToRegister(instr->value()); 3300 Register input = ToRegister(instr->value());
3302 Register result = ToRegister(instr->result()); 3301 Register result = ToRegister(instr->result());
3303 Register scratch = scratch0(); 3302 Register scratch = scratch0();
3304 3303
3305 // Deoptimize if not a heap number. 3304 // Deoptimize if not a heap number.
3306 __ ld(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); 3305 __ Ld(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3307 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 3306 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3308 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber, scratch, 3307 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber, scratch,
3309 Operand(at)); 3308 Operand(at));
3310 3309
3311 Label done; 3310 Label done;
3312 Register exponent = scratch0(); 3311 Register exponent = scratch0();
3313 scratch = no_reg; 3312 scratch = no_reg;
3314 __ lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); 3313 __ Lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
3315 // Check the sign of the argument. If the argument is positive, just 3314 // Check the sign of the argument. If the argument is positive, just
3316 // return it. 3315 // return it.
3317 __ Move(result, input); 3316 __ Move(result, input);
3318 __ And(at, exponent, Operand(HeapNumber::kSignMask)); 3317 __ And(at, exponent, Operand(HeapNumber::kSignMask));
3319 __ Branch(&done, eq, at, Operand(zero_reg)); 3318 __ Branch(&done, eq, at, Operand(zero_reg));
3320 3319
3321 // Input is negative. Reverse its sign. 3320 // Input is negative. Reverse its sign.
3322 // Preserve the value of all registers. 3321 // Preserve the value of all registers.
3323 { 3322 {
3324 PushSafepointRegistersScope scope(this); 3323 PushSafepointRegistersScope scope(this);
(...skipping 15 matching lines...) Expand all
3340 // Slow case: Call the runtime system to do the number allocation. 3339 // Slow case: Call the runtime system to do the number allocation.
3341 __ bind(&slow); 3340 __ bind(&slow);
3342 3341
3343 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr, 3342 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr,
3344 instr->context()); 3343 instr->context());
3345 // Set the pointer to the new heap number in tmp. 3344 // Set the pointer to the new heap number in tmp.
3346 if (!tmp1.is(v0)) 3345 if (!tmp1.is(v0))
3347 __ mov(tmp1, v0); 3346 __ mov(tmp1, v0);
3348 // Restore input_reg after call to runtime. 3347 // Restore input_reg after call to runtime.
3349 __ LoadFromSafepointRegisterSlot(input, input); 3348 __ LoadFromSafepointRegisterSlot(input, input);
3350 __ lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); 3349 __ Lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
3351 3350
3352 __ bind(&allocated); 3351 __ bind(&allocated);
3353 // exponent: floating point exponent value. 3352 // exponent: floating point exponent value.
3354 // tmp1: allocated heap number. 3353 // tmp1: allocated heap number.
3355 __ And(exponent, exponent, Operand(~HeapNumber::kSignMask)); 3354 __ And(exponent, exponent, Operand(~HeapNumber::kSignMask));
3356 __ sw(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset)); 3355 __ Sw(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
3357 __ lwu(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset)); 3356 __ Lwu(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
3358 __ sw(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset)); 3357 __ Sw(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
3359 3358
3360 __ StoreToSafepointRegisterSlot(tmp1, result); 3359 __ StoreToSafepointRegisterSlot(tmp1, result);
3361 } 3360 }
3362 3361
3363 __ bind(&done); 3362 __ bind(&done);
3364 } 3363 }
3365 3364
3366 3365
3367 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { 3366 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) {
3368 Register input = ToRegister(instr->value()); 3367 Register input = ToRegister(instr->value());
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after
3594 DCHECK(ToDoubleRegister(instr->left()).is(f2)); 3593 DCHECK(ToDoubleRegister(instr->left()).is(f2));
3595 DCHECK(ToDoubleRegister(instr->result()).is(f0)); 3594 DCHECK(ToDoubleRegister(instr->result()).is(f0));
3596 3595
3597 if (exponent_type.IsSmi()) { 3596 if (exponent_type.IsSmi()) {
3598 MathPowStub stub(isolate(), MathPowStub::TAGGED); 3597 MathPowStub stub(isolate(), MathPowStub::TAGGED);
3599 __ CallStub(&stub); 3598 __ CallStub(&stub);
3600 } else if (exponent_type.IsTagged()) { 3599 } else if (exponent_type.IsTagged()) {
3601 Label no_deopt; 3600 Label no_deopt;
3602 __ JumpIfSmi(tagged_exponent, &no_deopt); 3601 __ JumpIfSmi(tagged_exponent, &no_deopt);
3603 DCHECK(!a7.is(tagged_exponent)); 3602 DCHECK(!a7.is(tagged_exponent));
3604 __ lw(a7, FieldMemOperand(tagged_exponent, HeapObject::kMapOffset)); 3603 __ Lw(a7, FieldMemOperand(tagged_exponent, HeapObject::kMapOffset));
3605 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 3604 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3606 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber, a7, Operand(at)); 3605 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber, a7, Operand(at));
3607 __ bind(&no_deopt); 3606 __ bind(&no_deopt);
3608 MathPowStub stub(isolate(), MathPowStub::TAGGED); 3607 MathPowStub stub(isolate(), MathPowStub::TAGGED);
3609 __ CallStub(&stub); 3608 __ CallStub(&stub);
3610 } else if (exponent_type.IsInteger32()) { 3609 } else if (exponent_type.IsInteger32()) {
3611 MathPowStub stub(isolate(), MathPowStub::INTEGER); 3610 MathPowStub stub(isolate(), MathPowStub::INTEGER);
3612 __ CallStub(&stub); 3611 __ CallStub(&stub);
3613 } else { 3612 } else {
3614 DCHECK(exponent_type.IsDouble()); 3613 DCHECK(exponent_type.IsDouble());
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
3669 RegisterConfiguration::Crankshaft()->GetGeneralRegisterName( 3668 RegisterConfiguration::Crankshaft()->GetGeneralRegisterName(
3670 actual.reg().code())); 3669 actual.reg().code()));
3671 } else { 3670 } else {
3672 Comment(";;; PrepareForTailCall, actual: %d {", actual.immediate()); 3671 Comment(";;; PrepareForTailCall, actual: %d {", actual.immediate());
3673 } 3672 }
3674 } 3673 }
3675 3674
3676 // Check if next frame is an arguments adaptor frame. 3675 // Check if next frame is an arguments adaptor frame.
3677 Register caller_args_count_reg = scratch1; 3676 Register caller_args_count_reg = scratch1;
3678 Label no_arguments_adaptor, formal_parameter_count_loaded; 3677 Label no_arguments_adaptor, formal_parameter_count_loaded;
3679 __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3678 __ Ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3680 __ ld(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset)); 3679 __ Ld(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset));
3681 __ Branch(&no_arguments_adaptor, ne, scratch3, 3680 __ Branch(&no_arguments_adaptor, ne, scratch3,
3682 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); 3681 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
3683 3682
3684 // Drop current frame and load arguments count from arguments adaptor frame. 3683 // Drop current frame and load arguments count from arguments adaptor frame.
3685 __ mov(fp, scratch2); 3684 __ mov(fp, scratch2);
3686 __ ld(caller_args_count_reg, 3685 __ Ld(caller_args_count_reg,
3687 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3686 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
3688 __ SmiUntag(caller_args_count_reg); 3687 __ SmiUntag(caller_args_count_reg);
3689 __ Branch(&formal_parameter_count_loaded); 3688 __ Branch(&formal_parameter_count_loaded);
3690 3689
3691 __ bind(&no_arguments_adaptor); 3690 __ bind(&no_arguments_adaptor);
3692 // Load caller's formal parameter count 3691 // Load caller's formal parameter count
3693 __ li(caller_args_count_reg, Operand(info()->literal()->parameter_count())); 3692 __ li(caller_args_count_reg, Operand(info()->literal()->parameter_count()));
3694 3693
3695 __ bind(&formal_parameter_count_loaded); 3694 __ bind(&formal_parameter_count_loaded);
3696 __ PrepareForTailCall(actual, caller_args_count_reg, scratch2, scratch3); 3695 __ PrepareForTailCall(actual, caller_args_count_reg, scratch2, scratch3);
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
3781 3780
3782 if (instr->arity() == 0) { 3781 if (instr->arity() == 0) {
3783 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode); 3782 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
3784 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3783 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3785 } else if (instr->arity() == 1) { 3784 } else if (instr->arity() == 1) {
3786 Label done; 3785 Label done;
3787 if (IsFastPackedElementsKind(kind)) { 3786 if (IsFastPackedElementsKind(kind)) {
3788 Label packed_case; 3787 Label packed_case;
3789 // We might need a change here, 3788 // We might need a change here,
3790 // look at the first argument. 3789 // look at the first argument.
3791 __ ld(a5, MemOperand(sp, 0)); 3790 __ Ld(a5, MemOperand(sp, 0));
3792 __ Branch(&packed_case, eq, a5, Operand(zero_reg)); 3791 __ Branch(&packed_case, eq, a5, Operand(zero_reg));
3793 3792
3794 ElementsKind holey_kind = GetHoleyElementsKind(kind); 3793 ElementsKind holey_kind = GetHoleyElementsKind(kind);
3795 ArraySingleArgumentConstructorStub stub(isolate(), 3794 ArraySingleArgumentConstructorStub stub(isolate(),
3796 holey_kind, 3795 holey_kind,
3797 override_mode); 3796 override_mode);
3798 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3797 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3799 __ jmp(&done); 3798 __ jmp(&done);
3800 __ bind(&packed_case); 3799 __ bind(&packed_case);
3801 } 3800 }
(...skipping 11 matching lines...) Expand all
3813 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 3812 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3814 CallRuntime(instr->function(), instr->arity(), instr); 3813 CallRuntime(instr->function(), instr->arity(), instr);
3815 } 3814 }
3816 3815
3817 3816
3818 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { 3817 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) {
3819 Register function = ToRegister(instr->function()); 3818 Register function = ToRegister(instr->function());
3820 Register code_object = ToRegister(instr->code_object()); 3819 Register code_object = ToRegister(instr->code_object());
3821 __ Daddu(code_object, code_object, 3820 __ Daddu(code_object, code_object,
3822 Operand(Code::kHeaderSize - kHeapObjectTag)); 3821 Operand(Code::kHeaderSize - kHeapObjectTag));
3823 __ sd(code_object, 3822 __ Sd(code_object, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
3824 FieldMemOperand(function, JSFunction::kCodeEntryOffset));
3825 } 3823 }
3826 3824
3827 3825
3828 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { 3826 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
3829 Register result = ToRegister(instr->result()); 3827 Register result = ToRegister(instr->result());
3830 Register base = ToRegister(instr->base_object()); 3828 Register base = ToRegister(instr->base_object());
3831 if (instr->offset()->IsConstantOperand()) { 3829 if (instr->offset()->IsConstantOperand()) {
3832 LConstantOperand* offset = LConstantOperand::cast(instr->offset()); 3830 LConstantOperand* offset = LConstantOperand::cast(instr->offset());
3833 __ Daddu(result, base, Operand(ToInteger32(offset))); 3831 __ Daddu(result, base, Operand(ToInteger32(offset)));
3834 } else { 3832 } else {
(...skipping 22 matching lines...) Expand all
3857 __ AssertNotSmi(object); 3855 __ AssertNotSmi(object);
3858 3856
3859 DCHECK(!representation.IsSmi() || 3857 DCHECK(!representation.IsSmi() ||
3860 !instr->value()->IsConstantOperand() || 3858 !instr->value()->IsConstantOperand() ||
3861 IsSmi(LConstantOperand::cast(instr->value()))); 3859 IsSmi(LConstantOperand::cast(instr->value())));
3862 if (!FLAG_unbox_double_fields && representation.IsDouble()) { 3860 if (!FLAG_unbox_double_fields && representation.IsDouble()) {
3863 DCHECK(access.IsInobject()); 3861 DCHECK(access.IsInobject());
3864 DCHECK(!instr->hydrogen()->has_transition()); 3862 DCHECK(!instr->hydrogen()->has_transition());
3865 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); 3863 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
3866 DoubleRegister value = ToDoubleRegister(instr->value()); 3864 DoubleRegister value = ToDoubleRegister(instr->value());
3867 __ sdc1(value, FieldMemOperand(object, offset)); 3865 __ Sdc1(value, FieldMemOperand(object, offset));
3868 return; 3866 return;
3869 } 3867 }
3870 3868
3871 if (instr->hydrogen()->has_transition()) { 3869 if (instr->hydrogen()->has_transition()) {
3872 Handle<Map> transition = instr->hydrogen()->transition_map(); 3870 Handle<Map> transition = instr->hydrogen()->transition_map();
3873 AddDeprecationDependency(transition); 3871 AddDeprecationDependency(transition);
3874 __ li(scratch1, Operand(transition)); 3872 __ li(scratch1, Operand(transition));
3875 __ sd(scratch1, FieldMemOperand(object, HeapObject::kMapOffset)); 3873 __ Sd(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
3876 if (instr->hydrogen()->NeedsWriteBarrierForMap()) { 3874 if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
3877 Register temp = ToRegister(instr->temp()); 3875 Register temp = ToRegister(instr->temp());
3878 // Update the write barrier for the map field. 3876 // Update the write barrier for the map field.
3879 __ RecordWriteForMap(object, 3877 __ RecordWriteForMap(object,
3880 scratch1, 3878 scratch1,
3881 temp, 3879 temp,
3882 GetRAState(), 3880 GetRAState(),
3883 kSaveFPRegs); 3881 kSaveFPRegs);
3884 } 3882 }
3885 } 3883 }
3886 3884
3887 // Do the store. 3885 // Do the store.
3888 Register destination = object; 3886 Register destination = object;
3889 if (!access.IsInobject()) { 3887 if (!access.IsInobject()) {
3890 destination = scratch1; 3888 destination = scratch1;
3891 __ ld(destination, FieldMemOperand(object, JSObject::kPropertiesOffset)); 3889 __ Ld(destination, FieldMemOperand(object, JSObject::kPropertiesOffset));
3892 } 3890 }
3893 3891
3894 if (representation.IsSmi() && SmiValuesAre32Bits() && 3892 if (representation.IsSmi() && SmiValuesAre32Bits() &&
3895 instr->hydrogen()->value()->representation().IsInteger32()) { 3893 instr->hydrogen()->value()->representation().IsInteger32()) {
3896 DCHECK(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY); 3894 DCHECK(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY);
3897 if (FLAG_debug_code) { 3895 if (FLAG_debug_code) {
3898 __ Load(scratch2, FieldMemOperand(destination, offset), representation); 3896 __ Load(scratch2, FieldMemOperand(destination, offset), representation);
3899 __ AssertSmi(scratch2); 3897 __ AssertSmi(scratch2);
3900 } 3898 }
3901 // Store int value directly to upper half of the smi. 3899 // Store int value directly to upper half of the smi.
3902 offset = SmiWordOffset(offset); 3900 offset = SmiWordOffset(offset);
3903 representation = Representation::Integer32(); 3901 representation = Representation::Integer32();
3904 } 3902 }
3905 MemOperand operand = FieldMemOperand(destination, offset); 3903 MemOperand operand = FieldMemOperand(destination, offset);
3906 3904
3907 if (FLAG_unbox_double_fields && representation.IsDouble()) { 3905 if (FLAG_unbox_double_fields && representation.IsDouble()) {
3908 DCHECK(access.IsInobject()); 3906 DCHECK(access.IsInobject());
3909 DoubleRegister value = ToDoubleRegister(instr->value()); 3907 DoubleRegister value = ToDoubleRegister(instr->value());
3910 __ sdc1(value, operand); 3908 __ Sdc1(value, operand);
3911 } else { 3909 } else {
3912 DCHECK(instr->value()->IsRegister()); 3910 DCHECK(instr->value()->IsRegister());
3913 Register value = ToRegister(instr->value()); 3911 Register value = ToRegister(instr->value());
3914 __ Store(value, operand, representation); 3912 __ Store(value, operand, representation);
3915 } 3913 }
3916 3914
3917 if (instr->hydrogen()->NeedsWriteBarrier()) { 3915 if (instr->hydrogen()->NeedsWriteBarrier()) {
3918 // Update the write barrier for the object for in-object properties. 3916 // Update the write barrier for the object for in-object properties.
3919 Register value = ToRegister(instr->value()); 3917 Register value = ToRegister(instr->value());
3920 __ RecordWriteField(destination, 3918 __ RecordWriteField(destination,
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
3991 __ dsra(address, key, -shift_size); 3989 __ dsra(address, key, -shift_size);
3992 } 3990 }
3993 } else { 3991 } else {
3994 __ dsll(address, key, shift_size); 3992 __ dsll(address, key, shift_size);
3995 } 3993 }
3996 __ Daddu(address, external_pointer, address); 3994 __ Daddu(address, external_pointer, address);
3997 } 3995 }
3998 3996
3999 if (elements_kind == FLOAT32_ELEMENTS) { 3997 if (elements_kind == FLOAT32_ELEMENTS) {
4000 __ cvt_s_d(double_scratch0(), value); 3998 __ cvt_s_d(double_scratch0(), value);
4001 __ swc1(double_scratch0(), MemOperand(address, base_offset)); 3999 __ Swc1(double_scratch0(), MemOperand(address, base_offset));
4002 } else { // Storing doubles, not floats. 4000 } else { // Storing doubles, not floats.
4003 __ sdc1(value, MemOperand(address, base_offset)); 4001 __ Sdc1(value, MemOperand(address, base_offset));
4004 } 4002 }
4005 } else { 4003 } else {
4006 Register value(ToRegister(instr->value())); 4004 Register value(ToRegister(instr->value()));
4007 MemOperand mem_operand = PrepareKeyedOperand( 4005 MemOperand mem_operand = PrepareKeyedOperand(
4008 key, external_pointer, key_is_constant, constant_key, 4006 key, external_pointer, key_is_constant, constant_key,
4009 element_size_shift, shift_size, 4007 element_size_shift, shift_size,
4010 base_offset); 4008 base_offset);
4011 switch (elements_kind) { 4009 switch (elements_kind) {
4012 case UINT8_ELEMENTS: 4010 case UINT8_ELEMENTS:
4013 case UINT8_CLAMPED_ELEMENTS: 4011 case UINT8_CLAMPED_ELEMENTS:
4014 case INT8_ELEMENTS: 4012 case INT8_ELEMENTS:
4015 __ sb(value, mem_operand); 4013 __ Sb(value, mem_operand);
4016 break; 4014 break;
4017 case INT16_ELEMENTS: 4015 case INT16_ELEMENTS:
4018 case UINT16_ELEMENTS: 4016 case UINT16_ELEMENTS:
4019 __ sh(value, mem_operand); 4017 __ Sh(value, mem_operand);
4020 break; 4018 break;
4021 case INT32_ELEMENTS: 4019 case INT32_ELEMENTS:
4022 case UINT32_ELEMENTS: 4020 case UINT32_ELEMENTS:
4023 __ sw(value, mem_operand); 4021 __ Sw(value, mem_operand);
4024 break; 4022 break;
4025 case FLOAT32_ELEMENTS: 4023 case FLOAT32_ELEMENTS:
4026 case FLOAT64_ELEMENTS: 4024 case FLOAT64_ELEMENTS:
4027 case FAST_DOUBLE_ELEMENTS: 4025 case FAST_DOUBLE_ELEMENTS:
4028 case FAST_ELEMENTS: 4026 case FAST_ELEMENTS:
4029 case FAST_SMI_ELEMENTS: 4027 case FAST_SMI_ELEMENTS:
4030 case FAST_HOLEY_DOUBLE_ELEMENTS: 4028 case FAST_HOLEY_DOUBLE_ELEMENTS:
4031 case FAST_HOLEY_ELEMENTS: 4029 case FAST_HOLEY_ELEMENTS:
4032 case FAST_HOLEY_SMI_ELEMENTS: 4030 case FAST_HOLEY_SMI_ELEMENTS:
4033 case DICTIONARY_ELEMENTS: 4031 case DICTIONARY_ELEMENTS:
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
4071 if (shift_size == 3) { 4069 if (shift_size == 3) {
4072 __ dsll(at, ToRegister(instr->key()), 3); 4070 __ dsll(at, ToRegister(instr->key()), 3);
4073 } else if (shift_size == -29) { 4071 } else if (shift_size == -29) {
4074 __ dsra(at, ToRegister(instr->key()), 29); 4072 __ dsra(at, ToRegister(instr->key()), 29);
4075 } 4073 }
4076 __ Daddu(scratch, scratch, at); 4074 __ Daddu(scratch, scratch, at);
4077 } 4075 }
4078 4076
4079 if (instr->NeedsCanonicalization()) { 4077 if (instr->NeedsCanonicalization()) {
4080 __ FPUCanonicalizeNaN(double_scratch, value); 4078 __ FPUCanonicalizeNaN(double_scratch, value);
4081 __ sdc1(double_scratch, MemOperand(scratch, 0)); 4079 __ Sdc1(double_scratch, MemOperand(scratch, 0));
4082 } else { 4080 } else {
4083 __ sdc1(value, MemOperand(scratch, 0)); 4081 __ Sdc1(value, MemOperand(scratch, 0));
4084 } 4082 }
4085 } 4083 }
4086 4084
4087 4085
4088 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { 4086 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
4089 Register value = ToRegister(instr->value()); 4087 Register value = ToRegister(instr->value());
4090 Register elements = ToRegister(instr->elements()); 4088 Register elements = ToRegister(instr->elements());
4091 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) 4089 Register key = instr->key()->IsRegister() ? ToRegister(instr->key())
4092 : no_reg; 4090 : no_reg;
4093 Register scratch = scratch0(); 4091 Register scratch = scratch0();
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
4204 __ Branch(deferred->entry(), ge, ToRegister(key), 4202 __ Branch(deferred->entry(), ge, ToRegister(key),
4205 Operand(constant_capacity)); 4203 Operand(constant_capacity));
4206 } else { 4204 } else {
4207 __ Branch(deferred->entry(), ge, ToRegister(key), 4205 __ Branch(deferred->entry(), ge, ToRegister(key),
4208 Operand(ToRegister(current_capacity))); 4206 Operand(ToRegister(current_capacity)));
4209 } 4207 }
4210 4208
4211 if (instr->elements()->IsRegister()) { 4209 if (instr->elements()->IsRegister()) {
4212 __ mov(result, ToRegister(instr->elements())); 4210 __ mov(result, ToRegister(instr->elements()));
4213 } else { 4211 } else {
4214 __ ld(result, ToMemOperand(instr->elements())); 4212 __ Ld(result, ToMemOperand(instr->elements()));
4215 } 4213 }
4216 4214
4217 __ bind(deferred->exit()); 4215 __ bind(deferred->exit());
4218 } 4216 }
4219 4217
4220 4218
4221 void LCodeGen::DoDeferredMaybeGrowElements(LMaybeGrowElements* instr) { 4219 void LCodeGen::DoDeferredMaybeGrowElements(LMaybeGrowElements* instr) {
4222 // TODO(3095996): Get rid of this. For now, we need to make the 4220 // TODO(3095996): Get rid of this. For now, we need to make the
4223 // result register contain a valid pointer because it is already 4221 // result register contain a valid pointer because it is already
4224 // contained in the register pointer map. 4222 // contained in the register pointer map.
4225 Register result = v0; 4223 Register result = v0;
4226 __ mov(result, zero_reg); 4224 __ mov(result, zero_reg);
4227 4225
4228 // We have to call a stub. 4226 // We have to call a stub.
4229 { 4227 {
4230 PushSafepointRegistersScope scope(this); 4228 PushSafepointRegistersScope scope(this);
4231 if (instr->object()->IsRegister()) { 4229 if (instr->object()->IsRegister()) {
4232 __ mov(result, ToRegister(instr->object())); 4230 __ mov(result, ToRegister(instr->object()));
4233 } else { 4231 } else {
4234 __ ld(result, ToMemOperand(instr->object())); 4232 __ Ld(result, ToMemOperand(instr->object()));
4235 } 4233 }
4236 4234
4237 LOperand* key = instr->key(); 4235 LOperand* key = instr->key();
4238 if (key->IsConstantOperand()) { 4236 if (key->IsConstantOperand()) {
4239 __ li(a3, Operand(ToSmi(LConstantOperand::cast(key)))); 4237 __ li(a3, Operand(ToSmi(LConstantOperand::cast(key))));
4240 } else { 4238 } else {
4241 __ mov(a3, ToRegister(key)); 4239 __ mov(a3, ToRegister(key));
4242 __ SmiTag(a3); 4240 __ SmiTag(a3);
4243 } 4241 }
4244 4242
(...skipping 14 matching lines...) Expand all
4259 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { 4257 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4260 Register object_reg = ToRegister(instr->object()); 4258 Register object_reg = ToRegister(instr->object());
4261 Register scratch = scratch0(); 4259 Register scratch = scratch0();
4262 4260
4263 Handle<Map> from_map = instr->original_map(); 4261 Handle<Map> from_map = instr->original_map();
4264 Handle<Map> to_map = instr->transitioned_map(); 4262 Handle<Map> to_map = instr->transitioned_map();
4265 ElementsKind from_kind = instr->from_kind(); 4263 ElementsKind from_kind = instr->from_kind();
4266 ElementsKind to_kind = instr->to_kind(); 4264 ElementsKind to_kind = instr->to_kind();
4267 4265
4268 Label not_applicable; 4266 Label not_applicable;
4269 __ ld(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 4267 __ Ld(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset));
4270 __ Branch(&not_applicable, ne, scratch, Operand(from_map)); 4268 __ Branch(&not_applicable, ne, scratch, Operand(from_map));
4271 4269
4272 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { 4270 if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
4273 Register new_map_reg = ToRegister(instr->new_map_temp()); 4271 Register new_map_reg = ToRegister(instr->new_map_temp());
4274 __ li(new_map_reg, Operand(to_map)); 4272 __ li(new_map_reg, Operand(to_map));
4275 __ sd(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 4273 __ Sd(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
4276 // Write barrier. 4274 // Write barrier.
4277 __ RecordWriteForMap(object_reg, 4275 __ RecordWriteForMap(object_reg,
4278 new_map_reg, 4276 new_map_reg,
4279 scratch, 4277 scratch,
4280 GetRAState(), 4278 GetRAState(),
4281 kDontSaveFPRegs); 4279 kDontSaveFPRegs);
4282 } else { 4280 } else {
4283 DCHECK(object_reg.is(a0)); 4281 DCHECK(object_reg.is(a0));
4284 DCHECK(ToRegister(instr->context()).is(cp)); 4282 DCHECK(ToRegister(instr->context()).is(cp));
4285 PushSafepointRegistersScope scope(this); 4283 PushSafepointRegistersScope scope(this);
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
4388 DCHECK(instr->hydrogen()->value()->representation().IsInteger32()); 4386 DCHECK(instr->hydrogen()->value()->representation().IsInteger32());
4389 Register char_code = ToRegister(instr->char_code()); 4387 Register char_code = ToRegister(instr->char_code());
4390 Register result = ToRegister(instr->result()); 4388 Register result = ToRegister(instr->result());
4391 Register scratch = scratch0(); 4389 Register scratch = scratch0();
4392 DCHECK(!char_code.is(result)); 4390 DCHECK(!char_code.is(result));
4393 4391
4394 __ Branch(deferred->entry(), hi, 4392 __ Branch(deferred->entry(), hi,
4395 char_code, Operand(String::kMaxOneByteCharCode)); 4393 char_code, Operand(String::kMaxOneByteCharCode));
4396 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex); 4394 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
4397 __ Dlsa(result, result, char_code, kPointerSizeLog2); 4395 __ Dlsa(result, result, char_code, kPointerSizeLog2);
4398 __ ld(result, FieldMemOperand(result, FixedArray::kHeaderSize)); 4396 __ Ld(result, FieldMemOperand(result, FixedArray::kHeaderSize));
4399 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); 4397 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4400 __ Branch(deferred->entry(), eq, result, Operand(scratch)); 4398 __ Branch(deferred->entry(), eq, result, Operand(scratch));
4401 __ bind(deferred->exit()); 4399 __ bind(deferred->exit());
4402 } 4400 }
4403 4401
4404 4402
4405 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { 4403 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
4406 Register char_code = ToRegister(instr->char_code()); 4404 Register char_code = ToRegister(instr->char_code());
4407 Register result = ToRegister(instr->result()); 4405 Register result = ToRegister(instr->result());
4408 4406
(...skipping 12 matching lines...) Expand all
4421 4419
4422 4420
4423 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 4421 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4424 LOperand* input = instr->value(); 4422 LOperand* input = instr->value();
4425 DCHECK(input->IsRegister() || input->IsStackSlot()); 4423 DCHECK(input->IsRegister() || input->IsStackSlot());
4426 LOperand* output = instr->result(); 4424 LOperand* output = instr->result();
4427 DCHECK(output->IsDoubleRegister()); 4425 DCHECK(output->IsDoubleRegister());
4428 FPURegister single_scratch = double_scratch0().low(); 4426 FPURegister single_scratch = double_scratch0().low();
4429 if (input->IsStackSlot()) { 4427 if (input->IsStackSlot()) {
4430 Register scratch = scratch0(); 4428 Register scratch = scratch0();
4431 __ ld(scratch, ToMemOperand(input)); 4429 __ Ld(scratch, ToMemOperand(input));
4432 __ mtc1(scratch, single_scratch); 4430 __ mtc1(scratch, single_scratch);
4433 } else { 4431 } else {
4434 __ mtc1(ToRegister(input), single_scratch); 4432 __ mtc1(ToRegister(input), single_scratch);
4435 } 4433 }
4436 __ cvt_d_w(ToDoubleRegister(output), single_scratch); 4434 __ cvt_d_w(ToDoubleRegister(output), single_scratch);
4437 } 4435 }
4438 4436
4439 4437
4440 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) { 4438 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4441 LOperand* input = instr->value(); 4439 LOperand* input = instr->value();
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
4524 } 4522 }
4525 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); 4523 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4526 RecordSafepointWithRegisters( 4524 RecordSafepointWithRegisters(
4527 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); 4525 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4528 __ StoreToSafepointRegisterSlot(v0, dst); 4526 __ StoreToSafepointRegisterSlot(v0, dst);
4529 } 4527 }
4530 4528
4531 // Done. Put the value in dbl_scratch into the value of the allocated heap 4529 // Done. Put the value in dbl_scratch into the value of the allocated heap
4532 // number. 4530 // number.
4533 __ bind(&done); 4531 __ bind(&done);
4534 __ sdc1(dbl_scratch, FieldMemOperand(dst, HeapNumber::kValueOffset)); 4532 __ Sdc1(dbl_scratch, FieldMemOperand(dst, HeapNumber::kValueOffset));
4535 } 4533 }
4536 4534
4537 4535
4538 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { 4536 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
4539 class DeferredNumberTagD final : public LDeferredCode { 4537 class DeferredNumberTagD final : public LDeferredCode {
4540 public: 4538 public:
4541 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) 4539 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
4542 : LDeferredCode(codegen), instr_(instr) { } 4540 : LDeferredCode(codegen), instr_(instr) { }
4543 void Generate() override { codegen()->DoDeferredNumberTagD(instr_); } 4541 void Generate() override { codegen()->DoDeferredNumberTagD(instr_); }
4544 LInstruction* instr() override { return instr_; } 4542 LInstruction* instr() override { return instr_; }
(...skipping 10 matching lines...) Expand all
4555 4553
4556 DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr); 4554 DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
4557 if (FLAG_inline_new) { 4555 if (FLAG_inline_new) {
4558 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex); 4556 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
4559 // We want the untagged address first for performance 4557 // We want the untagged address first for performance
4560 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry()); 4558 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
4561 } else { 4559 } else {
4562 __ Branch(deferred->entry()); 4560 __ Branch(deferred->entry());
4563 } 4561 }
4564 __ bind(deferred->exit()); 4562 __ bind(deferred->exit());
4565 __ sdc1(input_reg, FieldMemOperand(reg, HeapNumber::kValueOffset)); 4563 __ Sdc1(input_reg, FieldMemOperand(reg, HeapNumber::kValueOffset));
4566 } 4564 }
4567 4565
4568 4566
4569 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { 4567 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
4570 // TODO(3095996): Get rid of this. For now, we need to make the 4568 // TODO(3095996): Get rid of this. For now, we need to make the
4571 // result register contain a valid pointer because it is already 4569 // result register contain a valid pointer because it is already
4572 // contained in the register pointer map. 4570 // contained in the register pointer map.
4573 Register reg = ToRegister(instr->result()); 4571 Register reg = ToRegister(instr->result());
4574 __ mov(reg, zero_reg); 4572 __ mov(reg, zero_reg);
4575 4573
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
4626 NumberUntagDMode mode) { 4624 NumberUntagDMode mode) {
4627 bool can_convert_undefined_to_nan = instr->truncating(); 4625 bool can_convert_undefined_to_nan = instr->truncating();
4628 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); 4626 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero();
4629 4627
4630 Register scratch = scratch0(); 4628 Register scratch = scratch0();
4631 Label convert, load_smi, done; 4629 Label convert, load_smi, done;
4632 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { 4630 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
4633 // Smi check. 4631 // Smi check.
4634 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); 4632 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
4635 // Heap number map check. 4633 // Heap number map check.
4636 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); 4634 __ Ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4637 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 4635 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4638 if (can_convert_undefined_to_nan) { 4636 if (can_convert_undefined_to_nan) {
4639 __ Branch(&convert, ne, scratch, Operand(at)); 4637 __ Branch(&convert, ne, scratch, Operand(at));
4640 } else { 4638 } else {
4641 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber, scratch, 4639 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber, scratch,
4642 Operand(at)); 4640 Operand(at));
4643 } 4641 }
4644 // Load heap number. 4642 // Load heap number.
4645 __ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); 4643 __ Ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset));
4646 if (deoptimize_on_minus_zero) { 4644 if (deoptimize_on_minus_zero) {
4647 __ mfc1(at, result_reg); 4645 __ mfc1(at, result_reg);
4648 __ Branch(&done, ne, at, Operand(zero_reg)); 4646 __ Branch(&done, ne, at, Operand(zero_reg));
4649 __ mfhc1(scratch, result_reg); // Get exponent/sign bits. 4647 __ mfhc1(scratch, result_reg); // Get exponent/sign bits.
4650 DeoptimizeIf(eq, instr, DeoptimizeReason::kMinusZero, scratch, 4648 DeoptimizeIf(eq, instr, DeoptimizeReason::kMinusZero, scratch,
4651 Operand(HeapNumber::kSignMask)); 4649 Operand(HeapNumber::kSignMask));
4652 } 4650 }
4653 __ Branch(&done); 4651 __ Branch(&done);
4654 if (can_convert_undefined_to_nan) { 4652 if (can_convert_undefined_to_nan) {
4655 __ bind(&convert); 4653 __ bind(&convert);
4656 // Convert undefined (and hole) to NaN. 4654 // Convert undefined (and hole) to NaN.
4657 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 4655 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4658 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined, 4656 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined,
4659 input_reg, Operand(at)); 4657 input_reg, Operand(at));
4660 __ LoadRoot(scratch, Heap::kNanValueRootIndex); 4658 __ LoadRoot(scratch, Heap::kNanValueRootIndex);
4661 __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); 4659 __ Ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset));
4662 __ Branch(&done); 4660 __ Branch(&done);
4663 } 4661 }
4664 } else { 4662 } else {
4665 __ SmiUntag(scratch, input_reg); 4663 __ SmiUntag(scratch, input_reg);
4666 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); 4664 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI);
4667 } 4665 }
4668 // Smi to double register conversion 4666 // Smi to double register conversion
4669 __ bind(&load_smi); 4667 __ bind(&load_smi);
4670 // scratch: untagged value of input_reg 4668 // scratch: untagged value of input_reg
4671 __ mtc1(scratch, result_reg); 4669 __ mtc1(scratch, result_reg);
4672 __ cvt_d_w(result_reg, result_reg); 4670 __ cvt_d_w(result_reg, result_reg);
4673 __ bind(&done); 4671 __ bind(&done);
4674 } 4672 }
4675 4673
4676 4674
4677 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { 4675 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
4678 Register input_reg = ToRegister(instr->value()); 4676 Register input_reg = ToRegister(instr->value());
4679 Register scratch1 = scratch0(); 4677 Register scratch1 = scratch0();
4680 Register scratch2 = ToRegister(instr->temp()); 4678 Register scratch2 = ToRegister(instr->temp());
4681 DoubleRegister double_scratch = double_scratch0(); 4679 DoubleRegister double_scratch = double_scratch0();
4682 DoubleRegister double_scratch2 = ToDoubleRegister(instr->temp2()); 4680 DoubleRegister double_scratch2 = ToDoubleRegister(instr->temp2());
4683 4681
4684 DCHECK(!scratch1.is(input_reg) && !scratch1.is(scratch2)); 4682 DCHECK(!scratch1.is(input_reg) && !scratch1.is(scratch2));
4685 DCHECK(!scratch2.is(input_reg) && !scratch2.is(scratch1)); 4683 DCHECK(!scratch2.is(input_reg) && !scratch2.is(scratch1));
4686 4684
4687 Label done; 4685 Label done;
4688 4686
4689 // The input is a tagged HeapObject. 4687 // The input is a tagged HeapObject.
4690 // Heap number map check. 4688 // Heap number map check.
4691 __ ld(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset)); 4689 __ Ld(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4692 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 4690 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4693 // This 'at' value and scratch1 map value are used for tests in both clauses 4691 // This 'at' value and scratch1 map value are used for tests in both clauses
4694 // of the if. 4692 // of the if.
4695 4693
4696 if (instr->truncating()) { 4694 if (instr->truncating()) {
4697 Label truncate; 4695 Label truncate;
4698 __ Branch(USE_DELAY_SLOT, &truncate, eq, scratch1, Operand(at)); 4696 __ Branch(USE_DELAY_SLOT, &truncate, eq, scratch1, Operand(at));
4699 __ mov(scratch2, input_reg); // In delay slot. 4697 __ mov(scratch2, input_reg); // In delay slot.
4700 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 4698 __ Lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4701 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotANumberOrOddball, scratch1, 4699 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotANumberOrOddball, scratch1,
4702 Operand(ODDBALL_TYPE)); 4700 Operand(ODDBALL_TYPE));
4703 __ bind(&truncate); 4701 __ bind(&truncate);
4704 __ TruncateHeapNumberToI(input_reg, scratch2); 4702 __ TruncateHeapNumberToI(input_reg, scratch2);
4705 } else { 4703 } else {
4706 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber, scratch1, 4704 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber, scratch1,
4707 Operand(at)); 4705 Operand(at));
4708 4706
4709 // Load the double value. 4707 // Load the double value.
4710 __ ldc1(double_scratch, 4708 __ Ldc1(double_scratch,
4711 FieldMemOperand(input_reg, HeapNumber::kValueOffset)); 4709 FieldMemOperand(input_reg, HeapNumber::kValueOffset));
4712 4710
4713 Register except_flag = scratch2; 4711 Register except_flag = scratch2;
4714 __ EmitFPUTruncate(kRoundToZero, 4712 __ EmitFPUTruncate(kRoundToZero,
4715 input_reg, 4713 input_reg,
4716 double_scratch, 4714 double_scratch,
4717 scratch1, 4715 scratch1,
4718 double_scratch2, 4716 double_scratch2,
4719 except_flag, 4717 except_flag,
4720 kCheckForInexactConversion); 4718 kCheckForInexactConversion);
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
4870 DeoptimizeIf(eq, instr, DeoptimizeReason::kSmi, at, Operand(zero_reg)); 4868 DeoptimizeIf(eq, instr, DeoptimizeReason::kSmi, at, Operand(zero_reg));
4871 } 4869 }
4872 } 4870 }
4873 4871
4874 4872
4875 void LCodeGen::DoCheckArrayBufferNotNeutered( 4873 void LCodeGen::DoCheckArrayBufferNotNeutered(
4876 LCheckArrayBufferNotNeutered* instr) { 4874 LCheckArrayBufferNotNeutered* instr) {
4877 Register view = ToRegister(instr->view()); 4875 Register view = ToRegister(instr->view());
4878 Register scratch = scratch0(); 4876 Register scratch = scratch0();
4879 4877
4880 __ ld(scratch, FieldMemOperand(view, JSArrayBufferView::kBufferOffset)); 4878 __ Ld(scratch, FieldMemOperand(view, JSArrayBufferView::kBufferOffset));
4881 __ lw(scratch, FieldMemOperand(scratch, JSArrayBuffer::kBitFieldOffset)); 4879 __ Lw(scratch, FieldMemOperand(scratch, JSArrayBuffer::kBitFieldOffset));
4882 __ And(at, scratch, 1 << JSArrayBuffer::WasNeutered::kShift); 4880 __ And(at, scratch, 1 << JSArrayBuffer::WasNeutered::kShift);
4883 DeoptimizeIf(ne, instr, DeoptimizeReason::kOutOfBounds, at, 4881 DeoptimizeIf(ne, instr, DeoptimizeReason::kOutOfBounds, at,
4884 Operand(zero_reg)); 4882 Operand(zero_reg));
4885 } 4883 }
4886 4884
4887 4885
4888 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { 4886 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4889 Register input = ToRegister(instr->value()); 4887 Register input = ToRegister(instr->value());
4890 Register scratch = scratch0(); 4888 Register scratch = scratch0();
4891 4889
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
4929 4927
4930 4928
4931 void LCodeGen::DoCheckValue(LCheckValue* instr) { 4929 void LCodeGen::DoCheckValue(LCheckValue* instr) {
4932 Register reg = ToRegister(instr->value()); 4930 Register reg = ToRegister(instr->value());
4933 Handle<HeapObject> object = instr->hydrogen()->object().handle(); 4931 Handle<HeapObject> object = instr->hydrogen()->object().handle();
4934 AllowDeferredHandleDereference smi_check; 4932 AllowDeferredHandleDereference smi_check;
4935 if (isolate()->heap()->InNewSpace(*object)) { 4933 if (isolate()->heap()->InNewSpace(*object)) {
4936 Register reg = ToRegister(instr->value()); 4934 Register reg = ToRegister(instr->value());
4937 Handle<Cell> cell = isolate()->factory()->NewCell(object); 4935 Handle<Cell> cell = isolate()->factory()->NewCell(object);
4938 __ li(at, Operand(cell)); 4936 __ li(at, Operand(cell));
4939 __ ld(at, FieldMemOperand(at, Cell::kValueOffset)); 4937 __ Ld(at, FieldMemOperand(at, Cell::kValueOffset));
4940 DeoptimizeIf(ne, instr, DeoptimizeReason::kValueMismatch, reg, Operand(at)); 4938 DeoptimizeIf(ne, instr, DeoptimizeReason::kValueMismatch, reg, Operand(at));
4941 } else { 4939 } else {
4942 DeoptimizeIf(ne, instr, DeoptimizeReason::kValueMismatch, reg, 4940 DeoptimizeIf(ne, instr, DeoptimizeReason::kValueMismatch, reg,
4943 Operand(object)); 4941 Operand(object));
4944 } 4942 }
4945 } 4943 }
4946 4944
4947 4945
4948 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { 4946 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
4949 Label deopt, done; 4947 Label deopt, done;
4950 // If the map is not deprecated the migration attempt does not make sense. 4948 // If the map is not deprecated the migration attempt does not make sense.
4951 __ ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); 4949 __ Ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset));
4952 __ lwu(scratch0(), FieldMemOperand(scratch0(), Map::kBitField3Offset)); 4950 __ Lwu(scratch0(), FieldMemOperand(scratch0(), Map::kBitField3Offset));
4953 __ And(at, scratch0(), Operand(Map::Deprecated::kMask)); 4951 __ And(at, scratch0(), Operand(Map::Deprecated::kMask));
4954 __ Branch(&deopt, eq, at, Operand(zero_reg)); 4952 __ Branch(&deopt, eq, at, Operand(zero_reg));
4955 4953
4956 { 4954 {
4957 PushSafepointRegistersScope scope(this); 4955 PushSafepointRegistersScope scope(this);
4958 __ push(object); 4956 __ push(object);
4959 __ mov(cp, zero_reg); 4957 __ mov(cp, zero_reg);
4960 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); 4958 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
4961 RecordSafepointWithRegisters( 4959 RecordSafepointWithRegisters(
4962 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); 4960 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
4999 for (int i = 0; i < maps->size(); ++i) { 4997 for (int i = 0; i < maps->size(); ++i) {
5000 AddStabilityDependency(maps->at(i).handle()); 4998 AddStabilityDependency(maps->at(i).handle());
5001 } 4999 }
5002 return; 5000 return;
5003 } 5001 }
5004 5002
5005 Register map_reg = scratch0(); 5003 Register map_reg = scratch0();
5006 LOperand* input = instr->value(); 5004 LOperand* input = instr->value();
5007 DCHECK(input->IsRegister()); 5005 DCHECK(input->IsRegister());
5008 Register reg = ToRegister(input); 5006 Register reg = ToRegister(input);
5009 __ ld(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); 5007 __ Ld(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
5010 5008
5011 DeferredCheckMaps* deferred = NULL; 5009 DeferredCheckMaps* deferred = NULL;
5012 if (instr->hydrogen()->HasMigrationTarget()) { 5010 if (instr->hydrogen()->HasMigrationTarget()) {
5013 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); 5011 deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
5014 __ bind(deferred->check_maps()); 5012 __ bind(deferred->check_maps());
5015 } 5013 }
5016 5014
5017 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); 5015 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5018 Label success; 5016 Label success;
5019 for (int i = 0; i < maps->size() - 1; i++) { 5017 for (int i = 0; i < maps->size() - 1; i++) {
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
5051 Register scratch = scratch0(); 5049 Register scratch = scratch0();
5052 Register input_reg = ToRegister(instr->unclamped()); 5050 Register input_reg = ToRegister(instr->unclamped());
5053 Register result_reg = ToRegister(instr->result()); 5051 Register result_reg = ToRegister(instr->result());
5054 DoubleRegister temp_reg = ToDoubleRegister(instr->temp()); 5052 DoubleRegister temp_reg = ToDoubleRegister(instr->temp());
5055 Label is_smi, done, heap_number; 5053 Label is_smi, done, heap_number;
5056 5054
5057 // Both smi and heap number cases are handled. 5055 // Both smi and heap number cases are handled.
5058 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi); 5056 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi);
5059 5057
5060 // Check for heap number 5058 // Check for heap number
5061 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); 5059 __ Ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
5062 __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map())); 5060 __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map()));
5063 5061
5064 // Check for undefined. Undefined is converted to zero for clamping 5062 // Check for undefined. Undefined is converted to zero for clamping
5065 // conversions. 5063 // conversions.
5066 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined, input_reg, 5064 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined, input_reg,
5067 Operand(factory()->undefined_value())); 5065 Operand(factory()->undefined_value()));
5068 __ mov(result_reg, zero_reg); 5066 __ mov(result_reg, zero_reg);
5069 __ jmp(&done); 5067 __ jmp(&done);
5070 5068
5071 // Heap number 5069 // Heap number
5072 __ bind(&heap_number); 5070 __ bind(&heap_number);
5073 __ ldc1(double_scratch0(), FieldMemOperand(input_reg, 5071 __ Ldc1(double_scratch0(),
5074 HeapNumber::kValueOffset)); 5072 FieldMemOperand(input_reg, HeapNumber::kValueOffset));
5075 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); 5073 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
5076 __ jmp(&done); 5074 __ jmp(&done);
5077 5075
5078 __ bind(&is_smi); 5076 __ bind(&is_smi);
5079 __ ClampUint8(result_reg, scratch); 5077 __ ClampUint8(result_reg, scratch);
5080 5078
5081 __ bind(&done); 5079 __ bind(&done);
5082 } 5080 }
5083 5081
5084 5082
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
5133 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5131 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5134 __ li(scratch, Operand(size - kHeapObjectTag)); 5132 __ li(scratch, Operand(size - kHeapObjectTag));
5135 } else { 5133 } else {
5136 __ Dsubu(scratch, ToRegister(instr->size()), Operand(kHeapObjectTag)); 5134 __ Dsubu(scratch, ToRegister(instr->size()), Operand(kHeapObjectTag));
5137 } 5135 }
5138 __ li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); 5136 __ li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
5139 Label loop; 5137 Label loop;
5140 __ bind(&loop); 5138 __ bind(&loop);
5141 __ Dsubu(scratch, scratch, Operand(kPointerSize)); 5139 __ Dsubu(scratch, scratch, Operand(kPointerSize));
5142 __ Daddu(at, result, Operand(scratch)); 5140 __ Daddu(at, result, Operand(scratch));
5143 __ sd(scratch2, MemOperand(at)); 5141 __ Sd(scratch2, MemOperand(at));
5144 __ Branch(&loop, ge, scratch, Operand(zero_reg)); 5142 __ Branch(&loop, ge, scratch, Operand(zero_reg));
5145 } 5143 }
5146 } 5144 }
5147 5145
5148 5146
5149 void LCodeGen::DoDeferredAllocate(LAllocate* instr) { 5147 void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
5150 Register result = ToRegister(instr->result()); 5148 Register result = ToRegister(instr->result());
5151 5149
5152 // TODO(3095996): Get rid of this. For now, we need to make the 5150 // TODO(3095996): Get rid of this. For now, we need to make the
5153 // result register contain a valid pointer because it is already 5151 // result register contain a valid pointer because it is already
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
5194 allocation_flags = static_cast<AllocationFlags>(flags | PRETENURE); 5192 allocation_flags = static_cast<AllocationFlags>(flags | PRETENURE);
5195 } 5193 }
5196 // If the allocation folding dominator allocate triggered a GC, allocation 5194 // If the allocation folding dominator allocate triggered a GC, allocation
5197 // happend in the runtime. We have to reset the top pointer to virtually 5195 // happend in the runtime. We have to reset the top pointer to virtually
5198 // undo the allocation. 5196 // undo the allocation.
5199 ExternalReference allocation_top = 5197 ExternalReference allocation_top =
5200 AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags); 5198 AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
5201 Register top_address = scratch0(); 5199 Register top_address = scratch0();
5202 __ Dsubu(v0, v0, Operand(kHeapObjectTag)); 5200 __ Dsubu(v0, v0, Operand(kHeapObjectTag));
5203 __ li(top_address, Operand(allocation_top)); 5201 __ li(top_address, Operand(allocation_top));
5204 __ sd(v0, MemOperand(top_address)); 5202 __ Sd(v0, MemOperand(top_address));
5205 __ Daddu(v0, v0, Operand(kHeapObjectTag)); 5203 __ Daddu(v0, v0, Operand(kHeapObjectTag));
5206 } 5204 }
5207 } 5205 }
5208 5206
5209 void LCodeGen::DoFastAllocate(LFastAllocate* instr) { 5207 void LCodeGen::DoFastAllocate(LFastAllocate* instr) {
5210 DCHECK(instr->hydrogen()->IsAllocationFolded()); 5208 DCHECK(instr->hydrogen()->IsAllocationFolded());
5211 DCHECK(!instr->hydrogen()->IsAllocationFoldingDominator()); 5209 DCHECK(!instr->hydrogen()->IsAllocationFoldingDominator());
5212 Register result = ToRegister(instr->result()); 5210 Register result = ToRegister(instr->result());
5213 Register scratch1 = ToRegister(instr->temp1()); 5211 Register scratch1 = ToRegister(instr->temp1());
5214 Register scratch2 = ToRegister(instr->temp2()); 5212 Register scratch2 = ToRegister(instr->temp2());
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
5276 Register* cmp1, 5274 Register* cmp1,
5277 Operand* cmp2) { 5275 Operand* cmp2) {
5278 // This function utilizes the delay slot heavily. This is used to load 5276 // This function utilizes the delay slot heavily. This is used to load
5279 // values that are always usable without depending on the type of the input 5277 // values that are always usable without depending on the type of the input
5280 // register. 5278 // register.
5281 Condition final_branch_condition = kNoCondition; 5279 Condition final_branch_condition = kNoCondition;
5282 Register scratch = scratch0(); 5280 Register scratch = scratch0();
5283 Factory* factory = isolate()->factory(); 5281 Factory* factory = isolate()->factory();
5284 if (String::Equals(type_name, factory->number_string())) { 5282 if (String::Equals(type_name, factory->number_string())) {
5285 __ JumpIfSmi(input, true_label); 5283 __ JumpIfSmi(input, true_label);
5286 __ ld(input, FieldMemOperand(input, HeapObject::kMapOffset)); 5284 __ Ld(input, FieldMemOperand(input, HeapObject::kMapOffset));
5287 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 5285 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5288 *cmp1 = input; 5286 *cmp1 = input;
5289 *cmp2 = Operand(at); 5287 *cmp2 = Operand(at);
5290 final_branch_condition = eq; 5288 final_branch_condition = eq;
5291 5289
5292 } else if (String::Equals(type_name, factory->string_string())) { 5290 } else if (String::Equals(type_name, factory->string_string())) {
5293 __ JumpIfSmi(input, false_label); 5291 __ JumpIfSmi(input, false_label);
5294 __ GetObjectType(input, input, scratch); 5292 __ GetObjectType(input, input, scratch);
5295 *cmp1 = scratch; 5293 *cmp1 = scratch;
5296 *cmp2 = Operand(FIRST_NONSTRING_TYPE); 5294 *cmp2 = Operand(FIRST_NONSTRING_TYPE);
(...skipping 14 matching lines...) Expand all
5311 *cmp2 = Operand(input); 5309 *cmp2 = Operand(input);
5312 final_branch_condition = eq; 5310 final_branch_condition = eq;
5313 5311
5314 } else if (String::Equals(type_name, factory->undefined_string())) { 5312 } else if (String::Equals(type_name, factory->undefined_string())) {
5315 __ LoadRoot(at, Heap::kNullValueRootIndex); 5313 __ LoadRoot(at, Heap::kNullValueRootIndex);
5316 __ Branch(USE_DELAY_SLOT, false_label, eq, at, Operand(input)); 5314 __ Branch(USE_DELAY_SLOT, false_label, eq, at, Operand(input));
5317 // The first instruction of JumpIfSmi is an And - it is safe in the delay 5315 // The first instruction of JumpIfSmi is an And - it is safe in the delay
5318 // slot. 5316 // slot.
5319 __ JumpIfSmi(input, false_label); 5317 __ JumpIfSmi(input, false_label);
5320 // Check for undetectable objects => true. 5318 // Check for undetectable objects => true.
5321 __ ld(input, FieldMemOperand(input, HeapObject::kMapOffset)); 5319 __ Ld(input, FieldMemOperand(input, HeapObject::kMapOffset));
5322 __ lbu(at, FieldMemOperand(input, Map::kBitFieldOffset)); 5320 __ Lbu(at, FieldMemOperand(input, Map::kBitFieldOffset));
5323 __ And(at, at, 1 << Map::kIsUndetectable); 5321 __ And(at, at, 1 << Map::kIsUndetectable);
5324 *cmp1 = at; 5322 *cmp1 = at;
5325 *cmp2 = Operand(zero_reg); 5323 *cmp2 = Operand(zero_reg);
5326 final_branch_condition = ne; 5324 final_branch_condition = ne;
5327 5325
5328 } else if (String::Equals(type_name, factory->function_string())) { 5326 } else if (String::Equals(type_name, factory->function_string())) {
5329 __ JumpIfSmi(input, false_label); 5327 __ JumpIfSmi(input, false_label);
5330 __ ld(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); 5328 __ Ld(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
5331 __ lbu(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset)); 5329 __ Lbu(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5332 __ And(scratch, scratch, 5330 __ And(scratch, scratch,
5333 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); 5331 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5334 *cmp1 = scratch; 5332 *cmp1 = scratch;
5335 *cmp2 = Operand(1 << Map::kIsCallable); 5333 *cmp2 = Operand(1 << Map::kIsCallable);
5336 final_branch_condition = eq; 5334 final_branch_condition = eq;
5337 5335
5338 } else if (String::Equals(type_name, factory->object_string())) { 5336 } else if (String::Equals(type_name, factory->object_string())) {
5339 __ JumpIfSmi(input, false_label); 5337 __ JumpIfSmi(input, false_label);
5340 __ LoadRoot(at, Heap::kNullValueRootIndex); 5338 __ LoadRoot(at, Heap::kNullValueRootIndex);
5341 __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input)); 5339 __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
5342 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 5340 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
5343 __ GetObjectType(input, scratch, scratch1()); 5341 __ GetObjectType(input, scratch, scratch1());
5344 __ Branch(false_label, lt, scratch1(), Operand(FIRST_JS_RECEIVER_TYPE)); 5342 __ Branch(false_label, lt, scratch1(), Operand(FIRST_JS_RECEIVER_TYPE));
5345 // Check for callable or undetectable objects => false. 5343 // Check for callable or undetectable objects => false.
5346 __ lbu(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset)); 5344 __ Lbu(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5347 __ And(at, scratch, 5345 __ And(at, scratch,
5348 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); 5346 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5349 *cmp1 = at; 5347 *cmp1 = at;
5350 *cmp2 = Operand(zero_reg); 5348 *cmp2 = Operand(zero_reg);
5351 final_branch_condition = eq; 5349 final_branch_condition = eq;
5352 5350
5353 } else { 5351 } else {
5354 *cmp1 = at; 5352 *cmp1 = at;
5355 *cmp2 = Operand(zero_reg); // Set to valid regs, to avoid caller assertion. 5353 *cmp2 = Operand(zero_reg); // Set to valid regs, to avoid caller assertion.
5356 __ Branch(false_label); 5354 __ Branch(false_label);
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
5485 5483
5486 5484
5487 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { 5485 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5488 Register result = ToRegister(instr->result()); 5486 Register result = ToRegister(instr->result());
5489 Register object = ToRegister(instr->object()); 5487 Register object = ToRegister(instr->object());
5490 5488
5491 Label use_cache, call_runtime; 5489 Label use_cache, call_runtime;
5492 DCHECK(object.is(a0)); 5490 DCHECK(object.is(a0));
5493 __ CheckEnumCache(&call_runtime); 5491 __ CheckEnumCache(&call_runtime);
5494 5492
5495 __ ld(result, FieldMemOperand(object, HeapObject::kMapOffset)); 5493 __ Ld(result, FieldMemOperand(object, HeapObject::kMapOffset));
5496 __ Branch(&use_cache); 5494 __ Branch(&use_cache);
5497 5495
5498 // Get the set of properties to enumerate. 5496 // Get the set of properties to enumerate.
5499 __ bind(&call_runtime); 5497 __ bind(&call_runtime);
5500 __ push(object); 5498 __ push(object);
5501 CallRuntime(Runtime::kForInEnumerate, instr); 5499 CallRuntime(Runtime::kForInEnumerate, instr);
5502 __ bind(&use_cache); 5500 __ bind(&use_cache);
5503 } 5501 }
5504 5502
5505 5503
5506 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { 5504 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5507 Register map = ToRegister(instr->map()); 5505 Register map = ToRegister(instr->map());
5508 Register result = ToRegister(instr->result()); 5506 Register result = ToRegister(instr->result());
5509 Label load_cache, done; 5507 Label load_cache, done;
5510 __ EnumLength(result, map); 5508 __ EnumLength(result, map);
5511 __ Branch(&load_cache, ne, result, Operand(Smi::kZero)); 5509 __ Branch(&load_cache, ne, result, Operand(Smi::kZero));
5512 __ li(result, Operand(isolate()->factory()->empty_fixed_array())); 5510 __ li(result, Operand(isolate()->factory()->empty_fixed_array()));
5513 __ jmp(&done); 5511 __ jmp(&done);
5514 5512
5515 __ bind(&load_cache); 5513 __ bind(&load_cache);
5516 __ LoadInstanceDescriptors(map, result); 5514 __ LoadInstanceDescriptors(map, result);
5517 __ ld(result, 5515 __ Ld(result, FieldMemOperand(result, DescriptorArray::kEnumCacheOffset));
5518 FieldMemOperand(result, DescriptorArray::kEnumCacheOffset)); 5516 __ Ld(result, FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
5519 __ ld(result,
5520 FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
5521 DeoptimizeIf(eq, instr, DeoptimizeReason::kNoCache, result, 5517 DeoptimizeIf(eq, instr, DeoptimizeReason::kNoCache, result,
5522 Operand(zero_reg)); 5518 Operand(zero_reg));
5523 5519
5524 __ bind(&done); 5520 __ bind(&done);
5525 } 5521 }
5526 5522
5527 5523
5528 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { 5524 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5529 Register object = ToRegister(instr->value()); 5525 Register object = ToRegister(instr->value());
5530 Register map = ToRegister(instr->map()); 5526 Register map = ToRegister(instr->map());
5531 __ ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); 5527 __ Ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset));
5532 DeoptimizeIf(ne, instr, DeoptimizeReason::kWrongMap, map, 5528 DeoptimizeIf(ne, instr, DeoptimizeReason::kWrongMap, map,
5533 Operand(scratch0())); 5529 Operand(scratch0()));
5534 } 5530 }
5535 5531
5536 5532
5537 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, 5533 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
5538 Register result, 5534 Register result,
5539 Register object, 5535 Register object,
5540 Register index) { 5536 Register index) {
5541 PushSafepointRegistersScope scope(this); 5537 PushSafepointRegistersScope scope(this);
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
5585 5581
5586 Label out_of_object, done; 5582 Label out_of_object, done;
5587 5583
5588 __ And(scratch, index, Operand(Smi::FromInt(1))); 5584 __ And(scratch, index, Operand(Smi::FromInt(1)));
5589 __ Branch(deferred->entry(), ne, scratch, Operand(zero_reg)); 5585 __ Branch(deferred->entry(), ne, scratch, Operand(zero_reg));
5590 __ dsra(index, index, 1); 5586 __ dsra(index, index, 1);
5591 5587
5592 __ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg)); 5588 __ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg));
5593 __ SmiScale(scratch, index, kPointerSizeLog2); // In delay slot. 5589 __ SmiScale(scratch, index, kPointerSizeLog2); // In delay slot.
5594 __ Daddu(scratch, object, scratch); 5590 __ Daddu(scratch, object, scratch);
5595 __ ld(result, FieldMemOperand(scratch, JSObject::kHeaderSize)); 5591 __ Ld(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
5596 5592
5597 __ Branch(&done); 5593 __ Branch(&done);
5598 5594
5599 __ bind(&out_of_object); 5595 __ bind(&out_of_object);
5600 __ ld(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 5596 __ Ld(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
5601 // Index is equal to negated out of object property index plus 1. 5597 // Index is equal to negated out of object property index plus 1.
5602 __ Dsubu(scratch, result, scratch); 5598 __ Dsubu(scratch, result, scratch);
5603 __ ld(result, FieldMemOperand(scratch, 5599 __ Ld(result,
5604 FixedArray::kHeaderSize - kPointerSize)); 5600 FieldMemOperand(scratch, FixedArray::kHeaderSize - kPointerSize));
5605 __ bind(deferred->exit()); 5601 __ bind(deferred->exit());
5606 __ bind(&done); 5602 __ bind(&done);
5607 } 5603 }
5608 5604
5609 #undef __ 5605 #undef __
5610 5606
5611 } // namespace internal 5607 } // namespace internal
5612 } // namespace v8 5608 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/mips64/code-generator-mips64.cc ('k') | src/crankshaft/mips64/lithium-gap-resolver-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698