Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(18)

Side by Side Diff: src/full-codegen/mips64/full-codegen-mips64.cc

Issue 2829073002: MIPS64: Move load/store instructions to macro-assembler. (Closed)
Patch Set: Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/debug/mips64/debug-mips64.cc ('k') | src/ic/mips64/handler-compiler-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS64 5 #if V8_TARGET_ARCH_MIPS64
6 6
7 // Note on Mips implementation: 7 // Note on Mips implementation:
8 // 8 //
9 // The result_register() for mips is the 'v0' register, which is defined 9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first 10 // by the ABI to contain function return values. However, the first
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
116 CompilationInfo* info = info_; 116 CompilationInfo* info = info_;
117 profiling_counter_ = isolate()->factory()->NewCell( 117 profiling_counter_ = isolate()->factory()->NewCell(
118 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 118 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
119 SetFunctionPosition(literal()); 119 SetFunctionPosition(literal());
120 Comment cmnt(masm_, "[ function compiled by full code generator"); 120 Comment cmnt(masm_, "[ function compiled by full code generator");
121 121
122 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 122 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
123 123
124 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { 124 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
125 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 125 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
126 __ ld(a2, MemOperand(sp, receiver_offset)); 126 __ Ld(a2, MemOperand(sp, receiver_offset));
127 __ AssertNotSmi(a2); 127 __ AssertNotSmi(a2);
128 __ GetObjectType(a2, a2, a2); 128 __ GetObjectType(a2, a2, a2);
129 __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2, 129 __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
130 Operand(FIRST_JS_RECEIVER_TYPE)); 130 Operand(FIRST_JS_RECEIVER_TYPE));
131 } 131 }
132 132
133 // Open a frame scope to indicate that there is a frame on the stack. The 133 // Open a frame scope to indicate that there is a frame on the stack. The
134 // MANUAL indicates that the scope shouldn't actually generate code to set up 134 // MANUAL indicates that the scope shouldn't actually generate code to set up
135 // the frame (that is done below). 135 // the frame (that is done below).
136 FrameScope frame_scope(masm_, StackFrame::MANUAL); 136 FrameScope frame_scope(masm_, StackFrame::MANUAL);
137 info->set_prologue_offset(masm_->pc_offset()); 137 info->set_prologue_offset(masm_->pc_offset());
138 __ Prologue(info->GeneratePreagedPrologue()); 138 __ Prologue(info->GeneratePreagedPrologue());
139 139
140 // Increment invocation count for the function. 140 // Increment invocation count for the function.
141 { 141 {
142 Comment cmnt(masm_, "[ Increment invocation count"); 142 Comment cmnt(masm_, "[ Increment invocation count");
143 __ ld(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset)); 143 __ Ld(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset));
144 __ ld(a0, FieldMemOperand(a0, Cell::kValueOffset)); 144 __ Ld(a0, FieldMemOperand(a0, Cell::kValueOffset));
145 __ ld(a4, FieldMemOperand( 145 __ Ld(a4, FieldMemOperand(
146 a0, FeedbackVector::kInvocationCountIndex * kPointerSize + 146 a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
147 FeedbackVector::kHeaderSize)); 147 FeedbackVector::kHeaderSize));
148 __ Daddu(a4, a4, Operand(Smi::FromInt(1))); 148 __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
149 __ sd(a4, FieldMemOperand( 149 __ Sd(a4, FieldMemOperand(
150 a0, FeedbackVector::kInvocationCountIndex * kPointerSize + 150 a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
151 FeedbackVector::kHeaderSize)); 151 FeedbackVector::kHeaderSize));
152 } 152 }
153 153
154 { Comment cmnt(masm_, "[ Allocate locals"); 154 { Comment cmnt(masm_, "[ Allocate locals");
155 int locals_count = info->scope()->num_stack_slots(); 155 int locals_count = info->scope()->num_stack_slots();
156 OperandStackDepthIncrement(locals_count); 156 OperandStackDepthIncrement(locals_count);
157 if (locals_count > 0) { 157 if (locals_count > 0) {
158 if (locals_count >= 128) { 158 if (locals_count >= 128) {
159 Label ok; 159 Label ok;
160 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize)); 160 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
161 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); 161 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
162 __ Branch(&ok, hs, t1, Operand(a2)); 162 __ Branch(&ok, hs, t1, Operand(a2));
163 __ CallRuntime(Runtime::kThrowStackOverflow); 163 __ CallRuntime(Runtime::kThrowStackOverflow);
164 __ bind(&ok); 164 __ bind(&ok);
165 } 165 }
166 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); 166 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
167 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; 167 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
168 if (locals_count >= kMaxPushes) { 168 if (locals_count >= kMaxPushes) {
169 int loop_iterations = locals_count / kMaxPushes; 169 int loop_iterations = locals_count / kMaxPushes;
170 __ li(a2, Operand(loop_iterations)); 170 __ li(a2, Operand(loop_iterations));
171 Label loop_header; 171 Label loop_header;
172 __ bind(&loop_header); 172 __ bind(&loop_header);
173 // Do pushes. 173 // Do pushes.
174 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize)); 174 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
175 for (int i = 0; i < kMaxPushes; i++) { 175 for (int i = 0; i < kMaxPushes; i++) {
176 __ sd(t1, MemOperand(sp, i * kPointerSize)); 176 __ Sd(t1, MemOperand(sp, i * kPointerSize));
177 } 177 }
178 // Continue loop if not done. 178 // Continue loop if not done.
179 __ Dsubu(a2, a2, Operand(1)); 179 __ Dsubu(a2, a2, Operand(1));
180 __ Branch(&loop_header, ne, a2, Operand(zero_reg)); 180 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
181 } 181 }
182 int remaining = locals_count % kMaxPushes; 182 int remaining = locals_count % kMaxPushes;
183 // Emit the remaining pushes. 183 // Emit the remaining pushes.
184 __ Dsubu(sp, sp, Operand(remaining * kPointerSize)); 184 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
185 for (int i = 0; i < remaining; i++) { 185 for (int i = 0; i < remaining; i++) {
186 __ sd(t1, MemOperand(sp, i * kPointerSize)); 186 __ Sd(t1, MemOperand(sp, i * kPointerSize));
187 } 187 }
188 } 188 }
189 } 189 }
190 190
191 bool function_in_register_a1 = true; 191 bool function_in_register_a1 = true;
192 192
193 // Possibly allocate a local context. 193 // Possibly allocate a local context.
194 if (info->scope()->NeedsContext()) { 194 if (info->scope()->NeedsContext()) {
195 Comment cmnt(masm_, "[ Allocate context"); 195 Comment cmnt(masm_, "[ Allocate context");
196 // Argument to NewContext is the function, which is still in a1. 196 // Argument to NewContext is the function, which is still in a1.
(...skipping 25 matching lines...) Expand all
222 __ CallRuntime(Runtime::kNewFunctionContext); 222 __ CallRuntime(Runtime::kNewFunctionContext);
223 } 223 }
224 if (info->scope()->new_target_var() != nullptr) { 224 if (info->scope()->new_target_var() != nullptr) {
225 __ pop(a3); // Restore new target. 225 __ pop(a3); // Restore new target.
226 } 226 }
227 } 227 }
228 function_in_register_a1 = false; 228 function_in_register_a1 = false;
229 // Context is returned in v0. It replaces the context passed to us. 229 // Context is returned in v0. It replaces the context passed to us.
230 // It's saved in the stack and kept live in cp. 230 // It's saved in the stack and kept live in cp.
231 __ mov(cp, v0); 231 __ mov(cp, v0);
232 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 232 __ Sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
233 // Copy any necessary parameters into the context. 233 // Copy any necessary parameters into the context.
234 int num_parameters = info->scope()->num_parameters(); 234 int num_parameters = info->scope()->num_parameters();
235 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; 235 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
236 for (int i = first_parameter; i < num_parameters; i++) { 236 for (int i = first_parameter; i < num_parameters; i++) {
237 Variable* var = 237 Variable* var =
238 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i); 238 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
239 if (var->IsContextSlot()) { 239 if (var->IsContextSlot()) {
240 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 240 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
241 (num_parameters - 1 - i) * kPointerSize; 241 (num_parameters - 1 - i) * kPointerSize;
242 // Load parameter from stack. 242 // Load parameter from stack.
243 __ ld(a0, MemOperand(fp, parameter_offset)); 243 __ Ld(a0, MemOperand(fp, parameter_offset));
244 // Store it in the context. 244 // Store it in the context.
245 MemOperand target = ContextMemOperand(cp, var->index()); 245 MemOperand target = ContextMemOperand(cp, var->index());
246 __ sd(a0, target); 246 __ Sd(a0, target);
247 247
248 // Update the write barrier. 248 // Update the write barrier.
249 if (need_write_barrier) { 249 if (need_write_barrier) {
250 __ RecordWriteContextSlot(cp, target.offset(), a0, a2, 250 __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
251 kRAHasBeenSaved, kDontSaveFPRegs); 251 kRAHasBeenSaved, kDontSaveFPRegs);
252 } else if (FLAG_debug_code) { 252 } else if (FLAG_debug_code) {
253 Label done; 253 Label done;
254 __ JumpIfInNewSpace(cp, a0, &done); 254 __ JumpIfInNewSpace(cp, a0, &done);
255 __ Abort(kExpectedNewSpaceObject); 255 __ Abort(kExpectedNewSpaceObject);
256 __ bind(&done); 256 __ bind(&done);
(...skipping 12 matching lines...) Expand all
269 DCHECK_NULL(info->scope()->new_target_var()); 269 DCHECK_NULL(info->scope()->new_target_var());
270 DCHECK_NULL(info->scope()->rest_parameter()); 270 DCHECK_NULL(info->scope()->rest_parameter());
271 DCHECK_NULL(info->scope()->this_function_var()); 271 DCHECK_NULL(info->scope()->this_function_var());
272 272
273 Variable* arguments = info->scope()->arguments(); 273 Variable* arguments = info->scope()->arguments();
274 if (arguments != NULL) { 274 if (arguments != NULL) {
275 // Function uses arguments object. 275 // Function uses arguments object.
276 Comment cmnt(masm_, "[ Allocate arguments object"); 276 Comment cmnt(masm_, "[ Allocate arguments object");
277 if (!function_in_register_a1) { 277 if (!function_in_register_a1) {
278 // Load this again, if it's used by the local context below. 278 // Load this again, if it's used by the local context below.
279 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 279 __ Ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
280 } 280 }
281 if (is_strict(language_mode()) || !has_simple_parameters()) { 281 if (is_strict(language_mode()) || !has_simple_parameters()) {
282 Callable callable = CodeFactory::FastNewStrictArguments(isolate()); 282 Callable callable = CodeFactory::FastNewStrictArguments(isolate());
283 __ Call(callable.code(), RelocInfo::CODE_TARGET); 283 __ Call(callable.code(), RelocInfo::CODE_TARGET);
284 RestoreContext(); 284 RestoreContext();
285 } else if (literal()->has_duplicate_parameters()) { 285 } else if (literal()->has_duplicate_parameters()) {
286 __ Push(a1); 286 __ Push(a1);
287 __ CallRuntime(Runtime::kNewSloppyArguments_Generic); 287 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
288 } else { 288 } else {
289 Callable callable = CodeFactory::FastNewSloppyArguments(isolate()); 289 Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
344 344
345 345
346 void FullCodeGenerator::ClearAccumulator() { 346 void FullCodeGenerator::ClearAccumulator() {
347 DCHECK(Smi::kZero == 0); 347 DCHECK(Smi::kZero == 0);
348 __ mov(v0, zero_reg); 348 __ mov(v0, zero_reg);
349 } 349 }
350 350
351 351
352 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 352 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
353 __ li(a2, Operand(profiling_counter_)); 353 __ li(a2, Operand(profiling_counter_));
354 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset)); 354 __ Ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
355 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta))); 355 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
356 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset)); 356 __ Sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
357 } 357 }
358 358
359 359
360 void FullCodeGenerator::EmitProfilingCounterReset() { 360 void FullCodeGenerator::EmitProfilingCounterReset() {
361 int reset_value = FLAG_interrupt_budget; 361 int reset_value = FLAG_interrupt_budget;
362 if (info_->is_debug()) { 362 if (info_->is_debug()) {
363 // Detect debug break requests as soon as possible. 363 // Detect debug break requests as soon as possible.
364 reset_value = FLAG_interrupt_budget >> 4; 364 reset_value = FLAG_interrupt_budget >> 4;
365 } 365 }
366 __ li(a2, Operand(profiling_counter_)); 366 __ li(a2, Operand(profiling_counter_));
367 __ li(a3, Operand(Smi::FromInt(reset_value))); 367 __ li(a3, Operand(Smi::FromInt(reset_value)));
368 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset)); 368 __ Sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
369 } 369 }
370 370
371 371
372 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 372 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
373 Label* back_edge_target) { 373 Label* back_edge_target) {
374 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need 374 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
375 // to make sure it is constant. Branch may emit a skip-or-jump sequence 375 // to make sure it is constant. Branch may emit a skip-or-jump sequence
376 // instead of the normal Branch. It seems that the "skip" part of that 376 // instead of the normal Branch. It seems that the "skip" part of that
377 // sequence is about as long as this Branch would be so it is safe to ignore 377 // sequence is about as long as this Branch would be so it is safe to ignore
378 // that. 378 // that.
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
449 SetReturnPosition(literal()); 449 SetReturnPosition(literal());
450 __ mov(sp, fp); 450 __ mov(sp, fp);
451 __ MultiPop(static_cast<RegList>(fp.bit() | ra.bit())); 451 __ MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
452 __ Daddu(sp, sp, Operand(sp_delta)); 452 __ Daddu(sp, sp, Operand(sp_delta));
453 __ Jump(ra); 453 __ Jump(ra);
454 } 454 }
455 } 455 }
456 } 456 }
457 457
458 void FullCodeGenerator::RestoreContext() { 458 void FullCodeGenerator::RestoreContext() {
459 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 459 __ Ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
460 } 460 }
461 461
462 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 462 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
463 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 463 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
464 codegen()->GetVar(result_register(), var); 464 codegen()->GetVar(result_register(), var);
465 codegen()->PushOperand(result_register()); 465 codegen()->PushOperand(result_register());
466 } 466 }
467 467
468 468
469 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 469 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
545 __ li(result_register(), Operand(lit)); 545 __ li(result_register(), Operand(lit));
546 codegen()->DoTest(this); 546 codegen()->DoTest(this);
547 } 547 }
548 } 548 }
549 549
550 550
551 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 551 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
552 Register reg) const { 552 Register reg) const {
553 DCHECK(count > 0); 553 DCHECK(count > 0);
554 if (count > 1) codegen()->DropOperands(count - 1); 554 if (count > 1) codegen()->DropOperands(count - 1);
555 __ sd(reg, MemOperand(sp, 0)); 555 __ Sd(reg, MemOperand(sp, 0));
556 } 556 }
557 557
558 558
559 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 559 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
560 Label* materialize_false) const { 560 Label* materialize_false) const {
561 DCHECK(materialize_true == materialize_false); 561 DCHECK(materialize_true == materialize_false);
562 __ bind(materialize_true); 562 __ bind(materialize_true);
563 } 563 }
564 564
565 565
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
679 return ContextMemOperand(scratch, var->index()); 679 return ContextMemOperand(scratch, var->index());
680 } else { 680 } else {
681 return StackOperand(var); 681 return StackOperand(var);
682 } 682 }
683 } 683 }
684 684
685 685
686 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 686 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
687 // Use destination as scratch. 687 // Use destination as scratch.
688 MemOperand location = VarOperand(var, dest); 688 MemOperand location = VarOperand(var, dest);
689 __ ld(dest, location); 689 __ Ld(dest, location);
690 } 690 }
691 691
692 692
693 void FullCodeGenerator::SetVar(Variable* var, 693 void FullCodeGenerator::SetVar(Variable* var,
694 Register src, 694 Register src,
695 Register scratch0, 695 Register scratch0,
696 Register scratch1) { 696 Register scratch1) {
697 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 697 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
698 DCHECK(!scratch0.is(src)); 698 DCHECK(!scratch0.is(src));
699 DCHECK(!scratch0.is(scratch1)); 699 DCHECK(!scratch0.is(scratch1));
700 DCHECK(!scratch1.is(src)); 700 DCHECK(!scratch1.is(src));
701 MemOperand location = VarOperand(var, scratch0); 701 MemOperand location = VarOperand(var, scratch0);
702 __ sd(src, location); 702 __ Sd(src, location);
703 // Emit the write barrier code if the location is in the heap. 703 // Emit the write barrier code if the location is in the heap.
704 if (var->IsContextSlot()) { 704 if (var->IsContextSlot()) {
705 __ RecordWriteContextSlot(scratch0, 705 __ RecordWriteContextSlot(scratch0,
706 location.offset(), 706 location.offset(),
707 src, 707 src,
708 scratch1, 708 scratch1,
709 kRAHasBeenSaved, 709 kRAHasBeenSaved,
710 kDontSaveFPRegs); 710 kDontSaveFPRegs);
711 } 711 }
712 } 712 }
(...skipping 18 matching lines...) Expand all
731 } 731 }
732 } 732 }
733 733
734 734
735 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 735 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
736 // The variable in the declaration always resides in the current function 736 // The variable in the declaration always resides in the current function
737 // context. 737 // context.
738 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 738 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
739 if (FLAG_debug_code) { 739 if (FLAG_debug_code) {
740 // Check that we're not inside a with or catch context. 740 // Check that we're not inside a with or catch context.
741 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); 741 __ Ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
742 __ LoadRoot(a4, Heap::kWithContextMapRootIndex); 742 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
743 __ Check(ne, kDeclarationInWithContext, 743 __ Check(ne, kDeclarationInWithContext,
744 a1, Operand(a4)); 744 a1, Operand(a4));
745 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex); 745 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
746 __ Check(ne, kDeclarationInCatchContext, 746 __ Check(ne, kDeclarationInCatchContext,
747 a1, Operand(a4)); 747 a1, Operand(a4));
748 } 748 }
749 } 749 }
750 750
751 751
(...skipping 10 matching lines...) Expand all
762 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); 762 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
763 globals_->Add(isolate()->factory()->undefined_value(), zone()); 763 globals_->Add(isolate()->factory()->undefined_value(), zone());
764 globals_->Add(isolate()->factory()->undefined_value(), zone()); 764 globals_->Add(isolate()->factory()->undefined_value(), zone());
765 break; 765 break;
766 } 766 }
767 case VariableLocation::PARAMETER: 767 case VariableLocation::PARAMETER:
768 case VariableLocation::LOCAL: 768 case VariableLocation::LOCAL:
769 if (variable->binding_needs_init()) { 769 if (variable->binding_needs_init()) {
770 Comment cmnt(masm_, "[ VariableDeclaration"); 770 Comment cmnt(masm_, "[ VariableDeclaration");
771 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); 771 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
772 __ sd(a4, StackOperand(variable)); 772 __ Sd(a4, StackOperand(variable));
773 } 773 }
774 break; 774 break;
775 775
776 case VariableLocation::CONTEXT: 776 case VariableLocation::CONTEXT:
777 if (variable->binding_needs_init()) { 777 if (variable->binding_needs_init()) {
778 Comment cmnt(masm_, "[ VariableDeclaration"); 778 Comment cmnt(masm_, "[ VariableDeclaration");
779 EmitDebugCheckDeclarationContext(variable); 779 EmitDebugCheckDeclarationContext(variable);
780 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 780 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
781 __ sd(at, ContextMemOperand(cp, variable->index())); 781 __ Sd(at, ContextMemOperand(cp, variable->index()));
782 // No write barrier since the_hole_value is in old space. 782 // No write barrier since the_hole_value is in old space.
783 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS); 783 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
784 } 784 }
785 break; 785 break;
786 786
787 case VariableLocation::LOOKUP: 787 case VariableLocation::LOOKUP:
788 case VariableLocation::MODULE: 788 case VariableLocation::MODULE:
789 UNREACHABLE(); 789 UNREACHABLE();
790 } 790 }
791 } 791 }
(...skipping 20 matching lines...) Expand all
812 // Check for stack-overflow exception. 812 // Check for stack-overflow exception.
813 if (function.is_null()) return SetStackOverflow(); 813 if (function.is_null()) return SetStackOverflow();
814 globals_->Add(function, zone()); 814 globals_->Add(function, zone());
815 break; 815 break;
816 } 816 }
817 817
818 case VariableLocation::PARAMETER: 818 case VariableLocation::PARAMETER:
819 case VariableLocation::LOCAL: { 819 case VariableLocation::LOCAL: {
820 Comment cmnt(masm_, "[ FunctionDeclaration"); 820 Comment cmnt(masm_, "[ FunctionDeclaration");
821 VisitForAccumulatorValue(declaration->fun()); 821 VisitForAccumulatorValue(declaration->fun());
822 __ sd(result_register(), StackOperand(variable)); 822 __ Sd(result_register(), StackOperand(variable));
823 break; 823 break;
824 } 824 }
825 825
826 case VariableLocation::CONTEXT: { 826 case VariableLocation::CONTEXT: {
827 Comment cmnt(masm_, "[ FunctionDeclaration"); 827 Comment cmnt(masm_, "[ FunctionDeclaration");
828 EmitDebugCheckDeclarationContext(variable); 828 EmitDebugCheckDeclarationContext(variable);
829 VisitForAccumulatorValue(declaration->fun()); 829 VisitForAccumulatorValue(declaration->fun());
830 __ sd(result_register(), ContextMemOperand(cp, variable->index())); 830 __ Sd(result_register(), ContextMemOperand(cp, variable->index()));
831 int offset = Context::SlotOffset(variable->index()); 831 int offset = Context::SlotOffset(variable->index());
832 // We know that we have written a function, which is not a smi. 832 // We know that we have written a function, which is not a smi.
833 __ RecordWriteContextSlot(cp, 833 __ RecordWriteContextSlot(cp,
834 offset, 834 offset,
835 result_register(), 835 result_register(),
836 a2, 836 a2,
837 kRAHasBeenSaved, 837 kRAHasBeenSaved,
838 kDontSaveFPRegs, 838 kDontSaveFPRegs,
839 EMIT_REMEMBERED_SET, 839 EMIT_REMEMBERED_SET,
840 OMIT_SMI_CHECK); 840 OMIT_SMI_CHECK);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
886 886
887 Comment cmnt(masm_, "[ Case comparison"); 887 Comment cmnt(masm_, "[ Case comparison");
888 __ bind(&next_test); 888 __ bind(&next_test);
889 next_test.Unuse(); 889 next_test.Unuse();
890 890
891 // Compile the label expression. 891 // Compile the label expression.
892 VisitForAccumulatorValue(clause->label()); 892 VisitForAccumulatorValue(clause->label());
893 __ mov(a0, result_register()); // CompareStub requires args in a0, a1. 893 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
894 894
895 // Perform the comparison as if via '==='. 895 // Perform the comparison as if via '==='.
896 __ ld(a1, MemOperand(sp, 0)); // Switch value. 896 __ Ld(a1, MemOperand(sp, 0)); // Switch value.
897 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 897 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
898 JumpPatchSite patch_site(masm_); 898 JumpPatchSite patch_site(masm_);
899 if (inline_smi_code) { 899 if (inline_smi_code) {
900 Label slow_case; 900 Label slow_case;
901 __ or_(a2, a1, a0); 901 __ or_(a2, a1, a0);
902 patch_site.EmitJumpIfNotSmi(a2, &slow_case); 902 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
903 903
904 __ Branch(&next_test, ne, a1, Operand(a0)); 904 __ Branch(&next_test, ne, a1, Operand(a0));
905 __ Drop(1); // Switch value is no longer needed. 905 __ Drop(1); // Switch value is no longer needed.
906 __ Branch(clause->body_target()); 906 __ Branch(clause->body_target());
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
992 // Check cache validity in generated code. If we cannot guarantee cache 992 // Check cache validity in generated code. If we cannot guarantee cache
993 // validity, call the runtime system to check cache validity or get the 993 // validity, call the runtime system to check cache validity or get the
994 // property names in a fixed array. Note: Proxies never have an enum cache, 994 // property names in a fixed array. Note: Proxies never have an enum cache,
995 // so will always take the slow path. 995 // so will always take the slow path.
996 Label call_runtime; 996 Label call_runtime;
997 __ CheckEnumCache(&call_runtime); 997 __ CheckEnumCache(&call_runtime);
998 998
999 // The enum cache is valid. Load the map of the object being 999 // The enum cache is valid. Load the map of the object being
1000 // iterated over and use the cache for the iteration. 1000 // iterated over and use the cache for the iteration.
1001 Label use_cache; 1001 Label use_cache;
1002 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset)); 1002 __ Ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1003 __ Branch(&use_cache); 1003 __ Branch(&use_cache);
1004 1004
1005 // Get the set of properties to enumerate. 1005 // Get the set of properties to enumerate.
1006 __ bind(&call_runtime); 1006 __ bind(&call_runtime);
1007 __ push(a0); // Duplicate the enumerable object on the stack. 1007 __ push(a0); // Duplicate the enumerable object on the stack.
1008 __ CallRuntime(Runtime::kForInEnumerate); 1008 __ CallRuntime(Runtime::kForInEnumerate);
1009 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER); 1009 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1010 1010
1011 // If we got a map from the runtime call, we can do a fast 1011 // If we got a map from the runtime call, we can do a fast
1012 // modification check. Otherwise, we got a fixed array, and we have 1012 // modification check. Otherwise, we got a fixed array, and we have
1013 // to do a slow check. 1013 // to do a slow check.
1014 Label fixed_array; 1014 Label fixed_array;
1015 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); 1015 __ Ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1016 __ LoadRoot(at, Heap::kMetaMapRootIndex); 1016 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1017 __ Branch(&fixed_array, ne, a2, Operand(at)); 1017 __ Branch(&fixed_array, ne, a2, Operand(at));
1018 1018
1019 // We got a map in register v0. Get the enumeration cache from it. 1019 // We got a map in register v0. Get the enumeration cache from it.
1020 Label no_descriptors; 1020 Label no_descriptors;
1021 __ bind(&use_cache); 1021 __ bind(&use_cache);
1022 1022
1023 __ EnumLength(a1, v0); 1023 __ EnumLength(a1, v0);
1024 __ Branch(&no_descriptors, eq, a1, Operand(Smi::kZero)); 1024 __ Branch(&no_descriptors, eq, a1, Operand(Smi::kZero));
1025 1025
1026 __ LoadInstanceDescriptors(v0, a2); 1026 __ LoadInstanceDescriptors(v0, a2);
1027 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset)); 1027 __ Ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1028 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1028 __ Ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1029 1029
1030 // Set up the four remaining stack slots. 1030 // Set up the four remaining stack slots.
1031 __ li(a0, Operand(Smi::kZero)); 1031 __ li(a0, Operand(Smi::kZero));
1032 // Push map, enumeration cache, enumeration cache length (as smi) and zero. 1032 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1033 __ Push(v0, a2, a1, a0); 1033 __ Push(v0, a2, a1, a0);
1034 __ jmp(&loop); 1034 __ jmp(&loop);
1035 1035
1036 __ bind(&no_descriptors); 1036 __ bind(&no_descriptors);
1037 __ Drop(1); 1037 __ Drop(1);
1038 __ jmp(&exit); 1038 __ jmp(&exit);
1039 1039
1040 // We got a fixed array in register v0. Iterate through that. 1040 // We got a fixed array in register v0. Iterate through that.
1041 __ bind(&fixed_array); 1041 __ bind(&fixed_array);
1042 1042
1043 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check 1043 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1044 __ Push(a1, v0); // Smi and array 1044 __ Push(a1, v0); // Smi and array
1045 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); 1045 __ Ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1046 __ Push(a1); // Fixed array length (as smi). 1046 __ Push(a1); // Fixed array length (as smi).
1047 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS); 1047 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1048 __ li(a0, Operand(Smi::kZero)); 1048 __ li(a0, Operand(Smi::kZero));
1049 __ Push(a0); // Initial index. 1049 __ Push(a0); // Initial index.
1050 1050
1051 // Generate code for doing the condition check. 1051 // Generate code for doing the condition check.
1052 __ bind(&loop); 1052 __ bind(&loop);
1053 SetExpressionAsStatementPosition(stmt->each()); 1053 SetExpressionAsStatementPosition(stmt->each());
1054 1054
1055 // Load the current count to a0, load the length to a1. 1055 // Load the current count to a0, load the length to a1.
1056 __ ld(a0, MemOperand(sp, 0 * kPointerSize)); 1056 __ Ld(a0, MemOperand(sp, 0 * kPointerSize));
1057 __ ld(a1, MemOperand(sp, 1 * kPointerSize)); 1057 __ Ld(a1, MemOperand(sp, 1 * kPointerSize));
1058 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1)); 1058 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1059 1059
1060 // Get the current entry of the array into register a3. 1060 // Get the current entry of the array into register a3.
1061 __ ld(a2, MemOperand(sp, 2 * kPointerSize)); 1061 __ Ld(a2, MemOperand(sp, 2 * kPointerSize));
1062 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1062 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1063 __ SmiScale(a4, a0, kPointerSizeLog2); 1063 __ SmiScale(a4, a0, kPointerSizeLog2);
1064 __ daddu(a4, a2, a4); // Array base + scaled (smi) index. 1064 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1065 __ ld(result_register(), MemOperand(a4)); // Current entry. 1065 __ Ld(result_register(), MemOperand(a4)); // Current entry.
1066 1066
1067 // Get the expected map from the stack or a smi in the 1067 // Get the expected map from the stack or a smi in the
1068 // permanent slow case into register a2. 1068 // permanent slow case into register a2.
1069 __ ld(a2, MemOperand(sp, 3 * kPointerSize)); 1069 __ Ld(a2, MemOperand(sp, 3 * kPointerSize));
1070 1070
1071 // Check if the expected map still matches that of the enumerable. 1071 // Check if the expected map still matches that of the enumerable.
1072 // If not, we may have to filter the key. 1072 // If not, we may have to filter the key.
1073 Label update_each; 1073 Label update_each;
1074 __ ld(a1, MemOperand(sp, 4 * kPointerSize)); 1074 __ Ld(a1, MemOperand(sp, 4 * kPointerSize));
1075 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset)); 1075 __ Ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1076 __ Branch(&update_each, eq, a4, Operand(a2)); 1076 __ Branch(&update_each, eq, a4, Operand(a2));
1077 1077
1078 // We need to filter the key, record slow-path here. 1078 // We need to filter the key, record slow-path here.
1079 int const vector_index = SmiFromSlot(slot)->value(); 1079 int const vector_index = SmiFromSlot(slot)->value();
1080 __ EmitLoadFeedbackVector(a3); 1080 __ EmitLoadFeedbackVector(a3);
1081 __ li(a2, Operand(FeedbackVector::MegamorphicSentinel(isolate()))); 1081 __ li(a2, Operand(FeedbackVector::MegamorphicSentinel(isolate())));
1082 __ sd(a2, FieldMemOperand(a3, FixedArray::OffsetOfElementAt(vector_index))); 1082 __ Sd(a2, FieldMemOperand(a3, FixedArray::OffsetOfElementAt(vector_index)));
1083 1083
1084 __ mov(a0, result_register()); 1084 __ mov(a0, result_register());
1085 // a0 contains the key. The receiver in a1 is the second argument to the 1085 // a0 contains the key. The receiver in a1 is the second argument to the
1086 // ForInFilter. ForInFilter returns undefined if the receiver doesn't 1086 // ForInFilter. ForInFilter returns undefined if the receiver doesn't
1087 // have the key or returns the name-converted key. 1087 // have the key or returns the name-converted key.
1088 __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET); 1088 __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1089 RestoreContext(); 1089 RestoreContext();
1090 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER); 1090 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1091 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1091 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1092 __ Branch(loop_statement.continue_label(), eq, result_register(), 1092 __ Branch(loop_statement.continue_label(), eq, result_register(),
(...skipping 30 matching lines...) Expand all
1123 1123
1124 // Exit and decrement the loop depth. 1124 // Exit and decrement the loop depth.
1125 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 1125 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1126 __ bind(&exit); 1126 __ bind(&exit);
1127 decrement_loop_depth(); 1127 decrement_loop_depth();
1128 } 1128 }
1129 1129
1130 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, 1130 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1131 FeedbackSlot slot) { 1131 FeedbackSlot slot) {
1132 DCHECK(NeedsHomeObject(initializer)); 1132 DCHECK(NeedsHomeObject(initializer));
1133 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1133 __ Ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1134 __ ld(StoreDescriptor::ValueRegister(), 1134 __ Ld(StoreDescriptor::ValueRegister(),
1135 MemOperand(sp, offset * kPointerSize)); 1135 MemOperand(sp, offset * kPointerSize));
1136 CallStoreIC(slot, isolate()->factory()->home_object_symbol()); 1136 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1137 } 1137 }
1138 1138
1139 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, 1139 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1140 int offset, 1140 int offset,
1141 FeedbackSlot slot) { 1141 FeedbackSlot slot) {
1142 DCHECK(NeedsHomeObject(initializer)); 1142 DCHECK(NeedsHomeObject(initializer));
1143 __ Move(StoreDescriptor::ReceiverRegister(), v0); 1143 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1144 __ ld(StoreDescriptor::ValueRegister(), 1144 __ Ld(StoreDescriptor::ValueRegister(),
1145 MemOperand(sp, offset * kPointerSize)); 1145 MemOperand(sp, offset * kPointerSize));
1146 CallStoreIC(slot, isolate()->factory()->home_object_symbol()); 1146 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1147 } 1147 }
1148 1148
1149 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, 1149 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1150 TypeofMode typeof_mode) { 1150 TypeofMode typeof_mode) {
1151 // Record position before possible IC call. 1151 // Record position before possible IC call.
1152 SetExpressionPosition(proxy); 1152 SetExpressionPosition(proxy);
1153 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS); 1153 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1154 Variable* var = proxy->var(); 1154 Variable* var = proxy->var();
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1209 } 1209 }
1210 } 1210 }
1211 } 1211 }
1212 1212
1213 1213
1214 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1214 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1215 Comment cmnt(masm_, "[ ObjectLiteral"); 1215 Comment cmnt(masm_, "[ ObjectLiteral");
1216 1216
1217 Handle<BoilerplateDescription> constant_properties = 1217 Handle<BoilerplateDescription> constant_properties =
1218 expr->GetOrBuildConstantProperties(isolate()); 1218 expr->GetOrBuildConstantProperties(isolate());
1219 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1219 __ Ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1220 __ li(a2, Operand(SmiFromSlot(expr->literal_slot()))); 1220 __ li(a2, Operand(SmiFromSlot(expr->literal_slot())));
1221 __ li(a1, Operand(constant_properties)); 1221 __ li(a1, Operand(constant_properties));
1222 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags()))); 1222 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1223 if (MustCreateObjectLiteralWithRuntime(expr)) { 1223 if (MustCreateObjectLiteralWithRuntime(expr)) {
1224 __ Push(a3, a2, a1, a0); 1224 __ Push(a3, a2, a1, a0);
1225 __ CallRuntime(Runtime::kCreateObjectLiteral); 1225 __ CallRuntime(Runtime::kCreateObjectLiteral);
1226 } else { 1226 } else {
1227 Callable callable = CodeFactory::FastCloneShallowObject( 1227 Callable callable = CodeFactory::FastCloneShallowObject(
1228 isolate(), expr->properties_count()); 1228 isolate(), expr->properties_count());
1229 __ Call(callable.code(), RelocInfo::CODE_TARGET); 1229 __ Call(callable.code(), RelocInfo::CODE_TARGET);
(...skipping 26 matching lines...) Expand all
1256 // Fall through. 1256 // Fall through.
1257 case ObjectLiteral::Property::COMPUTED: 1257 case ObjectLiteral::Property::COMPUTED:
1258 // It is safe to use [[Put]] here because the boilerplate already 1258 // It is safe to use [[Put]] here because the boilerplate already
1259 // contains computed properties with an uninitialized value. 1259 // contains computed properties with an uninitialized value.
1260 if (key->IsStringLiteral()) { 1260 if (key->IsStringLiteral()) {
1261 DCHECK(key->IsPropertyName()); 1261 DCHECK(key->IsPropertyName());
1262 if (property->emit_store()) { 1262 if (property->emit_store()) {
1263 VisitForAccumulatorValue(value); 1263 VisitForAccumulatorValue(value);
1264 __ mov(StoreDescriptor::ValueRegister(), result_register()); 1264 __ mov(StoreDescriptor::ValueRegister(), result_register());
1265 DCHECK(StoreDescriptor::ValueRegister().is(a0)); 1265 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1266 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1266 __ Ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1267 CallStoreIC(property->GetSlot(0), key->value(), kStoreOwn); 1267 CallStoreIC(property->GetSlot(0), key->value(), kStoreOwn);
1268 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS); 1268 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1269 1269
1270 if (NeedsHomeObject(value)) { 1270 if (NeedsHomeObject(value)) {
1271 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); 1271 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1272 } 1272 }
1273 } else { 1273 } else {
1274 VisitForEffect(value); 1274 VisitForEffect(value);
1275 } 1275 }
1276 break; 1276 break;
1277 } 1277 }
1278 // Duplicate receiver on stack. 1278 // Duplicate receiver on stack.
1279 __ ld(a0, MemOperand(sp)); 1279 __ Ld(a0, MemOperand(sp));
1280 PushOperand(a0); 1280 PushOperand(a0);
1281 VisitForStackValue(key); 1281 VisitForStackValue(key);
1282 VisitForStackValue(value); 1282 VisitForStackValue(value);
1283 if (property->emit_store()) { 1283 if (property->emit_store()) {
1284 if (NeedsHomeObject(value)) { 1284 if (NeedsHomeObject(value)) {
1285 EmitSetHomeObject(value, 2, property->GetSlot()); 1285 EmitSetHomeObject(value, 2, property->GetSlot());
1286 } 1286 }
1287 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes. 1287 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1288 PushOperand(a0); 1288 PushOperand(a0);
1289 CallRuntimeWithOperands(Runtime::kSetProperty); 1289 CallRuntimeWithOperands(Runtime::kSetProperty);
1290 } else { 1290 } else {
1291 DropOperands(3); 1291 DropOperands(3);
1292 } 1292 }
1293 break; 1293 break;
1294 case ObjectLiteral::Property::PROTOTYPE: 1294 case ObjectLiteral::Property::PROTOTYPE:
1295 // Duplicate receiver on stack. 1295 // Duplicate receiver on stack.
1296 __ ld(a0, MemOperand(sp)); 1296 __ Ld(a0, MemOperand(sp));
1297 PushOperand(a0); 1297 PushOperand(a0);
1298 VisitForStackValue(value); 1298 VisitForStackValue(value);
1299 DCHECK(property->emit_store()); 1299 DCHECK(property->emit_store());
1300 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); 1300 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1301 PrepareForBailoutForId(expr->GetIdForPropertySet(i), 1301 PrepareForBailoutForId(expr->GetIdForPropertySet(i),
1302 BailoutState::NO_REGISTERS); 1302 BailoutState::NO_REGISTERS);
1303 break; 1303 break;
1304 case ObjectLiteral::Property::GETTER: 1304 case ObjectLiteral::Property::GETTER:
1305 if (property->emit_store()) { 1305 if (property->emit_store()) {
1306 AccessorTable::Iterator it = accessor_table.lookup(key); 1306 AccessorTable::Iterator it = accessor_table.lookup(key);
1307 it->second->bailout_id = expr->GetIdForPropertySet(i); 1307 it->second->bailout_id = expr->GetIdForPropertySet(i);
1308 it->second->getter = property; 1308 it->second->getter = property;
1309 } 1309 }
1310 break; 1310 break;
1311 case ObjectLiteral::Property::SETTER: 1311 case ObjectLiteral::Property::SETTER:
1312 if (property->emit_store()) { 1312 if (property->emit_store()) {
1313 AccessorTable::Iterator it = accessor_table.lookup(key); 1313 AccessorTable::Iterator it = accessor_table.lookup(key);
1314 it->second->bailout_id = expr->GetIdForPropertySet(i); 1314 it->second->bailout_id = expr->GetIdForPropertySet(i);
1315 it->second->setter = property; 1315 it->second->setter = property;
1316 } 1316 }
1317 break; 1317 break;
1318 } 1318 }
1319 } 1319 }
1320 1320
1321 // Emit code to define accessors, using only a single call to the runtime for 1321 // Emit code to define accessors, using only a single call to the runtime for
1322 // each pair of corresponding getters and setters. 1322 // each pair of corresponding getters and setters.
1323 for (AccessorTable::Iterator it = accessor_table.begin(); 1323 for (AccessorTable::Iterator it = accessor_table.begin();
1324 it != accessor_table.end(); 1324 it != accessor_table.end();
1325 ++it) { 1325 ++it) {
1326 __ ld(a0, MemOperand(sp)); // Duplicate receiver. 1326 __ Ld(a0, MemOperand(sp)); // Duplicate receiver.
1327 PushOperand(a0); 1327 PushOperand(a0);
1328 VisitForStackValue(it->first); 1328 VisitForStackValue(it->first);
1329 EmitAccessor(it->second->getter); 1329 EmitAccessor(it->second->getter);
1330 EmitAccessor(it->second->setter); 1330 EmitAccessor(it->second->setter);
1331 __ li(a0, Operand(Smi::FromInt(NONE))); 1331 __ li(a0, Operand(Smi::FromInt(NONE)));
1332 PushOperand(a0); 1332 PushOperand(a0);
1333 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked); 1333 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1334 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS); 1334 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1335 } 1335 }
1336 1336
1337 if (result_saved) { 1337 if (result_saved) {
1338 context()->PlugTOS(); 1338 context()->PlugTOS();
1339 } else { 1339 } else {
1340 context()->Plug(v0); 1340 context()->Plug(v0);
1341 } 1341 }
1342 } 1342 }
1343 1343
1344 1344
1345 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1345 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1346 Comment cmnt(masm_, "[ ArrayLiteral"); 1346 Comment cmnt(masm_, "[ ArrayLiteral");
1347 1347
1348 Handle<ConstantElementsPair> constant_elements = 1348 Handle<ConstantElementsPair> constant_elements =
1349 expr->GetOrBuildConstantElements(isolate()); 1349 expr->GetOrBuildConstantElements(isolate());
1350 1350
1351 __ mov(a0, result_register()); 1351 __ mov(a0, result_register());
1352 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1352 __ Ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1353 __ li(a2, Operand(SmiFromSlot(expr->literal_slot()))); 1353 __ li(a2, Operand(SmiFromSlot(expr->literal_slot())));
1354 __ li(a1, Operand(constant_elements)); 1354 __ li(a1, Operand(constant_elements));
1355 if (MustCreateArrayLiteralWithRuntime(expr)) { 1355 if (MustCreateArrayLiteralWithRuntime(expr)) {
1356 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags()))); 1356 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1357 __ Push(a3, a2, a1, a0); 1357 __ Push(a3, a2, a1, a0);
1358 __ CallRuntime(Runtime::kCreateArrayLiteral); 1358 __ CallRuntime(Runtime::kCreateArrayLiteral);
1359 } else { 1359 } else {
1360 Callable callable = 1360 Callable callable =
1361 CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE); 1361 CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE);
1362 __ Call(callable.code(), RelocInfo::CODE_TARGET); 1362 __ Call(callable.code(), RelocInfo::CODE_TARGET);
(...skipping 16 matching lines...) Expand all
1379 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1379 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1380 1380
1381 if (!result_saved) { 1381 if (!result_saved) {
1382 PushOperand(v0); // array literal 1382 PushOperand(v0); // array literal
1383 result_saved = true; 1383 result_saved = true;
1384 } 1384 }
1385 1385
1386 VisitForAccumulatorValue(subexpr); 1386 VisitForAccumulatorValue(subexpr);
1387 1387
1388 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index))); 1388 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1389 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1389 __ Ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1390 __ mov(StoreDescriptor::ValueRegister(), result_register()); 1390 __ mov(StoreDescriptor::ValueRegister(), result_register());
1391 CallKeyedStoreIC(expr->LiteralFeedbackSlot()); 1391 CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1392 1392
1393 PrepareForBailoutForId(expr->GetIdForElement(array_index), 1393 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1394 BailoutState::NO_REGISTERS); 1394 BailoutState::NO_REGISTERS);
1395 } 1395 }
1396 1396
1397 if (result_saved) { 1397 if (result_saved) {
1398 context()->PlugTOS(); 1398 context()->PlugTOS();
1399 } else { 1399 } else {
(...skipping 12 matching lines...) Expand all
1412 1412
1413 // Evaluate LHS expression. 1413 // Evaluate LHS expression.
1414 switch (assign_type) { 1414 switch (assign_type) {
1415 case VARIABLE: 1415 case VARIABLE:
1416 // Nothing to do here. 1416 // Nothing to do here.
1417 break; 1417 break;
1418 case NAMED_PROPERTY: 1418 case NAMED_PROPERTY:
1419 if (expr->is_compound()) { 1419 if (expr->is_compound()) {
1420 // We need the receiver both on the stack and in the register. 1420 // We need the receiver both on the stack and in the register.
1421 VisitForStackValue(property->obj()); 1421 VisitForStackValue(property->obj());
1422 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1422 __ Ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1423 } else { 1423 } else {
1424 VisitForStackValue(property->obj()); 1424 VisitForStackValue(property->obj());
1425 } 1425 }
1426 break; 1426 break;
1427 case KEYED_PROPERTY: 1427 case KEYED_PROPERTY:
1428 // We need the key and receiver on both the stack and in v0 and a1. 1428 // We need the key and receiver on both the stack and in v0 and a1.
1429 if (expr->is_compound()) { 1429 if (expr->is_compound()) {
1430 VisitForStackValue(property->obj()); 1430 VisitForStackValue(property->obj());
1431 VisitForStackValue(property->key()); 1431 VisitForStackValue(property->key());
1432 __ ld(LoadDescriptor::ReceiverRegister(), 1432 __ Ld(LoadDescriptor::ReceiverRegister(),
1433 MemOperand(sp, 1 * kPointerSize)); 1433 MemOperand(sp, 1 * kPointerSize));
1434 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 1434 __ Ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1435 } else { 1435 } else {
1436 VisitForStackValue(property->obj()); 1436 VisitForStackValue(property->obj());
1437 VisitForStackValue(property->key()); 1437 VisitForStackValue(property->key());
1438 } 1438 }
1439 break; 1439 break;
1440 case NAMED_SUPER_PROPERTY: 1440 case NAMED_SUPER_PROPERTY:
1441 case KEYED_SUPER_PROPERTY: 1441 case KEYED_SUPER_PROPERTY:
1442 UNREACHABLE(); 1442 UNREACHABLE();
1443 break; 1443 break;
1444 } 1444 }
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
1560 __ bind(&allocate); 1560 __ bind(&allocate);
1561 __ Push(Smi::FromInt(JSIteratorResult::kSize)); 1561 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1562 __ CallRuntime(Runtime::kAllocateInNewSpace); 1562 __ CallRuntime(Runtime::kAllocateInNewSpace);
1563 1563
1564 __ bind(&done_allocate); 1564 __ bind(&done_allocate);
1565 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1); 1565 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
1566 PopOperand(a2); 1566 PopOperand(a2);
1567 __ LoadRoot(a3, 1567 __ LoadRoot(a3,
1568 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); 1568 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1569 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex); 1569 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
1570 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 1570 __ Sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
1571 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset)); 1571 __ Sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
1572 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); 1572 __ Sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
1573 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset)); 1573 __ Sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
1574 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset)); 1574 __ Sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
1575 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 1575 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1576 } 1576 }
1577 1577
1578 1578
1579 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1579 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1580 Token::Value op, 1580 Token::Value op,
1581 Expression* left_expr, 1581 Expression* left_expr,
1582 Expression* right_expr) { 1582 Expression* right_expr) {
1583 Label done, smi_case, stub_call; 1583 Label done, smi_case, stub_call;
1584 1584
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
1710 case KEYED_SUPER_PROPERTY: 1710 case KEYED_SUPER_PROPERTY:
1711 UNREACHABLE(); 1711 UNREACHABLE();
1712 break; 1712 break;
1713 } 1713 }
1714 context()->Plug(v0); 1714 context()->Plug(v0);
1715 } 1715 }
1716 1716
1717 1717
1718 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 1718 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
1719 Variable* var, MemOperand location) { 1719 Variable* var, MemOperand location) {
1720 __ sd(result_register(), location); 1720 __ Sd(result_register(), location);
1721 if (var->IsContextSlot()) { 1721 if (var->IsContextSlot()) {
1722 // RecordWrite may destroy all its register arguments. 1722 // RecordWrite may destroy all its register arguments.
1723 __ Move(a3, result_register()); 1723 __ Move(a3, result_register());
1724 int offset = Context::SlotOffset(var->index()); 1724 int offset = Context::SlotOffset(var->index());
1725 __ RecordWriteContextSlot( 1725 __ RecordWriteContextSlot(
1726 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); 1726 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
1727 } 1727 }
1728 } 1728 }
1729 1729
1730 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, 1730 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
1731 FeedbackSlot slot, 1731 FeedbackSlot slot,
1732 HoleCheckMode hole_check_mode) { 1732 HoleCheckMode hole_check_mode) {
1733 if (var->IsUnallocated()) { 1733 if (var->IsUnallocated()) {
1734 // Global var, const, or let. 1734 // Global var, const, or let.
1735 __ mov(StoreDescriptor::ValueRegister(), result_register()); 1735 __ mov(StoreDescriptor::ValueRegister(), result_register());
1736 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); 1736 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
1737 CallStoreIC(slot, var->name(), kStoreGlobal); 1737 CallStoreIC(slot, var->name(), kStoreGlobal);
1738 1738
1739 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) { 1739 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
1740 DCHECK(!var->IsLookupSlot()); 1740 DCHECK(!var->IsLookupSlot());
1741 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 1741 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1742 MemOperand location = VarOperand(var, a1); 1742 MemOperand location = VarOperand(var, a1);
1743 // Perform an initialization check for lexically declared variables. 1743 // Perform an initialization check for lexically declared variables.
1744 if (hole_check_mode == HoleCheckMode::kRequired) { 1744 if (hole_check_mode == HoleCheckMode::kRequired) {
1745 Label assign; 1745 Label assign;
1746 __ ld(a3, location); 1746 __ Ld(a3, location);
1747 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); 1747 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
1748 __ Branch(&assign, ne, a3, Operand(a4)); 1748 __ Branch(&assign, ne, a3, Operand(a4));
1749 __ li(a3, Operand(var->name())); 1749 __ li(a3, Operand(var->name()));
1750 __ push(a3); 1750 __ push(a3);
1751 __ CallRuntime(Runtime::kThrowReferenceError); 1751 __ CallRuntime(Runtime::kThrowReferenceError);
1752 __ bind(&assign); 1752 __ bind(&assign);
1753 } 1753 }
1754 if (var->mode() != CONST) { 1754 if (var->mode() != CONST) {
1755 EmitStoreToStackLocalOrContextSlot(var, location); 1755 EmitStoreToStackLocalOrContextSlot(var, location);
1756 } else if (var->throw_on_const_assignment(language_mode())) { 1756 } else if (var->throw_on_const_assignment(language_mode())) {
1757 __ CallRuntime(Runtime::kThrowConstAssignError); 1757 __ CallRuntime(Runtime::kThrowConstAssignError);
1758 } 1758 }
1759 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { 1759 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
1760 // Initializing assignment to const {this} needs a write barrier. 1760 // Initializing assignment to const {this} needs a write barrier.
1761 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 1761 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1762 Label uninitialized_this; 1762 Label uninitialized_this;
1763 MemOperand location = VarOperand(var, a1); 1763 MemOperand location = VarOperand(var, a1);
1764 __ ld(a3, location); 1764 __ Ld(a3, location);
1765 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1765 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1766 __ Branch(&uninitialized_this, eq, a3, Operand(at)); 1766 __ Branch(&uninitialized_this, eq, a3, Operand(at));
1767 __ li(a0, Operand(var->name())); 1767 __ li(a0, Operand(var->name()));
1768 __ Push(a0); 1768 __ Push(a0);
1769 __ CallRuntime(Runtime::kThrowReferenceError); 1769 __ CallRuntime(Runtime::kThrowReferenceError);
1770 __ bind(&uninitialized_this); 1770 __ bind(&uninitialized_this);
1771 EmitStoreToStackLocalOrContextSlot(var, location); 1771 EmitStoreToStackLocalOrContextSlot(var, location);
1772 1772
1773 } else { 1773 } else {
1774 DCHECK(var->mode() != CONST || op == Token::INIT); 1774 DCHECK(var->mode() != CONST || op == Token::INIT);
1775 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); 1775 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
1776 DCHECK(!var->IsLookupSlot()); 1776 DCHECK(!var->IsLookupSlot());
1777 // Assignment to var or initializing assignment to let/const in harmony 1777 // Assignment to var or initializing assignment to let/const in harmony
1778 // mode. 1778 // mode.
1779 MemOperand location = VarOperand(var, a1); 1779 MemOperand location = VarOperand(var, a1);
1780 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) { 1780 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
1781 // Check for an uninitialized let binding. 1781 // Check for an uninitialized let binding.
1782 __ ld(a2, location); 1782 __ Ld(a2, location);
1783 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); 1783 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
1784 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4)); 1784 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
1785 } 1785 }
1786 EmitStoreToStackLocalOrContextSlot(var, location); 1786 EmitStoreToStackLocalOrContextSlot(var, location);
1787 } 1787 }
1788 } 1788 }
1789 1789
1790 1790
1791 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 1791 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1792 // Assignment to a property, using a named store IC. 1792 // Assignment to a property, using a named store IC.
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1834 } 1834 }
1835 // Push undefined as receiver. This is patched in the method prologue if it 1835 // Push undefined as receiver. This is patched in the method prologue if it
1836 // is a sloppy mode method. 1836 // is a sloppy mode method.
1837 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1837 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1838 PushOperand(at); 1838 PushOperand(at);
1839 convert_mode = ConvertReceiverMode::kNullOrUndefined; 1839 convert_mode = ConvertReceiverMode::kNullOrUndefined;
1840 } else { 1840 } else {
1841 // Load the function from the receiver. 1841 // Load the function from the receiver.
1842 DCHECK(callee->IsProperty()); 1842 DCHECK(callee->IsProperty());
1843 DCHECK(!callee->AsProperty()->IsSuperAccess()); 1843 DCHECK(!callee->AsProperty()->IsSuperAccess());
1844 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1844 __ Ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1845 EmitNamedPropertyLoad(callee->AsProperty()); 1845 EmitNamedPropertyLoad(callee->AsProperty());
1846 PrepareForBailoutForId(callee->AsProperty()->LoadId(), 1846 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1847 BailoutState::TOS_REGISTER); 1847 BailoutState::TOS_REGISTER);
1848 // Push the target function under the receiver. 1848 // Push the target function under the receiver.
1849 __ ld(at, MemOperand(sp, 0)); 1849 __ Ld(at, MemOperand(sp, 0));
1850 PushOperand(at); 1850 PushOperand(at);
1851 __ sd(v0, MemOperand(sp, kPointerSize)); 1851 __ Sd(v0, MemOperand(sp, kPointerSize));
1852 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; 1852 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
1853 } 1853 }
1854 1854
1855 EmitCall(expr, convert_mode); 1855 EmitCall(expr, convert_mode);
1856 } 1856 }
1857 1857
1858 1858
1859 // Code common for calls using the IC. 1859 // Code common for calls using the IC.
1860 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 1860 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
1861 Expression* key) { 1861 Expression* key) {
1862 // Load the key. 1862 // Load the key.
1863 VisitForAccumulatorValue(key); 1863 VisitForAccumulatorValue(key);
1864 1864
1865 Expression* callee = expr->expression(); 1865 Expression* callee = expr->expression();
1866 1866
1867 // Load the function from the receiver. 1867 // Load the function from the receiver.
1868 DCHECK(callee->IsProperty()); 1868 DCHECK(callee->IsProperty());
1869 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1869 __ Ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1870 __ Move(LoadDescriptor::NameRegister(), v0); 1870 __ Move(LoadDescriptor::NameRegister(), v0);
1871 EmitKeyedPropertyLoad(callee->AsProperty()); 1871 EmitKeyedPropertyLoad(callee->AsProperty());
1872 PrepareForBailoutForId(callee->AsProperty()->LoadId(), 1872 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1873 BailoutState::TOS_REGISTER); 1873 BailoutState::TOS_REGISTER);
1874 1874
1875 // Push the target function under the receiver. 1875 // Push the target function under the receiver.
1876 __ ld(at, MemOperand(sp, 0)); 1876 __ Ld(at, MemOperand(sp, 0));
1877 PushOperand(at); 1877 PushOperand(at);
1878 __ sd(v0, MemOperand(sp, kPointerSize)); 1878 __ Sd(v0, MemOperand(sp, kPointerSize));
1879 1879
1880 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); 1880 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
1881 } 1881 }
1882 1882
1883 1883
1884 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { 1884 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
1885 // Load the arguments. 1885 // Load the arguments.
1886 ZoneList<Expression*>* args = expr->arguments(); 1886 ZoneList<Expression*>* args = expr->arguments();
1887 int arg_count = args->length(); 1887 int arg_count = args->length();
1888 for (int i = 0; i < arg_count; i++) { 1888 for (int i = 0; i < arg_count; i++) {
1889 VisitForStackValue(args->at(i)); 1889 VisitForStackValue(args->at(i));
1890 } 1890 }
1891 1891
1892 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 1892 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
1893 // Record source position of the IC call. 1893 // Record source position of the IC call.
1894 SetCallPosition(expr, expr->tail_call_mode()); 1894 SetCallPosition(expr, expr->tail_call_mode());
1895 if (expr->tail_call_mode() == TailCallMode::kAllow) { 1895 if (expr->tail_call_mode() == TailCallMode::kAllow) {
1896 if (FLAG_trace) { 1896 if (FLAG_trace) {
1897 __ CallRuntime(Runtime::kTraceTailCall); 1897 __ CallRuntime(Runtime::kTraceTailCall);
1898 } 1898 }
1899 // Update profiling counters before the tail call since we will 1899 // Update profiling counters before the tail call since we will
1900 // not return to this function. 1900 // not return to this function.
1901 EmitProfilingCounterHandlingForReturnSequence(true); 1901 EmitProfilingCounterHandlingForReturnSequence(true);
1902 } 1902 }
1903 Handle<Code> code = 1903 Handle<Code> code =
1904 CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode()) 1904 CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
1905 .code(); 1905 .code();
1906 __ li(a3, Operand(IntFromSlot(expr->CallFeedbackICSlot()))); 1906 __ li(a3, Operand(IntFromSlot(expr->CallFeedbackICSlot())));
1907 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 1907 __ Ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
1908 __ li(a0, Operand(arg_count)); 1908 __ li(a0, Operand(arg_count));
1909 CallIC(code); 1909 CallIC(code);
1910 OperandStackDepthDecrement(arg_count + 1); 1910 OperandStackDepthDecrement(arg_count + 1);
1911 1911
1912 RecordJSReturnSite(expr); 1912 RecordJSReturnSite(expr);
1913 RestoreContext(); 1913 RestoreContext();
1914 context()->DropAndPlug(1, v0); 1914 context()->DropAndPlug(1, v0);
1915 } 1915 }
1916 1916
1917 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 1917 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
(...skipping 14 matching lines...) Expand all
1932 for (int i = 0; i < arg_count; i++) { 1932 for (int i = 0; i < arg_count; i++) {
1933 VisitForStackValue(args->at(i)); 1933 VisitForStackValue(args->at(i));
1934 } 1934 }
1935 1935
1936 // Call the construct call builtin that handles allocation and 1936 // Call the construct call builtin that handles allocation and
1937 // constructor invocation. 1937 // constructor invocation.
1938 SetConstructCallPosition(expr); 1938 SetConstructCallPosition(expr);
1939 1939
1940 // Load function and argument count into a1 and a0. 1940 // Load function and argument count into a1 and a0.
1941 __ li(a0, Operand(arg_count)); 1941 __ li(a0, Operand(arg_count));
1942 __ ld(a1, MemOperand(sp, arg_count * kPointerSize)); 1942 __ Ld(a1, MemOperand(sp, arg_count * kPointerSize));
1943 1943
1944 // Record call targets in unoptimized code. 1944 // Record call targets in unoptimized code.
1945 __ EmitLoadFeedbackVector(a2); 1945 __ EmitLoadFeedbackVector(a2);
1946 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot()))); 1946 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
1947 1947
1948 CallConstructStub stub(isolate()); 1948 CallConstructStub stub(isolate());
1949 CallIC(stub.GetCode()); 1949 CallIC(stub.GetCode());
1950 OperandStackDepthDecrement(arg_count + 1); 1950 OperandStackDepthDecrement(arg_count + 1);
1951 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER); 1951 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
1952 RestoreContext(); 1952 RestoreContext();
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
2082 __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE)); 2082 __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
2083 2083
2084 // Check if the constructor in the map is a JS function. 2084 // Check if the constructor in the map is a JS function.
2085 Register instance_type = a2; 2085 Register instance_type = a2;
2086 __ GetMapConstructor(v0, v0, a1, instance_type); 2086 __ GetMapConstructor(v0, v0, a1, instance_type);
2087 __ Branch(&non_function_constructor, ne, instance_type, 2087 __ Branch(&non_function_constructor, ne, instance_type,
2088 Operand(JS_FUNCTION_TYPE)); 2088 Operand(JS_FUNCTION_TYPE));
2089 2089
2090 // v0 now contains the constructor function. Grab the 2090 // v0 now contains the constructor function. Grab the
2091 // instance class name from there. 2091 // instance class name from there.
2092 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); 2092 __ Ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2093 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset)); 2093 __ Ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2094 __ Branch(&done); 2094 __ Branch(&done);
2095 2095
2096 // Functions have class 'Function'. 2096 // Functions have class 'Function'.
2097 __ bind(&function); 2097 __ bind(&function);
2098 __ LoadRoot(v0, Heap::kFunction_stringRootIndex); 2098 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
2099 __ jmp(&done); 2099 __ jmp(&done);
2100 2100
2101 // Objects with a non-function constructor have class 'Object'. 2101 // Objects with a non-function constructor have class 'Object'.
2102 __ bind(&non_function_constructor); 2102 __ bind(&non_function_constructor);
2103 __ LoadRoot(v0, Heap::kObject_stringRootIndex); 2103 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
2158 void FullCodeGenerator::EmitCall(CallRuntime* expr) { 2158 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2159 ZoneList<Expression*>* args = expr->arguments(); 2159 ZoneList<Expression*>* args = expr->arguments();
2160 DCHECK_LE(2, args->length()); 2160 DCHECK_LE(2, args->length());
2161 // Push target, receiver and arguments onto the stack. 2161 // Push target, receiver and arguments onto the stack.
2162 for (Expression* const arg : *args) { 2162 for (Expression* const arg : *args) {
2163 VisitForStackValue(arg); 2163 VisitForStackValue(arg);
2164 } 2164 }
2165 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 2165 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2166 // Move target to a1. 2166 // Move target to a1.
2167 int const argc = args->length() - 2; 2167 int const argc = args->length() - 2;
2168 __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize)); 2168 __ Ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
2169 // Call the target. 2169 // Call the target.
2170 __ li(a0, Operand(argc)); 2170 __ li(a0, Operand(argc));
2171 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 2171 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2172 OperandStackDepthDecrement(argc + 1); 2172 OperandStackDepthDecrement(argc + 1);
2173 RestoreContext(); 2173 RestoreContext();
2174 // Discard the function left on TOS. 2174 // Discard the function left on TOS.
2175 context()->DropAndPlug(1, v0); 2175 context()->DropAndPlug(1, v0);
2176 } 2176 }
2177 2177
2178 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { 2178 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2179 ZoneList<Expression*>* args = expr->arguments(); 2179 ZoneList<Expression*>* args = expr->arguments();
2180 DCHECK_EQ(1, args->length()); 2180 DCHECK_EQ(1, args->length());
2181 VisitForAccumulatorValue(args->at(0)); 2181 VisitForAccumulatorValue(args->at(0));
2182 __ AssertFunction(v0); 2182 __ AssertFunction(v0);
2183 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); 2183 __ Ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
2184 __ ld(v0, FieldMemOperand(v0, Map::kPrototypeOffset)); 2184 __ Ld(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
2185 context()->Plug(v0); 2185 context()->Plug(v0);
2186 } 2186 }
2187 2187
2188 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 2188 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2189 DCHECK(expr->arguments()->length() == 0); 2189 DCHECK(expr->arguments()->length() == 0);
2190 ExternalReference debug_is_active = 2190 ExternalReference debug_is_active =
2191 ExternalReference::debug_is_active_address(isolate()); 2191 ExternalReference::debug_is_active_address(isolate());
2192 __ li(at, Operand(debug_is_active)); 2192 __ li(at, Operand(debug_is_active));
2193 __ lbu(v0, MemOperand(at)); 2193 __ Lbu(v0, MemOperand(at));
2194 __ SmiTag(v0); 2194 __ SmiTag(v0);
2195 context()->Plug(v0); 2195 context()->Plug(v0);
2196 } 2196 }
2197 2197
2198 2198
2199 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { 2199 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2200 ZoneList<Expression*>* args = expr->arguments(); 2200 ZoneList<Expression*>* args = expr->arguments();
2201 DCHECK_EQ(2, args->length()); 2201 DCHECK_EQ(2, args->length());
2202 VisitForStackValue(args->at(0)); 2202 VisitForStackValue(args->at(0));
2203 VisitForStackValue(args->at(1)); 2203 VisitForStackValue(args->at(1));
2204 2204
2205 Label runtime, done; 2205 Label runtime, done;
2206 2206
2207 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, 2207 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
2208 NO_ALLOCATION_FLAGS); 2208 NO_ALLOCATION_FLAGS);
2209 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1); 2209 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
2210 __ Pop(a2, a3); 2210 __ Pop(a2, a3);
2211 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex); 2211 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
2212 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 2212 __ Sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2213 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset)); 2213 __ Sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2214 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); 2214 __ Sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2215 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset)); 2215 __ Sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2216 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset)); 2216 __ Sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2217 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 2217 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2218 __ jmp(&done); 2218 __ jmp(&done);
2219 2219
2220 __ bind(&runtime); 2220 __ bind(&runtime);
2221 CallRuntimeWithOperands(Runtime::kCreateIterResultObject); 2221 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2222 2222
2223 __ bind(&done); 2223 __ bind(&done);
2224 context()->Plug(v0); 2224 context()->Plug(v0);
2225 } 2225 }
2226 2226
2227 2227
2228 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { 2228 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2229 // Push function. 2229 // Push function.
2230 __ LoadNativeContextSlot(expr->context_index(), v0); 2230 __ LoadNativeContextSlot(expr->context_index(), v0);
2231 PushOperand(v0); 2231 PushOperand(v0);
2232 2232
2233 // Push undefined as the receiver. 2233 // Push undefined as the receiver.
2234 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 2234 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2235 PushOperand(v0); 2235 PushOperand(v0);
2236 } 2236 }
2237 2237
2238 2238
2239 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { 2239 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2240 ZoneList<Expression*>* args = expr->arguments(); 2240 ZoneList<Expression*>* args = expr->arguments();
2241 int arg_count = args->length(); 2241 int arg_count = args->length();
2242 2242
2243 SetCallPosition(expr); 2243 SetCallPosition(expr);
2244 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2244 __ Ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2245 __ li(a0, Operand(arg_count)); 2245 __ li(a0, Operand(arg_count));
2246 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), 2246 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2247 RelocInfo::CODE_TARGET); 2247 RelocInfo::CODE_TARGET);
2248 OperandStackDepthDecrement(arg_count + 1); 2248 OperandStackDepthDecrement(arg_count + 1);
2249 RestoreContext(); 2249 RestoreContext();
2250 } 2250 }
2251 2251
2252 2252
2253 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 2253 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2254 switch (expr->op()) { 2254 switch (expr->op()) {
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
2375 } else { 2375 } else {
2376 // Reserve space for result of postfix operation. 2376 // Reserve space for result of postfix operation.
2377 if (expr->is_postfix() && !context()->IsEffect()) { 2377 if (expr->is_postfix() && !context()->IsEffect()) {
2378 __ li(at, Operand(Smi::kZero)); 2378 __ li(at, Operand(Smi::kZero));
2379 PushOperand(at); 2379 PushOperand(at);
2380 } 2380 }
2381 switch (assign_type) { 2381 switch (assign_type) {
2382 case NAMED_PROPERTY: { 2382 case NAMED_PROPERTY: {
2383 // Put the object both on the stack and in the register. 2383 // Put the object both on the stack and in the register.
2384 VisitForStackValue(prop->obj()); 2384 VisitForStackValue(prop->obj());
2385 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2385 __ Ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2386 EmitNamedPropertyLoad(prop); 2386 EmitNamedPropertyLoad(prop);
2387 break; 2387 break;
2388 } 2388 }
2389 2389
2390 case KEYED_PROPERTY: { 2390 case KEYED_PROPERTY: {
2391 VisitForStackValue(prop->obj()); 2391 VisitForStackValue(prop->obj());
2392 VisitForStackValue(prop->key()); 2392 VisitForStackValue(prop->key());
2393 __ ld(LoadDescriptor::ReceiverRegister(), 2393 __ Ld(LoadDescriptor::ReceiverRegister(),
2394 MemOperand(sp, 1 * kPointerSize)); 2394 MemOperand(sp, 1 * kPointerSize));
2395 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 2395 __ Ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2396 EmitKeyedPropertyLoad(prop); 2396 EmitKeyedPropertyLoad(prop);
2397 break; 2397 break;
2398 } 2398 }
2399 2399
2400 case NAMED_SUPER_PROPERTY: 2400 case NAMED_SUPER_PROPERTY:
2401 case KEYED_SUPER_PROPERTY: 2401 case KEYED_SUPER_PROPERTY:
2402 case VARIABLE: 2402 case VARIABLE:
2403 UNREACHABLE(); 2403 UNREACHABLE();
2404 } 2404 }
2405 } 2405 }
(...skipping 20 matching lines...) Expand all
2426 if (expr->is_postfix()) { 2426 if (expr->is_postfix()) {
2427 if (!context()->IsEffect()) { 2427 if (!context()->IsEffect()) {
2428 // Save the result on the stack. If we have a named or keyed property 2428 // Save the result on the stack. If we have a named or keyed property
2429 // we store the result under the receiver that is currently on top 2429 // we store the result under the receiver that is currently on top
2430 // of the stack. 2430 // of the stack.
2431 switch (assign_type) { 2431 switch (assign_type) {
2432 case VARIABLE: 2432 case VARIABLE:
2433 __ push(v0); 2433 __ push(v0);
2434 break; 2434 break;
2435 case NAMED_PROPERTY: 2435 case NAMED_PROPERTY:
2436 __ sd(v0, MemOperand(sp, kPointerSize)); 2436 __ Sd(v0, MemOperand(sp, kPointerSize));
2437 break; 2437 break;
2438 case KEYED_PROPERTY: 2438 case KEYED_PROPERTY:
2439 __ sd(v0, MemOperand(sp, 2 * kPointerSize)); 2439 __ Sd(v0, MemOperand(sp, 2 * kPointerSize));
2440 break; 2440 break;
2441 case NAMED_SUPER_PROPERTY: 2441 case NAMED_SUPER_PROPERTY:
2442 case KEYED_SUPER_PROPERTY: 2442 case KEYED_SUPER_PROPERTY:
2443 UNREACHABLE(); 2443 UNREACHABLE();
2444 break; 2444 break;
2445 } 2445 }
2446 } 2446 }
2447 } 2447 }
2448 2448
2449 Register scratch1 = a1; 2449 Register scratch1 = a1;
(...skipping 14 matching lines...) Expand all
2464 if (expr->is_postfix()) { 2464 if (expr->is_postfix()) {
2465 if (!context()->IsEffect()) { 2465 if (!context()->IsEffect()) {
2466 // Save the result on the stack. If we have a named or keyed property 2466 // Save the result on the stack. If we have a named or keyed property
2467 // we store the result under the receiver that is currently on top 2467 // we store the result under the receiver that is currently on top
2468 // of the stack. 2468 // of the stack.
2469 switch (assign_type) { 2469 switch (assign_type) {
2470 case VARIABLE: 2470 case VARIABLE:
2471 PushOperand(v0); 2471 PushOperand(v0);
2472 break; 2472 break;
2473 case NAMED_PROPERTY: 2473 case NAMED_PROPERTY:
2474 __ sd(v0, MemOperand(sp, kPointerSize)); 2474 __ Sd(v0, MemOperand(sp, kPointerSize));
2475 break; 2475 break;
2476 case KEYED_PROPERTY: 2476 case KEYED_PROPERTY:
2477 __ sd(v0, MemOperand(sp, 2 * kPointerSize)); 2477 __ Sd(v0, MemOperand(sp, 2 * kPointerSize));
2478 break; 2478 break;
2479 case NAMED_SUPER_PROPERTY: 2479 case NAMED_SUPER_PROPERTY:
2480 case KEYED_SUPER_PROPERTY: 2480 case KEYED_SUPER_PROPERTY:
2481 UNREACHABLE(); 2481 UNREACHABLE();
2482 break; 2482 break;
2483 } 2483 }
2484 } 2484 }
2485 } 2485 }
2486 2486
2487 __ bind(&stub_call); 2487 __ bind(&stub_call);
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
2569 &if_true, &if_false, &fall_through); 2569 &if_true, &if_false, &fall_through);
2570 2570
2571 { AccumulatorValueContext context(this); 2571 { AccumulatorValueContext context(this);
2572 VisitForTypeofValue(sub_expr); 2572 VisitForTypeofValue(sub_expr);
2573 } 2573 }
2574 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2574 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2575 2575
2576 Factory* factory = isolate()->factory(); 2576 Factory* factory = isolate()->factory();
2577 if (String::Equals(check, factory->number_string())) { 2577 if (String::Equals(check, factory->number_string())) {
2578 __ JumpIfSmi(v0, if_true); 2578 __ JumpIfSmi(v0, if_true);
2579 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); 2579 __ Ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
2580 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 2580 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
2581 Split(eq, v0, Operand(at), if_true, if_false, fall_through); 2581 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
2582 } else if (String::Equals(check, factory->string_string())) { 2582 } else if (String::Equals(check, factory->string_string())) {
2583 __ JumpIfSmi(v0, if_false); 2583 __ JumpIfSmi(v0, if_false);
2584 __ GetObjectType(v0, v0, a1); 2584 __ GetObjectType(v0, v0, a1);
2585 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false, 2585 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
2586 fall_through); 2586 fall_through);
2587 } else if (String::Equals(check, factory->symbol_string())) { 2587 } else if (String::Equals(check, factory->symbol_string())) {
2588 __ JumpIfSmi(v0, if_false); 2588 __ JumpIfSmi(v0, if_false);
2589 __ GetObjectType(v0, v0, a1); 2589 __ GetObjectType(v0, v0, a1);
2590 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through); 2590 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
2591 } else if (String::Equals(check, factory->boolean_string())) { 2591 } else if (String::Equals(check, factory->boolean_string())) {
2592 __ LoadRoot(at, Heap::kTrueValueRootIndex); 2592 __ LoadRoot(at, Heap::kTrueValueRootIndex);
2593 __ Branch(if_true, eq, v0, Operand(at)); 2593 __ Branch(if_true, eq, v0, Operand(at));
2594 __ LoadRoot(at, Heap::kFalseValueRootIndex); 2594 __ LoadRoot(at, Heap::kFalseValueRootIndex);
2595 Split(eq, v0, Operand(at), if_true, if_false, fall_through); 2595 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
2596 } else if (String::Equals(check, factory->undefined_string())) { 2596 } else if (String::Equals(check, factory->undefined_string())) {
2597 __ LoadRoot(at, Heap::kNullValueRootIndex); 2597 __ LoadRoot(at, Heap::kNullValueRootIndex);
2598 __ Branch(if_false, eq, v0, Operand(at)); 2598 __ Branch(if_false, eq, v0, Operand(at));
2599 __ JumpIfSmi(v0, if_false); 2599 __ JumpIfSmi(v0, if_false);
2600 // Check for undetectable objects => true. 2600 // Check for undetectable objects => true.
2601 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); 2601 __ Ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
2602 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 2602 __ Lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
2603 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); 2603 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
2604 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); 2604 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
2605 } else if (String::Equals(check, factory->function_string())) { 2605 } else if (String::Equals(check, factory->function_string())) {
2606 __ JumpIfSmi(v0, if_false); 2606 __ JumpIfSmi(v0, if_false);
2607 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); 2607 __ Ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
2608 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 2608 __ Lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
2609 __ And(a1, a1, 2609 __ And(a1, a1,
2610 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); 2610 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2611 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false, 2611 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
2612 fall_through); 2612 fall_through);
2613 } else if (String::Equals(check, factory->object_string())) { 2613 } else if (String::Equals(check, factory->object_string())) {
2614 __ JumpIfSmi(v0, if_false); 2614 __ JumpIfSmi(v0, if_false);
2615 __ LoadRoot(at, Heap::kNullValueRootIndex); 2615 __ LoadRoot(at, Heap::kNullValueRootIndex);
2616 __ Branch(if_true, eq, v0, Operand(at)); 2616 __ Branch(if_true, eq, v0, Operand(at));
2617 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 2617 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2618 __ GetObjectType(v0, v0, a1); 2618 __ GetObjectType(v0, v0, a1);
2619 __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE)); 2619 __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
2620 // Check for callable or undetectable objects => false. 2620 // Check for callable or undetectable objects => false.
2621 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 2621 __ Lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
2622 __ And(a1, a1, 2622 __ And(a1, a1,
2623 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); 2623 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2624 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); 2624 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
2625 } else { 2625 } else {
2626 if (if_false != fall_through) __ jmp(if_false); 2626 if (if_false != fall_through) __ jmp(if_false);
2627 } 2627 }
2628 context()->Plug(if_true, if_false); 2628 context()->Plug(if_true, if_false);
2629 } 2629 }
2630 2630
2631 2631
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
2714 VisitForAccumulatorValue(sub_expr); 2714 VisitForAccumulatorValue(sub_expr);
2715 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2715 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2716 if (expr->op() == Token::EQ_STRICT) { 2716 if (expr->op() == Token::EQ_STRICT) {
2717 Heap::RootListIndex nil_value = nil == kNullValue ? 2717 Heap::RootListIndex nil_value = nil == kNullValue ?
2718 Heap::kNullValueRootIndex : 2718 Heap::kNullValueRootIndex :
2719 Heap::kUndefinedValueRootIndex; 2719 Heap::kUndefinedValueRootIndex;
2720 __ LoadRoot(a1, nil_value); 2720 __ LoadRoot(a1, nil_value);
2721 Split(eq, v0, Operand(a1), if_true, if_false, fall_through); 2721 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
2722 } else { 2722 } else {
2723 __ JumpIfSmi(v0, if_false); 2723 __ JumpIfSmi(v0, if_false);
2724 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); 2724 __ Ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
2725 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 2725 __ Lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
2726 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); 2726 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
2727 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); 2727 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
2728 } 2728 }
2729 context()->Plug(if_true, if_false); 2729 context()->Plug(if_true, if_false);
2730 } 2730 }
2731 2731
2732 2732
2733 Register FullCodeGenerator::result_register() { 2733 Register FullCodeGenerator::result_register() {
2734 return v0; 2734 return v0;
2735 } 2735 }
2736 2736
2737 2737
2738 Register FullCodeGenerator::context_register() { 2738 Register FullCodeGenerator::context_register() {
2739 return cp; 2739 return cp;
2740 } 2740 }
2741 2741
2742 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) { 2742 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
2743 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 2743 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
2744 DCHECK(IsAligned(frame_offset, kPointerSize)); 2744 DCHECK(IsAligned(frame_offset, kPointerSize));
2745 // __ sw(value, MemOperand(fp, frame_offset)); 2745 // __ Sw(value, MemOperand(fp, frame_offset));
2746 __ ld(value, MemOperand(fp, frame_offset)); 2746 __ Ld(value, MemOperand(fp, frame_offset));
2747 } 2747 }
2748 2748
2749 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 2749 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
2750 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 2750 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
2751 DCHECK(IsAligned(frame_offset, kPointerSize)); 2751 DCHECK(IsAligned(frame_offset, kPointerSize));
2752 // __ sw(value, MemOperand(fp, frame_offset)); 2752 // __ Sw(value, MemOperand(fp, frame_offset));
2753 __ sd(value, MemOperand(fp, frame_offset)); 2753 __ Sd(value, MemOperand(fp, frame_offset));
2754 } 2754 }
2755 2755
2756 2756
2757 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 2757 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
2758 __ ld(dst, ContextMemOperand(cp, context_index)); 2758 __ Ld(dst, ContextMemOperand(cp, context_index));
2759 } 2759 }
2760 2760
2761 2761
2762 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 2762 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
2763 DeclarationScope* closure_scope = scope()->GetClosureScope(); 2763 DeclarationScope* closure_scope = scope()->GetClosureScope();
2764 if (closure_scope->is_script_scope() || 2764 if (closure_scope->is_script_scope() ||
2765 closure_scope->is_module_scope()) { 2765 closure_scope->is_module_scope()) {
2766 // Contexts nested in the native context have a canonical empty function 2766 // Contexts nested in the native context have a canonical empty function
2767 // as their closure, not the anonymous closure containing the global 2767 // as their closure, not the anonymous closure containing the global
2768 // code. 2768 // code.
2769 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at); 2769 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
2770 } else if (closure_scope->is_eval_scope()) { 2770 } else if (closure_scope->is_eval_scope()) {
2771 // Contexts created by a call to eval have the same closure as the 2771 // Contexts created by a call to eval have the same closure as the
2772 // context calling eval, not the anonymous closure containing the eval 2772 // context calling eval, not the anonymous closure containing the eval
2773 // code. Fetch it from the context. 2773 // code. Fetch it from the context.
2774 __ ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX)); 2774 __ Ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
2775 } else { 2775 } else {
2776 DCHECK(closure_scope->is_function_scope()); 2776 DCHECK(closure_scope->is_function_scope());
2777 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 2777 __ Ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2778 } 2778 }
2779 PushOperand(at); 2779 PushOperand(at);
2780 } 2780 }
2781 2781
2782 2782
2783 #undef __ 2783 #undef __
2784 2784
2785 2785
2786 void BackEdgeTable::PatchAt(Code* unoptimized_code, 2786 void BackEdgeTable::PatchAt(Code* unoptimized_code,
2787 Address pc, 2787 Address pc,
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
2855 reinterpret_cast<uint64_t>( 2855 reinterpret_cast<uint64_t>(
2856 isolate->builtins()->OnStackReplacement()->entry())); 2856 isolate->builtins()->OnStackReplacement()->entry()));
2857 return ON_STACK_REPLACEMENT; 2857 return ON_STACK_REPLACEMENT;
2858 } 2858 }
2859 2859
2860 2860
2861 } // namespace internal 2861 } // namespace internal
2862 } // namespace v8 2862 } // namespace v8
2863 2863
2864 #endif // V8_TARGET_ARCH_MIPS64 2864 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/debug/mips64/debug-mips64.cc ('k') | src/ic/mips64/handler-compiler-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698