Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(649)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 24795002: Thumb2 Backend: Use sp-relative offsets for stack slot operations in Crankshaft Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/lithium.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
143 __ b(eq, &ok); 143 __ b(eq, &ok);
144 int receiver_offset = scope()->num_parameters() * kPointerSize; 144 int receiver_offset = scope()->num_parameters() * kPointerSize;
145 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 145 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
146 __ str(r2, MemOperand(sp, receiver_offset)); 146 __ str(r2, MemOperand(sp, receiver_offset));
147 __ bind(&ok); 147 __ bind(&ok);
148 } 148 }
149 } 149 }
150 150
151 info()->set_prologue_offset(masm_->pc_offset()); 151 info()->set_prologue_offset(masm_->pc_offset());
152 if (NeedsEagerFrame()) { 152 if (NeedsEagerFrame()) {
153 fp_sp_delta_ = 2 * kPointerSize;
153 if (info()->IsStub()) { 154 if (info()->IsStub()) {
154 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); 155 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
155 __ Push(Smi::FromInt(StackFrame::STUB)); 156 __ Push(Smi::FromInt(StackFrame::STUB));
156 // Adjust FP to point to saved FP. 157 // Adjust FP to point to saved FP.
157 __ add(fp, sp, Operand(2 * kPointerSize)); 158 __ add(fp, sp, Operand(fp_sp_delta_));
158 } else { 159 } else {
159 PredictableCodeSizeScope predictible_code_size_scope( 160 PredictableCodeSizeScope predictible_code_size_scope(
160 masm_, kNoCodeAgeSequenceLength * Assembler::kInstrSize); 161 masm_, kNoCodeAgeSequenceLength * Assembler::kInstrSize);
161 // The following three instructions must remain together and unmodified 162 // The following three instructions must remain together and unmodified
162 // for code aging to work properly. 163 // for code aging to work properly.
163 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); 164 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
164 __ nop(ip.code()); 165 __ nop(ip.code());
165 if (masm()->is_thumb_mode()) { 166 if (masm()->is_thumb_mode()) {
166 __ nop(ip.code()); 167 __ nop(ip.code());
167 } 168 }
168 // Adjust FP to point to saved FP. 169 // Adjust FP to point to saved FP.
169 __ add(fp, sp, Operand(2 * kPointerSize)); 170 __ add(fp, sp, Operand(fp_sp_delta_));
170 } 171 }
171 frame_is_built_ = true; 172 frame_is_built_ = true;
172 info_->AddNoFrameRange(0, masm_->pc_offset()); 173 info_->AddNoFrameRange(0, masm_->pc_offset());
173 } 174 }
174 175
175 // Reserve space for the stack slots needed by the code. 176 // Reserve space for the stack slots needed by the code.
176 int slots = GetStackSlotCount(); 177 int slots = GetStackSlotCount();
177 if (slots > 0) { 178 if (slots > 0) {
179 int slots_size = slots * kPointerSize;
180 fp_sp_delta_ += slots_size;
178 if (FLAG_debug_code) { 181 if (FLAG_debug_code) {
179 __ sub(sp, sp, Operand(slots * kPointerSize)); 182 __ sub(sp, sp, Operand(slots_size));
180 __ push(r0); 183 __ push(r0);
181 __ push(r1); 184 __ push(r1);
182 __ add(r0, sp, Operand(slots * kPointerSize)); 185 __ add(r0, sp, Operand(slots_size));
183 __ mov(r1, Operand(kSlotsZapValue)); 186 __ mov(r1, Operand(kSlotsZapValue));
184 Label loop; 187 Label loop;
185 __ bind(&loop); 188 __ bind(&loop);
186 __ sub(r0, r0, Operand(kPointerSize)); 189 __ sub(r0, r0, Operand(kPointerSize));
187 __ str(r1, MemOperand(r0, 2 * kPointerSize)); 190 __ str(r1, MemOperand(r0, 2 * kPointerSize));
188 __ cmp(r0, sp); 191 __ cmp(r0, sp);
189 __ b(ne, &loop); 192 __ b(ne, &loop);
190 __ pop(r1); 193 __ pop(r1);
191 __ pop(r0); 194 __ pop(r0);
192 } else { 195 } else {
193 __ sub(sp, sp, Operand(slots * kPointerSize)); 196 __ sub(sp, sp, Operand(slots_size));
194 } 197 }
195 } 198 }
196 199
197 if (info()->saves_caller_doubles()) { 200 if (info()->saves_caller_doubles()) {
198 Comment(";;; Save clobbered callee double registers"); 201 Comment(";;; Save clobbered callee double registers");
199 int count = 0; 202 int count = 0;
200 BitVector* doubles = chunk()->allocated_double_registers(); 203 BitVector* doubles = chunk()->allocated_double_registers();
201 BitVector::Iterator save_iterator(doubles); 204 BitVector::Iterator save_iterator(doubles);
202 while (!save_iterator.Done()) { 205 while (!save_iterator.Done()) {
203 __ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()), 206 __ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
204 MemOperand(sp, count * kDoubleSize)); 207 MemOperand(sp, count * kDoubleSize));
205 save_iterator.Advance(); 208 save_iterator.Advance();
206 count++; 209 count++;
207 } 210 }
208 } 211 }
209 212
210 // Possibly allocate a local context. 213 // Possibly allocate a local context.
211 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 214 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
212 if (heap_slots > 0) { 215 if (heap_slots > 0) {
216 ASSERT(frame_is_built_);
213 Comment(";;; Allocate local context"); 217 Comment(";;; Allocate local context");
214 // Argument to NewContext is the function, which is in r1. 218 // Argument to NewContext is the function, which is in r1.
215 __ push(r1); 219 __ push(r1);
216 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 220 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
217 FastNewContextStub stub(heap_slots); 221 FastNewContextStub stub(heap_slots);
218 __ CallStub(&stub); 222 __ CallStub(&stub);
219 } else { 223 } else {
220 __ CallRuntime(Runtime::kNewFunctionContext, 1); 224 __ CallRuntime(Runtime::kNewFunctionContext, 1);
221 } 225 }
222 RecordSafepoint(Safepoint::kNoLazyDeopt); 226 RecordSafepoint(Safepoint::kNoLazyDeopt);
223 // Context is returned in both r0 and cp. It replaces the context 227 // Context is returned in both r0 and cp. It replaces the context
224 // passed to us. It's saved in the stack and kept live in cp. 228 // passed to us. It's saved in the stack and kept live in cp.
225 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 229 int cp_offset = StandardFrameConstants::kContextOffset + fp_sp_delta_;
230 __ str(cp, MemOperand(sp, cp_offset));
226 // Copy any necessary parameters into the context. 231 // Copy any necessary parameters into the context.
227 int num_parameters = scope()->num_parameters(); 232 int num_parameters = scope()->num_parameters();
228 for (int i = 0; i < num_parameters; i++) { 233 for (int i = 0; i < num_parameters; i++) {
229 Variable* var = scope()->parameter(i); 234 Variable* var = scope()->parameter(i);
230 if (var->IsContextSlot()) { 235 if (var->IsContextSlot()) {
231 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 236 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
232 (num_parameters - 1 - i) * kPointerSize; 237 (num_parameters - 1 - i) * kPointerSize + fp_sp_delta_;
233 // Load parameter from stack. 238 // Load parameter from stack.
234 __ ldr(r0, MemOperand(fp, parameter_offset)); 239 __ ldr(r0, MemOperand(sp, parameter_offset));
235 // Store it in the context. 240 // Store it in the context.
236 MemOperand target = ContextOperand(cp, var->index()); 241 MemOperand target = ContextOperand(cp, var->index());
237 __ str(r0, target); 242 __ str(r0, target);
238 // Update the write barrier. This clobbers r3 and r0. 243 // Update the write barrier. This clobbers r3 and r0.
239 __ RecordWriteContextSlot( 244 __ RecordWriteContextSlot(
240 cp, 245 cp,
241 target.offset(), 246 target.offset(),
242 r0, 247 r0,
243 r3, 248 r3,
244 GetLinkRegisterState(), 249 GetLinkRegisterState(),
(...skipping 14 matching lines...) Expand all
259 bool LCodeGen::GenerateBody() { 264 bool LCodeGen::GenerateBody() {
260 ASSERT(is_generating()); 265 ASSERT(is_generating());
261 bool emit_instructions = true; 266 bool emit_instructions = true;
262 for (current_instruction_ = 0; 267 for (current_instruction_ = 0;
263 !is_aborted() && current_instruction_ < instructions_->length(); 268 !is_aborted() && current_instruction_ < instructions_->length();
264 current_instruction_++) { 269 current_instruction_++) {
265 LInstruction* instr = instructions_->at(current_instruction_); 270 LInstruction* instr = instructions_->at(current_instruction_);
266 271
267 // Don't emit code for basic blocks with a replacement. 272 // Don't emit code for basic blocks with a replacement.
268 if (instr->IsLabel()) { 273 if (instr->IsLabel()) {
269 emit_instructions = !LLabel::cast(instr)->HasReplacement(); 274 LLabel* label = LLabel::cast(instr);
275 emit_instructions = !label->HasReplacement();
276 if (emit_instructions) {
277 HBasicBlock* block = label->block();
278 int argument_count = block->HasPredecessor()
279 ? block->predecessors()->at(0)->argument_count()
280 : 0;
281 SetFpSpDelta(argument_count);
282 }
270 } 283 }
271 if (!emit_instructions) continue; 284 if (!emit_instructions) continue;
272 285
273 if (FLAG_code_comments && instr->HasInterestingComment(this)) { 286 if (FLAG_code_comments && instr->HasInterestingComment(this)) {
274 Comment(";;; <@%d,#%d> %s", 287 Comment(";;; <@%d,#%d> %s",
275 current_instruction_, 288 current_instruction_,
276 instr->hydrogen_value()->id(), 289 instr->hydrogen_value()->id(),
277 instr->Mnemonic()); 290 instr->Mnemonic());
278 } 291 }
279 292
(...skipping 20 matching lines...) Expand all
300 "-------------------- Deferred %s --------------------", 313 "-------------------- Deferred %s --------------------",
301 code->instruction_index(), 314 code->instruction_index(),
302 code->instr()->hydrogen_value()->id(), 315 code->instr()->hydrogen_value()->id(),
303 code->instr()->Mnemonic()); 316 code->instr()->Mnemonic());
304 __ bind(code->entry()); 317 __ bind(code->entry());
305 if (NeedsDeferredFrame()) { 318 if (NeedsDeferredFrame()) {
306 Comment(";;; Build frame"); 319 Comment(";;; Build frame");
307 ASSERT(!frame_is_built_); 320 ASSERT(!frame_is_built_);
308 ASSERT(info()->IsStub()); 321 ASSERT(info()->IsStub());
309 frame_is_built_ = true; 322 frame_is_built_ = true;
323 fp_sp_delta_ = 2 * kPointerSize;
310 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); 324 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
311 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); 325 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
312 __ push(scratch0()); 326 __ push(scratch0());
313 __ add(fp, sp, Operand(2 * kPointerSize)); 327 __ add(fp, sp, Operand(fp_sp_delta_));
314 Comment(";;; Deferred code"); 328 Comment(";;; Deferred code");
315 } 329 }
316 code->Generate(); 330 code->Generate();
317 if (NeedsDeferredFrame()) { 331 if (NeedsDeferredFrame()) {
318 Comment(";;; Destroy frame"); 332 Comment(";;; Destroy frame");
319 ASSERT(frame_is_built_); 333 ASSERT(frame_is_built_);
320 __ pop(ip); 334 __ pop(ip);
321 __ ldm(ia_w, sp, cp.bit() | fp.bit() | lr.bit()); 335 __ ldm(ia_w, sp, cp.bit() | fp.bit() | lr.bit());
322 frame_is_built_ = false; 336 frame_is_built_ = false;
337 fp_sp_delta_ = -1;
323 } 338 }
324 __ jmp(code->exit()); 339 __ jmp(code->exit());
325 } 340 }
326 } 341 }
327 342
328 // Force constant pool emission at the end of the deferred code to make 343 // Force constant pool emission at the end of the deferred code to make
329 // sure that no constant pools are emitted after. 344 // sure that no constant pools are emitted after.
330 masm()->CheckConstPool(true, false); 345 masm()->CheckConstPool(true, false);
331 346
332 return !is_aborted(); 347 return !is_aborted();
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
368 __ b(&needs_frame); 383 __ b(&needs_frame);
369 } else { 384 } else {
370 __ bind(&needs_frame); 385 __ bind(&needs_frame);
371 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); 386 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
372 // This variant of deopt can only be used with stubs. Since we don't 387 // This variant of deopt can only be used with stubs. Since we don't
373 // have a function pointer to install in the stack frame that we're 388 // have a function pointer to install in the stack frame that we're
374 // building, install a special marker there instead. 389 // building, install a special marker there instead.
375 ASSERT(info()->IsStub()); 390 ASSERT(info()->IsStub());
376 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); 391 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
377 __ push(scratch0()); 392 __ push(scratch0());
378 __ add(fp, sp, Operand(2 * kPointerSize)); 393 fp_sp_delta_ = 2 * kPointerSize;
394 __ add(fp, sp, Operand(fp_sp_delta_));
379 __ mov(lr, Operand(pc), LeaveCC, al); 395 __ mov(lr, Operand(pc), LeaveCC, al);
380 __ mov(pc, ip); 396 __ mov(pc, ip);
381 } 397 }
382 } else { 398 } else {
383 __ mov(lr, Operand(pc), LeaveCC, al); 399 __ mov(lr, Operand(pc), LeaveCC, al);
384 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); 400 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry)));
385 } 401 }
386 masm()->CheckConstPool(false, false); 402 masm()->CheckConstPool(false, false);
387 } 403 }
388 404
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
556 // Stack slots not implemented, use ToMemOperand instead. 572 // Stack slots not implemented, use ToMemOperand instead.
557 UNREACHABLE(); 573 UNREACHABLE();
558 return Operand::Zero(); 574 return Operand::Zero();
559 } 575 }
560 576
561 577
562 MemOperand LCodeGen::ToMemOperand(LOperand* op) const { 578 MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
563 ASSERT(!op->IsRegister()); 579 ASSERT(!op->IsRegister());
564 ASSERT(!op->IsDoubleRegister()); 580 ASSERT(!op->IsDoubleRegister());
565 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); 581 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
566 return MemOperand(fp, StackSlotOffset(op->index())); 582 return MemOperand(sp, StackSlotOffset(op->index(), fp_sp_delta_));
567 } 583 }
568 584
569 585
570 MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const { 586 MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
571 ASSERT(op->IsDoubleStackSlot()); 587 ASSERT(op->IsDoubleStackSlot());
572 return MemOperand(fp, StackSlotOffset(op->index()) + kPointerSize); 588 int offset = StackSlotOffset(op->index(), fp_sp_delta_) + kPointerSize;
589 return MemOperand(sp, offset);
573 } 590 }
574 591
575 592
576 void LCodeGen::WriteTranslation(LEnvironment* environment, 593 void LCodeGen::WriteTranslation(LEnvironment* environment,
577 Translation* translation) { 594 Translation* translation) {
578 if (environment == NULL) return; 595 if (environment == NULL) return;
579 596
580 // The translation includes one command per value in the environment. 597 // The translation includes one command per value in the environment.
581 int translation_size = environment->translation_size(); 598 int translation_size = environment->translation_size();
582 // The output frame height does not include the parameters. 599 // The output frame height does not include the parameters.
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
694 } else if (op->IsConstantOperand()) { 711 } else if (op->IsConstantOperand()) {
695 HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op)); 712 HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
696 int src_index = DefineDeoptimizationLiteral(constant->handle()); 713 int src_index = DefineDeoptimizationLiteral(constant->handle());
697 translation->StoreLiteral(src_index); 714 translation->StoreLiteral(src_index);
698 } else { 715 } else {
699 UNREACHABLE(); 716 UNREACHABLE();
700 } 717 }
701 } 718 }
702 719
703 720
721 void LCodeGen::SetFpSpDelta(int argument_count) {
722 int slots_size = GetStackSlotCount() * kPointerSize;
723 int args_size = argument_count * kPointerSize;
724 int fixed_size = 2 * kPointerSize;
725 fp_sp_delta_ = slots_size + args_size + fixed_size;
726 }
727
728
729 void LCodeGen::Push(Register src1) {
730 fp_sp_delta_ += kPointerSize;
731 __ push(src1);
732 }
733
734
735 void LCodeGen::Push(Register src1, Register src2) {
736 fp_sp_delta_ += 2 * kPointerSize;
737 __ Push(src1, src2);
738 }
739
740
741 void LCodeGen::Push(Register src1, Register src2, Register src3) {
742 fp_sp_delta_ += 3 * kPointerSize;
743 __ Push(src1, src2, src3);
744 }
745
746
747 void LCodeGen::Push(Register src1,
748 Register src2,
749 Register src3,
750 Register src4) {
751 fp_sp_delta_ += 4 * kPointerSize;
752 __ Push(src1, src2, src3, src4);
753 }
754
755
756 #ifdef DEBUG
757 void LCodeGen::CheckFpSpDelta(LInstruction* instr) {
758 if (fp_sp_delta_ == -1 || !instr->HasEnvironment())
759 return;
760 int slots_size = GetStackSlotCount() * kPointerSize;
761 int args_size = instr->environment()->arguments_stack_height() * kPointerSize;
762 int fixed_size = 2 * kPointerSize;
763 int expected_delta = slots_size + args_size + fixed_size;
764 ASSERT_EQ(expected_delta, fp_sp_delta_);
765 }
766 #endif // DEBUG
767
768
704 void LCodeGen::CallCode(Handle<Code> code, 769 void LCodeGen::CallCode(Handle<Code> code,
705 RelocInfo::Mode mode, 770 RelocInfo::Mode mode,
771 int num_arguments,
706 LInstruction* instr, 772 LInstruction* instr,
707 TargetAddressStorageMode storage_mode) { 773 TargetAddressStorageMode storage_mode) {
708 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode); 774 CallCodeGeneric(code, mode, num_arguments, instr, RECORD_SIMPLE_SAFEPOINT,
775 storage_mode);
709 } 776 }
710 777
711 778
712 void LCodeGen::CallCodeGeneric(Handle<Code> code, 779 void LCodeGen::CallCodeGeneric(Handle<Code> code,
713 RelocInfo::Mode mode, 780 RelocInfo::Mode mode,
781 int num_arguments,
714 LInstruction* instr, 782 LInstruction* instr,
715 SafepointMode safepoint_mode, 783 SafepointMode safepoint_mode,
716 TargetAddressStorageMode storage_mode) { 784 TargetAddressStorageMode storage_mode) {
717 EnsureSpaceForLazyDeopt(); 785 EnsureSpaceForLazyDeopt();
718 ASSERT(instr != NULL); 786 ASSERT(instr != NULL);
719 // Block literal pool emission to ensure nop indicating no inlined smi code 787 // Block literal pool emission to ensure nop indicating no inlined smi code
720 // is in the correct position. 788 // is in the correct position.
721 Assembler::BlockConstPoolScope block_const_pool(masm()); 789 Assembler::BlockConstPoolScope block_const_pool(masm());
722 LPointerMap* pointers = instr->pointer_map(); 790 LPointerMap* pointers = instr->pointer_map();
723 RecordPosition(pointers->position()); 791 RecordPosition(pointers->position());
724 __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode); 792 __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode);
793 fp_sp_delta_ -= num_arguments * kPointerSize;
725 RecordSafepointWithLazyDeopt(instr, safepoint_mode); 794 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
726 795
727 // Signal that we don't inline smi code before these stubs in the 796 // Signal that we don't inline smi code before these stubs in the
728 // optimizing code generator. 797 // optimizing code generator.
729 if (code->kind() == Code::BINARY_OP_IC || 798 if (code->kind() == Code::BINARY_OP_IC ||
730 code->kind() == Code::COMPARE_IC) { 799 code->kind() == Code::COMPARE_IC) {
731 __ nop(); 800 __ nop();
732 } 801 }
733 } 802 }
734 803
735 804
736 void LCodeGen::CallRuntime(const Runtime::Function* function, 805 void LCodeGen::CallRuntime(const Runtime::Function* function,
737 int num_arguments, 806 int num_arguments,
738 LInstruction* instr) { 807 LInstruction* instr) {
739 ASSERT(instr != NULL); 808 ASSERT(instr != NULL);
740 LPointerMap* pointers = instr->pointer_map(); 809 LPointerMap* pointers = instr->pointer_map();
741 ASSERT(pointers != NULL); 810 ASSERT(pointers != NULL);
742 RecordPosition(pointers->position()); 811 RecordPosition(pointers->position());
743 812
744 __ CallRuntime(function, num_arguments); 813 __ CallRuntime(function, num_arguments);
814 fp_sp_delta_ -= num_arguments * kPointerSize;
745 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 815 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
746 } 816 }
747 817
748 818
749 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, 819 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
750 int argc, 820 int argc,
751 LInstruction* instr) { 821 LInstruction* instr) {
752 __ CallRuntimeSaveDoubles(id); 822 __ CallRuntimeSaveDoubles(id);
823 fp_sp_delta_ -= argc * kPointerSize;
753 RecordSafepointWithRegisters( 824 RecordSafepointWithRegisters(
754 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); 825 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
755 } 826 }
756 827
757 828
758 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, 829 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
759 Safepoint::DeoptMode mode) { 830 Safepoint::DeoptMode mode) {
760 if (!environment->HasBeenRegistered()) { 831 if (!environment->HasBeenRegistered()) {
761 // Physical stack frame layout: 832 // Physical stack frame layout:
762 // -x ............. -4 0 ..................................... y 833 // -x ............. -4 0 ..................................... y
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after
1062 void LCodeGen::DoParameter(LParameter* instr) { 1133 void LCodeGen::DoParameter(LParameter* instr) {
1063 // Nothing to do. 1134 // Nothing to do.
1064 } 1135 }
1065 1136
1066 1137
1067 void LCodeGen::DoCallStub(LCallStub* instr) { 1138 void LCodeGen::DoCallStub(LCallStub* instr) {
1068 ASSERT(ToRegister(instr->result()).is(r0)); 1139 ASSERT(ToRegister(instr->result()).is(r0));
1069 switch (instr->hydrogen()->major_key()) { 1140 switch (instr->hydrogen()->major_key()) {
1070 case CodeStub::RegExpConstructResult: { 1141 case CodeStub::RegExpConstructResult: {
1071 RegExpConstructResultStub stub; 1142 RegExpConstructResultStub stub;
1072 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1143 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 3, instr);
1073 break; 1144 break;
1074 } 1145 }
1075 case CodeStub::RegExpExec: { 1146 case CodeStub::RegExpExec: {
1076 RegExpExecStub stub; 1147 RegExpExecStub stub;
1077 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1148 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 4, instr);
1078 break; 1149 break;
1079 } 1150 }
1080 case CodeStub::SubString: { 1151 case CodeStub::SubString: {
1081 SubStringStub stub; 1152 SubStringStub stub;
1082 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1153 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 3, instr);
1083 break; 1154 break;
1084 } 1155 }
1085 case CodeStub::NumberToString: { 1156 case CodeStub::NumberToString: {
1086 NumberToStringStub stub; 1157 NumberToStringStub stub;
1087 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1158 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 1, instr);
1088 break; 1159 break;
1089 } 1160 }
1090 case CodeStub::StringCompare: { 1161 case CodeStub::StringCompare: {
1091 StringCompareStub stub; 1162 StringCompareStub stub;
1092 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1163 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 2, instr);
1093 break; 1164 break;
1094 } 1165 }
1095 case CodeStub::TranscendentalCache: { 1166 case CodeStub::TranscendentalCache: {
1096 __ ldr(r0, MemOperand(sp, 0)); 1167 __ ldr(r0, MemOperand(sp, 0));
1097 TranscendentalCacheStub stub(instr->transcendental_type(), 1168 TranscendentalCacheStub stub(instr->transcendental_type(),
1098 TranscendentalCacheStub::TAGGED); 1169 TranscendentalCacheStub::TAGGED);
1099 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1170 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 1, instr);
1100 break; 1171 break;
1101 } 1172 }
1102 default: 1173 default:
1103 UNREACHABLE(); 1174 UNREACHABLE();
1104 } 1175 }
1105 } 1176 }
1106 1177
1107 1178
1108 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { 1179 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
1109 // Record the address of the first unknown OSR value as the place to enter. 1180 // Record the address of the first unknown OSR value as the place to enter.
(...skipping 885 matching lines...) Expand 10 before | Expand all | Expand 10 after
1995 // MemOperand with ip as the base register is not allowed for strh, so 2066 // MemOperand with ip as the base register is not allowed for strh, so
1996 // we do the address calculation explicitly. 2067 // we do the address calculation explicitly.
1997 __ add(ip, ip, Operand(index, LSL, 1)); 2068 __ add(ip, ip, Operand(index, LSL, 1));
1998 __ strh(value, MemOperand(ip)); 2069 __ strh(value, MemOperand(ip));
1999 } 2070 }
2000 } 2071 }
2001 2072
2002 2073
2003 void LCodeGen::DoThrow(LThrow* instr) { 2074 void LCodeGen::DoThrow(LThrow* instr) {
2004 Register input_reg = EmitLoadRegister(instr->value(), ip); 2075 Register input_reg = EmitLoadRegister(instr->value(), ip);
2005 __ push(input_reg); 2076 Push(input_reg);
2006 CallRuntime(Runtime::kThrow, 1, instr); 2077 CallRuntime(Runtime::kThrow, 1, instr);
2007 2078
2008 if (FLAG_debug_code) { 2079 if (FLAG_debug_code) {
2009 __ stop("Unreachable code."); 2080 __ stop("Unreachable code.");
2010 } 2081 }
2011 } 2082 }
2012 2083
2013 2084
2014 void LCodeGen::DoAddI(LAddI* instr) { 2085 void LCodeGen::DoAddI(LAddI* instr) {
2015 LOperand* left = instr->left(); 2086 LOperand* left = instr->left();
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
2112 break; 2183 break;
2113 case Token::MUL: 2184 case Token::MUL:
2114 __ vmul(result, left, right); 2185 __ vmul(result, left, right);
2115 break; 2186 break;
2116 case Token::DIV: 2187 case Token::DIV:
2117 __ vdiv(result, left, right); 2188 __ vdiv(result, left, right);
2118 break; 2189 break;
2119 case Token::MOD: { 2190 case Token::MOD: {
2120 // Save r0-r3 on the stack. 2191 // Save r0-r3 on the stack.
2121 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit()); 2192 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
2122 2193 fp_sp_delta_ += 4 * kPointerSize;
2123 __ PrepareCallCFunction(0, 2, scratch0()); 2194 __ PrepareCallCFunction(0, 2, scratch0());
2124 __ SetCallCDoubleArguments(left, right); 2195 __ SetCallCDoubleArguments(left, right);
2125 __ CallCFunction( 2196 __ CallCFunction(
2126 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2197 ExternalReference::double_fp_operation(Token::MOD, isolate()),
2127 0, 2); 2198 0, 2);
2128 // Move the result in the double result register. 2199 // Move the result in the double result register.
2129 __ GetCFunctionDoubleResult(result); 2200 __ GetCFunctionDoubleResult(result);
2130 2201
2131 // Restore r0-r3. 2202 // Restore r0-r3.
2132 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit()); 2203 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
2204 fp_sp_delta_ -= 4 * kPointerSize;
2133 break; 2205 break;
2134 } 2206 }
2135 default: 2207 default:
2136 UNREACHABLE(); 2208 UNREACHABLE();
2137 break; 2209 break;
2138 } 2210 }
2139 } 2211 }
2140 2212
2141 2213
2142 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 2214 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2143 ASSERT(ToRegister(instr->left()).is(r1)); 2215 ASSERT(ToRegister(instr->left()).is(r1));
2144 ASSERT(ToRegister(instr->right()).is(r0)); 2216 ASSERT(ToRegister(instr->right()).is(r0));
2145 ASSERT(ToRegister(instr->result()).is(r0)); 2217 ASSERT(ToRegister(instr->result()).is(r0));
2146 2218
2147 BinaryOpStub stub(instr->op(), NO_OVERWRITE); 2219 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
2148 // Block literal pool emission to ensure nop indicating no inlined smi code 2220 // Block literal pool emission to ensure nop indicating no inlined smi code
2149 // is in the correct position. 2221 // is in the correct position.
2150 Assembler::BlockConstPoolScope block_const_pool(masm()); 2222 Assembler::BlockConstPoolScope block_const_pool(masm());
2151 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2223 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 0, instr);
2152 __ nop(); // Signals no inlined code. 2224 __ nop(); // Signals no inlined code.
2153 } 2225 }
2154 2226
2155 2227
2156 int LCodeGen::GetNextEmittedBlock() const { 2228 int LCodeGen::GetNextEmittedBlock() const {
2157 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) { 2229 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) {
2158 if (!chunk_->GetLabel(i)->HasReplacement()) return i; 2230 if (!chunk_->GetLabel(i)->HasReplacement()) return i;
2159 } 2231 }
2160 return -1; 2232 return -1;
2161 } 2233 }
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after
2566 UNREACHABLE(); 2638 UNREACHABLE();
2567 return kNoCondition; 2639 return kNoCondition;
2568 } 2640 }
2569 } 2641 }
2570 2642
2571 2643
2572 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { 2644 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2573 Token::Value op = instr->op(); 2645 Token::Value op = instr->op();
2574 2646
2575 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2647 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2576 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2648 CallCode(ic, RelocInfo::CODE_TARGET, 0, instr);
2577 // This instruction also signals no smi code inlined. 2649 // This instruction also signals no smi code inlined.
2578 __ cmp(r0, Operand::Zero()); 2650 __ cmp(r0, Operand::Zero());
2579 2651
2580 Condition condition = ComputeCompareCondition(op); 2652 Condition condition = ComputeCompareCondition(op);
2581 2653
2582 EmitBranch(instr, condition); 2654 EmitBranch(instr, condition);
2583 } 2655 }
2584 2656
2585 2657
2586 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { 2658 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
2727 __ cmp(temp, Operand(instr->map())); 2799 __ cmp(temp, Operand(instr->map()));
2728 EmitBranch(instr, eq); 2800 EmitBranch(instr, eq);
2729 } 2801 }
2730 2802
2731 2803
2732 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 2804 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2733 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0. 2805 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0.
2734 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1. 2806 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
2735 2807
2736 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 2808 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2737 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2809 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 0, instr);
2738 2810
2739 __ cmp(r0, Operand::Zero()); 2811 __ cmp(r0, Operand::Zero());
2740 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); 2812 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
2741 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq); 2813 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
2742 } 2814 }
2743 2815
2744 2816
2745 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 2817 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2746 class DeferredInstanceOfKnownGlobal: public LDeferredCode { 2818 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2747 public: 2819 public:
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
2869 if (masm_->SizeOfCodeGeneratedSince(&before_push_delta) != 8) { 2941 if (masm_->SizeOfCodeGeneratedSince(&before_push_delta) != 8) {
2870 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta)); 2942 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta));
2871 __ nop(); 2943 __ nop();
2872 if (masm_->is_thumb_mode()) { 2944 if (masm_->is_thumb_mode()) {
2873 __ nop(); 2945 __ nop();
2874 } 2946 }
2875 } 2947 }
2876 __ StoreToSafepointRegisterSlot(temp, temp); 2948 __ StoreToSafepointRegisterSlot(temp, temp);
2877 CallCodeGeneric(stub.GetCode(isolate()), 2949 CallCodeGeneric(stub.GetCode(isolate()),
2878 RelocInfo::CODE_TARGET, 2950 RelocInfo::CODE_TARGET,
2951 0,
2879 instr, 2952 instr,
2880 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 2953 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2881 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); 2954 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2882 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 2955 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2883 // Put the result value into the result register slot and 2956 // Put the result value into the result register slot and
2884 // restore all registers. 2957 // restore all registers.
2885 __ StoreToSafepointRegisterSlot(result, result); 2958 __ StoreToSafepointRegisterSlot(result, result);
2886 } 2959 }
2887 2960
2888 2961
2889 void LCodeGen::DoInstanceSize(LInstanceSize* instr) { 2962 void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
2890 Register object = ToRegister(instr->object()); 2963 Register object = ToRegister(instr->object());
2891 Register result = ToRegister(instr->result()); 2964 Register result = ToRegister(instr->result());
2892 __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset)); 2965 __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset));
2893 __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); 2966 __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset));
2894 } 2967 }
2895 2968
2896 2969
2897 void LCodeGen::DoCmpT(LCmpT* instr) { 2970 void LCodeGen::DoCmpT(LCmpT* instr) {
2898 Token::Value op = instr->op(); 2971 Token::Value op = instr->op();
2899 2972
2900 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2973 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2901 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2974 CallCode(ic, RelocInfo::CODE_TARGET, 0, instr);
2902 // This instruction also signals no smi code inlined. 2975 // This instruction also signals no smi code inlined.
2903 __ cmp(r0, Operand::Zero()); 2976 __ cmp(r0, Operand::Zero());
2904 2977
2905 Condition condition = ComputeCompareCondition(op); 2978 Condition condition = ComputeCompareCondition(op);
2906 __ LoadRoot(ToRegister(instr->result()), 2979 __ LoadRoot(ToRegister(instr->result()),
2907 Heap::kTrueValueRootIndex, 2980 Heap::kTrueValueRootIndex,
2908 condition); 2981 condition);
2909 __ LoadRoot(ToRegister(instr->result()), 2982 __ LoadRoot(ToRegister(instr->result()),
2910 Heap::kFalseValueRootIndex, 2983 Heap::kFalseValueRootIndex,
2911 NegateCondition(condition)); 2984 NegateCondition(condition));
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
2971 3044
2972 3045
2973 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { 3046 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2974 ASSERT(ToRegister(instr->global_object()).is(r0)); 3047 ASSERT(ToRegister(instr->global_object()).is(r0));
2975 ASSERT(ToRegister(instr->result()).is(r0)); 3048 ASSERT(ToRegister(instr->result()).is(r0));
2976 3049
2977 __ mov(r2, Operand(instr->name())); 3050 __ mov(r2, Operand(instr->name()));
2978 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET 3051 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2979 : RelocInfo::CODE_TARGET_CONTEXT; 3052 : RelocInfo::CODE_TARGET_CONTEXT;
2980 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 3053 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2981 CallCode(ic, mode, instr); 3054 CallCode(ic, mode, 0, instr);
2982 } 3055 }
2983 3056
2984 3057
2985 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { 3058 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2986 Register value = ToRegister(instr->value()); 3059 Register value = ToRegister(instr->value());
2987 Register cell = scratch0(); 3060 Register cell = scratch0();
2988 3061
2989 // Load the cell. 3062 // Load the cell.
2990 __ mov(cell, Operand(instr->hydrogen()->cell())); 3063 __ mov(cell, Operand(instr->hydrogen()->cell()));
2991 3064
(...skipping 16 matching lines...) Expand all
3008 3081
3009 3082
3010 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { 3083 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
3011 ASSERT(ToRegister(instr->global_object()).is(r1)); 3084 ASSERT(ToRegister(instr->global_object()).is(r1));
3012 ASSERT(ToRegister(instr->value()).is(r0)); 3085 ASSERT(ToRegister(instr->value()).is(r0));
3013 3086
3014 __ mov(r2, Operand(instr->name())); 3087 __ mov(r2, Operand(instr->name()));
3015 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 3088 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
3016 ? isolate()->builtins()->StoreIC_Initialize_Strict() 3089 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3017 : isolate()->builtins()->StoreIC_Initialize(); 3090 : isolate()->builtins()->StoreIC_Initialize();
3018 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 3091 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, 0, instr);
3019 } 3092 }
3020 3093
3021 3094
3022 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 3095 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
3023 Register context = ToRegister(instr->context()); 3096 Register context = ToRegister(instr->context());
3024 Register result = ToRegister(instr->result()); 3097 Register result = ToRegister(instr->result());
3025 __ ldr(result, ContextOperand(context, instr->slot_index())); 3098 __ ldr(result, ContextOperand(context, instr->slot_index()));
3026 if (instr->hydrogen()->RequiresHoleCheck()) { 3099 if (instr->hydrogen()->RequiresHoleCheck()) {
3027 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 3100 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3028 __ cmp(result, ip); 3101 __ cmp(result, ip);
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
3100 } 3173 }
3101 3174
3102 3175
3103 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 3176 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
3104 ASSERT(ToRegister(instr->object()).is(r0)); 3177 ASSERT(ToRegister(instr->object()).is(r0));
3105 ASSERT(ToRegister(instr->result()).is(r0)); 3178 ASSERT(ToRegister(instr->result()).is(r0));
3106 3179
3107 // Name is always in r2. 3180 // Name is always in r2.
3108 __ mov(r2, Operand(instr->name())); 3181 __ mov(r2, Operand(instr->name()));
3109 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 3182 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3110 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 3183 CallCode(ic, RelocInfo::CODE_TARGET, 0, instr, NEVER_INLINE_TARGET_ADDRESS);
3111 } 3184 }
3112 3185
3113 3186
3114 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 3187 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
3115 Register scratch = scratch0(); 3188 Register scratch = scratch0();
3116 Register function = ToRegister(instr->function()); 3189 Register function = ToRegister(instr->function());
3117 Register result = ToRegister(instr->result()); 3190 Register result = ToRegister(instr->result());
3118 3191
3119 // Check that the function really is a function. Load map into the 3192 // Check that the function really is a function. Load map into the
3120 // result register. 3193 // result register.
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after
3385 return MemOperand(base, scratch0(), LSR, 1); 3458 return MemOperand(base, scratch0(), LSR, 1);
3386 } 3459 }
3387 } 3460 }
3388 3461
3389 3462
3390 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 3463 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3391 ASSERT(ToRegister(instr->object()).is(r1)); 3464 ASSERT(ToRegister(instr->object()).is(r1));
3392 ASSERT(ToRegister(instr->key()).is(r0)); 3465 ASSERT(ToRegister(instr->key()).is(r0));
3393 3466
3394 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 3467 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
3395 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 3468 CallCode(ic, RelocInfo::CODE_TARGET, 0, instr, NEVER_INLINE_TARGET_ADDRESS);
3396 } 3469 }
3397 3470
3398 3471
3399 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 3472 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3400 Register scratch = scratch0(); 3473 Register scratch = scratch0();
3401 Register result = ToRegister(instr->result()); 3474 Register result = ToRegister(instr->result());
3402 3475
3403 if (instr->hydrogen()->from_inlined()) { 3476 if (instr->hydrogen()->from_inlined()) {
3404 __ sub(result, sp, Operand(2 * kPointerSize)); 3477 __ sub(result, sp, Operand(2 * kPointerSize));
3405 } else { 3478 } else {
3406 // Check if the calling frame is an arguments adaptor frame. 3479 // Check if the calling frame is an arguments adaptor frame.
3407 Label done, adapted; 3480 Label done, adapted;
3408 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3481 int fp_offset = StandardFrameConstants::kCallerFPOffset + fp_sp_delta_;
3482 __ ldr(scratch, MemOperand(sp, fp_offset));
3409 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset)); 3483 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
3410 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3484 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3411 3485
3412 // Result is the frame pointer for the frame if not adapted and for the real 3486 // Result is the frame pointer for the frame if not adapted and for the real
3413 // frame below the adaptor frame if adapted. 3487 // frame below the adaptor frame if adapted.
3414 __ mov(result, fp, LeaveCC, ne); 3488 __ mov(result, fp, LeaveCC, ne);
3415 __ mov(result, scratch, LeaveCC, eq); 3489 __ mov(result, scratch, LeaveCC, eq);
3416 } 3490 }
3417 } 3491 }
3418 3492
3419 3493
3420 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { 3494 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
3421 Register elem = ToRegister(instr->elements()); 3495 Register elem = ToRegister(instr->elements());
3422 Register result = ToRegister(instr->result()); 3496 Register result = ToRegister(instr->result());
3423 3497
3424 Label done; 3498 Label done;
3425 3499
3426 // If no arguments adaptor frame the number of arguments is fixed. 3500 // If no arguments adaptor frame the number of arguments is fixed.
3427 __ cmp(fp, elem); 3501 __ cmp(fp, elem);
3428 __ mov(result, Operand(scope()->num_parameters()), LeaveCC); 3502 __ mov(result, Operand(scope()->num_parameters()), LeaveCC);
3429 __ b(eq, &done); 3503 __ b(eq, &done);
3430 3504
3431 // Arguments adaptor frame present. Get argument length from there. 3505 // Arguments adaptor frame present. Get argument length from there.
3432 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3506 int fp_offset = StandardFrameConstants::kCallerFPOffset + fp_sp_delta_;
3507 __ ldr(result, MemOperand(sp, fp_offset));
3433 __ ldr(result, 3508 __ ldr(result,
3434 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3509 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
3435 __ SmiUntag(result); 3510 __ SmiUntag(result);
3436 3511
3437 // Argument length is in result register. 3512 // Argument length is in result register.
3438 __ bind(&done); 3513 __ bind(&done);
3439 } 3514 }
3440 3515
3441 3516
3442 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { 3517 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
3525 ASSERT(instr->HasPointerMap()); 3600 ASSERT(instr->HasPointerMap());
3526 LPointerMap* pointers = instr->pointer_map(); 3601 LPointerMap* pointers = instr->pointer_map();
3527 RecordPosition(pointers->position()); 3602 RecordPosition(pointers->position());
3528 SafepointGenerator safepoint_generator( 3603 SafepointGenerator safepoint_generator(
3529 this, pointers, Safepoint::kLazyDeopt); 3604 this, pointers, Safepoint::kLazyDeopt);
3530 // The number of arguments is stored in receiver which is r0, as expected 3605 // The number of arguments is stored in receiver which is r0, as expected
3531 // by InvokeFunction. 3606 // by InvokeFunction.
3532 ParameterCount actual(receiver); 3607 ParameterCount actual(receiver);
3533 __ InvokeFunction(function, actual, CALL_FUNCTION, 3608 __ InvokeFunction(function, actual, CALL_FUNCTION,
3534 safepoint_generator, CALL_AS_METHOD); 3609 safepoint_generator, CALL_AS_METHOD);
3535 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3610 int cp_offset = StandardFrameConstants::kContextOffset + fp_sp_delta_;
3611 __ ldr(cp, MemOperand(sp, cp_offset));
3536 } 3612 }
3537 3613
3538 3614
3539 void LCodeGen::DoPushArgument(LPushArgument* instr) { 3615 void LCodeGen::DoPushArgument(LPushArgument* instr) {
3540 LOperand* argument = instr->value(); 3616 LOperand* argument = instr->value();
3541 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { 3617 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
3542 Abort(kDoPushArgumentNotImplementedForDoubleType); 3618 Abort(kDoPushArgumentNotImplementedForDoubleType);
3543 } else { 3619 } else {
3544 Register argument_reg = EmitLoadRegister(argument, ip); 3620 Register argument_reg = EmitLoadRegister(argument, ip);
3545 __ push(argument_reg); 3621 Push(argument_reg);
3546 } 3622 }
3547 } 3623 }
3548 3624
3549 3625
3550 void LCodeGen::DoDrop(LDrop* instr) { 3626 void LCodeGen::DoDrop(LDrop* instr) {
3551 __ Drop(instr->count()); 3627 __ Drop(instr->count());
3628 fp_sp_delta_ -= instr->count() * kPointerSize;
3552 } 3629 }
3553 3630
3554 3631
3555 void LCodeGen::DoThisFunction(LThisFunction* instr) { 3632 void LCodeGen::DoThisFunction(LThisFunction* instr) {
3556 Register result = ToRegister(instr->result()); 3633 Register result = ToRegister(instr->result());
3557 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 3634 int offset = JavaScriptFrameConstants::kFunctionOffset + fp_sp_delta_;
3635 __ ldr(result, MemOperand(sp, offset));
3558 } 3636 }
3559 3637
3560 3638
3561 void LCodeGen::DoContext(LContext* instr) { 3639 void LCodeGen::DoContext(LContext* instr) {
3562 // If there is a non-return use, the context must be moved to a register. 3640 // If there is a non-return use, the context must be moved to a register.
3563 Register result = ToRegister(instr->result()); 3641 Register result = ToRegister(instr->result());
3564 for (HUseIterator it(instr->hydrogen()->uses()); !it.Done(); it.Advance()) { 3642 for (HUseIterator it(instr->hydrogen()->uses()); !it.Done(); it.Advance()) {
3565 if (!it.value()->IsReturn()) { 3643 if (!it.value()->IsReturn()) {
3566 __ mov(result, cp); 3644 __ mov(result, cp);
3567 return; 3645 return;
3568 } 3646 }
3569 } 3647 }
3570 } 3648 }
3571 3649
3572 3650
3573 void LCodeGen::DoOuterContext(LOuterContext* instr) { 3651 void LCodeGen::DoOuterContext(LOuterContext* instr) {
3574 Register context = ToRegister(instr->context()); 3652 Register context = ToRegister(instr->context());
3575 Register result = ToRegister(instr->result()); 3653 Register result = ToRegister(instr->result());
3576 __ ldr(result, 3654 __ ldr(result,
3577 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); 3655 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
3578 } 3656 }
3579 3657
3580 3658
3581 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { 3659 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3582 __ push(cp); // The context is the first argument. 3660 Push(cp); // The context is the first argument.
3583 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs()); 3661 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs());
3584 __ push(scratch0()); 3662 Push(scratch0());
3585 __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); 3663 __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
3586 __ push(scratch0()); 3664 Push(scratch0());
3587 CallRuntime(Runtime::kDeclareGlobals, 3, instr); 3665 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
3588 } 3666 }
3589 3667
3590 3668
3591 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { 3669 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
3592 Register result = ToRegister(instr->result()); 3670 Register result = ToRegister(instr->result());
3593 __ ldr(result, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3671 __ ldr(result, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3594 } 3672 }
3595 3673
3596 3674
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
3636 3714
3637 // Set up deoptimization. 3715 // Set up deoptimization.
3638 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 3716 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
3639 } else { 3717 } else {
3640 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 3718 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3641 ParameterCount count(arity); 3719 ParameterCount count(arity);
3642 ParameterCount expected(formal_parameter_count); 3720 ParameterCount expected(formal_parameter_count);
3643 __ InvokeFunction( 3721 __ InvokeFunction(
3644 function, expected, count, CALL_FUNCTION, generator, call_kind); 3722 function, expected, count, CALL_FUNCTION, generator, call_kind);
3645 } 3723 }
3724 fp_sp_delta_ -= (arity + 1) * kPointerSize;
3646 3725
3647 // Restore context. 3726 // Restore context.
3648 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3727 int cp_offset = StandardFrameConstants::kContextOffset + fp_sp_delta_;
3728 __ ldr(cp, MemOperand(sp, cp_offset));
3649 } 3729 }
3650 3730
3651 3731
3652 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 3732 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
3653 ASSERT(ToRegister(instr->result()).is(r0)); 3733 ASSERT(ToRegister(instr->result()).is(r0));
3654 CallKnownFunction(instr->hydrogen()->function(), 3734 CallKnownFunction(instr->hydrogen()->function(),
3655 instr->hydrogen()->formal_parameter_count(), 3735 instr->hydrogen()->formal_parameter_count(),
3656 instr->arity(), 3736 instr->arity(),
3657 instr, 3737 instr,
3658 CALL_AS_METHOD, 3738 CALL_AS_METHOD,
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
3986 MathExpGenerator::EmitMathExp( 4066 MathExpGenerator::EmitMathExp(
3987 masm(), input, result, double_scratch1, double_scratch2, 4067 masm(), input, result, double_scratch1, double_scratch2,
3988 temp1, temp2, scratch0()); 4068 temp1, temp2, scratch0());
3989 } 4069 }
3990 4070
3991 4071
3992 void LCodeGen::DoMathLog(LMathLog* instr) { 4072 void LCodeGen::DoMathLog(LMathLog* instr) {
3993 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4073 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3994 TranscendentalCacheStub stub(TranscendentalCache::LOG, 4074 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3995 TranscendentalCacheStub::UNTAGGED); 4075 TranscendentalCacheStub::UNTAGGED);
3996 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4076 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 0, instr);
3997 } 4077 }
3998 4078
3999 4079
4000 void LCodeGen::DoMathTan(LMathTan* instr) { 4080 void LCodeGen::DoMathTan(LMathTan* instr) {
4001 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4081 ASSERT(ToDoubleRegister(instr->result()).is(d2));
4002 TranscendentalCacheStub stub(TranscendentalCache::TAN, 4082 TranscendentalCacheStub stub(TranscendentalCache::TAN,
4003 TranscendentalCacheStub::UNTAGGED); 4083 TranscendentalCacheStub::UNTAGGED);
4004 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4084 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 0, instr);
4005 } 4085 }
4006 4086
4007 4087
4008 void LCodeGen::DoMathCos(LMathCos* instr) { 4088 void LCodeGen::DoMathCos(LMathCos* instr) {
4009 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4089 ASSERT(ToDoubleRegister(instr->result()).is(d2));
4010 TranscendentalCacheStub stub(TranscendentalCache::COS, 4090 TranscendentalCacheStub stub(TranscendentalCache::COS,
4011 TranscendentalCacheStub::UNTAGGED); 4091 TranscendentalCacheStub::UNTAGGED);
4012 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4092 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 0, instr);
4013 } 4093 }
4014 4094
4015 4095
4016 void LCodeGen::DoMathSin(LMathSin* instr) { 4096 void LCodeGen::DoMathSin(LMathSin* instr) {
4017 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4097 ASSERT(ToDoubleRegister(instr->result()).is(d2));
4018 TranscendentalCacheStub stub(TranscendentalCache::SIN, 4098 TranscendentalCacheStub stub(TranscendentalCache::SIN,
4019 TranscendentalCacheStub::UNTAGGED); 4099 TranscendentalCacheStub::UNTAGGED);
4020 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4100 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 0, instr);
4021 } 4101 }
4022 4102
4023 4103
4024 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { 4104 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
4025 ASSERT(ToRegister(instr->function()).is(r1)); 4105 ASSERT(ToRegister(instr->function()).is(r1));
4026 ASSERT(instr->HasPointerMap()); 4106 ASSERT(instr->HasPointerMap());
4027 4107
4028 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); 4108 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
4029 if (known_function.is_null()) { 4109 if (known_function.is_null()) {
4030 LPointerMap* pointers = instr->pointer_map(); 4110 LPointerMap* pointers = instr->pointer_map();
4031 RecordPosition(pointers->position()); 4111 RecordPosition(pointers->position());
4032 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 4112 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
4033 ParameterCount count(instr->arity()); 4113 ParameterCount count(instr->arity());
4034 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); 4114 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
4035 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4115 fp_sp_delta_ -= (instr->arity() + 1) * kPointerSize;
4116 int cp_offset = StandardFrameConstants::kContextOffset + fp_sp_delta_;
4117 __ ldr(cp, MemOperand(sp, cp_offset));
4036 } else { 4118 } else {
4037 CallKnownFunction(known_function, 4119 CallKnownFunction(known_function,
4038 instr->hydrogen()->formal_parameter_count(), 4120 instr->hydrogen()->formal_parameter_count(),
4039 instr->arity(), 4121 instr->arity(),
4040 instr, 4122 instr,
4041 CALL_AS_METHOD, 4123 CALL_AS_METHOD,
4042 R1_CONTAINS_TARGET); 4124 R1_CONTAINS_TARGET);
4043 } 4125 }
4044 } 4126 }
4045 4127
4046 4128
4047 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 4129 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
4048 ASSERT(ToRegister(instr->result()).is(r0)); 4130 ASSERT(ToRegister(instr->result()).is(r0));
4049 4131
4050 int arity = instr->arity(); 4132 int arity = instr->arity();
4051 Handle<Code> ic = 4133 Handle<Code> ic =
4052 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); 4134 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
4053 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 4135 CallCode(ic, RelocInfo::CODE_TARGET, arity + 1, instr,
4054 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4136 NEVER_INLINE_TARGET_ADDRESS);
4137 int cp_offset = StandardFrameConstants::kContextOffset + fp_sp_delta_;
4138 __ ldr(cp, MemOperand(sp, cp_offset));
4055 } 4139 }
4056 4140
4057 4141
4058 void LCodeGen::DoCallNamed(LCallNamed* instr) { 4142 void LCodeGen::DoCallNamed(LCallNamed* instr) {
4059 ASSERT(ToRegister(instr->result()).is(r0)); 4143 ASSERT(ToRegister(instr->result()).is(r0));
4060 4144
4061 int arity = instr->arity(); 4145 int arity = instr->arity();
4062 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 4146 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
4063 Handle<Code> ic = 4147 Handle<Code> ic =
4064 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); 4148 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
4065 __ mov(r2, Operand(instr->name())); 4149 __ mov(r2, Operand(instr->name()));
4066 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS); 4150 CallCode(ic, mode, arity + 1, instr, NEVER_INLINE_TARGET_ADDRESS);
4067 // Restore context register. 4151 // Restore context register.
4068 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4152 int cp_offset = StandardFrameConstants::kContextOffset + fp_sp_delta_;
4153 __ ldr(cp, MemOperand(sp, cp_offset));
4069 } 4154 }
4070 4155
4071 4156
4072 void LCodeGen::DoCallFunction(LCallFunction* instr) { 4157 void LCodeGen::DoCallFunction(LCallFunction* instr) {
4073 ASSERT(ToRegister(instr->function()).is(r1)); 4158 ASSERT(ToRegister(instr->function()).is(r1));
4074 ASSERT(ToRegister(instr->result()).is(r0)); 4159 ASSERT(ToRegister(instr->result()).is(r0));
4075 4160
4076 int arity = instr->arity(); 4161 int arity = instr->arity();
4077 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); 4162 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
4078 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4163 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, arity + 1, instr);
4079 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4164 int cp_offset = StandardFrameConstants::kContextOffset + fp_sp_delta_;
4165 __ ldr(cp, MemOperand(sp, cp_offset));
4080 } 4166 }
4081 4167
4082 4168
4083 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 4169 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
4084 ASSERT(ToRegister(instr->result()).is(r0)); 4170 ASSERT(ToRegister(instr->result()).is(r0));
4085 4171
4086 int arity = instr->arity(); 4172 int arity = instr->arity();
4087 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; 4173 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
4088 Handle<Code> ic = 4174 Handle<Code> ic =
4089 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); 4175 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
4090 __ mov(r2, Operand(instr->name())); 4176 __ mov(r2, Operand(instr->name()));
4091 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS); 4177 CallCode(ic, mode, arity + 1, instr, NEVER_INLINE_TARGET_ADDRESS);
4092 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4178 int cp_offset = StandardFrameConstants::kContextOffset + fp_sp_delta_;
4179 __ ldr(cp, MemOperand(sp, cp_offset));
4093 } 4180 }
4094 4181
4095 4182
4096 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 4183 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
4097 ASSERT(ToRegister(instr->result()).is(r0)); 4184 ASSERT(ToRegister(instr->result()).is(r0));
4098 CallKnownFunction(instr->hydrogen()->target(), 4185 CallKnownFunction(instr->hydrogen()->target(),
4099 instr->hydrogen()->formal_parameter_count(), 4186 instr->hydrogen()->formal_parameter_count(),
4100 instr->arity(), 4187 instr->arity(),
4101 instr, 4188 instr,
4102 CALL_AS_FUNCTION, 4189 CALL_AS_FUNCTION,
4103 R1_UNINITIALIZED); 4190 R1_UNINITIALIZED);
4104 } 4191 }
4105 4192
4106 4193
4107 void LCodeGen::DoCallNew(LCallNew* instr) { 4194 void LCodeGen::DoCallNew(LCallNew* instr) {
4108 ASSERT(ToRegister(instr->constructor()).is(r1)); 4195 ASSERT(ToRegister(instr->constructor()).is(r1));
4109 ASSERT(ToRegister(instr->result()).is(r0)); 4196 ASSERT(ToRegister(instr->result()).is(r0));
4110 4197
4111 __ mov(r0, Operand(instr->arity())); 4198 __ mov(r0, Operand(instr->arity()));
4112 // No cell in r2 for construct type feedback in optimized code 4199 // No cell in r2 for construct type feedback in optimized code
4113 Handle<Object> undefined_value(isolate()->factory()->undefined_value()); 4200 Handle<Object> undefined_value(isolate()->factory()->undefined_value());
4114 __ mov(r2, Operand(undefined_value)); 4201 __ mov(r2, Operand(undefined_value));
4115 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); 4202 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
4116 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 4203 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL,
4204 instr->arity() + 1, instr);
4117 } 4205 }
4118 4206
4119 4207
4120 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { 4208 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
4121 ASSERT(ToRegister(instr->constructor()).is(r1)); 4209 ASSERT(ToRegister(instr->constructor()).is(r1));
4122 ASSERT(ToRegister(instr->result()).is(r0)); 4210 ASSERT(ToRegister(instr->result()).is(r0));
4123 4211
4124 __ mov(r0, Operand(instr->arity())); 4212 __ mov(r0, Operand(instr->arity()));
4125 __ mov(r2, Operand(instr->hydrogen()->property_cell())); 4213 __ mov(r2, Operand(instr->hydrogen()->property_cell()));
4126 ElementsKind kind = instr->hydrogen()->elements_kind(); 4214 ElementsKind kind = instr->hydrogen()->elements_kind();
4127 AllocationSiteOverrideMode override_mode = 4215 AllocationSiteOverrideMode override_mode =
4128 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) 4216 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE)
4129 ? DISABLE_ALLOCATION_SITES 4217 ? DISABLE_ALLOCATION_SITES
4130 : DONT_OVERRIDE; 4218 : DONT_OVERRIDE;
4131 ContextCheckMode context_mode = CONTEXT_CHECK_NOT_REQUIRED; 4219 ContextCheckMode context_mode = CONTEXT_CHECK_NOT_REQUIRED;
4132 4220
4133 if (instr->arity() == 0) { 4221 if (instr->arity() == 0) {
4134 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); 4222 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode);
4135 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 4223 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL,
4224 instr->arity() + 1, instr);
4136 } else if (instr->arity() == 1) { 4225 } else if (instr->arity() == 1) {
4137 Label done; 4226 Label done;
4138 if (IsFastPackedElementsKind(kind)) { 4227 if (IsFastPackedElementsKind(kind)) {
4139 Label packed_case; 4228 Label packed_case;
4140 // We might need a change here 4229 // We might need a change here
4141 // look at the first argument 4230 // look at the first argument
4142 __ ldr(r5, MemOperand(sp, 0)); 4231 __ ldr(r5, MemOperand(sp, 0));
4143 __ cmp(r5, Operand::Zero()); 4232 __ cmp(r5, Operand::Zero());
4144 __ b(eq, &packed_case); 4233 __ b(eq, &packed_case);
4145 4234
4146 ElementsKind holey_kind = GetHoleyElementsKind(kind); 4235 ElementsKind holey_kind = GetHoleyElementsKind(kind);
4147 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, 4236 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode,
4148 override_mode); 4237 override_mode);
4149 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 4238 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL,
4239 instr->arity() + 1, instr);
4150 __ jmp(&done); 4240 __ jmp(&done);
4151 __ bind(&packed_case); 4241 __ bind(&packed_case);
4242 fp_sp_delta_ += (instr->arity() + 1) * kPointerSize;
4152 } 4243 }
4153 4244
4154 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); 4245 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode);
4155 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 4246 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL,
4247 instr->arity() + 1, instr);
4156 __ bind(&done); 4248 __ bind(&done);
4157 } else { 4249 } else {
4158 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); 4250 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode);
4159 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 4251 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL,
4252 instr->arity() + 1, instr);
4160 } 4253 }
4161 } 4254 }
4162 4255
4163 4256
4164 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 4257 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
4165 CallRuntime(instr->function(), instr->arity(), instr); 4258 CallRuntime(instr->function(), instr->arity(), instr);
4166 } 4259 }
4167 4260
4168 4261
4169 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { 4262 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
4261 4354
4262 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 4355 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
4263 ASSERT(ToRegister(instr->object()).is(r1)); 4356 ASSERT(ToRegister(instr->object()).is(r1));
4264 ASSERT(ToRegister(instr->value()).is(r0)); 4357 ASSERT(ToRegister(instr->value()).is(r0));
4265 4358
4266 // Name is always in r2. 4359 // Name is always in r2.
4267 __ mov(r2, Operand(instr->name())); 4360 __ mov(r2, Operand(instr->name()));
4268 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 4361 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4269 ? isolate()->builtins()->StoreIC_Initialize_Strict() 4362 ? isolate()->builtins()->StoreIC_Initialize_Strict()
4270 : isolate()->builtins()->StoreIC_Initialize(); 4363 : isolate()->builtins()->StoreIC_Initialize();
4271 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 4364 CallCode(ic, RelocInfo::CODE_TARGET, 0, instr, NEVER_INLINE_TARGET_ADDRESS);
4272 } 4365 }
4273 4366
4274 4367
4275 void LCodeGen::ApplyCheckIf(Condition condition, LBoundsCheck* check) { 4368 void LCodeGen::ApplyCheckIf(Condition condition, LBoundsCheck* check) {
4276 if (FLAG_debug_code && check->hydrogen()->skip_check()) { 4369 if (FLAG_debug_code && check->hydrogen()->skip_check()) {
4277 Label done; 4370 Label done;
4278 __ b(NegateCondition(condition), &done); 4371 __ b(NegateCondition(condition), &done);
4279 __ stop("eliminated bounds check failed"); 4372 __ stop("eliminated bounds check failed");
4280 __ bind(&done); 4373 __ bind(&done);
4281 } else { 4374 } else {
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after
4477 4570
4478 4571
4479 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 4572 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
4480 ASSERT(ToRegister(instr->object()).is(r2)); 4573 ASSERT(ToRegister(instr->object()).is(r2));
4481 ASSERT(ToRegister(instr->key()).is(r1)); 4574 ASSERT(ToRegister(instr->key()).is(r1));
4482 ASSERT(ToRegister(instr->value()).is(r0)); 4575 ASSERT(ToRegister(instr->value()).is(r0));
4483 4576
4484 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 4577 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4485 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 4578 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
4486 : isolate()->builtins()->KeyedStoreIC_Initialize(); 4579 : isolate()->builtins()->KeyedStoreIC_Initialize();
4487 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 4580 CallCode(ic, RelocInfo::CODE_TARGET, 0, instr, NEVER_INLINE_TARGET_ADDRESS);
4488 } 4581 }
4489 4582
4490 4583
4491 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { 4584 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4492 Register object_reg = ToRegister(instr->object()); 4585 Register object_reg = ToRegister(instr->object());
4493 Register scratch = scratch0(); 4586 Register scratch = scratch0();
4494 4587
4495 Handle<Map> from_map = instr->original_map(); 4588 Handle<Map> from_map = instr->original_map();
4496 Handle<Map> to_map = instr->transitioned_map(); 4589 Handle<Map> to_map = instr->transitioned_map();
4497 ElementsKind from_kind = instr->from_kind(); 4590 ElementsKind from_kind = instr->from_kind();
(...skipping 27 matching lines...) Expand all
4525 4618
4526 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { 4619 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4527 Register object = ToRegister(instr->object()); 4620 Register object = ToRegister(instr->object());
4528 Register temp = ToRegister(instr->temp()); 4621 Register temp = ToRegister(instr->temp());
4529 __ TestJSArrayForAllocationMemento(object, temp); 4622 __ TestJSArrayForAllocationMemento(object, temp);
4530 DeoptimizeIf(eq, instr->environment()); 4623 DeoptimizeIf(eq, instr->environment());
4531 } 4624 }
4532 4625
4533 4626
4534 void LCodeGen::DoStringAdd(LStringAdd* instr) { 4627 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4535 __ push(ToRegister(instr->left())); 4628 Push(ToRegister(instr->left()));
4536 __ push(ToRegister(instr->right())); 4629 Push(ToRegister(instr->right()));
4537 StringAddStub stub(instr->hydrogen()->flags()); 4630 StringAddStub stub(instr->hydrogen()->flags());
4538 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4631 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 2, instr);
4539 } 4632 }
4540 4633
4541 4634
4542 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 4635 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4543 class DeferredStringCharCodeAt: public LDeferredCode { 4636 class DeferredStringCharCodeAt: public LDeferredCode {
4544 public: 4637 public:
4545 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) 4638 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
4546 : LDeferredCode(codegen), instr_(instr) { } 4639 : LDeferredCode(codegen), instr_(instr) { }
4547 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } 4640 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
4548 virtual LInstruction* instr() { return instr_; } 4641 virtual LInstruction* instr() { return instr_; }
(...skipping 17 matching lines...) Expand all
4566 Register string = ToRegister(instr->string()); 4659 Register string = ToRegister(instr->string());
4567 Register result = ToRegister(instr->result()); 4660 Register result = ToRegister(instr->result());
4568 Register scratch = scratch0(); 4661 Register scratch = scratch0();
4569 4662
4570 // TODO(3095996): Get rid of this. For now, we need to make the 4663 // TODO(3095996): Get rid of this. For now, we need to make the
4571 // result register contain a valid pointer because it is already 4664 // result register contain a valid pointer because it is already
4572 // contained in the register pointer map. 4665 // contained in the register pointer map.
4573 __ mov(result, Operand::Zero()); 4666 __ mov(result, Operand::Zero());
4574 4667
4575 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4668 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4576 __ push(string); 4669 Push(string);
4577 // Push the index as a smi. This is safe because of the checks in 4670 // Push the index as a smi. This is safe because of the checks in
4578 // DoStringCharCodeAt above. 4671 // DoStringCharCodeAt above.
4579 if (instr->index()->IsConstantOperand()) { 4672 if (instr->index()->IsConstantOperand()) {
4580 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 4673 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
4581 __ mov(scratch, Operand(Smi::FromInt(const_index))); 4674 __ mov(scratch, Operand(Smi::FromInt(const_index)));
4582 __ push(scratch); 4675 Push(scratch);
4583 } else { 4676 } else {
4584 Register index = ToRegister(instr->index()); 4677 Register index = ToRegister(instr->index());
4585 __ SmiTag(index); 4678 __ SmiTag(index);
4586 __ push(index); 4679 Push(index);
4587 } 4680 }
4588 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); 4681 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
4589 __ AssertSmi(r0); 4682 __ AssertSmi(r0);
4590 __ SmiUntag(r0); 4683 __ SmiUntag(r0);
4591 __ StoreToSafepointRegisterSlot(r0, result); 4684 __ StoreToSafepointRegisterSlot(r0, result);
4592 } 4685 }
4593 4686
4594 4687
4595 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { 4688 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
4596 class DeferredStringCharFromCode: public LDeferredCode { 4689 class DeferredStringCharFromCode: public LDeferredCode {
(...skipping 30 matching lines...) Expand all
4627 Register char_code = ToRegister(instr->char_code()); 4720 Register char_code = ToRegister(instr->char_code());
4628 Register result = ToRegister(instr->result()); 4721 Register result = ToRegister(instr->result());
4629 4722
4630 // TODO(3095996): Get rid of this. For now, we need to make the 4723 // TODO(3095996): Get rid of this. For now, we need to make the
4631 // result register contain a valid pointer because it is already 4724 // result register contain a valid pointer because it is already
4632 // contained in the register pointer map. 4725 // contained in the register pointer map.
4633 __ mov(result, Operand::Zero()); 4726 __ mov(result, Operand::Zero());
4634 4727
4635 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4728 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4636 __ SmiTag(char_code); 4729 __ SmiTag(char_code);
4637 __ push(char_code); 4730 Push(char_code);
4638 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); 4731 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
4639 __ StoreToSafepointRegisterSlot(r0, result); 4732 __ StoreToSafepointRegisterSlot(r0, result);
4640 } 4733 }
4641 4734
4642 4735
4643 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 4736 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4644 LOperand* input = instr->value(); 4737 LOperand* input = instr->value();
4645 ASSERT(input->IsRegister() || input->IsStackSlot()); 4738 ASSERT(input->IsRegister() || input->IsStackSlot());
4646 LOperand* output = instr->result(); 4739 LOperand* output = instr->result();
4647 ASSERT(output->IsDoubleRegister()); 4740 ASSERT(output->IsDoubleRegister());
(...skipping 710 matching lines...) Expand 10 before | Expand all | Expand 10 after
5358 // TODO(3095996): Get rid of this. For now, we need to make the 5451 // TODO(3095996): Get rid of this. For now, we need to make the
5359 // result register contain a valid pointer because it is already 5452 // result register contain a valid pointer because it is already
5360 // contained in the register pointer map. 5453 // contained in the register pointer map.
5361 __ mov(result, Operand(Smi::FromInt(0))); 5454 __ mov(result, Operand(Smi::FromInt(0)));
5362 5455
5363 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 5456 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
5364 if (instr->size()->IsRegister()) { 5457 if (instr->size()->IsRegister()) {
5365 Register size = ToRegister(instr->size()); 5458 Register size = ToRegister(instr->size());
5366 ASSERT(!size.is(result)); 5459 ASSERT(!size.is(result));
5367 __ SmiTag(size); 5460 __ SmiTag(size);
5368 __ push(size); 5461 Push(size);
5369 } else { 5462 } else {
5370 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5463 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5371 __ Push(Smi::FromInt(size)); 5464 __ Push(Smi::FromInt(size));
5372 } 5465 }
5373 5466
5374 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { 5467 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5375 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); 5468 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
5376 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5469 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5377 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr); 5470 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
5378 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { 5471 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5379 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5472 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5380 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr); 5473 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
5381 } else { 5474 } else {
5382 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); 5475 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
5383 } 5476 }
5384 __ StoreToSafepointRegisterSlot(r0, result); 5477 __ StoreToSafepointRegisterSlot(r0, result);
5385 } 5478 }
5386 5479
5387 5480
5388 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 5481 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5389 ASSERT(ToRegister(instr->value()).is(r0)); 5482 ASSERT(ToRegister(instr->value()).is(r0));
5390 __ push(r0); 5483 Push(r0);
5391 CallRuntime(Runtime::kToFastProperties, 1, instr); 5484 CallRuntime(Runtime::kToFastProperties, 1, instr);
5392 } 5485 }
5393 5486
5394 5487
5395 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 5488 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5396 Label materialized; 5489 Label materialized;
5397 // Registers will be used as follows: 5490 // Registers will be used as follows:
5398 // r7 = literals array. 5491 // r7 = literals array.
5399 // r1 = regexp literal. 5492 // r1 = regexp literal.
5400 // r0 = regexp literal clone. 5493 // r0 = regexp literal clone.
5401 // r2 and r4-r6 are used as temporaries. 5494 // r2 and r4-r6 are used as temporaries.
5402 int literal_offset = 5495 int literal_offset =
5403 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); 5496 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5404 __ LoadHeapObject(r7, instr->hydrogen()->literals()); 5497 __ LoadHeapObject(r7, instr->hydrogen()->literals());
5405 __ ldr(r1, FieldMemOperand(r7, literal_offset)); 5498 __ ldr(r1, FieldMemOperand(r7, literal_offset));
5406 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 5499 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
5407 __ cmp(r1, ip); 5500 __ cmp(r1, ip);
5408 __ b(ne, &materialized); 5501 __ b(ne, &materialized);
5409 5502
5410 // Create regexp literal using runtime function 5503 // Create regexp literal using runtime function
5411 // Result will be in r0. 5504 // Result will be in r0.
5412 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); 5505 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
5413 __ mov(r5, Operand(instr->hydrogen()->pattern())); 5506 __ mov(r5, Operand(instr->hydrogen()->pattern()));
5414 __ mov(r4, Operand(instr->hydrogen()->flags())); 5507 __ mov(r4, Operand(instr->hydrogen()->flags()));
5415 __ Push(r7, r6, r5, r4); 5508 Push(r7, r6, r5, r4);
5416 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); 5509 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5417 __ mov(r1, r0); 5510 __ mov(r1, r0);
5418 5511
5419 __ bind(&materialized); 5512 __ bind(&materialized);
5420 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 5513 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5421 Label allocated, runtime_allocate; 5514 Label allocated, runtime_allocate;
5422 5515
5423 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); 5516 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
5424 __ jmp(&allocated); 5517 __ jmp(&allocated);
5425 5518
5426 __ bind(&runtime_allocate); 5519 __ bind(&runtime_allocate);
5427 __ mov(r0, Operand(Smi::FromInt(size))); 5520 __ mov(r0, Operand(Smi::FromInt(size)));
5428 __ Push(r1, r0); 5521 Push(r1, r0);
5429 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5522 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5430 __ pop(r1); 5523 __ pop(r1);
5524 fp_sp_delta_ -= kPointerSize;
5431 5525
5432 __ bind(&allocated); 5526 __ bind(&allocated);
5433 // Copy the content into the newly allocated memory. 5527 // Copy the content into the newly allocated memory.
5434 __ CopyFields(r0, r1, double_scratch0(), size / kPointerSize); 5528 __ CopyFields(r0, r1, double_scratch0(), size / kPointerSize);
5435 } 5529 }
5436 5530
5437 5531
5438 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 5532 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5439 // Use the fast case closure allocation code that allocates in new 5533 // Use the fast case closure allocation code that allocates in new
5440 // space for nested functions that don't need literals cloning. 5534 // space for nested functions that don't need literals cloning.
5441 bool pretenure = instr->hydrogen()->pretenure(); 5535 bool pretenure = instr->hydrogen()->pretenure();
5442 if (!pretenure && instr->hydrogen()->has_no_literals()) { 5536 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5443 FastNewClosureStub stub(instr->hydrogen()->language_mode(), 5537 FastNewClosureStub stub(instr->hydrogen()->language_mode(),
5444 instr->hydrogen()->is_generator()); 5538 instr->hydrogen()->is_generator());
5445 __ mov(r1, Operand(instr->hydrogen()->shared_info())); 5539 __ mov(r1, Operand(instr->hydrogen()->shared_info()));
5446 __ push(r1); 5540 Push(r1);
5447 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5541 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 1, instr);
5448 } else { 5542 } else {
5449 __ mov(r2, Operand(instr->hydrogen()->shared_info())); 5543 __ mov(r2, Operand(instr->hydrogen()->shared_info()));
5450 __ mov(r1, Operand(pretenure ? factory()->true_value() 5544 __ mov(r1, Operand(pretenure ? factory()->true_value()
5451 : factory()->false_value())); 5545 : factory()->false_value()));
5452 __ Push(cp, r2, r1); 5546 Push(cp, r2, r1);
5453 CallRuntime(Runtime::kNewClosure, 3, instr); 5547 CallRuntime(Runtime::kNewClosure, 3, instr);
5454 } 5548 }
5455 } 5549 }
5456 5550
5457 5551
5458 void LCodeGen::DoTypeof(LTypeof* instr) { 5552 void LCodeGen::DoTypeof(LTypeof* instr) {
5459 Register input = ToRegister(instr->value()); 5553 Register input = ToRegister(instr->value());
5460 __ push(input); 5554 Push(input);
5461 CallRuntime(Runtime::kTypeof, 1, instr); 5555 CallRuntime(Runtime::kTypeof, 1, instr);
5462 } 5556 }
5463 5557
5464 5558
5465 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { 5559 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
5466 Register input = ToRegister(instr->value()); 5560 Register input = ToRegister(instr->value());
5467 5561
5468 Condition final_branch_condition = EmitTypeofIs(instr->TrueLabel(chunk_), 5562 Condition final_branch_condition = EmitTypeofIs(instr->TrueLabel(chunk_),
5469 instr->FalseLabel(chunk_), 5563 instr->FalseLabel(chunk_),
5470 input, 5564 input,
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after
5655 // There is no LLazyBailout instruction for stack-checks. We have to 5749 // There is no LLazyBailout instruction for stack-checks. We have to
5656 // prepare for lazy deoptimization explicitly here. 5750 // prepare for lazy deoptimization explicitly here.
5657 if (instr->hydrogen()->is_function_entry()) { 5751 if (instr->hydrogen()->is_function_entry()) {
5658 // Perform stack overflow check. 5752 // Perform stack overflow check.
5659 Label done; 5753 Label done;
5660 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 5754 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
5661 __ cmp(sp, Operand(ip)); 5755 __ cmp(sp, Operand(ip));
5662 __ b(hs, &done); 5756 __ b(hs, &done);
5663 StackCheckStub stub; 5757 StackCheckStub stub;
5664 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); 5758 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
5665 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5759 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 0, instr);
5666 EnsureSpaceForLazyDeopt(); 5760 EnsureSpaceForLazyDeopt();
5667 last_lazy_deopt_pc_ = masm()->pc_offset(); 5761 last_lazy_deopt_pc_ = masm()->pc_offset();
5668 __ bind(&done); 5762 __ bind(&done);
5669 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5763 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5670 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5764 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5671 } else { 5765 } else {
5672 ASSERT(instr->hydrogen()->is_backwards_branch()); 5766 ASSERT(instr->hydrogen()->is_backwards_branch());
5673 // Perform stack overflow check if this goto needs it before jumping. 5767 // Perform stack overflow check if this goto needs it before jumping.
5674 DeferredStackCheck* deferred_stack_check = 5768 DeferredStackCheck* deferred_stack_check =
5675 new(zone()) DeferredStackCheck(this, instr); 5769 new(zone()) DeferredStackCheck(this, instr);
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
5723 DeoptimizeIf(le, instr->environment()); 5817 DeoptimizeIf(le, instr->environment());
5724 5818
5725 Label use_cache, call_runtime; 5819 Label use_cache, call_runtime;
5726 __ CheckEnumCache(null_value, &call_runtime); 5820 __ CheckEnumCache(null_value, &call_runtime);
5727 5821
5728 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 5822 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5729 __ b(&use_cache); 5823 __ b(&use_cache);
5730 5824
5731 // Get the set of properties to enumerate. 5825 // Get the set of properties to enumerate.
5732 __ bind(&call_runtime); 5826 __ bind(&call_runtime);
5733 __ push(r0); 5827 Push(r0);
5734 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); 5828 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5735 5829
5736 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 5830 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
5737 __ LoadRoot(ip, Heap::kMetaMapRootIndex); 5831 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
5738 __ cmp(r1, ip); 5832 __ cmp(r1, ip);
5739 DeoptimizeIf(ne, instr->environment()); 5833 DeoptimizeIf(ne, instr->environment());
5740 __ bind(&use_cache); 5834 __ bind(&use_cache);
5741 } 5835 }
5742 5836
5743 5837
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
5795 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); 5889 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
5796 __ ldr(result, FieldMemOperand(scratch, 5890 __ ldr(result, FieldMemOperand(scratch,
5797 FixedArray::kHeaderSize - kPointerSize)); 5891 FixedArray::kHeaderSize - kPointerSize));
5798 __ bind(&done); 5892 __ bind(&done);
5799 } 5893 }
5800 5894
5801 5895
5802 #undef __ 5896 #undef __
5803 5897
5804 } } // namespace v8::internal 5898 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/lithium.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698