Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(660)

Side by Side Diff: src/arm/codegen-arm.cc

Issue 2828004: ARM: Remove a bunch of spilled scopes. Still a lot to go. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 10 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/arm/jump-target-arm.cc » ('j') | src/arm/virtual-frame-arm.h » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
150 CodeGenerator::CodeGenerator(MacroAssembler* masm) 150 CodeGenerator::CodeGenerator(MacroAssembler* masm)
151 : deferred_(8), 151 : deferred_(8),
152 masm_(masm), 152 masm_(masm),
153 info_(NULL), 153 info_(NULL),
154 frame_(NULL), 154 frame_(NULL),
155 allocator_(NULL), 155 allocator_(NULL),
156 cc_reg_(al), 156 cc_reg_(al),
157 state_(NULL), 157 state_(NULL),
158 loop_nesting_(0), 158 loop_nesting_(0),
159 type_info_(NULL), 159 type_info_(NULL),
160 function_return_(JumpTarget::BIDIRECTIONAL),
160 function_return_is_shadowed_(false) { 161 function_return_is_shadowed_(false) {
161 } 162 }
162 163
163 164
164 // Calling conventions: 165 // Calling conventions:
165 // fp: caller's frame pointer 166 // fp: caller's frame pointer
166 // sp: stack pointer 167 // sp: stack pointer
167 // r1: called JS function 168 // r1: called JS function
168 // cp: callee's context 169 // cp: callee's context
169 170
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
211 #endif 212 #endif
212 213
213 if (info->mode() == CompilationInfo::PRIMARY) { 214 if (info->mode() == CompilationInfo::PRIMARY) {
214 frame_->Enter(); 215 frame_->Enter();
215 // tos: code slot 216 // tos: code slot
216 217
217 // Allocate space for locals and initialize them. This also checks 218 // Allocate space for locals and initialize them. This also checks
218 // for stack overflow. 219 // for stack overflow.
219 frame_->AllocateStackSlots(); 220 frame_->AllocateStackSlots();
220 221
221 VirtualFrame::SpilledScope spilled_scope(frame_); 222 frame_->AssertIsSpilled();
222 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 223 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
223 if (heap_slots > 0) { 224 if (heap_slots > 0) {
224 // Allocate local context. 225 // Allocate local context.
225 // Get outer context and create a new context based on it. 226 // Get outer context and create a new context based on it.
226 __ ldr(r0, frame_->Function()); 227 __ ldr(r0, frame_->Function());
227 frame_->EmitPush(r0); 228 frame_->EmitPush(r0);
228 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 229 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
229 FastNewContextStub stub(heap_slots); 230 FastNewContextStub stub(heap_slots);
230 frame_->CallStub(&stub, 1); 231 frame_->CallStub(&stub, 1);
231 } else { 232 } else {
(...skipping 18 matching lines...) Expand all
250 { 251 {
251 Comment cmnt2(masm_, "[ copy context parameters into .context"); 252 Comment cmnt2(masm_, "[ copy context parameters into .context");
252 // Note that iteration order is relevant here! If we have the same 253 // Note that iteration order is relevant here! If we have the same
253 // parameter twice (e.g., function (x, y, x)), and that parameter 254 // parameter twice (e.g., function (x, y, x)), and that parameter
254 // needs to be copied into the context, it must be the last argument 255 // needs to be copied into the context, it must be the last argument
255 // passed to the parameter that needs to be copied. This is a rare 256 // passed to the parameter that needs to be copied. This is a rare
256 // case so we don't check for it, instead we rely on the copying 257 // case so we don't check for it, instead we rely on the copying
257 // order: such a parameter is copied repeatedly into the same 258 // order: such a parameter is copied repeatedly into the same
258 // context location and thus the last value is what is seen inside 259 // context location and thus the last value is what is seen inside
259 // the function. 260 // the function.
261 frame_->AssertIsSpilled();
260 for (int i = 0; i < scope()->num_parameters(); i++) { 262 for (int i = 0; i < scope()->num_parameters(); i++) {
261 Variable* par = scope()->parameter(i); 263 Variable* par = scope()->parameter(i);
262 Slot* slot = par->slot(); 264 Slot* slot = par->slot();
263 if (slot != NULL && slot->type() == Slot::CONTEXT) { 265 if (slot != NULL && slot->type() == Slot::CONTEXT) {
264 ASSERT(!scope()->is_global_scope()); // No params in global scope. 266 ASSERT(!scope()->is_global_scope()); // No params in global scope.
265 __ ldr(r1, frame_->ParameterAt(i)); 267 __ ldr(r1, frame_->ParameterAt(i));
266 // Loads r2 with context; used below in RecordWrite. 268 // Loads r2 with context; used below in RecordWrite.
267 __ str(r1, SlotOperand(slot, r2)); 269 __ str(r1, SlotOperand(slot, r2));
268 // Load the offset into r3. 270 // Load the offset into r3.
269 int slot_offset = 271 int slot_offset =
270 FixedArray::kHeaderSize + slot->index() * kPointerSize; 272 FixedArray::kHeaderSize + slot->index() * kPointerSize;
271 __ mov(r3, Operand(slot_offset)); 273 __ mov(r3, Operand(slot_offset));
272 __ RecordWrite(r2, r3, r1); 274 __ RecordWrite(r2, r3, r1);
273 } 275 }
274 } 276 }
275 } 277 }
276 278
277 // Store the arguments object. This must happen after context 279 // Store the arguments object. This must happen after context
278 // initialization because the arguments object may be stored in 280 // initialization because the arguments object may be stored in
279 // the context. 281 // the context.
280 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) { 282 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
281 StoreArgumentsObject(true); 283 StoreArgumentsObject(true);
282 } 284 }
283 285
284 // Initialize ThisFunction reference if present. 286 // Initialize ThisFunction reference if present.
285 if (scope()->is_function_scope() && scope()->function() != NULL) { 287 if (scope()->is_function_scope() && scope()->function() != NULL) {
286 __ mov(ip, Operand(Factory::the_hole_value())); 288 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
287 frame_->EmitPush(ip);
288 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); 289 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
289 } 290 }
290 } else { 291 } else {
291 // When used as the secondary compiler for splitting, r1, cp, 292 // When used as the secondary compiler for splitting, r1, cp,
292 // fp, and lr have been pushed on the stack. Adjust the virtual 293 // fp, and lr have been pushed on the stack. Adjust the virtual
293 // frame to match this state. 294 // frame to match this state.
294 frame_->Adjust(4); 295 frame_->Adjust(4);
295 296
296 // Bind all the bailout labels to the beginning of the function. 297 // Bind all the bailout labels to the beginning of the function.
297 List<CompilationInfo::Bailout*>* bailouts = info->bailouts(); 298 List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
(...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after
504 // code as we unwind the C++ stack. 505 // code as we unwind the C++ stack.
505 // 506 //
506 // It's possible to have both a stack overflow and a valid frame 507 // It's possible to have both a stack overflow and a valid frame
507 // state (eg, a subexpression overflowed, visiting it returned 508 // state (eg, a subexpression overflowed, visiting it returned
508 // with a dummied frame state, and visiting this expression 509 // with a dummied frame state, and visiting this expression
509 // returned with a normal-looking state). 510 // returned with a normal-looking state).
510 if (HasStackOverflow() && 511 if (HasStackOverflow() &&
511 has_valid_frame() && 512 has_valid_frame() &&
512 !has_cc() && 513 !has_cc() &&
513 frame_->height() == original_height) { 514 frame_->height() == original_height) {
514 frame_->SpillAll();
515 true_target->Jump(); 515 true_target->Jump();
516 } 516 }
517 } 517 }
518 if (force_cc && frame_ != NULL && !has_cc()) { 518 if (force_cc && frame_ != NULL && !has_cc()) {
519 // Convert the TOS value to a boolean in the condition code register. 519 // Convert the TOS value to a boolean in the condition code register.
520 ToBoolean(true_target, false_target); 520 ToBoolean(true_target, false_target);
521 } 521 }
522 ASSERT(!force_cc || !has_valid_frame() || has_cc()); 522 ASSERT(!force_cc || !has_valid_frame() || has_cc());
523 ASSERT(!has_valid_frame() || 523 ASSERT(!has_valid_frame() ||
524 (has_cc() && frame_->height() == original_height) || 524 (has_cc() && frame_->height() == original_height) ||
525 (!has_cc() && frame_->height() == original_height + 1)); 525 (!has_cc() && frame_->height() == original_height + 1));
526 } 526 }
527 527
528 528
529 void CodeGenerator::Load(Expression* expr) { 529 void CodeGenerator::Load(Expression* expr) {
530 #ifdef DEBUG 530 #ifdef DEBUG
531 int original_height = frame_->height(); 531 int original_height = frame_->height();
532 #endif 532 #endif
533 JumpTarget true_target; 533 JumpTarget true_target;
534 JumpTarget false_target; 534 JumpTarget false_target;
535 LoadCondition(expr, &true_target, &false_target, false); 535 LoadCondition(expr, &true_target, &false_target, false);
536 536
537 if (has_cc()) { 537 if (has_cc()) {
538 // Convert cc_reg_ into a boolean value. 538 // Convert cc_reg_ into a boolean value.
539 VirtualFrame::SpilledScope scope(frame_);
540 JumpTarget loaded; 539 JumpTarget loaded;
541 JumpTarget materialize_true; 540 JumpTarget materialize_true;
542 materialize_true.Branch(cc_reg_); 541 materialize_true.Branch(cc_reg_);
543 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 542 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
544 frame_->EmitPush(r0); 543 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
545 loaded.Jump(); 544 loaded.Jump();
546 materialize_true.Bind(); 545 materialize_true.Bind();
547 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 546 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
548 frame_->EmitPush(r0);
549 loaded.Bind(); 547 loaded.Bind();
550 cc_reg_ = al; 548 cc_reg_ = al;
551 } 549 }
552 550
553 if (true_target.is_linked() || false_target.is_linked()) { 551 if (true_target.is_linked() || false_target.is_linked()) {
554 VirtualFrame::SpilledScope scope(frame_);
555 // We have at least one condition value that has been "translated" 552 // We have at least one condition value that has been "translated"
556 // into a branch, thus it needs to be loaded explicitly. 553 // into a branch, thus it needs to be loaded explicitly.
557 JumpTarget loaded; 554 JumpTarget loaded;
558 if (frame_ != NULL) { 555 if (frame_ != NULL) {
559 loaded.Jump(); // Don't lose the current TOS. 556 loaded.Jump(); // Don't lose the current TOS.
560 } 557 }
561 bool both = true_target.is_linked() && false_target.is_linked(); 558 bool both = true_target.is_linked() && false_target.is_linked();
562 // Load "true" if necessary. 559 // Load "true" if necessary.
563 if (true_target.is_linked()) { 560 if (true_target.is_linked()) {
564 true_target.Bind(); 561 true_target.Bind();
565 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 562 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
566 frame_->EmitPush(r0);
567 } 563 }
568 // If both "true" and "false" need to be loaded jump across the code for 564 // If both "true" and "false" need to be loaded jump across the code for
569 // "false". 565 // "false".
570 if (both) { 566 if (both) {
571 loaded.Jump(); 567 loaded.Jump();
572 } 568 }
573 // Load "false" if necessary. 569 // Load "false" if necessary.
574 if (false_target.is_linked()) { 570 if (false_target.is_linked()) {
575 false_target.Bind(); 571 false_target.Bind();
576 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 572 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
577 frame_->EmitPush(r0);
578 } 573 }
579 // A value is loaded on all paths reaching this point. 574 // A value is loaded on all paths reaching this point.
580 loaded.Bind(); 575 loaded.Bind();
581 } 576 }
582 ASSERT(has_valid_frame()); 577 ASSERT(has_valid_frame());
583 ASSERT(!has_cc()); 578 ASSERT(!has_cc());
584 ASSERT_EQ(original_height + 1, frame_->height()); 579 ASSERT_EQ(original_height + 1, frame_->height());
585 } 580 }
586 581
587 582
588 void CodeGenerator::LoadGlobal() { 583 void CodeGenerator::LoadGlobal() {
589 Register reg = frame_->GetTOSRegister(); 584 Register reg = frame_->GetTOSRegister();
590 __ ldr(reg, GlobalObject()); 585 __ ldr(reg, GlobalObject());
591 frame_->EmitPush(reg); 586 frame_->EmitPush(reg);
592 } 587 }
593 588
594 589
595 void CodeGenerator::LoadGlobalReceiver(Register scratch) { 590 void CodeGenerator::LoadGlobalReceiver(Register scratch) {
Søren Thygesen Gjesse 2010/06/15 10:29:56 scratch is not used anymore.
596 VirtualFrame::SpilledScope spilled_scope(frame_); 591 Register reg = frame_->GetTOSRegister();
597 __ ldr(scratch, ContextOperand(cp, Context::GLOBAL_INDEX)); 592 __ ldr(reg, ContextOperand(cp, Context::GLOBAL_INDEX));
598 __ ldr(scratch, 593 __ ldr(reg,
599 FieldMemOperand(scratch, GlobalObject::kGlobalReceiverOffset)); 594 FieldMemOperand(reg, GlobalObject::kGlobalReceiverOffset));
600 frame_->EmitPush(scratch); 595 frame_->EmitPush(reg);
601 } 596 }
602 597
603 598
604 ArgumentsAllocationMode CodeGenerator::ArgumentsMode() { 599 ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
605 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION; 600 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
606 ASSERT(scope()->arguments_shadow() != NULL); 601 ASSERT(scope()->arguments_shadow() != NULL);
607 // We don't want to do lazy arguments allocation for functions that 602 // We don't want to do lazy arguments allocation for functions that
608 // have heap-allocated contexts, because it interfers with the 603 // have heap-allocated contexts, because it interfers with the
609 // uninitialized const tracking in the context objects. 604 // uninitialized const tracking in the context objects.
610 return (scope()->num_heap_slots() > 0) 605 return (scope()->num_heap_slots() > 0)
611 ? EAGER_ARGUMENTS_ALLOCATION 606 ? EAGER_ARGUMENTS_ALLOCATION
612 : LAZY_ARGUMENTS_ALLOCATION; 607 : LAZY_ARGUMENTS_ALLOCATION;
613 } 608 }
614 609
615 610
616 void CodeGenerator::StoreArgumentsObject(bool initial) { 611 void CodeGenerator::StoreArgumentsObject(bool initial) {
617 VirtualFrame::SpilledScope spilled_scope(frame_);
618
619 ArgumentsAllocationMode mode = ArgumentsMode(); 612 ArgumentsAllocationMode mode = ArgumentsMode();
620 ASSERT(mode != NO_ARGUMENTS_ALLOCATION); 613 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
621 614
622 Comment cmnt(masm_, "[ store arguments object"); 615 Comment cmnt(masm_, "[ store arguments object");
623 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) { 616 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
624 // When using lazy arguments allocation, we store the hole value 617 // When using lazy arguments allocation, we store the hole value
625 // as a sentinel indicating that the arguments object hasn't been 618 // as a sentinel indicating that the arguments object hasn't been
626 // allocated yet. 619 // allocated yet.
627 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 620 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
628 frame_->EmitPush(ip);
629 } else { 621 } else {
622 frame_->SpillAll();
630 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); 623 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
631 __ ldr(r2, frame_->Function()); 624 __ ldr(r2, frame_->Function());
632 // The receiver is below the arguments, the return address, and the 625 // The receiver is below the arguments, the return address, and the
633 // frame pointer on the stack. 626 // frame pointer on the stack.
634 const int kReceiverDisplacement = 2 + scope()->num_parameters(); 627 const int kReceiverDisplacement = 2 + scope()->num_parameters();
635 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize)); 628 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
636 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters()))); 629 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
637 frame_->Adjust(3); 630 frame_->Adjust(3);
638 __ Push(r2, r1, r0); 631 __ Push(r2, r1, r0);
639 frame_->CallStub(&stub, 3); 632 frame_->CallStub(&stub, 3);
640 frame_->EmitPush(r0); 633 frame_->EmitPush(r0);
641 } 634 }
642 635
643 Variable* arguments = scope()->arguments()->var(); 636 Variable* arguments = scope()->arguments()->var();
644 Variable* shadow = scope()->arguments_shadow()->var(); 637 Variable* shadow = scope()->arguments_shadow()->var();
645 ASSERT(arguments != NULL && arguments->slot() != NULL); 638 ASSERT(arguments != NULL && arguments->slot() != NULL);
646 ASSERT(shadow != NULL && shadow->slot() != NULL); 639 ASSERT(shadow != NULL && shadow->slot() != NULL);
647 JumpTarget done; 640 JumpTarget done;
648 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) { 641 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
649 // We have to skip storing into the arguments slot if it has 642 // We have to skip storing into the arguments slot if it has
650 // already been written to. This can happen if the a function 643 // already been written to. This can happen if the a function
651 // has a local variable named 'arguments'. 644 // has a local variable named 'arguments'.
652 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); 645 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
653 frame_->EmitPop(r0); 646 Register arguments = frame_->PopToRegister();
654 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 647 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
655 __ cmp(r0, ip); 648 __ cmp(arguments, ip);
656 done.Branch(ne); 649 done.Branch(ne);
657 } 650 }
658 StoreToSlot(arguments->slot(), NOT_CONST_INIT); 651 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
659 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind(); 652 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
660 StoreToSlot(shadow->slot(), NOT_CONST_INIT); 653 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
661 } 654 }
662 655
663 656
664 void CodeGenerator::LoadTypeofExpression(Expression* expr) { 657 void CodeGenerator::LoadTypeofExpression(Expression* expr) {
665 // Special handling of identifiers as subexpressions of typeof. 658 // Special handling of identifiers as subexpressions of typeof.
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
748 frame_->EmitPush(tos); 741 frame_->EmitPush(tos);
749 } 742 }
750 } 743 }
751 744
752 745
753 // ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given 746 // ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
754 // register to a boolean in the condition code register. The code 747 // register to a boolean in the condition code register. The code
755 // may jump to 'false_target' in case the register converts to 'false'. 748 // may jump to 'false_target' in case the register converts to 'false'.
756 void CodeGenerator::ToBoolean(JumpTarget* true_target, 749 void CodeGenerator::ToBoolean(JumpTarget* true_target,
757 JumpTarget* false_target) { 750 JumpTarget* false_target) {
758 VirtualFrame::SpilledScope spilled_scope(frame_);
759 // Note: The generated code snippet does not change stack variables. 751 // Note: The generated code snippet does not change stack variables.
760 // Only the condition code should be set. 752 // Only the condition code should be set.
761 frame_->EmitPop(r0); 753 Register tos = frame_->PopToRegister();
762 754
763 // Fast case checks 755 // Fast case checks
764 756
765 // Check if the value is 'false'. 757 // Check if the value is 'false'.
766 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 758 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
767 __ cmp(r0, ip); 759 __ cmp(tos, ip);
768 false_target->Branch(eq); 760 false_target->Branch(eq);
769 761
770 // Check if the value is 'true'. 762 // Check if the value is 'true'.
771 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 763 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
772 __ cmp(r0, ip); 764 __ cmp(tos, ip);
773 true_target->Branch(eq); 765 true_target->Branch(eq);
774 766
775 // Check if the value is 'undefined'. 767 // Check if the value is 'undefined'.
776 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 768 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
777 __ cmp(r0, ip); 769 __ cmp(tos, ip);
778 false_target->Branch(eq); 770 false_target->Branch(eq);
779 771
780 // Check if the value is a smi. 772 // Check if the value is a smi.
781 __ cmp(r0, Operand(Smi::FromInt(0))); 773 __ cmp(tos, Operand(Smi::FromInt(0)));
782 false_target->Branch(eq); 774 false_target->Branch(eq);
783 __ tst(r0, Operand(kSmiTagMask)); 775 __ tst(tos, Operand(kSmiTagMask));
784 true_target->Branch(eq); 776 true_target->Branch(eq);
785 777
786 // Slow case: call the runtime. 778 // Slow case: call the runtime.
787 frame_->EmitPush(r0); 779 frame_->EmitPush(tos);
788 frame_->CallRuntime(Runtime::kToBool, 1); 780 frame_->CallRuntime(Runtime::kToBool, 1);
789 // Convert the result (r0) to a condition code. 781 // Convert the result (r0) to a condition code.
790 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 782 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
791 __ cmp(r0, ip); 783 __ cmp(r0, ip);
792 784
793 cc_reg_ = ne; 785 cc_reg_ = ne;
794 } 786 }
795 787
796 788
797 void CodeGenerator::GenericBinaryOperation(Token::Value op, 789 void CodeGenerator::GenericBinaryOperation(Token::Value op,
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
929 921
930 private: 922 private:
931 Token::Value op_; 923 Token::Value op_;
932 int value_; 924 int value_;
933 bool reversed_; 925 bool reversed_;
934 OverwriteMode overwrite_mode_; 926 OverwriteMode overwrite_mode_;
935 Register tos_register_; 927 Register tos_register_;
936 }; 928 };
937 929
938 930
931
932 // On entry the non-constant side of the binary operation is in tos_register_
Søren Thygesen Gjesse 2010/06/15 10:29:56 Doesn't tos contain the result of performing the o
933 // and the constant smi side is nowhere. The tos_register_ is not used by the
934 // virtual frame. On exit the answer is in the tos_register_ and the virtual
935 // frame is unchanged.
939 void DeferredInlineSmiOperation::Generate() { 936 void DeferredInlineSmiOperation::Generate() {
937 VirtualFrame copied_frame(*frame_state()->frame());
938 copied_frame.SpillAll();
939
940 Register lhs = r1; 940 Register lhs = r1;
941 Register rhs = r0; 941 Register rhs = r0;
942 switch (op_) { 942 switch (op_) {
943 case Token::ADD: { 943 case Token::ADD: {
944 // Revert optimistic add. 944 // Revert optimistic add.
945 if (reversed_) { 945 if (reversed_) {
946 __ sub(r0, tos_register_, Operand(Smi::FromInt(value_))); 946 __ sub(r0, tos_register_, Operand(Smi::FromInt(value_)));
947 __ mov(r1, Operand(Smi::FromInt(value_))); 947 __ mov(r1, Operand(Smi::FromInt(value_)));
948 } else { 948 } else {
949 __ sub(r1, tos_register_, Operand(Smi::FromInt(value_))); 949 __ sub(r1, tos_register_, Operand(Smi::FromInt(value_)));
Søren Thygesen Gjesse 2010/06/15 10:29:56 Why does r0 not need to be loaded with value_?
950 __ mov(r0, Operand(Smi::FromInt(value_))); 950 __ mov(r0, Operand(Smi::FromInt(value_)));
951 } 951 }
952 break; 952 break;
953 } 953 }
954 954
955 case Token::SUB: { 955 case Token::SUB: {
956 // Revert optimistic sub. 956 // Revert optimistic sub.
957 if (reversed_) { 957 if (reversed_) {
958 __ rsb(r0, tos_register_, Operand(Smi::FromInt(value_))); 958 __ rsb(r0, tos_register_, Operand(Smi::FromInt(value_)));
959 __ mov(r1, Operand(Smi::FromInt(value_))); 959 __ mov(r1, Operand(Smi::FromInt(value_)));
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1013 } 1013 }
1014 1014
1015 default: 1015 default:
1016 // Other cases should have been handled before this point. 1016 // Other cases should have been handled before this point.
1017 UNREACHABLE(); 1017 UNREACHABLE();
1018 break; 1018 break;
1019 } 1019 }
1020 1020
1021 GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_); 1021 GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_);
1022 __ CallStub(&stub); 1022 __ CallStub(&stub);
1023
1023 // The generic stub returns its value in r0, but that's not 1024 // The generic stub returns its value in r0, but that's not
1024 // necessarily what we want. We want whatever the inlined code 1025 // necessarily what we want. We want whatever the inlined code
1025 // expected, which is that the answer is in the same register as 1026 // expected, which is that the answer is in the same register as
1026 // the operand was. 1027 // the operand was.
1027 __ Move(tos_register_, r0); 1028 __ Move(tos_register_, r0);
1029
1030 // The tos register was not in use for the virtual frame that we
1031 // came into this function with, so we can merge back to that frame
1032 // without trashing it.
1033 copied_frame.MergeTo(frame_state()->frame());
1028 } 1034 }
1029 1035
1030 1036
1031 static bool PopCountLessThanEqual2(unsigned int x) { 1037 static bool PopCountLessThanEqual2(unsigned int x) {
1032 x &= x - 1; 1038 x &= x - 1;
1033 return (x & (x - 1)) == 0; 1039 return (x & (x - 1)) == 0;
1034 } 1040 }
1035 1041
1036 1042
1037 // Returns the index of the lowest bit set. 1043 // Returns the index of the lowest bit set.
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1118 frame_->EmitPush(lhs, TypeInfo::Smi()); 1124 frame_->EmitPush(lhs, TypeInfo::Smi());
1119 TypeInfo t = both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Unknown(); 1125 TypeInfo t = both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Unknown();
1120 frame_->EmitPush(rhs, t); 1126 frame_->EmitPush(rhs, t);
1121 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, kUnknownIntValue); 1127 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, kUnknownIntValue);
1122 } 1128 }
1123 return; 1129 return;
1124 } 1130 }
1125 1131
1126 // We move the top of stack to a register (normally no move is invoved). 1132 // We move the top of stack to a register (normally no move is invoved).
1127 Register tos = frame_->PopToRegister(); 1133 Register tos = frame_->PopToRegister();
1128 // All other registers are spilled. The deferred code expects one argument
1129 // in a register and all other values are flushed to the stack. The
1130 // answer is returned in the same register that the top of stack argument was
1131 // in.
1132 frame_->SpillAll();
1133
1134 switch (op) { 1134 switch (op) {
1135 case Token::ADD: { 1135 case Token::ADD: {
1136 DeferredCode* deferred = 1136 DeferredCode* deferred =
1137 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos); 1137 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
1138 1138
1139 __ add(tos, tos, Operand(value), SetCC); 1139 __ add(tos, tos, Operand(value), SetCC);
1140 deferred->Branch(vs); 1140 deferred->Branch(vs);
1141 if (!both_sides_are_smi) { 1141 if (!both_sides_are_smi) {
1142 __ tst(tos, Operand(kSmiTagMask)); 1142 __ tst(tos, Operand(kSmiTagMask));
1143 deferred->Branch(ne); 1143 deferred->Branch(ne);
(...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after
1442 1442
1443 exit.Bind(); 1443 exit.Bind();
1444 cc_reg_ = cc; 1444 cc_reg_ = cc;
1445 } 1445 }
1446 1446
1447 1447
1448 // Call the function on the stack with the given arguments. 1448 // Call the function on the stack with the given arguments.
1449 void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args, 1449 void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
1450 CallFunctionFlags flags, 1450 CallFunctionFlags flags,
1451 int position) { 1451 int position) {
1452 frame_->AssertIsSpilled();
1453
1454 // Push the arguments ("left-to-right") on the stack. 1452 // Push the arguments ("left-to-right") on the stack.
1455 int arg_count = args->length(); 1453 int arg_count = args->length();
1456 for (int i = 0; i < arg_count; i++) { 1454 for (int i = 0; i < arg_count; i++) {
1457 Load(args->at(i)); 1455 Load(args->at(i));
1458 } 1456 }
1459 1457
1460 // Record the position for debugging purposes. 1458 // Record the position for debugging purposes.
1461 CodeForSourcePosition(position); 1459 CodeForSourcePosition(position);
1462 1460
1463 // Use the shared code stub to call the function. 1461 // Use the shared code stub to call the function.
(...skipping 12 matching lines...) Expand all
1476 VariableProxy* arguments, 1474 VariableProxy* arguments,
1477 int position) { 1475 int position) {
1478 // An optimized implementation of expressions of the form 1476 // An optimized implementation of expressions of the form
1479 // x.apply(y, arguments). 1477 // x.apply(y, arguments).
1480 // If the arguments object of the scope has not been allocated, 1478 // If the arguments object of the scope has not been allocated,
1481 // and x.apply is Function.prototype.apply, this optimization 1479 // and x.apply is Function.prototype.apply, this optimization
1482 // just copies y and the arguments of the current function on the 1480 // just copies y and the arguments of the current function on the
1483 // stack, as receiver and arguments, and calls x. 1481 // stack, as receiver and arguments, and calls x.
1484 // In the implementation comments, we call x the applicand 1482 // In the implementation comments, we call x the applicand
1485 // and y the receiver. 1483 // and y the receiver.
1486 VirtualFrame::SpilledScope spilled_scope(frame_);
1487 1484
1488 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); 1485 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1489 ASSERT(arguments->IsArguments()); 1486 ASSERT(arguments->IsArguments());
1490 1487
1491 // Load applicand.apply onto the stack. This will usually 1488 // Load applicand.apply onto the stack. This will usually
1492 // give us a megamorphic load site. Not super, but it works. 1489 // give us a megamorphic load site. Not super, but it works.
1493 Load(applicand); 1490 Load(applicand);
1494 Handle<String> name = Factory::LookupAsciiSymbol("apply"); 1491 Handle<String> name = Factory::LookupAsciiSymbol("apply");
1495 frame_->Dup(); 1492 frame_->Dup();
1496 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET); 1493 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1497 frame_->EmitPush(r0); 1494 frame_->EmitPush(r0);
1498 1495
1499 // Load the receiver and the existing arguments object onto the 1496 // Load the receiver and the existing arguments object onto the
1500 // expression stack. Avoid allocating the arguments object here. 1497 // expression stack. Avoid allocating the arguments object here.
1501 Load(receiver); 1498 Load(receiver);
1502 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); 1499 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
1503 1500
1501 // At this point the top two stack elements are probably in registers
1502 // since they were just loaded. Ensure they are in regs and get the
1503 // regs.
1504 Register receiver_reg = frame_->Peek2();
1505 Register arguments_reg = frame_->Peek();
1506
1507 // From now on the frame is spilled.
1508 frame_->SpillAll();
1509
1504 // Emit the source position information after having loaded the 1510 // Emit the source position information after having loaded the
1505 // receiver and the arguments. 1511 // receiver and the arguments.
1506 CodeForSourcePosition(position); 1512 CodeForSourcePosition(position);
1507 // Contents of the stack at this point: 1513 // Contents of the stack at this point:
1508 // sp[0]: arguments object of the current function or the hole. 1514 // sp[0]: arguments object of the current function or the hole.
1509 // sp[1]: receiver 1515 // sp[1]: receiver
1510 // sp[2]: applicand.apply 1516 // sp[2]: applicand.apply
1511 // sp[3]: applicand. 1517 // sp[3]: applicand.
1512 1518
1513 // Check if the arguments object has been lazily allocated 1519 // Check if the arguments object has been lazily allocated
1514 // already. If so, just use that instead of copying the arguments 1520 // already. If so, just use that instead of copying the arguments
1515 // from the stack. This also deals with cases where a local variable 1521 // from the stack. This also deals with cases where a local variable
1516 // named 'arguments' has been introduced. 1522 // named 'arguments' has been introduced.
1517 __ ldr(r0, MemOperand(sp, 0)); 1523 JumpTarget slow;
1518 1524 Label done;
1519 Label slow, done;
1520 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 1525 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1521 __ cmp(ip, r0); 1526 __ cmp(ip, arguments_reg);
1522 __ b(ne, &slow); 1527 slow.Branch(ne);
1523 1528
1524 Label build_args; 1529 Label build_args;
1525 // Get rid of the arguments object probe. 1530 // Get rid of the arguments object probe.
1526 frame_->Drop(); 1531 frame_->Drop();
1527 // Stack now has 3 elements on it. 1532 // Stack now has 3 elements on it.
1528 // Contents of stack at this point: 1533 // Contents of stack at this point:
1529 // sp[0]: receiver 1534 // sp[0]: receiver - in the receiver_reg register.
1530 // sp[1]: applicand.apply 1535 // sp[1]: applicand.apply
1531 // sp[2]: applicand. 1536 // sp[2]: applicand.
1532 1537
1533 // Check that the receiver really is a JavaScript object. 1538 // Check that the receiver really is a JavaScript object.
1534 __ ldr(r0, MemOperand(sp, 0)); 1539 __ BranchOnSmi(receiver_reg, &build_args);
1535 __ BranchOnSmi(r0, &build_args);
1536 // We allow all JSObjects including JSFunctions. As long as 1540 // We allow all JSObjects including JSFunctions. As long as
1537 // JS_FUNCTION_TYPE is the last instance type and it is right 1541 // JS_FUNCTION_TYPE is the last instance type and it is right
1538 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper 1542 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
1539 // bound. 1543 // bound.
1540 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 1544 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1541 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); 1545 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1542 __ CompareObjectType(r0, r1, r2, FIRST_JS_OBJECT_TYPE); 1546 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
1543 __ b(lt, &build_args); 1547 __ b(lt, &build_args);
1544 1548
1545 // Check that applicand.apply is Function.prototype.apply. 1549 // Check that applicand.apply is Function.prototype.apply.
1546 __ ldr(r0, MemOperand(sp, kPointerSize)); 1550 __ ldr(r0, MemOperand(sp, kPointerSize));
1547 __ BranchOnSmi(r0, &build_args); 1551 __ BranchOnSmi(r0, &build_args);
1548 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 1552 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1549 __ b(ne, &build_args); 1553 __ b(ne, &build_args);
1550 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 1554 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
1551 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); 1555 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
1552 __ ldr(r1, FieldMemOperand(r0, SharedFunctionInfo::kCodeOffset)); 1556 __ ldr(r1, FieldMemOperand(r0, SharedFunctionInfo::kCodeOffset));
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
1621 // there, and fall-through to the slow-case where we call 1625 // there, and fall-through to the slow-case where we call
1622 // applicand.apply. 1626 // applicand.apply.
1623 __ bind(&build_args); 1627 __ bind(&build_args);
1624 // Stack now has 3 elements, because we have jumped from where: 1628 // Stack now has 3 elements, because we have jumped from where:
1625 // sp[0]: receiver 1629 // sp[0]: receiver
1626 // sp[1]: applicand.apply 1630 // sp[1]: applicand.apply
1627 // sp[2]: applicand. 1631 // sp[2]: applicand.
1628 StoreArgumentsObject(false); 1632 StoreArgumentsObject(false);
1629 1633
1630 // Stack and frame now have 4 elements. 1634 // Stack and frame now have 4 elements.
1631 __ bind(&slow); 1635 slow.Bind();
1632 1636
1633 // Generic computation of x.apply(y, args) with no special optimization. 1637 // Generic computation of x.apply(y, args) with no special optimization.
1634 // Flip applicand.apply and applicand on the stack, so 1638 // Flip applicand.apply and applicand on the stack, so
1635 // applicand looks like the receiver of the applicand.apply call. 1639 // applicand looks like the receiver of the applicand.apply call.
1636 // Then process it as a normal function call. 1640 // Then process it as a normal function call.
1637 __ ldr(r0, MemOperand(sp, 3 * kPointerSize)); 1641 __ ldr(r0, MemOperand(sp, 3 * kPointerSize));
1638 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); 1642 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1639 __ Strd(r0, r1, MemOperand(sp, 2 * kPointerSize)); 1643 __ Strd(r0, r1, MemOperand(sp, 2 * kPointerSize));
1640 1644
1641 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS); 1645 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
1642 frame_->CallStub(&call_function, 3); 1646 frame_->CallStub(&call_function, 3);
1643 // The function and its two arguments have been dropped. 1647 // The function and its two arguments have been dropped.
1644 frame_->Drop(); // Drop the receiver as well. 1648 frame_->Drop(); // Drop the receiver as well.
1645 frame_->EmitPush(r0); 1649 frame_->EmitPush(r0);
1646 // Stack now has 1 element: 1650 // Stack now has 1 element:
1647 // sp[0]: result 1651 // sp[0]: result
1648 __ bind(&done); 1652 __ bind(&done);
1649 1653
1650 // Restore the context register after a call. 1654 // Restore the context register after a call.
1651 __ ldr(cp, frame_->Context()); 1655 __ ldr(cp, frame_->Context());
1652 } 1656 }
1653 1657
1654 1658
1655 void CodeGenerator::Branch(bool if_true, JumpTarget* target) { 1659 void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
1656 VirtualFrame::SpilledScope spilled_scope(frame_);
1657 ASSERT(has_cc()); 1660 ASSERT(has_cc());
1658 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_); 1661 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_);
1659 target->Branch(cc); 1662 target->Branch(cc);
1660 cc_reg_ = al; 1663 cc_reg_ = al;
1661 } 1664 }
1662 1665
1663 1666
1664 void CodeGenerator::CheckStack() { 1667 void CodeGenerator::CheckStack() {
1665 VirtualFrame::SpilledScope spilled_scope(frame_); 1668 frame_->SpillAll();
1666 Comment cmnt(masm_, "[ check stack"); 1669 Comment cmnt(masm_, "[ check stack");
1667 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 1670 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1668 // Put the lr setup instruction in the delay slot. kInstrSize is added to 1671 // Put the lr setup instruction in the delay slot. kInstrSize is added to
1669 // the implicit 8 byte offset that always applies to operations with pc and 1672 // the implicit 8 byte offset that always applies to operations with pc and
1670 // gives a return address 12 bytes down. 1673 // gives a return address 12 bytes down.
1671 masm_->add(lr, pc, Operand(Assembler::kInstrSize)); 1674 masm_->add(lr, pc, Operand(Assembler::kInstrSize));
1672 masm_->cmp(sp, Operand(ip)); 1675 masm_->cmp(sp, Operand(ip));
1673 StackCheckStub stub; 1676 StackCheckStub stub;
1674 // Call the stub if lower. 1677 // Call the stub if lower.
1675 masm_->mov(pc, 1678 masm_->mov(pc,
1676 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()), 1679 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
1677 RelocInfo::CODE_TARGET), 1680 RelocInfo::CODE_TARGET),
1678 LeaveCC, 1681 LeaveCC,
1679 lo); 1682 lo);
1680 } 1683 }
1681 1684
1682 1685
1683 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { 1686 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
1684 #ifdef DEBUG 1687 #ifdef DEBUG
1685 int original_height = frame_->height(); 1688 int original_height = frame_->height();
1686 #endif 1689 #endif
1687 VirtualFrame::SpilledScope spilled_scope(frame_);
1688 for (int i = 0; frame_ != NULL && i < statements->length(); i++) { 1690 for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
1689 Visit(statements->at(i)); 1691 Visit(statements->at(i));
1690 } 1692 }
1691 ASSERT(!has_valid_frame() || frame_->height() == original_height); 1693 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1692 } 1694 }
1693 1695
1694 1696
1695 void CodeGenerator::VisitBlock(Block* node) { 1697 void CodeGenerator::VisitBlock(Block* node) {
1696 #ifdef DEBUG 1698 #ifdef DEBUG
1697 int original_height = frame_->height(); 1699 int original_height = frame_->height();
1698 #endif 1700 #endif
1699 VirtualFrame::SpilledScope spilled_scope(frame_);
1700 Comment cmnt(masm_, "[ Block"); 1701 Comment cmnt(masm_, "[ Block");
1701 CodeForStatementPosition(node); 1702 CodeForStatementPosition(node);
1702 node->break_target()->SetExpectedHeight(); 1703 node->break_target()->SetExpectedHeight();
1703 VisitStatements(node->statements()); 1704 VisitStatements(node->statements());
1704 if (node->break_target()->is_linked()) { 1705 if (node->break_target()->is_linked()) {
1705 node->break_target()->Bind(); 1706 node->break_target()->Bind();
1706 } 1707 }
1707 node->break_target()->Unuse(); 1708 node->break_target()->Unuse();
1708 ASSERT(!has_valid_frame() || frame_->height() == original_height); 1709 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1709 } 1710 }
1710 1711
1711 1712
1712 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 1713 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1713 frame_->EmitPush(cp); 1714 frame_->EmitPush(cp);
1714 frame_->EmitPush(Operand(pairs)); 1715 frame_->EmitPush(Operand(pairs));
1715 frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0))); 1716 frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0)));
1716 1717
1717 VirtualFrame::SpilledScope spilled_scope(frame_);
1718 frame_->CallRuntime(Runtime::kDeclareGlobals, 3); 1718 frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
1719 // The result is discarded. 1719 // The result is discarded.
1720 } 1720 }
1721 1721
1722 1722
1723 void CodeGenerator::VisitDeclaration(Declaration* node) { 1723 void CodeGenerator::VisitDeclaration(Declaration* node) {
1724 #ifdef DEBUG 1724 #ifdef DEBUG
1725 int original_height = frame_->height(); 1725 int original_height = frame_->height();
1726 #endif 1726 #endif
1727 Comment cmnt(masm_, "[ Declaration"); 1727 Comment cmnt(masm_, "[ Declaration");
(...skipping 20 matching lines...) Expand all
1748 // 'undefined') because we may have a (legal) redeclaration and we 1748 // 'undefined') because we may have a (legal) redeclaration and we
1749 // must not destroy the current value. 1749 // must not destroy the current value.
1750 if (node->mode() == Variable::CONST) { 1750 if (node->mode() == Variable::CONST) {
1751 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex); 1751 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
1752 } else if (node->fun() != NULL) { 1752 } else if (node->fun() != NULL) {
1753 Load(node->fun()); 1753 Load(node->fun());
1754 } else { 1754 } else {
1755 frame_->EmitPush(Operand(0)); 1755 frame_->EmitPush(Operand(0));
1756 } 1756 }
1757 1757
1758 VirtualFrame::SpilledScope spilled_scope(frame_);
1759 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); 1758 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1760 // Ignore the return value (declarations are statements). 1759 // Ignore the return value (declarations are statements).
1761 1760
1762 ASSERT(frame_->height() == original_height); 1761 ASSERT(frame_->height() == original_height);
1763 return; 1762 return;
1764 } 1763 }
1765 1764
1766 ASSERT(!var->is_global()); 1765 ASSERT(!var->is_global());
1767 1766
1768 // If we have a function or a constant, we need to initialize the variable. 1767 // If we have a function or a constant, we need to initialize the variable.
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
1893 1892
1894 // end 1893 // end
1895 if (exit.is_linked()) { 1894 if (exit.is_linked()) {
1896 exit.Bind(); 1895 exit.Bind();
1897 } 1896 }
1898 ASSERT(!has_valid_frame() || frame_->height() == original_height); 1897 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1899 } 1898 }
1900 1899
1901 1900
1902 void CodeGenerator::VisitContinueStatement(ContinueStatement* node) { 1901 void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
1903 VirtualFrame::SpilledScope spilled_scope(frame_);
1904 Comment cmnt(masm_, "[ ContinueStatement"); 1902 Comment cmnt(masm_, "[ ContinueStatement");
1905 CodeForStatementPosition(node); 1903 CodeForStatementPosition(node);
1906 node->target()->continue_target()->Jump(); 1904 node->target()->continue_target()->Jump();
1907 } 1905 }
1908 1906
1909 1907
1910 void CodeGenerator::VisitBreakStatement(BreakStatement* node) { 1908 void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
1911 VirtualFrame::SpilledScope spilled_scope(frame_);
1912 Comment cmnt(masm_, "[ BreakStatement"); 1909 Comment cmnt(masm_, "[ BreakStatement");
1913 CodeForStatementPosition(node); 1910 CodeForStatementPosition(node);
1914 node->target()->break_target()->Jump(); 1911 node->target()->break_target()->Jump();
1915 } 1912 }
1916 1913
1917 1914
1918 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { 1915 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
1919 VirtualFrame::SpilledScope spilled_scope(frame_); 1916 frame_->SpillAll();
1920 Comment cmnt(masm_, "[ ReturnStatement"); 1917 Comment cmnt(masm_, "[ ReturnStatement");
1921 1918
1922 CodeForStatementPosition(node); 1919 CodeForStatementPosition(node);
1923 Load(node->expression()); 1920 Load(node->expression());
1924 if (function_return_is_shadowed_) { 1921 if (function_return_is_shadowed_) {
1925 frame_->EmitPop(r0); 1922 frame_->EmitPop(r0);
1926 function_return_.Jump(); 1923 function_return_.Jump();
1927 } else { 1924 } else {
1928 // Pop the result from the frame and prepare the frame for 1925 // Pop the result from the frame and prepare the frame for
1929 // returning thus making it easier to merge. 1926 // returning thus making it easier to merge.
1930 frame_->EmitPop(r0); 1927 frame_->PopToR0();
1931 frame_->PrepareForReturn(); 1928 frame_->PrepareForReturn();
1932 if (function_return_.is_bound()) { 1929 if (function_return_.is_bound()) {
1933 // If the function return label is already bound we reuse the 1930 // If the function return label is already bound we reuse the
1934 // code by jumping to the return site. 1931 // code by jumping to the return site.
1935 function_return_.Jump(); 1932 function_return_.Jump();
1936 } else { 1933 } else {
1937 function_return_.Bind(); 1934 function_return_.Bind();
1938 GenerateReturnSequence(); 1935 GenerateReturnSequence();
1939 } 1936 }
1940 } 1937 }
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1980 Assembler::kJSReturnSequenceInstructions + 1); 1977 Assembler::kJSReturnSequenceInstructions + 1);
1981 #endif 1978 #endif
1982 } 1979 }
1983 } 1980 }
1984 1981
1985 1982
1986 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) { 1983 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
1987 #ifdef DEBUG 1984 #ifdef DEBUG
1988 int original_height = frame_->height(); 1985 int original_height = frame_->height();
1989 #endif 1986 #endif
1990 VirtualFrame::SpilledScope spilled_scope(frame_);
1991 Comment cmnt(masm_, "[ WithEnterStatement"); 1987 Comment cmnt(masm_, "[ WithEnterStatement");
1992 CodeForStatementPosition(node); 1988 CodeForStatementPosition(node);
1993 Load(node->expression()); 1989 Load(node->expression());
1994 if (node->is_catch_block()) { 1990 if (node->is_catch_block()) {
1995 frame_->CallRuntime(Runtime::kPushCatchContext, 1); 1991 frame_->CallRuntime(Runtime::kPushCatchContext, 1);
1996 } else { 1992 } else {
1997 frame_->CallRuntime(Runtime::kPushContext, 1); 1993 frame_->CallRuntime(Runtime::kPushContext, 1);
1998 } 1994 }
1999 #ifdef DEBUG 1995 #ifdef DEBUG
2000 JumpTarget verified_true; 1996 JumpTarget verified_true;
2001 __ cmp(r0, cp); 1997 __ cmp(r0, cp);
2002 verified_true.Branch(eq); 1998 verified_true.Branch(eq);
2003 __ stop("PushContext: r0 is expected to be the same as cp"); 1999 __ stop("PushContext: r0 is expected to be the same as cp");
2004 verified_true.Bind(); 2000 verified_true.Bind();
2005 #endif 2001 #endif
2006 // Update context local. 2002 // Update context local.
2007 __ str(cp, frame_->Context()); 2003 __ str(cp, frame_->Context());
2008 ASSERT(frame_->height() == original_height); 2004 ASSERT(frame_->height() == original_height);
2009 } 2005 }
2010 2006
2011 2007
2012 void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) { 2008 void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
2013 #ifdef DEBUG 2009 #ifdef DEBUG
2014 int original_height = frame_->height(); 2010 int original_height = frame_->height();
2015 #endif 2011 #endif
2016 VirtualFrame::SpilledScope spilled_scope(frame_);
2017 Comment cmnt(masm_, "[ WithExitStatement"); 2012 Comment cmnt(masm_, "[ WithExitStatement");
2018 CodeForStatementPosition(node); 2013 CodeForStatementPosition(node);
2019 // Pop context. 2014 // Pop context.
2020 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX)); 2015 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX));
2021 // Update context local. 2016 // Update context local.
2022 __ str(cp, frame_->Context()); 2017 __ str(cp, frame_->Context());
2023 ASSERT(frame_->height() == original_height); 2018 ASSERT(frame_->height() == original_height);
2024 } 2019 }
2025 2020
2026 2021
2027 void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) { 2022 void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
2028 #ifdef DEBUG 2023 #ifdef DEBUG
2029 int original_height = frame_->height(); 2024 int original_height = frame_->height();
2030 #endif 2025 #endif
2031 VirtualFrame::SpilledScope spilled_scope(frame_);
2032 Comment cmnt(masm_, "[ SwitchStatement"); 2026 Comment cmnt(masm_, "[ SwitchStatement");
2033 CodeForStatementPosition(node); 2027 CodeForStatementPosition(node);
2034 node->break_target()->SetExpectedHeight(); 2028 node->break_target()->SetExpectedHeight();
2035 2029
2036 Load(node->tag()); 2030 Load(node->tag());
2037 2031
2038 JumpTarget next_test; 2032 JumpTarget next_test;
2039 JumpTarget fall_through; 2033 JumpTarget fall_through;
2040 JumpTarget default_entry; 2034 JumpTarget default_entry;
2041 JumpTarget default_exit(JumpTarget::BIDIRECTIONAL); 2035 JumpTarget default_exit(JumpTarget::BIDIRECTIONAL);
2042 ZoneList<CaseClause*>* cases = node->cases(); 2036 ZoneList<CaseClause*>* cases = node->cases();
2043 int length = cases->length(); 2037 int length = cases->length();
2044 CaseClause* default_clause = NULL; 2038 CaseClause* default_clause = NULL;
2045 2039
2046 for (int i = 0; i < length; i++) { 2040 for (int i = 0; i < length; i++) {
2047 CaseClause* clause = cases->at(i); 2041 CaseClause* clause = cases->at(i);
2048 if (clause->is_default()) { 2042 if (clause->is_default()) {
2049 // Remember the default clause and compile it at the end. 2043 // Remember the default clause and compile it at the end.
2050 default_clause = clause; 2044 default_clause = clause;
2051 continue; 2045 continue;
2052 } 2046 }
2053 2047
2054 Comment cmnt(masm_, "[ Case clause"); 2048 Comment cmnt(masm_, "[ Case clause");
2055 // Compile the test. 2049 // Compile the test.
2056 next_test.Bind(); 2050 next_test.Bind();
2057 next_test.Unuse(); 2051 next_test.Unuse();
2058 // Duplicate TOS. 2052 // Duplicate TOS.
2059 __ ldr(r0, frame_->Top()); 2053 frame_->Dup();
2060 frame_->EmitPush(r0);
2061 Comparison(eq, NULL, clause->label(), true); 2054 Comparison(eq, NULL, clause->label(), true);
2062 Branch(false, &next_test); 2055 Branch(false, &next_test);
2063 2056
2064 // Before entering the body from the test, remove the switch value from 2057 // Before entering the body from the test, remove the switch value from
2065 // the stack. 2058 // the stack.
2066 frame_->Drop(); 2059 frame_->Drop();
2067 2060
2068 // Label the body so that fall through is enabled. 2061 // Label the body so that fall through is enabled.
2069 if (i > 0 && cases->at(i - 1)->is_default()) { 2062 if (i > 0 && cases->at(i - 1)->is_default()) {
2070 default_exit.Bind(); 2063 default_exit.Bind();
(...skipping 17 matching lines...) Expand all
2088 // The final "test" removes the switch value. 2081 // The final "test" removes the switch value.
2089 next_test.Bind(); 2082 next_test.Bind();
2090 frame_->Drop(); 2083 frame_->Drop();
2091 2084
2092 // If there is a default clause, compile it. 2085 // If there is a default clause, compile it.
2093 if (default_clause != NULL) { 2086 if (default_clause != NULL) {
2094 Comment cmnt(masm_, "[ Default clause"); 2087 Comment cmnt(masm_, "[ Default clause");
2095 default_entry.Bind(); 2088 default_entry.Bind();
2096 VisitStatements(default_clause->statements()); 2089 VisitStatements(default_clause->statements());
2097 // If control flow can fall out of the default and there is a case after 2090 // If control flow can fall out of the default and there is a case after
2098 // it, jup to that case's body. 2091 // it, jump to that case's body.
2099 if (frame_ != NULL && default_exit.is_bound()) { 2092 if (frame_ != NULL && default_exit.is_bound()) {
2100 default_exit.Jump(); 2093 default_exit.Jump();
2101 } 2094 }
2102 } 2095 }
2103 2096
2104 if (fall_through.is_linked()) { 2097 if (fall_through.is_linked()) {
2105 fall_through.Bind(); 2098 fall_through.Bind();
2106 } 2099 }
2107 2100
2108 if (node->break_target()->is_linked()) { 2101 if (node->break_target()->is_linked()) {
2109 node->break_target()->Bind(); 2102 node->break_target()->Bind();
2110 } 2103 }
2111 node->break_target()->Unuse(); 2104 node->break_target()->Unuse();
2112 ASSERT(!has_valid_frame() || frame_->height() == original_height); 2105 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2113 } 2106 }
2114 2107
2115 2108
2116 void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) { 2109 void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
2117 #ifdef DEBUG 2110 #ifdef DEBUG
2118 int original_height = frame_->height(); 2111 int original_height = frame_->height();
2119 #endif 2112 #endif
2120 VirtualFrame::SpilledScope spilled_scope(frame_);
2121 Comment cmnt(masm_, "[ DoWhileStatement"); 2113 Comment cmnt(masm_, "[ DoWhileStatement");
2122 CodeForStatementPosition(node); 2114 CodeForStatementPosition(node);
2123 node->break_target()->SetExpectedHeight(); 2115 node->break_target()->SetExpectedHeight();
2124 JumpTarget body(JumpTarget::BIDIRECTIONAL); 2116 JumpTarget body(JumpTarget::BIDIRECTIONAL);
2125 IncrementLoopNesting(); 2117 IncrementLoopNesting();
2126 2118
2127 // Label the top of the loop for the backward CFG edge. If the test 2119 // Label the top of the loop for the backward CFG edge. If the test
2128 // is always true we can use the continue target, and if the test is 2120 // is always true we can use the continue target, and if the test is
2129 // always false there is no need. 2121 // always false there is no need.
2130 ConditionAnalysis info = AnalyzeCondition(node->cond()); 2122 ConditionAnalysis info = AnalyzeCondition(node->cond());
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
2185 } 2177 }
2186 DecrementLoopNesting(); 2178 DecrementLoopNesting();
2187 ASSERT(!has_valid_frame() || frame_->height() == original_height); 2179 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2188 } 2180 }
2189 2181
2190 2182
2191 void CodeGenerator::VisitWhileStatement(WhileStatement* node) { 2183 void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
2192 #ifdef DEBUG 2184 #ifdef DEBUG
2193 int original_height = frame_->height(); 2185 int original_height = frame_->height();
2194 #endif 2186 #endif
2195 VirtualFrame::SpilledScope spilled_scope(frame_);
2196 Comment cmnt(masm_, "[ WhileStatement"); 2187 Comment cmnt(masm_, "[ WhileStatement");
2197 CodeForStatementPosition(node); 2188 CodeForStatementPosition(node);
2198 2189
2199 // If the test is never true and has no side effects there is no need 2190 // If the test is never true and has no side effects there is no need
2200 // to compile the test or body. 2191 // to compile the test or body.
2201 ConditionAnalysis info = AnalyzeCondition(node->cond()); 2192 ConditionAnalysis info = AnalyzeCondition(node->cond());
2202 if (info == ALWAYS_FALSE) return; 2193 if (info == ALWAYS_FALSE) return;
2203 2194
2204 node->break_target()->SetExpectedHeight(); 2195 node->break_target()->SetExpectedHeight();
2205 IncrementLoopNesting(); 2196 IncrementLoopNesting();
2206 2197
2207 // Label the top of the loop with the continue target for the backward 2198 // Label the top of the loop with the continue target for the backward
2208 // CFG edge. 2199 // CFG edge.
2209 node->continue_target()->SetExpectedHeight(); 2200 node->continue_target()->SetExpectedHeight();
2210 node->continue_target()->Bind(); 2201 node->continue_target()->Bind();
2211 2202
2212 if (info == DONT_KNOW) { 2203 if (info == DONT_KNOW) {
2213 JumpTarget body; 2204 JumpTarget body(JumpTarget::BIDIRECTIONAL);
2214 LoadCondition(node->cond(), &body, node->break_target(), true); 2205 LoadCondition(node->cond(), &body, node->break_target(), true);
2215 if (has_valid_frame()) { 2206 if (has_valid_frame()) {
2216 // A NULL frame indicates that control did not fall out of the 2207 // A NULL frame indicates that control did not fall out of the
2217 // test expression. 2208 // test expression.
2218 Branch(false, node->break_target()); 2209 Branch(false, node->break_target());
2219 } 2210 }
2220 if (has_valid_frame() || body.is_linked()) { 2211 if (has_valid_frame() || body.is_linked()) {
2221 body.Bind(); 2212 body.Bind();
2222 } 2213 }
2223 } 2214 }
(...skipping 12 matching lines...) Expand all
2236 } 2227 }
2237 DecrementLoopNesting(); 2228 DecrementLoopNesting();
2238 ASSERT(!has_valid_frame() || frame_->height() == original_height); 2229 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2239 } 2230 }
2240 2231
2241 2232
2242 void CodeGenerator::VisitForStatement(ForStatement* node) { 2233 void CodeGenerator::VisitForStatement(ForStatement* node) {
2243 #ifdef DEBUG 2234 #ifdef DEBUG
2244 int original_height = frame_->height(); 2235 int original_height = frame_->height();
2245 #endif 2236 #endif
2246 VirtualFrame::SpilledScope spilled_scope(frame_);
2247 Comment cmnt(masm_, "[ ForStatement"); 2237 Comment cmnt(masm_, "[ ForStatement");
2248 CodeForStatementPosition(node); 2238 CodeForStatementPosition(node);
2249 if (node->init() != NULL) { 2239 if (node->init() != NULL) {
2250 Visit(node->init()); 2240 Visit(node->init());
2251 } 2241 }
2252 2242
2253 // If the test is never true there is no need to compile the test or 2243 // If the test is never true there is no need to compile the test or
2254 // body. 2244 // body.
2255 ConditionAnalysis info = AnalyzeCondition(node->cond()); 2245 ConditionAnalysis info = AnalyzeCondition(node->cond());
2256 if (info == ALWAYS_FALSE) return; 2246 if (info == ALWAYS_FALSE) return;
(...skipping 668 matching lines...) Expand 10 before | Expand all | Expand 10 after
2925 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral"); 2915 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
2926 InstantiateFunction(node->shared_function_info()); 2916 InstantiateFunction(node->shared_function_info());
2927 ASSERT_EQ(original_height + 1, frame_->height()); 2917 ASSERT_EQ(original_height + 1, frame_->height());
2928 } 2918 }
2929 2919
2930 2920
2931 void CodeGenerator::VisitConditional(Conditional* node) { 2921 void CodeGenerator::VisitConditional(Conditional* node) {
2932 #ifdef DEBUG 2922 #ifdef DEBUG
2933 int original_height = frame_->height(); 2923 int original_height = frame_->height();
2934 #endif 2924 #endif
2935 VirtualFrame::SpilledScope spilled_scope(frame_);
2936 Comment cmnt(masm_, "[ Conditional"); 2925 Comment cmnt(masm_, "[ Conditional");
2937 JumpTarget then; 2926 JumpTarget then;
2938 JumpTarget else_; 2927 JumpTarget else_;
2939 LoadCondition(node->condition(), &then, &else_, true); 2928 LoadCondition(node->condition(), &then, &else_, true);
2940 if (has_valid_frame()) { 2929 if (has_valid_frame()) {
2941 Branch(false, &else_); 2930 Branch(false, &else_);
2942 } 2931 }
2943 if (has_valid_frame() || then.is_linked()) { 2932 if (has_valid_frame() || then.is_linked()) {
2944 then.Bind(); 2933 then.Bind();
2945 Load(node->then_expression()); 2934 Load(node->then_expression());
(...skipping 20 matching lines...) Expand all
2966 2955
2967 // Generate fast case for loading from slots that correspond to 2956 // Generate fast case for loading from slots that correspond to
2968 // local/global variables or arguments unless they are shadowed by 2957 // local/global variables or arguments unless they are shadowed by
2969 // eval-introduced bindings. 2958 // eval-introduced bindings.
2970 EmitDynamicLoadFromSlotFastCase(slot, 2959 EmitDynamicLoadFromSlotFastCase(slot,
2971 typeof_state, 2960 typeof_state,
2972 &slow, 2961 &slow,
2973 &done); 2962 &done);
2974 2963
2975 slow.Bind(); 2964 slow.Bind();
2976 VirtualFrame::SpilledScope spilled_scope(frame_);
2977 frame_->EmitPush(cp); 2965 frame_->EmitPush(cp);
2978 __ mov(r0, Operand(slot->var()->name())); 2966 frame_->EmitPush(Operand(slot->var()->name()));
2979 frame_->EmitPush(r0);
2980 2967
2981 if (typeof_state == INSIDE_TYPEOF) { 2968 if (typeof_state == INSIDE_TYPEOF) {
2982 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 2969 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
2983 } else { 2970 } else {
2984 frame_->CallRuntime(Runtime::kLoadContextSlot, 2); 2971 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
2985 } 2972 }
2986 2973
2987 done.Bind(); 2974 done.Bind();
2988 frame_->EmitPush(r0); 2975 frame_->EmitPush(r0);
2989 2976
2990 } else { 2977 } else {
2991 Register scratch = VirtualFrame::scratch0(); 2978 Register scratch = VirtualFrame::scratch0();
2992 TypeInfo info = type_info(slot); 2979 TypeInfo info = type_info(slot);
2993 frame_->EmitPush(SlotOperand(slot, scratch), info); 2980 frame_->EmitPush(SlotOperand(slot, scratch), info);
2981
2994 if (slot->var()->mode() == Variable::CONST) { 2982 if (slot->var()->mode() == Variable::CONST) {
2995 // Const slots may contain 'the hole' value (the constant hasn't been 2983 // Const slots may contain 'the hole' value (the constant hasn't been
2996 // initialized yet) which needs to be converted into the 'undefined' 2984 // initialized yet) which needs to be converted into the 'undefined'
2997 // value. 2985 // value.
2998 Comment cmnt(masm_, "[ Unhole const"); 2986 Comment cmnt(masm_, "[ Unhole const");
2999 frame_->EmitPop(scratch); 2987 Register tos = frame_->PopToRegister();
3000 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 2988 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3001 __ cmp(scratch, ip); 2989 __ cmp(tos, ip);
3002 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex, eq); 2990 __ LoadRoot(tos, Heap::kUndefinedValueRootIndex, eq);
3003 frame_->EmitPush(scratch); 2991 frame_->EmitPush(tos);
3004 } 2992 }
3005 } 2993 }
3006 } 2994 }
3007 2995
3008 2996
3009 void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot, 2997 void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
3010 TypeofState state) { 2998 TypeofState state) {
2999 VirtualFrame::RegisterAllocationScope scope(this);
3011 LoadFromSlot(slot, state); 3000 LoadFromSlot(slot, state);
3012 3001
3013 // Bail out quickly if we're not using lazy arguments allocation. 3002 // Bail out quickly if we're not using lazy arguments allocation.
3014 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return; 3003 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
3015 3004
3016 // ... or if the slot isn't a non-parameter arguments slot. 3005 // ... or if the slot isn't a non-parameter arguments slot.
3017 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return; 3006 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
3018 3007
3019 VirtualFrame::SpilledScope spilled_scope(frame_); 3008 // Load the loaded value from the stack into a register but leave it on the
3020
3021 // Load the loaded value from the stack into r0 but leave it on the
3022 // stack. 3009 // stack.
3023 __ ldr(r0, MemOperand(sp, 0)); 3010 Register tos = frame_->Peek();
3024 3011
3025 // If the loaded value is the sentinel that indicates that we 3012 // If the loaded value is the sentinel that indicates that we
3026 // haven't loaded the arguments object yet, we need to do it now. 3013 // haven't loaded the arguments object yet, we need to do it now.
3027 JumpTarget exit; 3014 JumpTarget exit;
3028 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 3015 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3029 __ cmp(r0, ip); 3016 __ cmp(tos, ip);
3030 exit.Branch(ne); 3017 exit.Branch(ne);
3031 frame_->Drop(); 3018 frame_->Drop();
3032 StoreArgumentsObject(false); 3019 StoreArgumentsObject(false);
3033 exit.Bind(); 3020 exit.Bind();
3034 } 3021 }
3035 3022
3036 3023
3037 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { 3024 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
3038 ASSERT(slot != NULL); 3025 ASSERT(slot != NULL);
3026 VirtualFrame::RegisterAllocationScope scope(this);
3039 if (slot->type() == Slot::LOOKUP) { 3027 if (slot->type() == Slot::LOOKUP) {
3040 VirtualFrame::SpilledScope spilled_scope(frame_);
3041 ASSERT(slot->var()->is_dynamic()); 3028 ASSERT(slot->var()->is_dynamic());
3042 3029
3043 // For now, just do a runtime call. 3030 // For now, just do a runtime call.
3044 frame_->EmitPush(cp); 3031 frame_->EmitPush(cp);
3045 __ mov(r0, Operand(slot->var()->name())); 3032 frame_->EmitPush(Operand(slot->var()->name()));
3046 frame_->EmitPush(r0);
3047 3033
3048 if (init_state == CONST_INIT) { 3034 if (init_state == CONST_INIT) {
3049 // Same as the case for a normal store, but ignores attribute 3035 // Same as the case for a normal store, but ignores attribute
3050 // (e.g. READ_ONLY) of context slot so that we can initialize 3036 // (e.g. READ_ONLY) of context slot so that we can initialize
3051 // const properties (introduced via eval("const foo = (some 3037 // const properties (introduced via eval("const foo = (some
3052 // expr);")). Also, uses the current function context instead of 3038 // expr);")). Also, uses the current function context instead of
3053 // the top context. 3039 // the top context.
3054 // 3040 //
3055 // Note that we must declare the foo upon entry of eval(), via a 3041 // Note that we must declare the foo upon entry of eval(), via a
3056 // context slot declaration, but we cannot initialize it at the 3042 // context slot declaration, but we cannot initialize it at the
3057 // same time, because the const declaration may be at the end of 3043 // same time, because the const declaration may be at the end of
3058 // the eval code (sigh...) and the const variable may have been 3044 // the eval code (sigh...) and the const variable may have been
3059 // used before (where its value is 'undefined'). Thus, we can only 3045 // used before (where its value is 'undefined'). Thus, we can only
3060 // do the initialization when we actually encounter the expression 3046 // do the initialization when we actually encounter the expression
3061 // and when the expression operands are defined and valid, and 3047 // and when the expression operands are defined and valid, and
3062 // thus we need the split into 2 operations: declaration of the 3048 // thus we need the split into 2 operations: declaration of the
3063 // context slot followed by initialization. 3049 // context slot followed by initialization.
3064 frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3); 3050 frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3065 } else { 3051 } else {
3066 frame_->CallRuntime(Runtime::kStoreContextSlot, 3); 3052 frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
3067 } 3053 }
3068 // Storing a variable must keep the (new) value on the expression 3054 // Storing a variable must keep the (new) value on the expression
3069 // stack. This is necessary for compiling assignment expressions. 3055 // stack. This is necessary for compiling assignment expressions.
3070 frame_->EmitPush(r0); 3056 frame_->EmitPush(r0);
3071 3057
3072 } else { 3058 } else {
3073 ASSERT(!slot->var()->is_dynamic()); 3059 ASSERT(!slot->var()->is_dynamic());
3074 Register scratch = VirtualFrame::scratch0(); 3060 Register scratch = VirtualFrame::scratch0();
3075 VirtualFrame::RegisterAllocationScope scope(this); 3061 Register scratch2 = VirtualFrame::scratch1();
3076 3062
3077 // The frame must be spilled when branching to this target. 3063 // The frame must be spilled when branching to this target.
3078 JumpTarget exit; 3064 JumpTarget exit;
3079 3065
3080 if (init_state == CONST_INIT) { 3066 if (init_state == CONST_INIT) {
3081 ASSERT(slot->var()->mode() == Variable::CONST); 3067 ASSERT(slot->var()->mode() == Variable::CONST);
3082 // Only the first const initialization must be executed (the slot 3068 // Only the first const initialization must be executed (the slot
3083 // still contains 'the hole' value). When the assignment is 3069 // still contains 'the hole' value). When the assignment is
3084 // executed, the code is identical to a normal store (see below). 3070 // executed, the code is identical to a normal store (see below).
3085 Comment cmnt(masm_, "[ Init const"); 3071 Comment cmnt(masm_, "[ Init const");
3086 __ ldr(scratch, SlotOperand(slot, scratch)); 3072 __ ldr(scratch, SlotOperand(slot, scratch));
3087 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 3073 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3088 __ cmp(scratch, ip); 3074 __ cmp(scratch, ip);
3089 frame_->SpillAll();
3090 exit.Branch(ne); 3075 exit.Branch(ne);
3091 } 3076 }
3092 3077
3093 // We must execute the store. Storing a variable must keep the 3078 // We must execute the store. Storing a variable must keep the
3094 // (new) value on the stack. This is necessary for compiling 3079 // (new) value on the stack. This is necessary for compiling
3095 // assignment expressions. 3080 // assignment expressions.
3096 // 3081 //
3097 // Note: We will reach here even with slot->var()->mode() == 3082 // Note: We will reach here even with slot->var()->mode() ==
3098 // Variable::CONST because of const declarations which will 3083 // Variable::CONST because of const declarations which will
3099 // initialize consts to 'the hole' value and by doing so, end up 3084 // initialize consts to 'the hole' value and by doing so, end up
3100 // calling this code. r2 may be loaded with context; used below in 3085 // calling this code. r2 may be loaded with context; used below in
3101 // RecordWrite. 3086 // RecordWrite.
3102 Register tos = frame_->Peek(); 3087 Register tos = frame_->Peek();
3103 __ str(tos, SlotOperand(slot, scratch)); 3088 __ str(tos, SlotOperand(slot, scratch));
3104 if (slot->type() == Slot::CONTEXT) { 3089 if (slot->type() == Slot::CONTEXT) {
3105 // Skip write barrier if the written value is a smi. 3090 // Skip write barrier if the written value is a smi.
3106 __ tst(tos, Operand(kSmiTagMask)); 3091 __ tst(tos, Operand(kSmiTagMask));
3107 // We don't use tos any more after here. 3092 // We don't use tos any more after here.
3108 VirtualFrame::SpilledScope spilled_scope(frame_);
3109 exit.Branch(eq); 3093 exit.Branch(eq);
3110 // scratch is loaded with context when calling SlotOperand above. 3094 // scratch is loaded with context when calling SlotOperand above.
3111 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; 3095 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
3112 __ mov(r3, Operand(offset)); 3096 __ mov(scratch2, Operand(offset));
3113 // r1 could be identical with tos, but that doesn't matter. 3097 // We need an extra register. Until we have a way to do that in the
3114 __ RecordWrite(scratch, r3, r1); 3098 // virtual frame we will cheat and ask for a free TOS register.
3099 Register scratch3 = frame_->GetTOSRegister();
3100 __ RecordWrite(scratch, scratch2, scratch3);
3115 } 3101 }
3116 // If we definitely did not jump over the assignment, we do not need 3102 // If we definitely did not jump over the assignment, we do not need
3117 // to bind the exit label. Doing so can defeat peephole 3103 // to bind the exit label. Doing so can defeat peephole
3118 // optimization. 3104 // optimization.
3119 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) { 3105 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
3120 frame_->SpillAll();
3121 exit.Bind(); 3106 exit.Bind();
3122 } 3107 }
3123 } 3108 }
3124 } 3109 }
3125 3110
3126 3111
3127 void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, 3112 void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
3128 TypeofState typeof_state, 3113 TypeofState typeof_state,
3129 JumpTarget* slow) { 3114 JumpTarget* slow) {
3130 // Check that no extension objects have been created by calls to 3115 // Check that no extension objects have been created by calls to
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
3284 __ mov(reg, Operand(node->handle())); 3269 __ mov(reg, Operand(node->handle()));
3285 frame_->EmitPush(reg, is_smi ? TypeInfo::Smi() : TypeInfo::Unknown()); 3270 frame_->EmitPush(reg, is_smi ? TypeInfo::Smi() : TypeInfo::Unknown());
3286 ASSERT_EQ(original_height + 1, frame_->height()); 3271 ASSERT_EQ(original_height + 1, frame_->height());
3287 } 3272 }
3288 3273
3289 3274
3290 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { 3275 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
3291 #ifdef DEBUG 3276 #ifdef DEBUG
3292 int original_height = frame_->height(); 3277 int original_height = frame_->height();
3293 #endif 3278 #endif
3294 VirtualFrame::SpilledScope spilled_scope(frame_);
3295 Comment cmnt(masm_, "[ RexExp Literal"); 3279 Comment cmnt(masm_, "[ RexExp Literal");
3296 3280
3281 Register tmp = VirtualFrame::scratch0();
3282 // Free up a TOS register that can be used to push the literal.
3283 Register literal = frame_->GetTOSRegister();
3284
3297 // Retrieve the literal array and check the allocated entry. 3285 // Retrieve the literal array and check the allocated entry.
3298 3286
3299 // Load the function of this activation. 3287 // Load the function of this activation.
3300 __ ldr(r1, frame_->Function()); 3288 __ ldr(tmp, frame_->Function());
3301 3289
3302 // Load the literals array of the function. 3290 // Load the literals array of the function.
3303 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); 3291 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kLiteralsOffset));
3304 3292
3305 // Load the literal at the ast saved index. 3293 // Load the literal at the ast saved index.
3306 int literal_offset = 3294 int literal_offset =
3307 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; 3295 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
3308 __ ldr(r2, FieldMemOperand(r1, literal_offset)); 3296 __ ldr(literal, FieldMemOperand(tmp, literal_offset));
3309 3297
3310 JumpTarget done; 3298 JumpTarget done;
3311 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 3299 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3312 __ cmp(r2, ip); 3300 __ cmp(literal, ip);
3301 // This branch locks the virtual frame at the done label to match the
3302 // one we have here, where the literal register is not on the stack and
3303 // nothing is spilled.
3313 done.Branch(ne); 3304 done.Branch(ne);
3314 3305
3315 // If the entry is undefined we call the runtime system to computed 3306 // If the entry is undefined we call the runtime system to compute
3316 // the literal. 3307 // the literal.
3317 frame_->EmitPush(r1); // literal array (0) 3308 // literal array (0)
3318 __ mov(r0, Operand(Smi::FromInt(node->literal_index()))); 3309 frame_->EmitPush(tmp);
3319 frame_->EmitPush(r0); // literal index (1) 3310 // literal index (1)
3320 __ mov(r0, Operand(node->pattern())); // RegExp pattern (2) 3311 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3321 frame_->EmitPush(r0); 3312 // RegExp pattern (2)
3322 __ mov(r0, Operand(node->flags())); // RegExp flags (3) 3313 frame_->EmitPush(Operand(node->pattern()));
3323 frame_->EmitPush(r0); 3314 // RegExp flags (3)
3315 frame_->EmitPush(Operand(node->flags()));
3324 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 3316 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
3325 __ mov(r2, Operand(r0)); 3317 __ Move(literal, r0);
3326 3318
3319 // This call to bind will get us back to the virtual frame we had before
3320 // where things are not spilled and the literal register is not on the stack.
3327 done.Bind(); 3321 done.Bind();
3328 // Push the literal. 3322 // Push the literal.
3329 frame_->EmitPush(r2); 3323 frame_->EmitPush(literal);
3330 ASSERT_EQ(original_height + 1, frame_->height()); 3324 ASSERT_EQ(original_height + 1, frame_->height());
3331 } 3325 }
3332 3326
3333 3327
3334 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { 3328 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
3335 #ifdef DEBUG 3329 #ifdef DEBUG
3336 int original_height = frame_->height(); 3330 int original_height = frame_->height();
3337 #endif 3331 #endif
3338 VirtualFrame::SpilledScope spilled_scope(frame_);
3339 Comment cmnt(masm_, "[ ObjectLiteral"); 3332 Comment cmnt(masm_, "[ ObjectLiteral");
3340 3333
3334 Register literal = frame_->GetTOSRegister();
3341 // Load the function of this activation. 3335 // Load the function of this activation.
3342 __ ldr(r3, frame_->Function()); 3336 __ ldr(literal, frame_->Function());
3343 // Literal array. 3337 // Literal array.
3344 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 3338 __ ldr(literal, FieldMemOperand(literal, JSFunction::kLiteralsOffset));
3339 frame_->EmitPush(literal);
3345 // Literal index. 3340 // Literal index.
3346 __ mov(r2, Operand(Smi::FromInt(node->literal_index()))); 3341 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3347 // Constant properties. 3342 // Constant properties.
3348 __ mov(r1, Operand(node->constant_properties())); 3343 frame_->EmitPush(Operand(node->constant_properties()));
3349 // Should the object literal have fast elements? 3344 // Should the object literal have fast elements?
3350 __ mov(r0, Operand(Smi::FromInt(node->fast_elements() ? 1 : 0))); 3345 frame_->EmitPush(Operand(Smi::FromInt(node->fast_elements() ? 1 : 0)));
3351 frame_->EmitPushMultiple(4, r3.bit() | r2.bit() | r1.bit() | r0.bit());
3352 if (node->depth() > 1) { 3346 if (node->depth() > 1) {
3353 frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4); 3347 frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
3354 } else { 3348 } else {
3355 frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); 3349 frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
3356 } 3350 }
3357 frame_->EmitPush(r0); // save the result 3351 frame_->EmitPush(r0); // save the result
3358 for (int i = 0; i < node->properties()->length(); i++) { 3352 for (int i = 0; i < node->properties()->length(); i++) {
3359 // At the start of each iteration, the top of stack contains 3353 // At the start of each iteration, the top of stack contains
3360 // the newly created object literal. 3354 // the newly created object literal.
3361 ObjectLiteral::Property* property = node->properties()->at(i); 3355 ObjectLiteral::Property* property = node->properties()->at(i);
3362 Literal* key = property->key(); 3356 Literal* key = property->key();
3363 Expression* value = property->value(); 3357 Expression* value = property->value();
3364 switch (property->kind()) { 3358 switch (property->kind()) {
3365 case ObjectLiteral::Property::CONSTANT: 3359 case ObjectLiteral::Property::CONSTANT:
3366 break; 3360 break;
3367 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 3361 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3368 if (CompileTimeValue::IsCompileTimeValue(property->value())) break; 3362 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3369 // else fall through 3363 // else fall through
3370 case ObjectLiteral::Property::COMPUTED: 3364 case ObjectLiteral::Property::COMPUTED:
3371 if (key->handle()->IsSymbol()) { 3365 if (key->handle()->IsSymbol()) {
3372 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 3366 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
3373 Load(value); 3367 Load(value);
3374 frame_->EmitPop(r0); 3368 frame_->PopToR0();
3369 // Fetch the object literal.
3370 frame_->SpillAllButCopyTOSToR1();
3375 __ mov(r2, Operand(key->handle())); 3371 __ mov(r2, Operand(key->handle()));
3376 __ ldr(r1, frame_->Top()); // Load the receiver.
3377 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); 3372 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3378 break; 3373 break;
3379 } 3374 }
3380 // else fall through 3375 // else fall through
3381 case ObjectLiteral::Property::PROTOTYPE: { 3376 case ObjectLiteral::Property::PROTOTYPE: {
3382 __ ldr(r0, frame_->Top()); 3377 frame_->Dup();
3383 frame_->EmitPush(r0); // dup the result
3384 Load(key); 3378 Load(key);
3385 Load(value); 3379 Load(value);
3386 frame_->CallRuntime(Runtime::kSetProperty, 3); 3380 frame_->CallRuntime(Runtime::kSetProperty, 3);
3387 break; 3381 break;
3388 } 3382 }
3389 case ObjectLiteral::Property::SETTER: { 3383 case ObjectLiteral::Property::SETTER: {
3390 __ ldr(r0, frame_->Top()); 3384 frame_->Dup();
3391 frame_->EmitPush(r0);
3392 Load(key); 3385 Load(key);
3393 __ mov(r0, Operand(Smi::FromInt(1))); 3386 frame_->EmitPush(Operand(Smi::FromInt(1)));
3394 frame_->EmitPush(r0);
3395 Load(value); 3387 Load(value);
3396 frame_->CallRuntime(Runtime::kDefineAccessor, 4); 3388 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
3397 break; 3389 break;
3398 } 3390 }
3399 case ObjectLiteral::Property::GETTER: { 3391 case ObjectLiteral::Property::GETTER: {
3400 __ ldr(r0, frame_->Top()); 3392 frame_->Dup();
3401 frame_->EmitPush(r0);
3402 Load(key); 3393 Load(key);
3403 __ mov(r0, Operand(Smi::FromInt(0))); 3394 frame_->EmitPush(Operand(Smi::FromInt(0)));
3404 frame_->EmitPush(r0);
3405 Load(value); 3395 Load(value);
3406 frame_->CallRuntime(Runtime::kDefineAccessor, 4); 3396 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
3407 break; 3397 break;
3408 } 3398 }
3409 } 3399 }
3410 } 3400 }
3411 ASSERT_EQ(original_height + 1, frame_->height()); 3401 ASSERT_EQ(original_height + 1, frame_->height());
3412 } 3402 }
3413 3403
3414 3404
3415 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) { 3405 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
3416 #ifdef DEBUG 3406 #ifdef DEBUG
3417 int original_height = frame_->height(); 3407 int original_height = frame_->height();
3418 #endif 3408 #endif
3419 VirtualFrame::SpilledScope spilled_scope(frame_);
3420 Comment cmnt(masm_, "[ ArrayLiteral"); 3409 Comment cmnt(masm_, "[ ArrayLiteral");
3421 3410
3411 Register tos = frame_->GetTOSRegister();
3422 // Load the function of this activation. 3412 // Load the function of this activation.
3423 __ ldr(r2, frame_->Function()); 3413 __ ldr(tos, frame_->Function());
3424 // Load the literals array of the function. 3414 // Load the literals array of the function.
3425 __ ldr(r2, FieldMemOperand(r2, JSFunction::kLiteralsOffset)); 3415 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset));
3426 __ mov(r1, Operand(Smi::FromInt(node->literal_index()))); 3416 frame_->EmitPush(tos);
3427 __ mov(r0, Operand(node->constant_elements())); 3417 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3428 frame_->EmitPushMultiple(3, r2.bit() | r1.bit() | r0.bit()); 3418 frame_->EmitPush(Operand(node->constant_elements()));
3429 int length = node->values()->length(); 3419 int length = node->values()->length();
3430 if (node->depth() > 1) { 3420 if (node->depth() > 1) {
3431 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); 3421 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
3432 } else if (length > FastCloneShallowArrayStub::kMaximumLength) { 3422 } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
3433 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 3423 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
3434 } else { 3424 } else {
3435 FastCloneShallowArrayStub stub(length); 3425 FastCloneShallowArrayStub stub(length);
3436 frame_->CallStub(&stub, 3); 3426 frame_->CallStub(&stub, 3);
3437 } 3427 }
3438 frame_->EmitPush(r0); // save the result 3428 frame_->EmitPush(r0); // save the result
3439 // r0: created object literal 3429 // r0: created object literal
3440 3430
3441 // Generate code to set the elements in the array that are not 3431 // Generate code to set the elements in the array that are not
3442 // literals. 3432 // literals.
3443 for (int i = 0; i < node->values()->length(); i++) { 3433 for (int i = 0; i < node->values()->length(); i++) {
3444 Expression* value = node->values()->at(i); 3434 Expression* value = node->values()->at(i);
3445 3435
3446 // If value is a literal the property value is already set in the 3436 // If value is a literal the property value is already set in the
3447 // boilerplate object. 3437 // boilerplate object.
3448 if (value->AsLiteral() != NULL) continue; 3438 if (value->AsLiteral() != NULL) continue;
3449 // If value is a materialized literal the property value is already set 3439 // If value is a materialized literal the property value is already set
3450 // in the boilerplate object if it is simple. 3440 // in the boilerplate object if it is simple.
3451 if (CompileTimeValue::IsCompileTimeValue(value)) continue; 3441 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
3452 3442
3453 // The property must be set by generated code. 3443 // The property must be set by generated code.
3454 Load(value); 3444 Load(value);
3455 frame_->EmitPop(r0); 3445 frame_->PopToR0();
3446 // Fetch the object literal.
3447 frame_->SpillAllButCopyTOSToR1();
3456 3448
3457 // Fetch the object literal.
3458 __ ldr(r1, frame_->Top());
3459 // Get the elements array. 3449 // Get the elements array.
3460 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); 3450 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
3461 3451
3462 // Write to the indexed properties array. 3452 // Write to the indexed properties array.
3463 int offset = i * kPointerSize + FixedArray::kHeaderSize; 3453 int offset = i * kPointerSize + FixedArray::kHeaderSize;
3464 __ str(r0, FieldMemOperand(r1, offset)); 3454 __ str(r0, FieldMemOperand(r1, offset));
3465 3455
3466 // Update the write barrier for the array address. 3456 // Update the write barrier for the array address.
3467 __ mov(r3, Operand(offset)); 3457 __ mov(r3, Operand(offset));
3468 __ RecordWrite(r1, r3, r2); 3458 __ RecordWrite(r1, r3, r2);
(...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after
3859 // ------------------------------------------------------------------------ 3849 // ------------------------------------------------------------------------
3860 // Fast-case: Use inline caching. 3850 // Fast-case: Use inline caching.
3861 // --- 3851 // ---
3862 // According to ECMA-262, section 11.2.3, page 44, the function to call 3852 // According to ECMA-262, section 11.2.3, page 44, the function to call
3863 // must be resolved after the arguments have been evaluated. The IC code 3853 // must be resolved after the arguments have been evaluated. The IC code
3864 // automatically handles this by loading the arguments before the function 3854 // automatically handles this by loading the arguments before the function
3865 // is resolved in cache misses (this also holds for megamorphic calls). 3855 // is resolved in cache misses (this also holds for megamorphic calls).
3866 // ------------------------------------------------------------------------ 3856 // ------------------------------------------------------------------------
3867 3857
3868 if (var != NULL && var->is_possibly_eval()) { 3858 if (var != NULL && var->is_possibly_eval()) {
3869 VirtualFrame::SpilledScope spilled_scope(frame_);
3870 // ---------------------------------- 3859 // ----------------------------------
3871 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed 3860 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
3872 // ---------------------------------- 3861 // ----------------------------------
3873 3862
3874 // In a call to eval, we first call %ResolvePossiblyDirectEval to 3863 // In a call to eval, we first call %ResolvePossiblyDirectEval to
3875 // resolve the function we need to call and the receiver of the 3864 // resolve the function we need to call and the receiver of the
3876 // call. Then we call the resolved function using the given 3865 // call. Then we call the resolved function using the given
3877 // arguments. 3866 // arguments.
3878 3867
3879 // Prepare stack for call to resolved function. 3868 // Prepare stack for call to resolved function.
3880 Load(function); 3869 Load(function);
3881 3870
3882 // Allocate a frame slot for the receiver. 3871 // Allocate a frame slot for the receiver.
3883 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 3872 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
3884 frame_->EmitPush(r2);
3885 3873
3886 // Load the arguments. 3874 // Load the arguments.
3887 int arg_count = args->length(); 3875 int arg_count = args->length();
3888 for (int i = 0; i < arg_count; i++) { 3876 for (int i = 0; i < arg_count; i++) {
3889 Load(args->at(i)); 3877 Load(args->at(i));
3890 } 3878 }
3891 3879
3880 VirtualFrame::SpilledScope spilled_scope(frame_);
3881
3892 // If we know that eval can only be shadowed by eval-introduced 3882 // If we know that eval can only be shadowed by eval-introduced
3893 // variables we attempt to load the global eval function directly 3883 // variables we attempt to load the global eval function directly
3894 // in generated code. If we succeed, there is no need to perform a 3884 // in generated code. If we succeed, there is no need to perform a
3895 // context lookup in the runtime system. 3885 // context lookup in the runtime system.
3896 JumpTarget done; 3886 JumpTarget done;
3897 if (var->slot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { 3887 if (var->slot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
3898 ASSERT(var->slot()->type() == Slot::LOOKUP); 3888 ASSERT(var->slot()->type() == Slot::LOOKUP);
3899 JumpTarget slow; 3889 JumpTarget slow;
3900 // Prepare the stack for the call to 3890 // Prepare the stack for the call to
3901 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded 3891 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
(...skipping 6766 matching lines...) Expand 10 before | Expand all | Expand 10 after
10668 __ bind(&string_add_runtime); 10658 __ bind(&string_add_runtime);
10669 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); 10659 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
10670 } 10660 }
10671 10661
10672 10662
10673 #undef __ 10663 #undef __
10674 10664
10675 } } // namespace v8::internal 10665 } } // namespace v8::internal
10676 10666
10677 #endif // V8_TARGET_ARCH_ARM 10667 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « no previous file | src/arm/jump-target-arm.cc » ('j') | src/arm/virtual-frame-arm.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698