OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
186 } else { | 186 } else { |
187 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 187 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
188 } | 188 } |
189 function_in_register = false; | 189 function_in_register = false; |
190 // Context is returned in both r0 and cp. It replaces the context | 190 // Context is returned in both r0 and cp. It replaces the context |
191 // passed to us. It's saved in the stack and kept live in cp. | 191 // passed to us. It's saved in the stack and kept live in cp. |
192 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 192 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
193 // Copy any necessary parameters into the context. | 193 // Copy any necessary parameters into the context. |
194 int num_parameters = info->scope()->num_parameters(); | 194 int num_parameters = info->scope()->num_parameters(); |
195 for (int i = 0; i < num_parameters; i++) { | 195 for (int i = 0; i < num_parameters; i++) { |
196 Slot* slot = scope()->parameter(i)->AsSlot(); | 196 Variable* var = scope()->parameter(i); |
197 if (slot != NULL && slot->type() == Slot::CONTEXT) { | 197 if (var->IsContextSlot()) { |
198 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 198 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
199 (num_parameters - 1 - i) * kPointerSize; | 199 (num_parameters - 1 - i) * kPointerSize; |
200 // Load parameter from stack. | 200 // Load parameter from stack. |
201 __ ldr(r0, MemOperand(fp, parameter_offset)); | 201 __ ldr(r0, MemOperand(fp, parameter_offset)); |
202 // Store it in the context. | 202 // Store it in the context. |
203 __ mov(r1, Operand(Context::SlotOffset(slot->index()))); | 203 __ mov(r1, Operand(Context::SlotOffset(var->index()))); |
204 __ str(r0, MemOperand(cp, r1)); | 204 __ str(r0, MemOperand(cp, r1)); |
205 // Update the write barrier. This clobbers all involved | 205 // Update the write barrier. This clobbers all involved |
206 // registers, so we have to use two more registers to avoid | 206 // registers, so we have to use two more registers to avoid |
207 // clobbering cp. | 207 // clobbering cp. |
208 __ mov(r2, Operand(cp)); | 208 __ mov(r2, Operand(cp)); |
209 __ RecordWrite(r2, Operand(r1), r3, r0); | 209 __ RecordWrite(r2, Operand(r1), r3, r0); |
210 } | 210 } |
211 } | 211 } |
212 } | 212 } |
213 | 213 |
(...skipping 23 matching lines...) Expand all Loading... | |
237 if (is_strict_mode()) { | 237 if (is_strict_mode()) { |
238 type = ArgumentsAccessStub::NEW_STRICT; | 238 type = ArgumentsAccessStub::NEW_STRICT; |
239 } else if (function()->has_duplicate_parameters()) { | 239 } else if (function()->has_duplicate_parameters()) { |
240 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; | 240 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; |
241 } else { | 241 } else { |
242 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; | 242 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; |
243 } | 243 } |
244 ArgumentsAccessStub stub(type); | 244 ArgumentsAccessStub stub(type); |
245 __ CallStub(&stub); | 245 __ CallStub(&stub); |
246 | 246 |
247 Move(arguments->AsSlot(), r0, r1, r2); | 247 SetVar(arguments, r0, r1, r2); |
248 } | 248 } |
249 | 249 |
250 if (FLAG_trace) { | 250 if (FLAG_trace) { |
251 __ CallRuntime(Runtime::kTraceEnter, 0); | 251 __ CallRuntime(Runtime::kTraceEnter, 0); |
252 } | 252 } |
253 | 253 |
254 // Visit the declarations and body unless there is an illegal | 254 // Visit the declarations and body unless there is an illegal |
255 // redeclaration. | 255 // redeclaration. |
256 if (scope()->HasIllegalRedeclaration()) { | 256 if (scope()->HasIllegalRedeclaration()) { |
257 Comment cmnt(masm_, "[ Declarations"); | 257 Comment cmnt(masm_, "[ Declarations"); |
258 scope()->VisitIllegalRedeclaration(this); | 258 scope()->VisitIllegalRedeclaration(this); |
259 | 259 |
260 } else { | 260 } else { |
261 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); | 261 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); |
262 { Comment cmnt(masm_, "[ Declarations"); | 262 { Comment cmnt(masm_, "[ Declarations"); |
263 // For named function expressions, declare the function name as a | 263 // For named function expressions, declare the function name as a |
264 // constant. | 264 // constant. |
265 if (scope()->is_function_scope() && scope()->function() != NULL) { | 265 if (scope()->is_function_scope() && scope()->function() != NULL) { |
266 EmitDeclaration(scope()->function(), Variable::CONST, NULL); | 266 int ignored = 0; |
267 EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored); | |
267 } | 268 } |
268 VisitDeclarations(scope()->declarations()); | 269 VisitDeclarations(scope()->declarations()); |
269 } | 270 } |
270 | 271 |
271 { Comment cmnt(masm_, "[ Stack check"); | 272 { Comment cmnt(masm_, "[ Stack check"); |
272 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS); | 273 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS); |
273 Label ok; | 274 Label ok; |
274 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 275 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
275 __ cmp(sp, Operand(ip)); | 276 __ cmp(sp, Operand(ip)); |
276 __ b(hs, &ok); | 277 __ b(hs, &ok); |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
361 #ifdef DEBUG | 362 #ifdef DEBUG |
362 // Check that the size of the code used for returning is large enough | 363 // Check that the size of the code used for returning is large enough |
363 // for the debugger's requirements. | 364 // for the debugger's requirements. |
364 ASSERT(Assembler::kJSReturnSequenceInstructions <= | 365 ASSERT(Assembler::kJSReturnSequenceInstructions <= |
365 masm_->InstructionsGeneratedSince(&check_exit_codesize)); | 366 masm_->InstructionsGeneratedSince(&check_exit_codesize)); |
366 #endif | 367 #endif |
367 } | 368 } |
368 } | 369 } |
369 | 370 |
370 | 371 |
371 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { | 372 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { |
373 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | |
372 } | 374 } |
373 | 375 |
374 | 376 |
375 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const { | 377 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { |
376 codegen()->Move(result_register(), slot); | 378 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
379 codegen()->GetVar(result_register(), var); | |
377 } | 380 } |
378 | 381 |
379 | 382 |
380 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { | 383 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { |
381 codegen()->Move(result_register(), slot); | 384 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
385 codegen()->GetVar(result_register(), var); | |
382 __ push(result_register()); | 386 __ push(result_register()); |
383 } | 387 } |
384 | 388 |
385 | 389 |
386 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { | 390 void FullCodeGenerator::TestContext::Plug(Variable* var) const { |
391 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | |
387 // For simplicity we always test the accumulator register. | 392 // For simplicity we always test the accumulator register. |
388 codegen()->Move(result_register(), slot); | 393 codegen()->GetVar(result_register(), var); |
389 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); | 394 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
390 codegen()->DoTest(this); | 395 codegen()->DoTest(this); |
391 } | 396 } |
392 | 397 |
393 | 398 |
394 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { | 399 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { |
395 } | 400 } |
396 | 401 |
397 | 402 |
398 void FullCodeGenerator::AccumulatorValueContext::Plug( | 403 void FullCodeGenerator::AccumulatorValueContext::Plug( |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
610 __ b(cond, if_true); | 615 __ b(cond, if_true); |
611 } else if (if_true == fall_through) { | 616 } else if (if_true == fall_through) { |
612 __ b(NegateCondition(cond), if_false); | 617 __ b(NegateCondition(cond), if_false); |
613 } else { | 618 } else { |
614 __ b(cond, if_true); | 619 __ b(cond, if_true); |
615 __ b(if_false); | 620 __ b(if_false); |
616 } | 621 } |
617 } | 622 } |
618 | 623 |
619 | 624 |
620 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { | 625 MemOperand FullCodeGenerator::StackOperand(Variable* var) { |
621 switch (slot->type()) { | 626 ASSERT(var->IsStackAllocated()); |
622 case Slot::PARAMETER: | 627 // Offset is negative because higher indexes are at lower addresses. |
623 case Slot::LOCAL: | 628 int offset = -var->index() * kPointerSize; |
624 return MemOperand(fp, SlotOffset(slot)); | 629 // Adjust by a (parameter or local) base offset. |
625 case Slot::CONTEXT: { | 630 if (var->IsParameter()) { |
626 int context_chain_length = | 631 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; |
627 scope()->ContextChainLength(slot->var()->scope()); | 632 } else { |
628 __ LoadContext(scratch, context_chain_length); | 633 offset += JavaScriptFrameConstants::kLocal0Offset; |
629 return ContextOperand(scratch, slot->index()); | |
630 } | |
631 case Slot::LOOKUP: | |
632 UNREACHABLE(); | |
633 } | 634 } |
634 UNREACHABLE(); | 635 return MemOperand(fp, offset); |
635 return MemOperand(r0, 0); | |
636 } | 636 } |
637 | 637 |
638 | 638 |
639 void FullCodeGenerator::Move(Register destination, Slot* source) { | 639 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { |
640 // Use destination as scratch. | 640 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
641 MemOperand slot_operand = EmitSlotSearch(source, destination); | 641 if (var->IsContextSlot()) { |
642 __ ldr(destination, slot_operand); | 642 int context_chain_length = scope()->ContextChainLength(var->scope()); |
643 __ LoadContext(scratch, context_chain_length); | |
644 return ContextOperand(scratch, var->index()); | |
645 } else { | |
646 return StackOperand(var); | |
647 } | |
643 } | 648 } |
644 | 649 |
645 | 650 |
646 void FullCodeGenerator::Move(Slot* dst, | 651 void FullCodeGenerator::GetVar(Register dest, Variable* var) { |
647 Register src, | 652 // Use destination as scratch. |
648 Register scratch1, | 653 MemOperand location = VarOperand(var, dest); |
649 Register scratch2) { | 654 __ ldr(dest, location); |
650 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. | 655 } |
651 ASSERT(!scratch1.is(src) && !scratch2.is(src)); | 656 |
652 MemOperand location = EmitSlotSearch(dst, scratch1); | 657 |
658 void FullCodeGenerator::SetVar(Variable* var, | |
659 Register src, | |
660 Register scratch0, | |
661 Register scratch1) { | |
662 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); | |
663 ASSERT(!scratch0.is(src)); | |
664 ASSERT(!scratch0.is(scratch1)); | |
665 ASSERT(!scratch1.is(src)); | |
666 MemOperand location = VarOperand(var, scratch0); | |
653 __ str(src, location); | 667 __ str(src, location); |
654 // Emit the write barrier code if the location is in the heap. | 668 // Emit the write barrier code if the location is in the heap. |
655 if (dst->type() == Slot::CONTEXT) { | 669 if (var->IsContextSlot()) { |
656 __ RecordWrite(scratch1, | 670 __ RecordWrite(scratch0, |
657 Operand(Context::SlotOffset(dst->index())), | 671 Operand(Context::SlotOffset(var->index())), |
658 scratch2, | 672 scratch1, |
659 src); | 673 src); |
660 } | 674 } |
661 } | 675 } |
662 | 676 |
663 | 677 |
664 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, | 678 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, |
665 bool should_normalize, | 679 bool should_normalize, |
666 Label* if_true, | 680 Label* if_true, |
667 Label* if_false) { | 681 Label* if_false) { |
668 // Only prepare for bailouts before splits if we're in a test | 682 // Only prepare for bailouts before splits if we're in a test |
(...skipping 14 matching lines...) Expand all Loading... | |
683 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 697 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
684 __ cmp(r0, ip); | 698 __ cmp(r0, ip); |
685 Split(eq, if_true, if_false, NULL); | 699 Split(eq, if_true, if_false, NULL); |
686 __ bind(&skip); | 700 __ bind(&skip); |
687 } | 701 } |
688 } | 702 } |
689 | 703 |
690 | 704 |
691 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, | 705 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, |
692 Variable::Mode mode, | 706 Variable::Mode mode, |
693 FunctionLiteral* function) { | 707 FunctionLiteral* function, |
694 Comment cmnt(masm_, "[ Declaration"); | 708 int* global_count) { |
709 // If it was not possible to allocate the variable at compile time, we | |
710 // need to "declare" it at runtime to make sure it actually exists in the | |
711 // local context. | |
695 Variable* variable = proxy->var(); | 712 Variable* variable = proxy->var(); |
696 ASSERT(variable != NULL); // Must have been resolved. | 713 switch (variable->location()) { |
697 Slot* slot = variable->AsSlot(); | 714 case Variable::UNALLOCATED: |
698 ASSERT(slot != NULL); | 715 ++(*global_count); |
699 switch (slot->type()) { | 716 break; |
700 case Slot::PARAMETER: | 717 |
701 case Slot::LOCAL: | 718 case Variable::PARAMETER: |
719 case Variable::LOCAL: | |
702 if (function != NULL) { | 720 if (function != NULL) { |
721 Comment cmnt(masm_, "[ Declaration"); | |
703 VisitForAccumulatorValue(function); | 722 VisitForAccumulatorValue(function); |
704 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); | 723 __ str(result_register(), StackOperand(variable)); |
705 } else if (mode == Variable::CONST || mode == Variable::LET) { | 724 } else if (mode == Variable::CONST || mode == Variable::LET) { |
725 Comment cmnt(masm_, "[ Declaration"); | |
706 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 726 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
707 __ str(ip, MemOperand(fp, SlotOffset(slot))); | 727 __ str(ip, StackOperand(variable)); |
708 } | 728 } |
709 break; | 729 break; |
710 | 730 |
711 case Slot::CONTEXT: | 731 case Variable::CONTEXT: |
712 // We bypass the general EmitSlotSearch because we know more about | |
713 // this specific context. | |
714 | |
715 // The variable in the decl always resides in the current function | 732 // The variable in the decl always resides in the current function |
716 // context. | 733 // context. |
717 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); | 734 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); |
718 if (FLAG_debug_code) { | 735 if (FLAG_debug_code) { |
719 // Check that we're not inside a with or catch context. | 736 // Check that we're not inside a with or catch context. |
720 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); | 737 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); |
721 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); | 738 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); |
722 __ Check(ne, "Declaration in with context."); | 739 __ Check(ne, "Declaration in with context."); |
723 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); | 740 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); |
724 __ Check(ne, "Declaration in catch context."); | 741 __ Check(ne, "Declaration in catch context."); |
725 } | 742 } |
726 if (function != NULL) { | 743 if (function != NULL) { |
744 Comment cmnt(masm_, "[ Declaration"); | |
727 VisitForAccumulatorValue(function); | 745 VisitForAccumulatorValue(function); |
728 __ str(result_register(), ContextOperand(cp, slot->index())); | 746 __ str(result_register(), ContextOperand(cp, variable->index())); |
729 int offset = Context::SlotOffset(slot->index()); | 747 int offset = Context::SlotOffset(variable->index()); |
730 // We know that we have written a function, which is not a smi. | 748 // We know that we have written a function, which is not a smi. |
731 __ mov(r1, Operand(cp)); | 749 __ mov(r1, Operand(cp)); |
732 __ RecordWrite(r1, Operand(offset), r2, result_register()); | 750 __ RecordWrite(r1, Operand(offset), r2, result_register()); |
733 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 751 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
734 } else if (mode == Variable::CONST || mode == Variable::LET) { | 752 } else if (mode == Variable::CONST || mode == Variable::LET) { |
753 Comment cmnt(masm_, "[ Declaration"); | |
735 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 754 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
736 __ str(ip, ContextOperand(cp, slot->index())); | 755 __ str(ip, ContextOperand(cp, variable->index())); |
737 // No write barrier since the_hole_value is in old space. | 756 // No write barrier since the_hole_value is in old space. |
738 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 757 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
739 } | 758 } |
740 break; | 759 break; |
741 | 760 |
742 case Slot::LOOKUP: { | 761 case Variable::LOOKUP: { |
762 Comment cmnt(masm_, "[ Declaration"); | |
743 __ mov(r2, Operand(variable->name())); | 763 __ mov(r2, Operand(variable->name())); |
744 // Declaration nodes are always introduced in one of two modes. | 764 // Declaration nodes are always introduced in one of three modes. |
745 ASSERT(mode == Variable::VAR || | 765 ASSERT(mode == Variable::VAR || |
746 mode == Variable::CONST || | 766 mode == Variable::CONST || |
747 mode == Variable::LET); | 767 mode == Variable::LET); |
748 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; | 768 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; |
749 __ mov(r1, Operand(Smi::FromInt(attr))); | 769 __ mov(r1, Operand(Smi::FromInt(attr))); |
750 // Push initial value, if any. | 770 // Push initial value, if any. |
751 // Note: For variables we must not push an initial value (such as | 771 // Note: For variables we must not push an initial value (such as |
752 // 'undefined') because we may have a (legal) redeclaration and we | 772 // 'undefined') because we may have a (legal) redeclaration and we |
753 // must not destroy the current value. | 773 // must not destroy the current value. |
754 if (function != NULL) { | 774 if (function != NULL) { |
755 __ Push(cp, r2, r1); | 775 __ Push(cp, r2, r1); |
756 // Push initial value for function declaration. | 776 // Push initial value for function declaration. |
757 VisitForStackValue(function); | 777 VisitForStackValue(function); |
758 } else if (mode == Variable::CONST || mode == Variable::LET) { | 778 } else if (mode == Variable::CONST || mode == Variable::LET) { |
759 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); | 779 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); |
760 __ Push(cp, r2, r1, r0); | 780 __ Push(cp, r2, r1, r0); |
761 } else { | 781 } else { |
762 __ mov(r0, Operand(Smi::FromInt(0))); // No initial value! | 782 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. |
763 __ Push(cp, r2, r1, r0); | 783 __ Push(cp, r2, r1, r0); |
764 } | 784 } |
765 __ CallRuntime(Runtime::kDeclareContextSlot, 4); | 785 __ CallRuntime(Runtime::kDeclareContextSlot, 4); |
766 break; | 786 break; |
767 } | 787 } |
768 } | 788 } |
769 } | 789 } |
770 | 790 |
771 | 791 |
772 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { | 792 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { } |
773 EmitDeclaration(decl->proxy(), decl->mode(), decl->fun()); | |
774 } | |
775 | 793 |
776 | 794 |
777 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 795 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
778 // Call the runtime to declare the globals. | 796 // Call the runtime to declare the globals. |
779 // The context is the first argument. | 797 // The context is the first argument. |
780 __ mov(r1, Operand(pairs)); | 798 __ mov(r1, Operand(pairs)); |
781 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); | 799 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); |
782 __ Push(cp, r1, r0); | 800 __ Push(cp, r1, r0); |
783 __ CallRuntime(Runtime::kDeclareGlobals, 3); | 801 __ CallRuntime(Runtime::kDeclareGlobals, 3); |
784 // Return value is ignored. | 802 // Return value is ignored. |
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1081 context()->Plug(r0); | 1099 context()->Plug(r0); |
1082 } | 1100 } |
1083 | 1101 |
1084 | 1102 |
1085 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { | 1103 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { |
1086 Comment cmnt(masm_, "[ VariableProxy"); | 1104 Comment cmnt(masm_, "[ VariableProxy"); |
1087 EmitVariableLoad(expr); | 1105 EmitVariableLoad(expr); |
1088 } | 1106 } |
1089 | 1107 |
1090 | 1108 |
1091 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( | 1109 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, |
1092 Slot* slot, | 1110 TypeofState typeof_state, |
1093 TypeofState typeof_state, | 1111 Label* slow) { |
1094 Label* slow) { | |
1095 Register current = cp; | 1112 Register current = cp; |
1096 Register next = r1; | 1113 Register next = r1; |
1097 Register temp = r2; | 1114 Register temp = r2; |
1098 | 1115 |
1099 Scope* s = scope(); | 1116 Scope* s = scope(); |
1100 while (s != NULL) { | 1117 while (s != NULL) { |
1101 if (s->num_heap_slots() > 0) { | 1118 if (s->num_heap_slots() > 0) { |
1102 if (s->calls_eval()) { | 1119 if (s->calls_eval()) { |
1103 // Check that extension is NULL. | 1120 // Check that extension is NULL. |
1104 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); | 1121 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); |
(...skipping 26 matching lines...) Expand all Loading... | |
1131 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); | 1148 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); |
1132 __ tst(temp, temp); | 1149 __ tst(temp, temp); |
1133 __ b(ne, slow); | 1150 __ b(ne, slow); |
1134 // Load next context in chain. | 1151 // Load next context in chain. |
1135 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); | 1152 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); |
1136 __ b(&loop); | 1153 __ b(&loop); |
1137 __ bind(&fast); | 1154 __ bind(&fast); |
1138 } | 1155 } |
1139 | 1156 |
1140 __ ldr(r0, GlobalObjectOperand()); | 1157 __ ldr(r0, GlobalObjectOperand()); |
1141 __ mov(r2, Operand(slot->var()->name())); | 1158 __ mov(r2, Operand(var->name())); |
1142 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) | 1159 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) |
1143 ? RelocInfo::CODE_TARGET | 1160 ? RelocInfo::CODE_TARGET |
1144 : RelocInfo::CODE_TARGET_CONTEXT; | 1161 : RelocInfo::CODE_TARGET_CONTEXT; |
1145 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 1162 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
1146 __ Call(ic, mode); | 1163 __ Call(ic, mode); |
1147 } | 1164 } |
1148 | 1165 |
1149 | 1166 |
1150 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( | 1167 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
1151 Slot* slot, | 1168 Label* slow) { |
1152 Label* slow) { | 1169 ASSERT(var->IsContextSlot()); |
1153 ASSERT(slot->type() == Slot::CONTEXT); | |
1154 Register context = cp; | 1170 Register context = cp; |
1155 Register next = r3; | 1171 Register next = r3; |
1156 Register temp = r4; | 1172 Register temp = r4; |
1157 | 1173 |
1158 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { | 1174 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
1159 if (s->num_heap_slots() > 0) { | 1175 if (s->num_heap_slots() > 0) { |
1160 if (s->calls_eval()) { | 1176 if (s->calls_eval()) { |
1161 // Check that extension is NULL. | 1177 // Check that extension is NULL. |
1162 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | 1178 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
1163 __ tst(temp, temp); | 1179 __ tst(temp, temp); |
1164 __ b(ne, slow); | 1180 __ b(ne, slow); |
1165 } | 1181 } |
1166 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); | 1182 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); |
1167 // Walk the rest of the chain without clobbering cp. | 1183 // Walk the rest of the chain without clobbering cp. |
1168 context = next; | 1184 context = next; |
1169 } | 1185 } |
1170 } | 1186 } |
1171 // Check that last extension is NULL. | 1187 // Check that last extension is NULL. |
1172 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | 1188 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
1173 __ tst(temp, temp); | 1189 __ tst(temp, temp); |
1174 __ b(ne, slow); | 1190 __ b(ne, slow); |
1175 | 1191 |
1176 // This function is used only for loads, not stores, so it's safe to | 1192 // This function is used only for loads, not stores, so it's safe to |
1177 // return an cp-based operand (the write barrier cannot be allowed to | 1193 // return an cp-based operand (the write barrier cannot be allowed to |
1178 // destroy the cp register). | 1194 // destroy the cp register). |
1179 return ContextOperand(context, slot->index()); | 1195 return ContextOperand(context, var->index()); |
1180 } | 1196 } |
1181 | 1197 |
1182 | 1198 |
1183 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( | 1199 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, |
1184 Slot* slot, | 1200 TypeofState typeof_state, |
1185 TypeofState typeof_state, | 1201 Label* slow, |
1186 Label* slow, | 1202 Label* done) { |
1187 Label* done) { | |
1188 // Generate fast-case code for variables that might be shadowed by | 1203 // Generate fast-case code for variables that might be shadowed by |
1189 // eval-introduced variables. Eval is used a lot without | 1204 // eval-introduced variables. Eval is used a lot without |
1190 // introducing variables. In those cases, we do not want to | 1205 // introducing variables. In those cases, we do not want to |
1191 // perform a runtime call for all variables in the scope | 1206 // perform a runtime call for all variables in the scope |
1192 // containing the eval. | 1207 // containing the eval. |
1193 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { | 1208 if (var->mode() == Variable::DYNAMIC_GLOBAL) { |
1194 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow); | 1209 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); |
1195 __ jmp(done); | 1210 __ jmp(done); |
1196 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { | 1211 } else if (var->mode() == Variable::DYNAMIC_LOCAL) { |
1197 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot(); | 1212 Variable* local = var->local_if_not_shadowed(); |
1198 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite(); | 1213 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); |
1199 if (potential_slot != NULL) { | 1214 if (local->mode() == Variable::CONST) { |
1200 // Generate fast case for locals that rewrite to slots. | 1215 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); |
1201 __ ldr(r0, ContextSlotOperandCheckExtensions(potential_slot, slow)); | 1216 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); |
1202 if (potential_slot->var()->mode() == Variable::CONST) { | |
1203 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
1204 __ cmp(r0, ip); | |
1205 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
1206 } | |
1207 __ jmp(done); | |
1208 } else if (rewrite != NULL) { | |
1209 // Generate fast case for calls of an argument function. | |
1210 Property* property = rewrite->AsProperty(); | |
1211 if (property != NULL) { | |
1212 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); | |
1213 Literal* key_literal = property->key()->AsLiteral(); | |
1214 if (obj_proxy != NULL && | |
1215 key_literal != NULL && | |
1216 obj_proxy->IsArguments() && | |
1217 key_literal->handle()->IsSmi()) { | |
1218 // Load arguments object if there are no eval-introduced | |
1219 // variables. Then load the argument from the arguments | |
1220 // object using keyed load. | |
1221 __ ldr(r1, | |
1222 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(), | |
1223 slow)); | |
1224 __ mov(r0, Operand(key_literal->handle())); | |
1225 Handle<Code> ic = | |
1226 isolate()->builtins()->KeyedLoadIC_Initialize(); | |
1227 __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property)); | |
1228 __ jmp(done); | |
1229 } | |
1230 } | |
1231 } | 1217 } |
1218 __ jmp(done); | |
1232 } | 1219 } |
1233 } | 1220 } |
1234 | 1221 |
1235 | 1222 |
1236 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { | 1223 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { |
1237 // Record position before possible IC call. | 1224 // Record position before possible IC call. |
1238 SetSourcePosition(proxy->position()); | 1225 SetSourcePosition(proxy->position()); |
1239 Variable* var = proxy->var(); | 1226 Variable* var = proxy->var(); |
1240 | 1227 |
1241 // Three cases: non-this global variables, lookup slots, and all other | 1228 // Three cases: global variables, lookup variables, and all other types of |
1242 // types of slots. | 1229 // variables. |
1243 Slot* slot = var->AsSlot(); | 1230 switch (var->location()) { |
1244 ASSERT((var->is_global() && !var->is_this()) == (slot == NULL)); | 1231 case Variable::UNALLOCATED: { |
1232 Comment cmnt(masm_, "Global variable"); | |
1233 // Use inline caching. Variable name is passed in r2 and the global | |
1234 // object (receiver) in r0. | |
1235 __ ldr(r0, GlobalObjectOperand()); | |
1236 __ mov(r2, Operand(var->name())); | |
1237 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | |
1238 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); | |
1239 context()->Plug(r0); | |
1240 break; | |
1241 } | |
1245 | 1242 |
1246 if (slot == NULL) { | 1243 case Variable::PARAMETER: |
1247 Comment cmnt(masm_, "Global variable"); | 1244 case Variable::LOCAL: |
1248 // Use inline caching. Variable name is passed in r2 and the global | 1245 case Variable::CONTEXT: { |
1249 // object (receiver) in r0. | 1246 Comment cmnt(masm_, var->IsContextSlot() |
1250 __ ldr(r0, GlobalObjectOperand()); | 1247 ? "Context variable" |
1251 __ mov(r2, Operand(var->name())); | 1248 : "Stack variable"); |
1252 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 1249 if (var->mode() != Variable::LET && var->mode() != Variable::CONST) { |
1253 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); | 1250 context()->Plug(var); |
1254 context()->Plug(r0); | 1251 } else { |
1252 // Let and const need a read barrier. | |
1253 GetVar(r0, var); | |
1254 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); | |
1255 if (var->mode() == Variable::LET) { | |
1256 Label done; | |
1257 __ b(ne, &done); | |
1258 __ mov(r0, Operand(var->name())); | |
1259 __ push(r0); | |
1260 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
1261 __ bind(&done); | |
1262 } else { | |
1263 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
1264 } | |
1265 context()->Plug(r0); | |
1266 } | |
1267 break; | |
1268 } | |
1255 | 1269 |
1256 } else if (slot->type() == Slot::LOOKUP) { | 1270 case Variable::LOOKUP: { |
1257 Label done, slow; | 1271 Label done, slow; |
1258 | 1272 // Generate code for loading from variables potentially shadowed |
1259 // Generate code for loading from variables potentially shadowed | 1273 // by eval-introduced variables. |
1260 // by eval-introduced variables. | 1274 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); |
1261 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done); | 1275 __ bind(&slow); |
1262 | 1276 Comment cmnt(masm_, "Lookup variable"); |
1263 __ bind(&slow); | 1277 __ mov(r1, Operand(var->name())); |
1264 Comment cmnt(masm_, "Lookup slot"); | 1278 __ Push(cp, r1); // Context and name. |
1265 __ mov(r1, Operand(var->name())); | 1279 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
1266 __ Push(cp, r1); // Context and name. | |
1267 __ CallRuntime(Runtime::kLoadContextSlot, 2); | |
1268 __ bind(&done); | |
1269 | |
1270 context()->Plug(r0); | |
1271 | |
1272 } else { | |
1273 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) | |
1274 ? "Context slot" | |
1275 : "Stack slot"); | |
1276 if (var->mode() == Variable::CONST) { | |
1277 // Constants may be the hole value if they have not been initialized. | |
1278 // Unhole them. | |
1279 MemOperand slot_operand = EmitSlotSearch(slot, r0); | |
1280 __ ldr(r0, slot_operand); | |
1281 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
1282 __ cmp(r0, ip); | |
1283 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
1284 context()->Plug(r0); | |
1285 } else if (var->mode() == Variable::LET) { | |
1286 // Let bindings may be the hole value if they have not been initialized. | |
1287 // Throw a type error in this case. | |
1288 Label done; | |
1289 MemOperand slot_operand = EmitSlotSearch(slot, r0); | |
1290 __ ldr(r0, slot_operand); | |
1291 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
1292 __ cmp(r0, ip); | |
1293 __ b(ne, &done); | |
1294 __ mov(r0, Operand(var->name())); | |
1295 __ push(r0); | |
1296 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
1297 __ bind(&done); | 1280 __ bind(&done); |
1298 context()->Plug(r0); | 1281 context()->Plug(r0); |
1299 } else { | |
1300 context()->Plug(slot); | |
1301 } | 1282 } |
1302 } | 1283 } |
1303 } | 1284 } |
1304 | 1285 |
1305 | 1286 |
1306 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | 1287 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
1307 Comment cmnt(masm_, "[ RegExpLiteral"); | 1288 Comment cmnt(masm_, "[ RegExpLiteral"); |
1308 Label materialized; | 1289 Label materialized; |
1309 // Registers will be used as follows: | 1290 // Registers will be used as follows: |
1310 // r5 = materialized value (RegExp literal) | 1291 // r5 = materialized value (RegExp literal) |
(...skipping 513 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1824 break; | 1805 break; |
1825 } | 1806 } |
1826 } | 1807 } |
1827 PrepareForBailoutForId(bailout_ast_id, TOS_REG); | 1808 PrepareForBailoutForId(bailout_ast_id, TOS_REG); |
1828 context()->Plug(r0); | 1809 context()->Plug(r0); |
1829 } | 1810 } |
1830 | 1811 |
1831 | 1812 |
1832 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | 1813 void FullCodeGenerator::EmitVariableAssignment(Variable* var, |
1833 Token::Value op) { | 1814 Token::Value op) { |
1834 ASSERT(var != NULL); | 1815 if (var->IsUnallocated()) { |
1835 ASSERT(var->is_global() || var->AsSlot() != NULL); | 1816 // Global var, const, or let. |
fschneider
2011/09/06 09:34:22
Comment should be a sentence and outside the if-st
Kevin Millikin (Chromium)
2011/09/06 10:44:06
I thought it was pretty clear that there was an im
| |
1836 | |
1837 if (var->is_global()) { | |
1838 ASSERT(!var->is_this()); | |
1839 // Assignment to a global variable. Use inline caching for the | |
1840 // assignment. Right-hand-side value is passed in r0, variable name in | |
1841 // r2, and the global object in r1. | |
1842 __ mov(r2, Operand(var->name())); | 1817 __ mov(r2, Operand(var->name())); |
1843 __ ldr(r1, GlobalObjectOperand()); | 1818 __ ldr(r1, GlobalObjectOperand()); |
1844 Handle<Code> ic = is_strict_mode() | 1819 Handle<Code> ic = is_strict_mode() |
1845 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 1820 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
1846 : isolate()->builtins()->StoreIC_Initialize(); | 1821 : isolate()->builtins()->StoreIC_Initialize(); |
1847 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); | 1822 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); |
1848 | 1823 |
1849 } else if (op == Token::INIT_CONST) { | 1824 } else if (op == Token::INIT_CONST) { |
1850 // Like var declarations, const declarations are hoisted to function | 1825 // Const initializers need a write barrier. |
1851 // scope. However, unlike var initializers, const initializers are able | 1826 ASSERT(!var->IsParameter()); // No const parameters. |
1852 // to drill a hole to that function context, even from inside a 'with' | 1827 if (var->IsStackLocal()) { |
1853 // context. We thus bypass the normal static scope lookup. | 1828 Label skip; |
1854 Slot* slot = var->AsSlot(); | 1829 __ ldr(r1, StackOperand(var)); |
1855 Label skip; | 1830 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex); |
1856 switch (slot->type()) { | 1831 __ b(ne, &skip); |
1857 case Slot::PARAMETER: | 1832 __ str(result_register(), StackOperand(var)); |
1858 // No const parameters. | 1833 __ bind(&skip); |
1859 UNREACHABLE(); | 1834 } else { |
1860 break; | 1835 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); |
1861 case Slot::LOCAL: | 1836 // Like var declarations, const declarations are hoisted to function |
1862 // Detect const reinitialization by checking for the hole value. | 1837 // scope. However, unlike var initializers, const initializers are |
1863 __ ldr(r1, MemOperand(fp, SlotOffset(slot))); | 1838 // able to drill a hole to that function context, even from inside a |
1864 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 1839 // 'with' context. We thus bypass the normal static scope lookup for |
1865 __ cmp(r1, ip); | 1840 // var->IsContextSlot(). |
1866 __ b(ne, &skip); | 1841 __ push(r0); |
1867 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); | 1842 __ mov(r0, Operand(var->name())); |
1868 break; | 1843 __ Push(cp, r0); // Context and name. |
1869 case Slot::CONTEXT: | 1844 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
1870 case Slot::LOOKUP: | |
1871 __ push(r0); | |
1872 __ mov(r0, Operand(slot->var()->name())); | |
1873 __ Push(cp, r0); // Context and name. | |
1874 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | |
1875 break; | |
1876 } | 1845 } |
1877 __ bind(&skip); | |
1878 | 1846 |
1879 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) { | 1847 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) { |
1880 // Perform the assignment for non-const variables. Const assignments | 1848 // Non-initializing assignment to let variable needs a write barrier. |
1881 // are simply skipped. | 1849 if (var->IsLookupSlot()) { |
1882 Slot* slot = var->AsSlot(); | 1850 __ push(r0); // Value. |
1883 switch (slot->type()) { | 1851 __ mov(r1, Operand(var->name())); |
1884 case Slot::PARAMETER: | 1852 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag()))); |
1885 case Slot::LOCAL: { | 1853 __ Push(cp, r1, r0); // Context, name, strict mode. |
1886 Label assign; | 1854 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
1887 // Check for an initialized let binding. | 1855 } else { |
1888 __ ldr(r1, MemOperand(fp, SlotOffset(slot))); | 1856 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
1889 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 1857 Label assign; |
1890 __ cmp(r1, ip); | 1858 MemOperand location = VarOperand(var, r1); |
1891 __ b(ne, &assign); | 1859 __ ldr(r3, location); |
1892 __ mov(r1, Operand(var->name())); | 1860 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); |
1893 __ push(r1); | 1861 __ b(ne, &assign); |
1894 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1862 __ mov(r3, Operand(var->name())); |
1895 // Perform the assignment. | 1863 __ push(r3); |
1896 __ bind(&assign); | 1864 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
1897 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); | 1865 // Perform the assignment. |
1898 break; | 1866 __ bind(&assign); |
1899 } | 1867 __ str(result_register(), location); |
1900 case Slot::CONTEXT: { | 1868 if (var->IsContextSlot()) { |
1901 // Let variables may be the hole value if they have not been | |
1902 // initialized. Throw a type error in this case. | |
1903 Label assign; | |
1904 MemOperand target = EmitSlotSearch(slot, r1); | |
1905 // Check for an initialized let binding. | |
1906 __ ldr(r3, target); | |
1907 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
1908 __ cmp(r3, ip); | |
1909 __ b(ne, &assign); | |
1910 __ mov(r3, Operand(var->name())); | |
1911 __ push(r3); | |
1912 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
1913 // Perform the assignment. | |
1914 __ bind(&assign); | |
1915 __ str(result_register(), target); | |
1916 // RecordWrite may destroy all its register arguments. | 1869 // RecordWrite may destroy all its register arguments. |
1917 __ mov(r3, result_register()); | 1870 __ mov(r3, result_register()); |
1918 int offset = Context::SlotOffset(slot->index()); | 1871 int offset = Context::SlotOffset(var->index()); |
1919 __ RecordWrite(r1, Operand(offset), r2, r3); | 1872 __ RecordWrite(r1, Operand(offset), r2, r3); |
1920 break; | |
1921 } | 1873 } |
1922 case Slot::LOOKUP: | |
1923 // Call the runtime for the assignment. | |
1924 __ push(r0); // Value. | |
1925 __ mov(r1, Operand(slot->var()->name())); | |
1926 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag()))); | |
1927 __ Push(cp, r1, r0); // Context, name, strict mode. | |
1928 __ CallRuntime(Runtime::kStoreContextSlot, 4); | |
1929 break; | |
1930 } | 1874 } |
1931 | 1875 |
1932 } else if (var->mode() != Variable::CONST) { | 1876 } else if (var->mode() != Variable::CONST) { |
1933 // Perform the assignment for non-const variables. Const assignments | 1877 // Assignment to var or initializing assignment to let. |
1934 // are simply skipped. | 1878 if (var->IsStackAllocated()) { |
1935 Slot* slot = var->AsSlot(); | 1879 __ str(result_register(), StackOperand(var)); |
1936 switch (slot->type()) { | 1880 } else if (var->IsContextSlot()) { |
1937 case Slot::PARAMETER: | 1881 // Preserve the value in r0 against the write barrier. |
1938 case Slot::LOCAL: | 1882 __ mov(r3, result_register()); |
1939 // Perform the assignment. | 1883 SetVar(var, r3, r1, r2); |
1940 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); | 1884 } else { |
1941 break; | 1885 ASSERT(var->IsLookupSlot()); |
1942 | 1886 __ push(r0); // Value. |
1943 case Slot::CONTEXT: { | 1887 __ mov(r1, Operand(var->name())); |
1944 MemOperand target = EmitSlotSearch(slot, r1); | 1888 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag()))); |
1945 // Perform the assignment and issue the write barrier. | 1889 __ Push(cp, r1, r0); // Context, name, strict mode. |
1946 __ str(result_register(), target); | 1890 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
1947 // RecordWrite may destroy all its register arguments. | |
1948 __ mov(r3, result_register()); | |
1949 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; | |
1950 __ RecordWrite(r1, Operand(offset), r2, r3); | |
1951 break; | |
1952 } | |
1953 | |
1954 case Slot::LOOKUP: | |
1955 // Call the runtime for the assignment. | |
1956 __ push(r0); // Value. | |
1957 __ mov(r1, Operand(slot->var()->name())); | |
1958 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag()))); | |
1959 __ Push(cp, r1, r0); // Context, name, strict mode. | |
1960 __ CallRuntime(Runtime::kStoreContextSlot, 4); | |
1961 break; | |
1962 } | 1891 } |
1963 } | 1892 } |
1893 // Non-initializing assignments to consts are ignored. | |
1964 } | 1894 } |
1965 | 1895 |
1966 | 1896 |
1967 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | 1897 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
1968 // Assignment to a property, using a named store IC. | 1898 // Assignment to a property, using a named store IC. |
1969 Property* prop = expr->target()->AsProperty(); | 1899 Property* prop = expr->target()->AsProperty(); |
1970 ASSERT(prop != NULL); | 1900 ASSERT(prop != NULL); |
1971 ASSERT(prop->key()->AsLiteral() != NULL); | 1901 ASSERT(prop->key()->AsLiteral() != NULL); |
1972 | 1902 |
1973 // If the assignment starts a block of assignments to the same object, | 1903 // If the assignment starts a block of assignments to the same object, |
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2186 | 2116 |
2187 | 2117 |
2188 void FullCodeGenerator::VisitCall(Call* expr) { | 2118 void FullCodeGenerator::VisitCall(Call* expr) { |
2189 #ifdef DEBUG | 2119 #ifdef DEBUG |
2190 // We want to verify that RecordJSReturnSite gets called on all paths | 2120 // We want to verify that RecordJSReturnSite gets called on all paths |
2191 // through this function. Avoid early returns. | 2121 // through this function. Avoid early returns. |
2192 expr->return_is_recorded_ = false; | 2122 expr->return_is_recorded_ = false; |
2193 #endif | 2123 #endif |
2194 | 2124 |
2195 Comment cmnt(masm_, "[ Call"); | 2125 Comment cmnt(masm_, "[ Call"); |
2196 Expression* fun = expr->expression(); | 2126 Expression* callee = expr->expression(); |
2197 Variable* var = fun->AsVariableProxy()->AsVariable(); | 2127 VariableProxy* proxy = callee->AsVariableProxy(); |
2128 Property* property = callee->AsProperty(); | |
2198 | 2129 |
2199 if (var != NULL && var->is_possibly_eval()) { | 2130 if (proxy != NULL && proxy->var()->is_possibly_eval()) { |
2200 // In a call to eval, we first call %ResolvePossiblyDirectEval to | 2131 // In a call to eval, we first call %ResolvePossiblyDirectEval to |
2201 // resolve the function we need to call and the receiver of the | 2132 // resolve the function we need to call and the receiver of the |
2202 // call. Then we call the resolved function using the given | 2133 // call. Then we call the resolved function using the given |
2203 // arguments. | 2134 // arguments. |
2204 ZoneList<Expression*>* args = expr->arguments(); | 2135 ZoneList<Expression*>* args = expr->arguments(); |
2205 int arg_count = args->length(); | 2136 int arg_count = args->length(); |
2206 | 2137 |
2207 { PreservePositionScope pos_scope(masm()->positions_recorder()); | 2138 { PreservePositionScope pos_scope(masm()->positions_recorder()); |
2208 VisitForStackValue(fun); | 2139 VisitForStackValue(callee); |
2209 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 2140 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
2210 __ push(r2); // Reserved receiver slot. | 2141 __ push(r2); // Reserved receiver slot. |
2211 | 2142 |
2212 // Push the arguments. | 2143 // Push the arguments. |
2213 for (int i = 0; i < arg_count; i++) { | 2144 for (int i = 0; i < arg_count; i++) { |
2214 VisitForStackValue(args->at(i)); | 2145 VisitForStackValue(args->at(i)); |
2215 } | 2146 } |
2216 | 2147 |
2217 // If we know that eval can only be shadowed by eval-introduced | 2148 // If we know that eval can only be shadowed by eval-introduced |
2218 // variables we attempt to load the global eval function directly | 2149 // variables we attempt to load the global eval function directly |
2219 // in generated code. If we succeed, there is no need to perform a | 2150 // in generated code. If we succeed, there is no need to perform a |
2220 // context lookup in the runtime system. | 2151 // context lookup in the runtime system. |
2221 Label done; | 2152 Label done; |
2222 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { | 2153 Variable* var = proxy->var(); |
2154 if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) { | |
2223 Label slow; | 2155 Label slow; |
2224 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(), | 2156 EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow); |
2225 NOT_INSIDE_TYPEOF, | |
2226 &slow); | |
2227 // Push the function and resolve eval. | 2157 // Push the function and resolve eval. |
2228 __ push(r0); | 2158 __ push(r0); |
2229 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); | 2159 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); |
2230 __ jmp(&done); | 2160 __ jmp(&done); |
2231 __ bind(&slow); | 2161 __ bind(&slow); |
2232 } | 2162 } |
2233 | 2163 |
2234 // Push copy of the function (found below the arguments) and | 2164 // Push a copy of the function (found below the arguments) and |
2235 // resolve eval. | 2165 // resolve eval. |
2236 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2166 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
2237 __ push(r1); | 2167 __ push(r1); |
2238 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); | 2168 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); |
2239 if (done.is_linked()) { | 2169 __ bind(&done); |
2240 __ bind(&done); | |
2241 } | |
2242 | 2170 |
2243 // The runtime call returns a pair of values in r0 (function) and | 2171 // The runtime call returns a pair of values in r0 (function) and |
2244 // r1 (receiver). Touch up the stack with the right values. | 2172 // r1 (receiver). Touch up the stack with the right values. |
2245 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2173 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
2246 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); | 2174 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); |
2247 } | 2175 } |
2248 | 2176 |
2249 // Record source position for debugger. | 2177 // Record source position for debugger. |
2250 SetSourcePosition(expr->position()); | 2178 SetSourcePosition(expr->position()); |
2251 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; | 2179 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
2252 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); | 2180 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); |
2253 __ CallStub(&stub); | 2181 __ CallStub(&stub); |
2254 RecordJSReturnSite(expr); | 2182 RecordJSReturnSite(expr); |
2255 // Restore context register. | 2183 // Restore context register. |
2256 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2184 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2257 context()->DropAndPlug(1, r0); | 2185 context()->DropAndPlug(1, r0); |
2258 } else if (var != NULL && !var->is_this() && var->is_global()) { | 2186 } else if (proxy != NULL && proxy->var()->IsUnallocated()) { |
2259 // Push global object as receiver for the call IC. | 2187 // Push global object as receiver for the call IC. |
2260 __ ldr(r0, GlobalObjectOperand()); | 2188 __ ldr(r0, GlobalObjectOperand()); |
2261 __ push(r0); | 2189 __ push(r0); |
2262 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); | 2190 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT); |
2263 } else if (var != NULL && var->AsSlot() != NULL && | 2191 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { |
2264 var->AsSlot()->type() == Slot::LOOKUP) { | |
2265 // Call to a lookup slot (dynamically introduced variable). | 2192 // Call to a lookup slot (dynamically introduced variable). |
2266 Label slow, done; | 2193 Label slow, done; |
2267 | 2194 |
2268 { PreservePositionScope scope(masm()->positions_recorder()); | 2195 { PreservePositionScope scope(masm()->positions_recorder()); |
2269 // Generate code for loading from variables potentially shadowed | 2196 // Generate code for loading from variables potentially shadowed |
2270 // by eval-introduced variables. | 2197 // by eval-introduced variables. |
2271 EmitDynamicLoadFromSlotFastCase(var->AsSlot(), | 2198 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); |
2272 NOT_INSIDE_TYPEOF, | |
2273 &slow, | |
2274 &done); | |
2275 } | 2199 } |
2276 | 2200 |
2277 __ bind(&slow); | 2201 __ bind(&slow); |
2278 // Call the runtime to find the function to call (returned in r0) | 2202 // Call the runtime to find the function to call (returned in r0) |
2279 // and the object holding it (returned in edx). | 2203 // and the object holding it (returned in edx). |
2280 __ push(context_register()); | 2204 __ push(context_register()); |
2281 __ mov(r2, Operand(var->name())); | 2205 __ mov(r2, Operand(proxy->name())); |
2282 __ push(r2); | 2206 __ push(r2); |
2283 __ CallRuntime(Runtime::kLoadContextSlot, 2); | 2207 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
2284 __ Push(r0, r1); // Function, receiver. | 2208 __ Push(r0, r1); // Function, receiver. |
2285 | 2209 |
2286 // If fast case code has been generated, emit code to push the | 2210 // If fast case code has been generated, emit code to push the |
2287 // function and receiver and have the slow path jump around this | 2211 // function and receiver and have the slow path jump around this |
2288 // code. | 2212 // code. |
2289 if (done.is_linked()) { | 2213 if (done.is_linked()) { |
2290 Label call; | 2214 Label call; |
2291 __ b(&call); | 2215 __ b(&call); |
2292 __ bind(&done); | 2216 __ bind(&done); |
2293 // Push function. | 2217 // Push function. |
2294 __ push(r0); | 2218 __ push(r0); |
2295 // The receiver is implicitly the global receiver. Indicate this | 2219 // The receiver is implicitly the global receiver. Indicate this |
2296 // by passing the hole to the call function stub. | 2220 // by passing the hole to the call function stub. |
2297 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); | 2221 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); |
2298 __ push(r1); | 2222 __ push(r1); |
2299 __ bind(&call); | 2223 __ bind(&call); |
2300 } | 2224 } |
2301 | 2225 |
2302 // The receiver is either the global receiver or an object found | 2226 // The receiver is either the global receiver or an object found |
2303 // by LoadContextSlot. That object could be the hole if the | 2227 // by LoadContextSlot. That object could be the hole if the |
2304 // receiver is implicitly the global object. | 2228 // receiver is implicitly the global object. |
2305 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); | 2229 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); |
2306 } else if (fun->AsProperty() != NULL) { | 2230 } else if (property != NULL) { |
2307 // Call to an object property. | 2231 { PreservePositionScope scope(masm()->positions_recorder()); |
2308 Property* prop = fun->AsProperty(); | 2232 VisitForStackValue(property->obj()); |
2309 Literal* key = prop->key()->AsLiteral(); | 2233 } |
2310 if (key != NULL && key->handle()->IsSymbol()) { | 2234 if (property->key()->IsPropertyName()) { |
2311 // Call to a named property, use call IC. | 2235 EmitCallWithIC(expr, |
2312 { PreservePositionScope scope(masm()->positions_recorder()); | 2236 property->key()->AsLiteral()->handle(), |
2313 VisitForStackValue(prop->obj()); | 2237 RelocInfo::CODE_TARGET); |
2314 } | |
2315 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); | |
2316 } else { | 2238 } else { |
2317 // Call to a keyed property. | 2239 EmitKeyedCallWithIC(expr, property->key()); |
2318 { PreservePositionScope scope(masm()->positions_recorder()); | |
2319 VisitForStackValue(prop->obj()); | |
2320 } | |
2321 EmitKeyedCallWithIC(expr, prop->key()); | |
2322 } | 2240 } |
2323 } else { | 2241 } else { |
2242 // Call to an arbitrary expression not handled specially above. | |
2324 { PreservePositionScope scope(masm()->positions_recorder()); | 2243 { PreservePositionScope scope(masm()->positions_recorder()); |
2325 VisitForStackValue(fun); | 2244 VisitForStackValue(callee); |
2326 } | 2245 } |
2327 // Load global receiver object. | 2246 // Load global receiver object. |
2328 __ ldr(r1, GlobalObjectOperand()); | 2247 __ ldr(r1, GlobalObjectOperand()); |
2329 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); | 2248 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); |
2330 __ push(r1); | 2249 __ push(r1); |
2331 // Emit function call. | 2250 // Emit function call. |
2332 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); | 2251 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); |
2333 } | 2252 } |
2334 | 2253 |
2335 #ifdef DEBUG | 2254 #ifdef DEBUG |
(...skipping 1301 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3637 __ CallRuntime(expr->function(), arg_count); | 3556 __ CallRuntime(expr->function(), arg_count); |
3638 } | 3557 } |
3639 context()->Plug(r0); | 3558 context()->Plug(r0); |
3640 } | 3559 } |
3641 | 3560 |
3642 | 3561 |
3643 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 3562 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
3644 switch (expr->op()) { | 3563 switch (expr->op()) { |
3645 case Token::DELETE: { | 3564 case Token::DELETE: { |
3646 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | 3565 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
3647 Property* prop = expr->expression()->AsProperty(); | 3566 Property* property = expr->expression()->AsProperty(); |
3648 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); | 3567 VariableProxy* proxy = expr->expression()->AsVariableProxy(); |
3649 | 3568 |
3650 if (prop != NULL) { | 3569 if (property != NULL) { |
3651 VisitForStackValue(prop->obj()); | 3570 VisitForStackValue(property->obj()); |
3652 VisitForStackValue(prop->key()); | 3571 VisitForStackValue(property->key()); |
3653 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag()))); | 3572 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag()))); |
3654 __ push(r1); | 3573 __ push(r1); |
3655 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 3574 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
3656 context()->Plug(r0); | 3575 context()->Plug(r0); |
3657 } else if (var != NULL) { | 3576 } else if (proxy != NULL) { |
3577 Variable* var = proxy->var(); | |
3658 // Delete of an unqualified identifier is disallowed in strict mode | 3578 // Delete of an unqualified identifier is disallowed in strict mode |
3659 // but "delete this" is. | 3579 // but "delete this" is allowed. |
3660 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); | 3580 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); |
3661 if (var->is_global()) { | 3581 if (var->IsUnallocated()) { |
3662 __ ldr(r2, GlobalObjectOperand()); | 3582 __ ldr(r2, GlobalObjectOperand()); |
3663 __ mov(r1, Operand(var->name())); | 3583 __ mov(r1, Operand(var->name())); |
3664 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode))); | 3584 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode))); |
3665 __ Push(r2, r1, r0); | 3585 __ Push(r2, r1, r0); |
3666 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 3586 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
3667 context()->Plug(r0); | 3587 context()->Plug(r0); |
3668 } else if (var->AsSlot() != NULL && | 3588 } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
3669 var->AsSlot()->type() != Slot::LOOKUP) { | |
3670 // Result of deleting non-global, non-dynamic variables is false. | 3589 // Result of deleting non-global, non-dynamic variables is false. |
3671 // The subexpression does not have side effects. | 3590 // The subexpression does not have side effects. |
3672 context()->Plug(false); | 3591 context()->Plug(var->is_this()); |
3673 } else { | 3592 } else { |
3674 // Non-global variable. Call the runtime to try to delete from the | 3593 // Non-global variable. Call the runtime to try to delete from the |
3675 // context where the variable was introduced. | 3594 // context where the variable was introduced. |
3676 __ push(context_register()); | 3595 __ push(context_register()); |
3677 __ mov(r2, Operand(var->name())); | 3596 __ mov(r2, Operand(var->name())); |
3678 __ push(r2); | 3597 __ push(r2); |
3679 __ CallRuntime(Runtime::kDeleteContextSlot, 2); | 3598 __ CallRuntime(Runtime::kDeleteContextSlot, 2); |
3680 context()->Plug(r0); | 3599 context()->Plug(r0); |
3681 } | 3600 } |
3682 } else { | 3601 } else { |
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3937 break; | 3856 break; |
3938 } | 3857 } |
3939 } | 3858 } |
3940 } | 3859 } |
3941 | 3860 |
3942 | 3861 |
3943 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { | 3862 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { |
3944 ASSERT(!context()->IsEffect()); | 3863 ASSERT(!context()->IsEffect()); |
3945 ASSERT(!context()->IsTest()); | 3864 ASSERT(!context()->IsTest()); |
3946 VariableProxy* proxy = expr->AsVariableProxy(); | 3865 VariableProxy* proxy = expr->AsVariableProxy(); |
3947 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { | 3866 if (proxy != NULL && proxy->var()->IsUnallocated()) { |
3948 Comment cmnt(masm_, "Global variable"); | 3867 Comment cmnt(masm_, "Global variable"); |
3949 __ ldr(r0, GlobalObjectOperand()); | 3868 __ ldr(r0, GlobalObjectOperand()); |
3950 __ mov(r2, Operand(proxy->name())); | 3869 __ mov(r2, Operand(proxy->name())); |
3951 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 3870 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
3952 // Use a regular load, not a contextual load, to avoid a reference | 3871 // Use a regular load, not a contextual load, to avoid a reference |
3953 // error. | 3872 // error. |
3954 __ Call(ic); | 3873 __ Call(ic); |
3955 PrepareForBailout(expr, TOS_REG); | 3874 PrepareForBailout(expr, TOS_REG); |
3956 context()->Plug(r0); | 3875 context()->Plug(r0); |
3957 } else if (proxy != NULL && | 3876 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { |
3958 proxy->var()->AsSlot() != NULL && | |
3959 proxy->var()->AsSlot()->type() == Slot::LOOKUP) { | |
3960 Label done, slow; | 3877 Label done, slow; |
3961 | 3878 |
3962 // Generate code for loading from variables potentially shadowed | 3879 // Generate code for loading from variables potentially shadowed |
3963 // by eval-introduced variables. | 3880 // by eval-introduced variables. |
3964 Slot* slot = proxy->var()->AsSlot(); | 3881 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); |
3965 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done); | |
3966 | 3882 |
3967 __ bind(&slow); | 3883 __ bind(&slow); |
3968 __ mov(r0, Operand(proxy->name())); | 3884 __ mov(r0, Operand(proxy->name())); |
3969 __ Push(cp, r0); | 3885 __ Push(cp, r0); |
3970 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); | 3886 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
3971 PrepareForBailout(expr, TOS_REG); | 3887 PrepareForBailout(expr, TOS_REG); |
3972 __ bind(&done); | 3888 __ bind(&done); |
3973 | 3889 |
3974 context()->Plug(r0); | 3890 context()->Plug(r0); |
3975 } else { | 3891 } else { |
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4273 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. | 4189 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. |
4274 __ add(pc, r1, Operand(masm_->CodeObject())); | 4190 __ add(pc, r1, Operand(masm_->CodeObject())); |
4275 } | 4191 } |
4276 | 4192 |
4277 | 4193 |
4278 #undef __ | 4194 #undef __ |
4279 | 4195 |
4280 } } // namespace v8::internal | 4196 } } // namespace v8::internal |
4281 | 4197 |
4282 #endif // V8_TARGET_ARCH_ARM | 4198 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |