Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(88)

Side by Side Diff: src/arm/full-codegen-arm.cc

Issue 7860035: Merge bleeding edge up to 9192 into the GC branch. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/code-stubs-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 30 matching lines...) Expand all
41 #include "arm/code-stubs-arm.h" 41 #include "arm/code-stubs-arm.h"
42 #include "arm/macro-assembler-arm.h" 42 #include "arm/macro-assembler-arm.h"
43 43
44 namespace v8 { 44 namespace v8 {
45 namespace internal { 45 namespace internal {
46 46
47 #define __ ACCESS_MASM(masm_) 47 #define __ ACCESS_MASM(masm_)
48 48
49 49
50 static unsigned GetPropertyId(Property* property) { 50 static unsigned GetPropertyId(Property* property) {
51 if (property->is_synthetic()) return AstNode::kNoNumber;
52 return property->id(); 51 return property->id();
53 } 52 }
54 53
55 54
56 // A patch site is a location in the code which it is possible to patch. This 55 // A patch site is a location in the code which it is possible to patch. This
57 // class has a number of methods to emit the code which is patchable and the 56 // class has a number of methods to emit the code which is patchable and the
58 // method EmitPatchInfo to record a marker back to the patchable code. This 57 // method EmitPatchInfo to record a marker back to the patchable code. This
59 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit 58 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
60 // immediate value is used) is the delta from the pc to the first instruction of 59 // immediate value is used) is the delta from the pc to the first instruction of
61 // the patchable code. 60 // the patchable code.
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
188 } else { 187 } else {
189 __ CallRuntime(Runtime::kNewFunctionContext, 1); 188 __ CallRuntime(Runtime::kNewFunctionContext, 1);
190 } 189 }
191 function_in_register = false; 190 function_in_register = false;
192 // Context is returned in both r0 and cp. It replaces the context 191 // Context is returned in both r0 and cp. It replaces the context
193 // passed to us. It's saved in the stack and kept live in cp. 192 // passed to us. It's saved in the stack and kept live in cp.
194 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 193 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
195 // Copy any necessary parameters into the context. 194 // Copy any necessary parameters into the context.
196 int num_parameters = info->scope()->num_parameters(); 195 int num_parameters = info->scope()->num_parameters();
197 for (int i = 0; i < num_parameters; i++) { 196 for (int i = 0; i < num_parameters; i++) {
198 Slot* slot = scope()->parameter(i)->AsSlot(); 197 Variable* var = scope()->parameter(i);
199 if (slot != NULL && slot->type() == Slot::CONTEXT) { 198 if (var->IsContextSlot()) {
200 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 199 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
201 (num_parameters - 1 - i) * kPointerSize; 200 (num_parameters - 1 - i) * kPointerSize;
202 // Load parameter from stack. 201 // Load parameter from stack.
203 __ ldr(r0, MemOperand(fp, parameter_offset)); 202 __ ldr(r0, MemOperand(fp, parameter_offset));
204 // Store it in the context. 203 // Store it in the context.
205 MemOperand target = ContextOperand(cp, slot->index()); 204 MemOperand target = ContextOperand(cp, var->index());
206 __ str(r0, target); 205 __ str(r0, target);
207 206
208 // Update the write barrier. 207 // Update the write barrier.
209 __ RecordWriteContextSlot( 208 __ RecordWriteContextSlot(
210 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); 209 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
211 } 210 }
212 } 211 }
213 } 212 }
214 213
215 Variable* arguments = scope()->arguments(); 214 Variable* arguments = scope()->arguments();
(...skipping 22 matching lines...) Expand all
238 if (is_strict_mode()) { 237 if (is_strict_mode()) {
239 type = ArgumentsAccessStub::NEW_STRICT; 238 type = ArgumentsAccessStub::NEW_STRICT;
240 } else if (function()->has_duplicate_parameters()) { 239 } else if (function()->has_duplicate_parameters()) {
241 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 240 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
242 } else { 241 } else {
243 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 242 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
244 } 243 }
245 ArgumentsAccessStub stub(type); 244 ArgumentsAccessStub stub(type);
246 __ CallStub(&stub); 245 __ CallStub(&stub);
247 246
248 Move(arguments->AsSlot(), r0, r1, r2); 247 SetVar(arguments, r0, r1, r2);
249 } 248 }
250 249
251 if (FLAG_trace) { 250 if (FLAG_trace) {
252 __ CallRuntime(Runtime::kTraceEnter, 0); 251 __ CallRuntime(Runtime::kTraceEnter, 0);
253 } 252 }
254 253
255 // Visit the declarations and body unless there is an illegal 254 // Visit the declarations and body unless there is an illegal
256 // redeclaration. 255 // redeclaration.
257 if (scope()->HasIllegalRedeclaration()) { 256 if (scope()->HasIllegalRedeclaration()) {
258 Comment cmnt(masm_, "[ Declarations"); 257 Comment cmnt(masm_, "[ Declarations");
259 scope()->VisitIllegalRedeclaration(this); 258 scope()->VisitIllegalRedeclaration(this);
260 259
261 } else { 260 } else {
261 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
262 { Comment cmnt(masm_, "[ Declarations"); 262 { Comment cmnt(masm_, "[ Declarations");
263 // For named function expressions, declare the function name as a 263 // For named function expressions, declare the function name as a
264 // constant. 264 // constant.
265 if (scope()->is_function_scope() && scope()->function() != NULL) { 265 if (scope()->is_function_scope() && scope()->function() != NULL) {
266 EmitDeclaration(scope()->function(), Variable::CONST, NULL); 266 int ignored = 0;
267 EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored);
267 } 268 }
268 VisitDeclarations(scope()->declarations()); 269 VisitDeclarations(scope()->declarations());
269 } 270 }
270 271
271 { Comment cmnt(masm_, "[ Stack check"); 272 { Comment cmnt(masm_, "[ Stack check");
272 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); 273 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
273 Label ok; 274 Label ok;
274 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 275 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
275 __ cmp(sp, Operand(ip)); 276 __ cmp(sp, Operand(ip));
276 __ b(hs, &ok); 277 __ b(hs, &ok);
277 StackCheckStub stub; 278 StackCheckStub stub;
278 __ CallStub(&stub); 279 __ CallStub(&stub);
279 __ bind(&ok); 280 __ bind(&ok);
280 } 281 }
281 282
282 { Comment cmnt(masm_, "[ Body"); 283 { Comment cmnt(masm_, "[ Body");
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
361 #ifdef DEBUG 362 #ifdef DEBUG
362 // Check that the size of the code used for returning is large enough 363 // Check that the size of the code used for returning is large enough
363 // for the debugger's requirements. 364 // for the debugger's requirements.
364 ASSERT(Assembler::kJSReturnSequenceInstructions <= 365 ASSERT(Assembler::kJSReturnSequenceInstructions <=
365 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 366 masm_->InstructionsGeneratedSince(&check_exit_codesize));
366 #endif 367 #endif
367 } 368 }
368 } 369 }
369 370
370 371
371 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { 372 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
373 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
372 } 374 }
373 375
374 376
375 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const { 377 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
376 codegen()->Move(result_register(), slot); 378 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
379 codegen()->GetVar(result_register(), var);
377 } 380 }
378 381
379 382
380 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { 383 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
381 codegen()->Move(result_register(), slot); 384 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
385 codegen()->GetVar(result_register(), var);
382 __ push(result_register()); 386 __ push(result_register());
383 } 387 }
384 388
385 389
386 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { 390 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
391 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
387 // For simplicity we always test the accumulator register. 392 // For simplicity we always test the accumulator register.
388 codegen()->Move(result_register(), slot); 393 codegen()->GetVar(result_register(), var);
389 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); 394 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
390 codegen()->DoTest(this); 395 codegen()->DoTest(this);
391 } 396 }
392 397
393 398
394 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 399 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
395 } 400 }
396 401
397 402
398 void FullCodeGenerator::AccumulatorValueContext::Plug( 403 void FullCodeGenerator::AccumulatorValueContext::Plug(
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
610 __ b(cond, if_true); 615 __ b(cond, if_true);
611 } else if (if_true == fall_through) { 616 } else if (if_true == fall_through) {
612 __ b(NegateCondition(cond), if_false); 617 __ b(NegateCondition(cond), if_false);
613 } else { 618 } else {
614 __ b(cond, if_true); 619 __ b(cond, if_true);
615 __ b(if_false); 620 __ b(if_false);
616 } 621 }
617 } 622 }
618 623
619 624
620 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { 625 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
621 switch (slot->type()) { 626 ASSERT(var->IsStackAllocated());
622 case Slot::PARAMETER: 627 // Offset is negative because higher indexes are at lower addresses.
623 case Slot::LOCAL: 628 int offset = -var->index() * kPointerSize;
624 return MemOperand(fp, SlotOffset(slot)); 629 // Adjust by a (parameter or local) base offset.
625 case Slot::CONTEXT: { 630 if (var->IsParameter()) {
626 int context_chain_length = 631 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
627 scope()->ContextChainLength(slot->var()->scope()); 632 } else {
628 __ LoadContext(scratch, context_chain_length); 633 offset += JavaScriptFrameConstants::kLocal0Offset;
629 return ContextOperand(scratch, slot->index());
630 }
631 case Slot::LOOKUP:
632 UNREACHABLE();
633 } 634 }
634 UNREACHABLE(); 635 return MemOperand(fp, offset);
635 return MemOperand(r0, 0);
636 } 636 }
637 637
638 638
639 void FullCodeGenerator::Move(Register destination, Slot* source) { 639 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
640 // Use destination as scratch. 640 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
641 MemOperand slot_operand = EmitSlotSearch(source, destination); 641 if (var->IsContextSlot()) {
642 __ ldr(destination, slot_operand); 642 int context_chain_length = scope()->ContextChainLength(var->scope());
643 __ LoadContext(scratch, context_chain_length);
644 return ContextOperand(scratch, var->index());
645 } else {
646 return StackOperand(var);
647 }
643 } 648 }
644 649
645 650
646 void FullCodeGenerator::Move(Slot* dst, 651 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
647 Register src, 652 // Use destination as scratch.
648 Register scratch1, 653 MemOperand location = VarOperand(var, dest);
649 Register scratch2) { 654 __ ldr(dest, location);
650 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. 655 }
651 ASSERT(!scratch1.is(src) && !scratch2.is(src)); 656
652 MemOperand location = EmitSlotSearch(dst, scratch1); 657
658 void FullCodeGenerator::SetVar(Variable* var,
659 Register src,
660 Register scratch0,
661 Register scratch1) {
662 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
663 ASSERT(!scratch0.is(src));
664 ASSERT(!scratch0.is(scratch1));
665 ASSERT(!scratch1.is(src));
666 MemOperand location = VarOperand(var, scratch0);
653 __ str(src, location); 667 __ str(src, location);
654 668
655 // Emit the write barrier code if the location is in the heap. 669 // Emit the write barrier code if the location is in the heap.
656 if (dst->type() == Slot::CONTEXT) { 670 if (var->IsContextSlot()) {
657 __ RecordWriteContextSlot(scratch1, 671 __ RecordWriteContextSlot(scratch0,
658 location.offset(), 672 location.offset(),
659 src, 673 src,
660 scratch2, 674 scratch1,
661 kLRHasBeenSaved, 675 kLRHasBeenSaved,
662 kDontSaveFPRegs); 676 kDontSaveFPRegs);
663 } 677 }
664 } 678 }
665 679
666 680
667 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 681 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
668 bool should_normalize, 682 bool should_normalize,
669 Label* if_true, 683 Label* if_true,
670 Label* if_false) { 684 Label* if_false) {
(...skipping 13 matching lines...) Expand all
684 698
685 if (should_normalize) { 699 if (should_normalize) {
686 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 700 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
687 __ cmp(r0, ip); 701 __ cmp(r0, ip);
688 Split(eq, if_true, if_false, NULL); 702 Split(eq, if_true, if_false, NULL);
689 __ bind(&skip); 703 __ bind(&skip);
690 } 704 }
691 } 705 }
692 706
693 707
694 void FullCodeGenerator::EmitDeclaration(Variable* variable, 708 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
695 Variable::Mode mode, 709 Variable::Mode mode,
696 FunctionLiteral* function) { 710 FunctionLiteral* function,
697 Comment cmnt(masm_, "[ Declaration"); 711 int* global_count) {
698 ASSERT(variable != NULL); // Must have been resolved. 712 // If it was not possible to allocate the variable at compile time, we
699 Slot* slot = variable->AsSlot(); 713 // need to "declare" it at runtime to make sure it actually exists in the
700 Property* prop = variable->AsProperty(); 714 // local context.
715 Variable* variable = proxy->var();
716 switch (variable->location()) {
717 case Variable::UNALLOCATED:
718 ++(*global_count);
719 break;
701 720
702 if (slot != NULL) { 721 case Variable::PARAMETER:
703 switch (slot->type()) { 722 case Variable::LOCAL:
704 case Slot::PARAMETER: 723 if (function != NULL) {
705 case Slot::LOCAL: 724 Comment cmnt(masm_, "[ Declaration");
706 if (mode == Variable::CONST) { 725 VisitForAccumulatorValue(function);
707 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 726 __ str(result_register(), StackOperand(variable));
708 __ str(ip, MemOperand(fp, SlotOffset(slot))); 727 } else if (mode == Variable::CONST || mode == Variable::LET) {
709 } else if (function != NULL) { 728 Comment cmnt(masm_, "[ Declaration");
710 VisitForAccumulatorValue(function); 729 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
711 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); 730 __ str(ip, StackOperand(variable));
712 } 731 }
713 break; 732 break;
714 733
715 case Slot::CONTEXT: 734 case Variable::CONTEXT:
716 // We bypass the general EmitSlotSearch because we know more about 735 // The variable in the decl always resides in the current function
717 // this specific context. 736 // context.
737 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
738 if (FLAG_debug_code) {
739 // Check that we're not inside a with or catch context.
740 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
741 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
742 __ Check(ne, "Declaration in with context.");
743 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
744 __ Check(ne, "Declaration in catch context.");
745 }
746 if (function != NULL) {
747 Comment cmnt(masm_, "[ Declaration");
748 VisitForAccumulatorValue(function);
749 __ str(result_register(), ContextOperand(cp, variable->index()));
750 int offset = Context::SlotOffset(variable->index());
751 // We know that we have written a function, which is not a smi.
752 __ RecordWriteContextSlot(cp,
753 offset,
754 result_register(),
755 r2,
756 kLRHasBeenSaved,
757 kDontSaveFPRegs,
758 EMIT_REMEMBERED_SET,
759 OMIT_SMI_CHECK);
760 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
761 } else if (mode == Variable::CONST || mode == Variable::LET) {
762 Comment cmnt(masm_, "[ Declaration");
763 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
764 __ str(ip, ContextOperand(cp, variable->index()));
765 // No write barrier since the_hole_value is in old space.
766 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
767 }
768 break;
718 769
719 // The variable in the decl always resides in the current function 770 case Variable::LOOKUP: {
720 // context. 771 Comment cmnt(masm_, "[ Declaration");
721 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 772 __ mov(r2, Operand(variable->name()));
722 if (FLAG_debug_code) { 773 // Declaration nodes are always introduced in one of three modes.
723 // Check that we're not inside a with or catch context. 774 ASSERT(mode == Variable::VAR ||
724 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); 775 mode == Variable::CONST ||
725 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); 776 mode == Variable::LET);
726 __ Check(ne, "Declaration in with context."); 777 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
727 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 778 __ mov(r1, Operand(Smi::FromInt(attr)));
728 __ Check(ne, "Declaration in catch context."); 779 // Push initial value, if any.
729 } 780 // Note: For variables we must not push an initial value (such as
730 if (mode == Variable::CONST) { 781 // 'undefined') because we may have a (legal) redeclaration and we
731 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 782 // must not destroy the current value.
732 __ str(ip, ContextOperand(cp, slot->index())); 783 if (function != NULL) {
733 // No write barrier since the_hole_value is in old space. 784 __ Push(cp, r2, r1);
734 } else if (function != NULL) { 785 // Push initial value for function declaration.
735 VisitForAccumulatorValue(function); 786 VisitForStackValue(function);
736 MemOperand target = ContextOperand(cp, slot->index()); 787 } else if (mode == Variable::CONST || mode == Variable::LET) {
737 __ str(result_register(), target); 788 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
738 789 __ Push(cp, r2, r1, r0);
739 // We know that we have written a function, which is not a smi. 790 } else {
740 __ RecordWriteContextSlot(cp, 791 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
741 target.offset(), 792 __ Push(cp, r2, r1, r0);
742 result_register(),
743 r2,
744 kLRHasBeenSaved,
745 kDontSaveFPRegs,
746 EMIT_REMEMBERED_SET,
747 OMIT_SMI_CHECK);
748 }
749 break;
750
751 case Slot::LOOKUP: {
752 __ mov(r2, Operand(variable->name()));
753 // Declaration nodes are always introduced in one of two modes.
754 ASSERT(mode == Variable::VAR ||
755 mode == Variable::CONST ||
756 mode == Variable::LET);
757 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
758 __ mov(r1, Operand(Smi::FromInt(attr)));
759 // Push initial value, if any.
760 // Note: For variables we must not push an initial value (such as
761 // 'undefined') because we may have a (legal) redeclaration and we
762 // must not destroy the current value.
763 if (mode == Variable::CONST) {
764 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
765 __ Push(cp, r2, r1, r0);
766 } else if (function != NULL) {
767 __ Push(cp, r2, r1);
768 // Push initial value for function declaration.
769 VisitForStackValue(function);
770 } else {
771 __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
772 __ Push(cp, r2, r1, r0);
773 }
774 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
775 break;
776 } 793 }
777 } 794 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
778 795 break;
779 } else if (prop != NULL) {
780 // A const declaration aliasing a parameter is an illegal redeclaration.
781 ASSERT(mode != Variable::CONST);
782 if (function != NULL) {
783 // We are declaring a function that rewrites to a property.
784 // Use (keyed) IC to set the initial value. We cannot visit the
785 // rewrite because it's shared and we risk recording duplicate AST
786 // IDs for bailouts from optimized code.
787 ASSERT(prop->obj()->AsVariableProxy() != NULL);
788 { AccumulatorValueContext for_object(this);
789 EmitVariableLoad(prop->obj()->AsVariableProxy());
790 }
791
792 __ push(r0);
793 VisitForAccumulatorValue(function);
794 __ pop(r2);
795
796 ASSERT(prop->key()->AsLiteral() != NULL &&
797 prop->key()->AsLiteral()->handle()->IsSmi());
798 __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
799
800 Handle<Code> ic = is_strict_mode()
801 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
802 : isolate()->builtins()->KeyedStoreIC_Initialize();
803 __ Call(ic);
804 // Value in r0 is ignored (declarations are statements).
805 } 796 }
806 } 797 }
807 } 798 }
808 799
809 800
810 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 801 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
811 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
812 }
813 802
814 803
815 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 804 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
816 // Call the runtime to declare the globals. 805 // Call the runtime to declare the globals.
817 // The context is the first argument. 806 // The context is the first argument.
818 __ mov(r2, Operand(pairs)); 807 __ mov(r1, Operand(pairs));
819 __ mov(r1, Operand(Smi::FromInt(is_eval() ? 1 : 0))); 808 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
820 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag()))); 809 __ Push(cp, r1, r0);
821 __ Push(cp, r2, r1, r0); 810 __ CallRuntime(Runtime::kDeclareGlobals, 3);
822 __ CallRuntime(Runtime::kDeclareGlobals, 4);
823 // Return value is ignored. 811 // Return value is ignored.
824 } 812 }
825 813
826 814
827 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 815 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
828 Comment cmnt(masm_, "[ SwitchStatement"); 816 Comment cmnt(masm_, "[ SwitchStatement");
829 Breakable nested_statement(this, stmt); 817 Breakable nested_statement(this, stmt);
830 SetStatementPosition(stmt); 818 SetStatementPosition(stmt);
831 819
832 // Keep the switch value on the stack until a case matches. 820 // Keep the switch value on the stack until a case matches.
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after
1120 context()->Plug(r0); 1108 context()->Plug(r0);
1121 } 1109 }
1122 1110
1123 1111
1124 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1112 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1125 Comment cmnt(masm_, "[ VariableProxy"); 1113 Comment cmnt(masm_, "[ VariableProxy");
1126 EmitVariableLoad(expr); 1114 EmitVariableLoad(expr);
1127 } 1115 }
1128 1116
1129 1117
1130 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( 1118 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1131 Slot* slot, 1119 TypeofState typeof_state,
1132 TypeofState typeof_state, 1120 Label* slow) {
1133 Label* slow) {
1134 Register current = cp; 1121 Register current = cp;
1135 Register next = r1; 1122 Register next = r1;
1136 Register temp = r2; 1123 Register temp = r2;
1137 1124
1138 Scope* s = scope(); 1125 Scope* s = scope();
1139 while (s != NULL) { 1126 while (s != NULL) {
1140 if (s->num_heap_slots() > 0) { 1127 if (s->num_heap_slots() > 0) {
1141 if (s->calls_eval()) { 1128 if (s->calls_eval()) {
1142 // Check that extension is NULL. 1129 // Check that extension is NULL.
1143 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1130 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
(...skipping 26 matching lines...) Expand all
1170 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1157 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1171 __ tst(temp, temp); 1158 __ tst(temp, temp);
1172 __ b(ne, slow); 1159 __ b(ne, slow);
1173 // Load next context in chain. 1160 // Load next context in chain.
1174 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1161 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1175 __ b(&loop); 1162 __ b(&loop);
1176 __ bind(&fast); 1163 __ bind(&fast);
1177 } 1164 }
1178 1165
1179 __ ldr(r0, GlobalObjectOperand()); 1166 __ ldr(r0, GlobalObjectOperand());
1180 __ mov(r2, Operand(slot->var()->name())); 1167 __ mov(r2, Operand(var->name()));
1181 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 1168 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1182 ? RelocInfo::CODE_TARGET 1169 ? RelocInfo::CODE_TARGET
1183 : RelocInfo::CODE_TARGET_CONTEXT; 1170 : RelocInfo::CODE_TARGET_CONTEXT;
1184 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1171 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1185 __ Call(ic, mode); 1172 __ Call(ic, mode);
1186 } 1173 }
1187 1174
1188 1175
1189 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( 1176 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1190 Slot* slot, 1177 Label* slow) {
1191 Label* slow) { 1178 ASSERT(var->IsContextSlot());
1192 ASSERT(slot->type() == Slot::CONTEXT);
1193 Register context = cp; 1179 Register context = cp;
1194 Register next = r3; 1180 Register next = r3;
1195 Register temp = r4; 1181 Register temp = r4;
1196 1182
1197 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { 1183 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1198 if (s->num_heap_slots() > 0) { 1184 if (s->num_heap_slots() > 0) {
1199 if (s->calls_eval()) { 1185 if (s->calls_eval()) {
1200 // Check that extension is NULL. 1186 // Check that extension is NULL.
1201 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1187 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1202 __ tst(temp, temp); 1188 __ tst(temp, temp);
1203 __ b(ne, slow); 1189 __ b(ne, slow);
1204 } 1190 }
1205 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1191 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1206 // Walk the rest of the chain without clobbering cp. 1192 // Walk the rest of the chain without clobbering cp.
1207 context = next; 1193 context = next;
1208 } 1194 }
1209 } 1195 }
1210 // Check that last extension is NULL. 1196 // Check that last extension is NULL.
1211 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1197 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1212 __ tst(temp, temp); 1198 __ tst(temp, temp);
1213 __ b(ne, slow); 1199 __ b(ne, slow);
1214 1200
1215 // This function is used only for loads, not stores, so it's safe to 1201 // This function is used only for loads, not stores, so it's safe to
1216 // return an cp-based operand (the write barrier cannot be allowed to 1202 // return an cp-based operand (the write barrier cannot be allowed to
1217 // destroy the cp register). 1203 // destroy the cp register).
1218 return ContextOperand(context, slot->index()); 1204 return ContextOperand(context, var->index());
1219 } 1205 }
1220 1206
1221 1207
1222 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( 1208 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1223 Slot* slot, 1209 TypeofState typeof_state,
1224 TypeofState typeof_state, 1210 Label* slow,
1225 Label* slow, 1211 Label* done) {
1226 Label* done) {
1227 // Generate fast-case code for variables that might be shadowed by 1212 // Generate fast-case code for variables that might be shadowed by
1228 // eval-introduced variables. Eval is used a lot without 1213 // eval-introduced variables. Eval is used a lot without
1229 // introducing variables. In those cases, we do not want to 1214 // introducing variables. In those cases, we do not want to
1230 // perform a runtime call for all variables in the scope 1215 // perform a runtime call for all variables in the scope
1231 // containing the eval. 1216 // containing the eval.
1232 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { 1217 if (var->mode() == Variable::DYNAMIC_GLOBAL) {
1233 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow); 1218 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1234 __ jmp(done); 1219 __ jmp(done);
1235 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { 1220 } else if (var->mode() == Variable::DYNAMIC_LOCAL) {
1236 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot(); 1221 Variable* local = var->local_if_not_shadowed();
1237 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite(); 1222 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1238 if (potential_slot != NULL) { 1223 if (local->mode() == Variable::CONST) {
1239 // Generate fast case for locals that rewrite to slots. 1224 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1240 __ ldr(r0, ContextSlotOperandCheckExtensions(potential_slot, slow)); 1225 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1241 if (potential_slot->var()->mode() == Variable::CONST) {
1242 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1243 __ cmp(r0, ip);
1244 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1245 }
1246 __ jmp(done);
1247 } else if (rewrite != NULL) {
1248 // Generate fast case for calls of an argument function.
1249 Property* property = rewrite->AsProperty();
1250 if (property != NULL) {
1251 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1252 Literal* key_literal = property->key()->AsLiteral();
1253 if (obj_proxy != NULL &&
1254 key_literal != NULL &&
1255 obj_proxy->IsArguments() &&
1256 key_literal->handle()->IsSmi()) {
1257 // Load arguments object if there are no eval-introduced
1258 // variables. Then load the argument from the arguments
1259 // object using keyed load.
1260 __ ldr(r1,
1261 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1262 slow));
1263 __ mov(r0, Operand(key_literal->handle()));
1264 Handle<Code> ic =
1265 isolate()->builtins()->KeyedLoadIC_Initialize();
1266 __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1267 __ jmp(done);
1268 }
1269 }
1270 } 1226 }
1227 __ jmp(done);
1271 } 1228 }
1272 } 1229 }
1273 1230
1274 1231
1275 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1232 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1276 // Record position before possible IC call. 1233 // Record position before possible IC call.
1277 SetSourcePosition(proxy->position()); 1234 SetSourcePosition(proxy->position());
1278 Variable* var = proxy->var(); 1235 Variable* var = proxy->var();
1279 1236
1280 // Three cases: non-this global variables, lookup slots, and all other 1237 // Three cases: global variables, lookup variables, and all other types of
1281 // types of slots. 1238 // variables.
1282 Slot* slot = var->AsSlot(); 1239 switch (var->location()) {
1283 ASSERT((var->is_global() && !var->is_this()) == (slot == NULL)); 1240 case Variable::UNALLOCATED: {
1241 Comment cmnt(masm_, "Global variable");
1242 // Use inline caching. Variable name is passed in r2 and the global
1243 // object (receiver) in r0.
1244 __ ldr(r0, GlobalObjectOperand());
1245 __ mov(r2, Operand(var->name()));
1246 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1247 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1248 context()->Plug(r0);
1249 break;
1250 }
1284 1251
1285 if (slot == NULL) { 1252 case Variable::PARAMETER:
1286 Comment cmnt(masm_, "Global variable"); 1253 case Variable::LOCAL:
1287 // Use inline caching. Variable name is passed in r2 and the global 1254 case Variable::CONTEXT: {
1288 // object (receiver) in r0. 1255 Comment cmnt(masm_, var->IsContextSlot()
1289 __ ldr(r0, GlobalObjectOperand()); 1256 ? "Context variable"
1290 __ mov(r2, Operand(var->name())); 1257 : "Stack variable");
1291 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1258 if (var->mode() != Variable::LET && var->mode() != Variable::CONST) {
1292 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1259 context()->Plug(var);
1293 context()->Plug(r0); 1260 } else {
1261 // Let and const need a read barrier.
1262 GetVar(r0, var);
1263 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1264 if (var->mode() == Variable::LET) {
1265 Label done;
1266 __ b(ne, &done);
1267 __ mov(r0, Operand(var->name()));
1268 __ push(r0);
1269 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1270 __ bind(&done);
1271 } else {
1272 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1273 }
1274 context()->Plug(r0);
1275 }
1276 break;
1277 }
1294 1278
1295 } else if (slot->type() == Slot::LOOKUP) { 1279 case Variable::LOOKUP: {
1296 Label done, slow; 1280 Label done, slow;
1297 1281 // Generate code for loading from variables potentially shadowed
1298 // Generate code for loading from variables potentially shadowed 1282 // by eval-introduced variables.
1299 // by eval-introduced variables. 1283 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1300 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done); 1284 __ bind(&slow);
1301 1285 Comment cmnt(masm_, "Lookup variable");
1302 __ bind(&slow); 1286 __ mov(r1, Operand(var->name()));
1303 Comment cmnt(masm_, "Lookup slot"); 1287 __ Push(cp, r1); // Context and name.
1304 __ mov(r1, Operand(var->name())); 1288 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1305 __ Push(cp, r1); // Context and name. 1289 __ bind(&done);
1306 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1307 __ bind(&done);
1308
1309 context()->Plug(r0);
1310
1311 } else {
1312 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1313 ? "Context slot"
1314 : "Stack slot");
1315 if (var->mode() == Variable::CONST) {
1316 // Constants may be the hole value if they have not been initialized.
1317 // Unhole them.
1318 MemOperand slot_operand = EmitSlotSearch(slot, r0);
1319 __ ldr(r0, slot_operand);
1320 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1321 __ cmp(r0, ip);
1322 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1323 context()->Plug(r0); 1290 context()->Plug(r0);
1324 } else {
1325 context()->Plug(slot);
1326 } 1291 }
1327 } 1292 }
1328 } 1293 }
1329 1294
1330 1295
1331 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1296 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1332 Comment cmnt(masm_, "[ RegExpLiteral"); 1297 Comment cmnt(masm_, "[ RegExpLiteral");
1333 Label materialized; 1298 Label materialized;
1334 // Registers will be used as follows: 1299 // Registers will be used as follows:
1335 // r5 = materialized value (RegExp literal) 1300 // r5 = materialized value (RegExp literal)
(...skipping 514 matching lines...) Expand 10 before | Expand all | Expand 10 after
1850 break; 1815 break;
1851 } 1816 }
1852 } 1817 }
1853 PrepareForBailoutForId(bailout_ast_id, TOS_REG); 1818 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1854 context()->Plug(r0); 1819 context()->Plug(r0);
1855 } 1820 }
1856 1821
1857 1822
1858 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1823 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1859 Token::Value op) { 1824 Token::Value op) {
1860 ASSERT(var != NULL); 1825 if (var->IsUnallocated()) {
1861 ASSERT(var->is_global() || var->AsSlot() != NULL); 1826 // Global var, const, or let.
1862
1863 if (var->is_global()) {
1864 ASSERT(!var->is_this());
1865 // Assignment to a global variable. Use inline caching for the
1866 // assignment. Right-hand-side value is passed in r0, variable name in
1867 // r2, and the global object in r1.
1868 __ mov(r2, Operand(var->name())); 1827 __ mov(r2, Operand(var->name()));
1869 __ ldr(r1, GlobalObjectOperand()); 1828 __ ldr(r1, GlobalObjectOperand());
1870 Handle<Code> ic = is_strict_mode() 1829 Handle<Code> ic = is_strict_mode()
1871 ? isolate()->builtins()->StoreIC_Initialize_Strict() 1830 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1872 : isolate()->builtins()->StoreIC_Initialize(); 1831 : isolate()->builtins()->StoreIC_Initialize();
1873 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1832 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1874 1833
1875 } else if (op == Token::INIT_CONST) { 1834 } else if (op == Token::INIT_CONST) {
1876 // Like var declarations, const declarations are hoisted to function 1835 // Const initializers need a write barrier.
1877 // scope. However, unlike var initializers, const initializers are able 1836 ASSERT(!var->IsParameter()); // No const parameters.
1878 // to drill a hole to that function context, even from inside a 'with' 1837 if (var->IsStackLocal()) {
1879 // context. We thus bypass the normal static scope lookup. 1838 Label skip;
1880 Slot* slot = var->AsSlot(); 1839 __ ldr(r1, StackOperand(var));
1881 Label skip; 1840 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
1882 switch (slot->type()) { 1841 __ b(ne, &skip);
1883 case Slot::PARAMETER: 1842 __ str(result_register(), StackOperand(var));
1884 // No const parameters. 1843 __ bind(&skip);
1885 UNREACHABLE(); 1844 } else {
1886 break; 1845 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
1887 case Slot::LOCAL: 1846 // Like var declarations, const declarations are hoisted to function
1888 // Detect const reinitialization by checking for the hole value. 1847 // scope. However, unlike var initializers, const initializers are
1889 __ ldr(r1, MemOperand(fp, SlotOffset(slot))); 1848 // able to drill a hole to that function context, even from inside a
1890 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 1849 // 'with' context. We thus bypass the normal static scope lookup for
1891 __ cmp(r1, ip); 1850 // var->IsContextSlot().
1892 __ b(ne, &skip); 1851 __ push(r0);
1893 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); 1852 __ mov(r0, Operand(var->name()));
1894 break; 1853 __ Push(cp, r0); // Context and name.
1854 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1855 }
1895 1856
1896 case Slot::CONTEXT: 1857 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
1897 case Slot::LOOKUP: 1858 // Non-initializing assignment to let variable needs a write barrier.
1898 __ push(r0); 1859 if (var->IsLookupSlot()) {
1899 __ mov(r0, Operand(slot->var()->name())); 1860 __ push(r0); // Value.
1900 __ Push(cp, r0); // Context and name. 1861 __ mov(r1, Operand(var->name()));
1901 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 1862 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
1902 break; 1863 __ Push(cp, r1, r0); // Context, name, strict mode.
1864 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1865 } else {
1866 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
1867 Label assign;
1868 MemOperand location = VarOperand(var, r1);
1869 __ ldr(r3, location);
1870 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1871 __ b(ne, &assign);
1872 __ mov(r3, Operand(var->name()));
1873 __ push(r3);
1874 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1875 // Perform the assignment.
1876 __ bind(&assign);
1877 __ str(result_register(), location);
1878 if (var->IsContextSlot()) {
1879 // RecordWrite may destroy all its register arguments.
1880 __ mov(r3, result_register());
1881 int offset = Context::SlotOffset(var->index());
1882 __ RecordWriteContextSlot(
1883 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
1884 }
1903 } 1885 }
1904 __ bind(&skip);
1905 1886
1906 } else if (var->mode() != Variable::CONST) { 1887 } else if (var->mode() != Variable::CONST) {
1907 // Perform the assignment for non-const variables. Const assignments 1888 // Assignment to var or initializing assignment to let.
1908 // are simply skipped. 1889 if (var->IsStackAllocated() || var->IsContextSlot()) {
1909 Slot* slot = var->AsSlot(); 1890 MemOperand location = VarOperand(var, r1);
1910 switch (slot->type()) { 1891 if (FLAG_debug_code && op == Token::INIT_LET) {
1911 case Slot::PARAMETER: 1892 // Check for an uninitialized let binding.
1912 case Slot::LOCAL: 1893 __ ldr(r2, location);
1913 // Perform the assignment. 1894 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1914 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); 1895 __ Check(eq, "Let binding re-initialization.");
1915 break; 1896 }
1916 1897 // Perform the assignment.
1917 case Slot::CONTEXT: { 1898 __ str(r0, location);
1918 MemOperand target = EmitSlotSearch(slot, r1); 1899 if (var->IsContextSlot()) {
1919 // Perform the assignment and issue the write barrier. 1900 __ mov(r3, r0);
1920 __ str(result_register(), target); 1901 int offset = Context::SlotOffset(var->index());
1921 // The value of the assignment is in result_register(). RecordWrite
1922 // clobbers its second and third register arguments.
1923 __ mov(r3, result_register());
1924 __ RecordWriteContextSlot( 1902 __ RecordWriteContextSlot(
1925 r1, target.offset(), r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 1903 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
1926 break;
1927 } 1904 }
1928 1905 } else {
1929 case Slot::LOOKUP: 1906 ASSERT(var->IsLookupSlot());
1930 // Call the runtime for the assignment. 1907 __ push(r0); // Value.
1931 __ push(r0); // Value. 1908 __ mov(r1, Operand(var->name()));
1932 __ mov(r1, Operand(slot->var()->name())); 1909 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
1933 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag()))); 1910 __ Push(cp, r1, r0); // Context, name, strict mode.
1934 __ Push(cp, r1, r0); // Context, name, strict mode. 1911 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1935 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1936 break;
1937 } 1912 }
1938 } 1913 }
1914 // Non-initializing assignments to consts are ignored.
1939 } 1915 }
1940 1916
1941 1917
1942 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 1918 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1943 // Assignment to a property, using a named store IC. 1919 // Assignment to a property, using a named store IC.
1944 Property* prop = expr->target()->AsProperty(); 1920 Property* prop = expr->target()->AsProperty();
1945 ASSERT(prop != NULL); 1921 ASSERT(prop != NULL);
1946 ASSERT(prop->key()->AsLiteral() != NULL); 1922 ASSERT(prop->key()->AsLiteral() != NULL);
1947 1923
1948 // If the assignment starts a block of assignments to the same object, 1924 // If the assignment starts a block of assignments to the same object,
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after
2138 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2114 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2139 } else { 2115 } else {
2140 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2116 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2141 } 2117 }
2142 __ push(r1); 2118 __ push(r1);
2143 2119
2144 // Push the receiver of the enclosing function and do runtime call. 2120 // Push the receiver of the enclosing function and do runtime call.
2145 int receiver_offset = 2 + info_->scope()->num_parameters(); 2121 int receiver_offset = 2 + info_->scope()->num_parameters();
2146 __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize)); 2122 __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
2147 __ push(r1); 2123 __ push(r1);
2148 // Push the strict mode flag. 2124 // Push the strict mode flag. In harmony mode every eval call
2149 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag()))); 2125 // is a strict mode eval call.
2126 StrictModeFlag strict_mode = strict_mode_flag();
2127 if (FLAG_harmony_block_scoping) {
2128 strict_mode = kStrictMode;
2129 }
2130 __ mov(r1, Operand(Smi::FromInt(strict_mode)));
2150 __ push(r1); 2131 __ push(r1);
2151 2132
2152 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP 2133 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2153 ? Runtime::kResolvePossiblyDirectEvalNoLookup 2134 ? Runtime::kResolvePossiblyDirectEvalNoLookup
2154 : Runtime::kResolvePossiblyDirectEval, 4); 2135 : Runtime::kResolvePossiblyDirectEval, 4);
2155 } 2136 }
2156 2137
2157 2138
2158 void FullCodeGenerator::VisitCall(Call* expr) { 2139 void FullCodeGenerator::VisitCall(Call* expr) {
2159 #ifdef DEBUG 2140 #ifdef DEBUG
2160 // We want to verify that RecordJSReturnSite gets called on all paths 2141 // We want to verify that RecordJSReturnSite gets called on all paths
2161 // through this function. Avoid early returns. 2142 // through this function. Avoid early returns.
2162 expr->return_is_recorded_ = false; 2143 expr->return_is_recorded_ = false;
2163 #endif 2144 #endif
2164 2145
2165 Comment cmnt(masm_, "[ Call"); 2146 Comment cmnt(masm_, "[ Call");
2166 Expression* fun = expr->expression(); 2147 Expression* callee = expr->expression();
2167 Variable* var = fun->AsVariableProxy()->AsVariable(); 2148 VariableProxy* proxy = callee->AsVariableProxy();
2149 Property* property = callee->AsProperty();
2168 2150
2169 if (var != NULL && var->is_possibly_eval()) { 2151 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2170 // In a call to eval, we first call %ResolvePossiblyDirectEval to 2152 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2171 // resolve the function we need to call and the receiver of the 2153 // resolve the function we need to call and the receiver of the
2172 // call. Then we call the resolved function using the given 2154 // call. Then we call the resolved function using the given
2173 // arguments. 2155 // arguments.
2174 ZoneList<Expression*>* args = expr->arguments(); 2156 ZoneList<Expression*>* args = expr->arguments();
2175 int arg_count = args->length(); 2157 int arg_count = args->length();
2176 2158
2177 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2159 { PreservePositionScope pos_scope(masm()->positions_recorder());
2178 VisitForStackValue(fun); 2160 VisitForStackValue(callee);
2179 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2161 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2180 __ push(r2); // Reserved receiver slot. 2162 __ push(r2); // Reserved receiver slot.
2181 2163
2182 // Push the arguments. 2164 // Push the arguments.
2183 for (int i = 0; i < arg_count; i++) { 2165 for (int i = 0; i < arg_count; i++) {
2184 VisitForStackValue(args->at(i)); 2166 VisitForStackValue(args->at(i));
2185 } 2167 }
2186 2168
2187 // If we know that eval can only be shadowed by eval-introduced 2169 // If we know that eval can only be shadowed by eval-introduced
2188 // variables we attempt to load the global eval function directly 2170 // variables we attempt to load the global eval function directly
2189 // in generated code. If we succeed, there is no need to perform a 2171 // in generated code. If we succeed, there is no need to perform a
2190 // context lookup in the runtime system. 2172 // context lookup in the runtime system.
2191 Label done; 2173 Label done;
2192 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { 2174 Variable* var = proxy->var();
2175 if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) {
2193 Label slow; 2176 Label slow;
2194 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(), 2177 EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow);
2195 NOT_INSIDE_TYPEOF,
2196 &slow);
2197 // Push the function and resolve eval. 2178 // Push the function and resolve eval.
2198 __ push(r0); 2179 __ push(r0);
2199 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); 2180 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2200 __ jmp(&done); 2181 __ jmp(&done);
2201 __ bind(&slow); 2182 __ bind(&slow);
2202 } 2183 }
2203 2184
2204 // Push copy of the function (found below the arguments) and 2185 // Push a copy of the function (found below the arguments) and
2205 // resolve eval. 2186 // resolve eval.
2206 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2187 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2207 __ push(r1); 2188 __ push(r1);
2208 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); 2189 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2209 if (done.is_linked()) { 2190 __ bind(&done);
2210 __ bind(&done);
2211 }
2212 2191
2213 // The runtime call returns a pair of values in r0 (function) and 2192 // The runtime call returns a pair of values in r0 (function) and
2214 // r1 (receiver). Touch up the stack with the right values. 2193 // r1 (receiver). Touch up the stack with the right values.
2215 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2194 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2216 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); 2195 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2217 } 2196 }
2218 2197
2219 // Record source position for debugger. 2198 // Record source position for debugger.
2220 SetSourcePosition(expr->position()); 2199 SetSourcePosition(expr->position());
2221 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2200 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2222 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); 2201 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
2223 __ CallStub(&stub); 2202 __ CallStub(&stub);
2224 RecordJSReturnSite(expr); 2203 RecordJSReturnSite(expr);
2225 // Restore context register. 2204 // Restore context register.
2226 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2205 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2227 context()->DropAndPlug(1, r0); 2206 context()->DropAndPlug(1, r0);
2228 } else if (var != NULL && !var->is_this() && var->is_global()) { 2207 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2229 // Push global object as receiver for the call IC. 2208 // Push global object as receiver for the call IC.
2230 __ ldr(r0, GlobalObjectOperand()); 2209 __ ldr(r0, GlobalObjectOperand());
2231 __ push(r0); 2210 __ push(r0);
2232 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); 2211 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2233 } else if (var != NULL && var->AsSlot() != NULL && 2212 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2234 var->AsSlot()->type() == Slot::LOOKUP) {
2235 // Call to a lookup slot (dynamically introduced variable). 2213 // Call to a lookup slot (dynamically introduced variable).
2236 Label slow, done; 2214 Label slow, done;
2237 2215
2238 { PreservePositionScope scope(masm()->positions_recorder()); 2216 { PreservePositionScope scope(masm()->positions_recorder());
2239 // Generate code for loading from variables potentially shadowed 2217 // Generate code for loading from variables potentially shadowed
2240 // by eval-introduced variables. 2218 // by eval-introduced variables.
2241 EmitDynamicLoadFromSlotFastCase(var->AsSlot(), 2219 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2242 NOT_INSIDE_TYPEOF,
2243 &slow,
2244 &done);
2245 } 2220 }
2246 2221
2247 __ bind(&slow); 2222 __ bind(&slow);
2248 // Call the runtime to find the function to call (returned in r0) 2223 // Call the runtime to find the function to call (returned in r0)
2249 // and the object holding it (returned in edx). 2224 // and the object holding it (returned in edx).
2250 __ push(context_register()); 2225 __ push(context_register());
2251 __ mov(r2, Operand(var->name())); 2226 __ mov(r2, Operand(proxy->name()));
2252 __ push(r2); 2227 __ push(r2);
2253 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2228 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2254 __ Push(r0, r1); // Function, receiver. 2229 __ Push(r0, r1); // Function, receiver.
2255 2230
2256 // If fast case code has been generated, emit code to push the 2231 // If fast case code has been generated, emit code to push the
2257 // function and receiver and have the slow path jump around this 2232 // function and receiver and have the slow path jump around this
2258 // code. 2233 // code.
2259 if (done.is_linked()) { 2234 if (done.is_linked()) {
2260 Label call; 2235 Label call;
2261 __ b(&call); 2236 __ b(&call);
2262 __ bind(&done); 2237 __ bind(&done);
2263 // Push function. 2238 // Push function.
2264 __ push(r0); 2239 __ push(r0);
2265 // The receiver is implicitly the global receiver. Indicate this 2240 // The receiver is implicitly the global receiver. Indicate this
2266 // by passing the hole to the call function stub. 2241 // by passing the hole to the call function stub.
2267 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); 2242 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
2268 __ push(r1); 2243 __ push(r1);
2269 __ bind(&call); 2244 __ bind(&call);
2270 } 2245 }
2271 2246
2272 // The receiver is either the global receiver or an object found 2247 // The receiver is either the global receiver or an object found
2273 // by LoadContextSlot. That object could be the hole if the 2248 // by LoadContextSlot. That object could be the hole if the
2274 // receiver is implicitly the global object. 2249 // receiver is implicitly the global object.
2275 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); 2250 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2276 } else if (fun->AsProperty() != NULL) { 2251 } else if (property != NULL) {
2277 // Call to an object property. 2252 { PreservePositionScope scope(masm()->positions_recorder());
2278 Property* prop = fun->AsProperty(); 2253 VisitForStackValue(property->obj());
2279 Literal* key = prop->key()->AsLiteral(); 2254 }
2280 if (key != NULL && key->handle()->IsSymbol()) { 2255 if (property->key()->IsPropertyName()) {
2281 // Call to a named property, use call IC. 2256 EmitCallWithIC(expr,
2282 { PreservePositionScope scope(masm()->positions_recorder()); 2257 property->key()->AsLiteral()->handle(),
2283 VisitForStackValue(prop->obj()); 2258 RelocInfo::CODE_TARGET);
2284 }
2285 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2286 } else { 2259 } else {
2287 // Call to a keyed property. 2260 EmitKeyedCallWithIC(expr, property->key());
2288 // For a synthetic property use keyed load IC followed by function call,
2289 // for a regular property use EmitKeyedCallWithIC.
2290 if (prop->is_synthetic()) {
2291 // Do not visit the object and key subexpressions (they are shared
2292 // by all occurrences of the same rewritten parameter).
2293 ASSERT(prop->obj()->AsVariableProxy() != NULL);
2294 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2295 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2296 MemOperand operand = EmitSlotSearch(slot, r1);
2297 __ ldr(r1, operand);
2298
2299 ASSERT(prop->key()->AsLiteral() != NULL);
2300 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2301 __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
2302
2303 // Record source code position for IC call.
2304 SetSourcePosition(prop->position());
2305
2306 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2307 __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
2308 __ ldr(r1, GlobalObjectOperand());
2309 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2310 __ Push(r0, r1); // Function, receiver.
2311 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2312 } else {
2313 { PreservePositionScope scope(masm()->positions_recorder());
2314 VisitForStackValue(prop->obj());
2315 }
2316 EmitKeyedCallWithIC(expr, prop->key());
2317 }
2318 } 2261 }
2319 } else { 2262 } else {
2263 // Call to an arbitrary expression not handled specially above.
2320 { PreservePositionScope scope(masm()->positions_recorder()); 2264 { PreservePositionScope scope(masm()->positions_recorder());
2321 VisitForStackValue(fun); 2265 VisitForStackValue(callee);
2322 } 2266 }
2323 // Load global receiver object. 2267 // Load global receiver object.
2324 __ ldr(r1, GlobalObjectOperand()); 2268 __ ldr(r1, GlobalObjectOperand());
2325 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); 2269 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2326 __ push(r1); 2270 __ push(r1);
2327 // Emit function call. 2271 // Emit function call.
2328 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); 2272 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2329 } 2273 }
2330 2274
2331 #ifdef DEBUG 2275 #ifdef DEBUG
(...skipping 937 matching lines...) Expand 10 before | Expand all | Expand 10 after
3269 Register cache = r1; 3213 Register cache = r1;
3270 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX)); 3214 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
3271 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset)); 3215 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
3272 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3216 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3273 __ ldr(cache, 3217 __ ldr(cache,
3274 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3218 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3275 3219
3276 3220
3277 Label done, not_found; 3221 Label done, not_found;
3278 // tmp now holds finger offset as a smi. 3222 // tmp now holds finger offset as a smi.
3279 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3223 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3280 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 3224 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3281 // r2 now holds finger offset as a smi. 3225 // r2 now holds finger offset as a smi.
3282 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3226 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3283 // r3 now points to the start of fixed array elements. 3227 // r3 now points to the start of fixed array elements.
3284 __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex)); 3228 __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
3285 // Note side effect of PreIndex: r3 now points to the key of the pair. 3229 // Note side effect of PreIndex: r3 now points to the key of the pair.
3286 __ cmp(key, r2); 3230 __ cmp(key, r2);
3287 __ b(ne, &not_found); 3231 __ b(ne, &not_found);
3288 3232
3289 __ ldr(r0, MemOperand(r3, kPointerSize)); 3233 __ ldr(r0, MemOperand(r3, kPointerSize));
(...skipping 307 matching lines...) Expand 10 before | Expand all | Expand 10 after
3597 ASSERT(result.is(r0)); 3541 ASSERT(result.is(r0));
3598 __ b(&done); 3542 __ b(&done);
3599 3543
3600 __ bind(&bailout); 3544 __ bind(&bailout);
3601 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3545 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3602 __ bind(&done); 3546 __ bind(&done);
3603 context()->Plug(r0); 3547 context()->Plug(r0);
3604 } 3548 }
3605 3549
3606 3550
3607 void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList<Expression*>* args) {
3608 ASSERT(args->length() == 1);
3609
3610 // Load the function into r0.
3611 VisitForAccumulatorValue(args->at(0));
3612
3613 // Prepare for the test.
3614 Label materialize_true, materialize_false;
3615 Label* if_true = NULL;
3616 Label* if_false = NULL;
3617 Label* fall_through = NULL;
3618 context()->PrepareTest(&materialize_true, &materialize_false,
3619 &if_true, &if_false, &fall_through);
3620
3621 // Test for strict mode function.
3622 __ ldr(r1, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3623 __ ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCompilerHintsOffset));
3624 __ tst(r1, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
3625 kSmiTagSize)));
3626 __ b(ne, if_true);
3627
3628 // Test for native function.
3629 __ tst(r1, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
3630 __ b(ne, if_true);
3631
3632 // Not native or strict-mode function.
3633 __ b(if_false);
3634
3635 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3636 context()->Plug(if_true, if_false);
3637 }
3638
3639
3640 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3551 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3641 Handle<String> name = expr->name(); 3552 Handle<String> name = expr->name();
3642 if (name->length() > 0 && name->Get(0) == '_') { 3553 if (name->length() > 0 && name->Get(0) == '_') {
3643 Comment cmnt(masm_, "[ InlineRuntimeCall"); 3554 Comment cmnt(masm_, "[ InlineRuntimeCall");
3644 EmitInlineRuntimeCall(expr); 3555 EmitInlineRuntimeCall(expr);
3645 return; 3556 return;
3646 } 3557 }
3647 3558
3648 Comment cmnt(masm_, "[ CallRuntime"); 3559 Comment cmnt(masm_, "[ CallRuntime");
3649 ZoneList<Expression*>* args = expr->arguments(); 3560 ZoneList<Expression*>* args = expr->arguments();
(...skipping 27 matching lines...) Expand all
3677 __ CallRuntime(expr->function(), arg_count); 3588 __ CallRuntime(expr->function(), arg_count);
3678 } 3589 }
3679 context()->Plug(r0); 3590 context()->Plug(r0);
3680 } 3591 }
3681 3592
3682 3593
3683 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3594 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3684 switch (expr->op()) { 3595 switch (expr->op()) {
3685 case Token::DELETE: { 3596 case Token::DELETE: {
3686 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3597 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3687 Property* prop = expr->expression()->AsProperty(); 3598 Property* property = expr->expression()->AsProperty();
3688 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); 3599 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3689 3600
3690 if (prop != NULL) { 3601 if (property != NULL) {
3691 if (prop->is_synthetic()) { 3602 VisitForStackValue(property->obj());
3692 // Result of deleting parameters is false, even when they rewrite 3603 VisitForStackValue(property->key());
3693 // to accesses on the arguments object. 3604 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
3694 context()->Plug(false); 3605 __ push(r1);
3695 } else { 3606 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3696 VisitForStackValue(prop->obj()); 3607 context()->Plug(r0);
3697 VisitForStackValue(prop->key()); 3608 } else if (proxy != NULL) {
3698 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag()))); 3609 Variable* var = proxy->var();
3699 __ push(r1);
3700 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3701 context()->Plug(r0);
3702 }
3703 } else if (var != NULL) {
3704 // Delete of an unqualified identifier is disallowed in strict mode 3610 // Delete of an unqualified identifier is disallowed in strict mode
3705 // but "delete this" is. 3611 // but "delete this" is allowed.
3706 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); 3612 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3707 if (var->is_global()) { 3613 if (var->IsUnallocated()) {
3708 __ ldr(r2, GlobalObjectOperand()); 3614 __ ldr(r2, GlobalObjectOperand());
3709 __ mov(r1, Operand(var->name())); 3615 __ mov(r1, Operand(var->name()));
3710 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode))); 3616 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
3711 __ Push(r2, r1, r0); 3617 __ Push(r2, r1, r0);
3712 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3618 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3713 context()->Plug(r0); 3619 context()->Plug(r0);
3714 } else if (var->AsSlot() != NULL && 3620 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3715 var->AsSlot()->type() != Slot::LOOKUP) {
3716 // Result of deleting non-global, non-dynamic variables is false. 3621 // Result of deleting non-global, non-dynamic variables is false.
3717 // The subexpression does not have side effects. 3622 // The subexpression does not have side effects.
3718 context()->Plug(false); 3623 context()->Plug(var->is_this());
3719 } else { 3624 } else {
3720 // Non-global variable. Call the runtime to try to delete from the 3625 // Non-global variable. Call the runtime to try to delete from the
3721 // context where the variable was introduced. 3626 // context where the variable was introduced.
3722 __ push(context_register()); 3627 __ push(context_register());
3723 __ mov(r2, Operand(var->name())); 3628 __ mov(r2, Operand(var->name()));
3724 __ push(r2); 3629 __ push(r2);
3725 __ CallRuntime(Runtime::kDeleteContextSlot, 2); 3630 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3726 context()->Plug(r0); 3631 context()->Plug(r0);
3727 } 3632 }
3728 } else { 3633 } else {
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after
3983 break; 3888 break;
3984 } 3889 }
3985 } 3890 }
3986 } 3891 }
3987 3892
3988 3893
3989 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 3894 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3990 ASSERT(!context()->IsEffect()); 3895 ASSERT(!context()->IsEffect());
3991 ASSERT(!context()->IsTest()); 3896 ASSERT(!context()->IsTest());
3992 VariableProxy* proxy = expr->AsVariableProxy(); 3897 VariableProxy* proxy = expr->AsVariableProxy();
3993 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { 3898 if (proxy != NULL && proxy->var()->IsUnallocated()) {
3994 Comment cmnt(masm_, "Global variable"); 3899 Comment cmnt(masm_, "Global variable");
3995 __ ldr(r0, GlobalObjectOperand()); 3900 __ ldr(r0, GlobalObjectOperand());
3996 __ mov(r2, Operand(proxy->name())); 3901 __ mov(r2, Operand(proxy->name()));
3997 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 3902 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3998 // Use a regular load, not a contextual load, to avoid a reference 3903 // Use a regular load, not a contextual load, to avoid a reference
3999 // error. 3904 // error.
4000 __ Call(ic); 3905 __ Call(ic);
4001 PrepareForBailout(expr, TOS_REG); 3906 PrepareForBailout(expr, TOS_REG);
4002 context()->Plug(r0); 3907 context()->Plug(r0);
4003 } else if (proxy != NULL && 3908 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4004 proxy->var()->AsSlot() != NULL &&
4005 proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
4006 Label done, slow; 3909 Label done, slow;
4007 3910
4008 // Generate code for loading from variables potentially shadowed 3911 // Generate code for loading from variables potentially shadowed
4009 // by eval-introduced variables. 3912 // by eval-introduced variables.
4010 Slot* slot = proxy->var()->AsSlot(); 3913 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4011 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
4012 3914
4013 __ bind(&slow); 3915 __ bind(&slow);
4014 __ mov(r0, Operand(proxy->name())); 3916 __ mov(r0, Operand(proxy->name()));
4015 __ Push(cp, r0); 3917 __ Push(cp, r0);
4016 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 3918 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4017 PrepareForBailout(expr, TOS_REG); 3919 PrepareForBailout(expr, TOS_REG);
4018 __ bind(&done); 3920 __ bind(&done);
4019 3921
4020 context()->Plug(r0); 3922 context()->Plug(r0);
4021 } else { 3923 } else {
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
4296 // ---------------------------------------------------------------------------- 4198 // ----------------------------------------------------------------------------
4297 // Non-local control flow support. 4199 // Non-local control flow support.
4298 4200
4299 void FullCodeGenerator::EnterFinallyBlock() { 4201 void FullCodeGenerator::EnterFinallyBlock() {
4300 ASSERT(!result_register().is(r1)); 4202 ASSERT(!result_register().is(r1));
4301 // Store result register while executing finally block. 4203 // Store result register while executing finally block.
4302 __ push(result_register()); 4204 __ push(result_register());
4303 // Cook return address in link register to stack (smi encoded Code* delta) 4205 // Cook return address in link register to stack (smi encoded Code* delta)
4304 __ sub(r1, lr, Operand(masm_->CodeObject())); 4206 __ sub(r1, lr, Operand(masm_->CodeObject()));
4305 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 4207 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4306 ASSERT_EQ(0, kSmiTag); 4208 STATIC_ASSERT(kSmiTag == 0);
4307 __ add(r1, r1, Operand(r1)); // Convert to smi. 4209 __ add(r1, r1, Operand(r1)); // Convert to smi.
4308 __ push(r1); 4210 __ push(r1);
4309 } 4211 }
4310 4212
4311 4213
4312 void FullCodeGenerator::ExitFinallyBlock() { 4214 void FullCodeGenerator::ExitFinallyBlock() {
4313 ASSERT(!result_register().is(r1)); 4215 ASSERT(!result_register().is(r1));
4314 // Restore result register from stack. 4216 // Restore result register from stack.
4315 __ pop(r1); 4217 __ pop(r1);
4316 // Uncook return address and return. 4218 // Uncook return address and return.
4317 __ pop(result_register()); 4219 __ pop(result_register());
4318 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 4220 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4319 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. 4221 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
4320 __ add(pc, r1, Operand(masm_->CodeObject())); 4222 __ add(pc, r1, Operand(masm_->CodeObject()));
4321 } 4223 }
4322 4224
4323 4225
4324 #undef __ 4226 #undef __
4325 4227
4228 #define __ ACCESS_MASM(masm())
4229
4230 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4231 int* stack_depth,
4232 int* context_length) {
4233 // The macros used here must preserve the result register.
4234
4235 // Because the handler block contains the context of the finally
4236 // code, we can restore it directly from there for the finally code
4237 // rather than iteratively unwinding contexts via their previous
4238 // links.
4239 __ Drop(*stack_depth); // Down to the handler block.
4240 if (*context_length > 0) {
4241 // Restore the context to its dedicated register and the stack.
4242 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4243 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4244 }
4245 __ PopTryHandler();
4246 __ bl(finally_entry_);
4247
4248 *stack_depth = 0;
4249 *context_length = 0;
4250 return previous_;
4251 }
4252
4253
4254 #undef __
4255
4326 } } // namespace v8::internal 4256 } } // namespace v8::internal
4327 4257
4328 #endif // V8_TARGET_ARCH_ARM 4258 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/code-stubs-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698