Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/ia32/full-codegen-ia32.cc

Issue 7860035: Merge bleeding edge up to 9192 into the GC branch. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 23 matching lines...) Expand all
34 #include "compiler.h" 34 #include "compiler.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "parser.h" 37 #include "parser.h"
38 #include "scopes.h" 38 #include "scopes.h"
39 #include "stub-cache.h" 39 #include "stub-cache.h"
40 40
41 namespace v8 { 41 namespace v8 {
42 namespace internal { 42 namespace internal {
43 43
44
45 #define __ ACCESS_MASM(masm_) 44 #define __ ACCESS_MASM(masm_)
46 45
47 46
48 static unsigned GetPropertyId(Property* property) { 47 static unsigned GetPropertyId(Property* property) {
49 if (property->is_synthetic()) return AstNode::kNoNumber;
50 return property->id(); 48 return property->id();
51 } 49 }
52 50
53 51
54 class JumpPatchSite BASE_EMBEDDED { 52 class JumpPatchSite BASE_EMBEDDED {
55 public: 53 public:
56 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 54 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
57 #ifdef DEBUG 55 #ifdef DEBUG
58 info_emitted_ = false; 56 info_emitted_ = false;
59 #endif 57 #endif
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
186 __ CallRuntime(Runtime::kNewFunctionContext, 1); 184 __ CallRuntime(Runtime::kNewFunctionContext, 1);
187 } 185 }
188 function_in_register = false; 186 function_in_register = false;
189 // Context is returned in both eax and esi. It replaces the context 187 // Context is returned in both eax and esi. It replaces the context
190 // passed to us. It's saved in the stack and kept live in esi. 188 // passed to us. It's saved in the stack and kept live in esi.
191 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); 189 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
192 190
193 // Copy parameters into context if necessary. 191 // Copy parameters into context if necessary.
194 int num_parameters = info->scope()->num_parameters(); 192 int num_parameters = info->scope()->num_parameters();
195 for (int i = 0; i < num_parameters; i++) { 193 for (int i = 0; i < num_parameters; i++) {
196 Slot* slot = scope()->parameter(i)->AsSlot(); 194 Variable* var = scope()->parameter(i);
197 if (slot != NULL && slot->type() == Slot::CONTEXT) { 195 if (var->IsContextSlot()) {
198 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 196 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
199 (num_parameters - 1 - i) * kPointerSize; 197 (num_parameters - 1 - i) * kPointerSize;
200 // Load parameter from stack. 198 // Load parameter from stack.
201 __ mov(eax, Operand(ebp, parameter_offset)); 199 __ mov(eax, Operand(ebp, parameter_offset));
202 // Store it in the context. 200 // Store it in the context.
203 int context_offset = Context::SlotOffset(slot->index()); 201 int context_offset = Context::SlotOffset(var->index());
204 __ mov(Operand(esi, context_offset), eax); 202 __ mov(Operand(esi, context_offset), eax);
205 // Update the write barrier. This clobbers eax and ebx. 203 // Update the write barrier. This clobbers eax and ebx.
206 __ RecordWriteContextSlot(esi, 204 __ RecordWriteContextSlot(esi,
207 context_offset, 205 context_offset,
208 eax, 206 eax,
209 ebx, 207 ebx,
210 kDontSaveFPRegs); 208 kDontSaveFPRegs);
211 } 209 }
212 } 210 }
213 } 211 }
(...skipping 22 matching lines...) Expand all
236 if (is_strict_mode()) { 234 if (is_strict_mode()) {
237 type = ArgumentsAccessStub::NEW_STRICT; 235 type = ArgumentsAccessStub::NEW_STRICT;
238 } else if (function()->has_duplicate_parameters()) { 236 } else if (function()->has_duplicate_parameters()) {
239 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 237 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
240 } else { 238 } else {
241 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 239 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
242 } 240 }
243 ArgumentsAccessStub stub(type); 241 ArgumentsAccessStub stub(type);
244 __ CallStub(&stub); 242 __ CallStub(&stub);
245 243
246 Move(arguments->AsSlot(), eax, ebx, edx); 244 SetVar(arguments, eax, ebx, edx);
247 } 245 }
248 246
249 if (FLAG_trace) { 247 if (FLAG_trace) {
250 __ CallRuntime(Runtime::kTraceEnter, 0); 248 __ CallRuntime(Runtime::kTraceEnter, 0);
251 } 249 }
252 250
253 // Visit the declarations and body unless there is an illegal 251 // Visit the declarations and body unless there is an illegal
254 // redeclaration. 252 // redeclaration.
255 if (scope()->HasIllegalRedeclaration()) { 253 if (scope()->HasIllegalRedeclaration()) {
256 Comment cmnt(masm_, "[ Declarations"); 254 Comment cmnt(masm_, "[ Declarations");
257 scope()->VisitIllegalRedeclaration(this); 255 scope()->VisitIllegalRedeclaration(this);
258 256
259 } else { 257 } else {
258 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
260 { Comment cmnt(masm_, "[ Declarations"); 259 { Comment cmnt(masm_, "[ Declarations");
261 // For named function expressions, declare the function name as a 260 // For named function expressions, declare the function name as a
262 // constant. 261 // constant.
263 if (scope()->is_function_scope() && scope()->function() != NULL) { 262 if (scope()->is_function_scope() && scope()->function() != NULL) {
264 EmitDeclaration(scope()->function(), Variable::CONST, NULL); 263 int ignored = 0;
264 EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored);
265 } 265 }
266 VisitDeclarations(scope()->declarations()); 266 VisitDeclarations(scope()->declarations());
267 } 267 }
268 268
269 { Comment cmnt(masm_, "[ Stack check"); 269 { Comment cmnt(masm_, "[ Stack check");
270 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); 270 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
271 Label ok; 271 Label ok;
272 ExternalReference stack_limit = 272 ExternalReference stack_limit =
273 ExternalReference::address_of_stack_limit(isolate()); 273 ExternalReference::address_of_stack_limit(isolate());
274 __ cmp(esp, Operand::StaticVariable(stack_limit)); 274 __ cmp(esp, Operand::StaticVariable(stack_limit));
275 __ j(above_equal, &ok, Label::kNear); 275 __ j(above_equal, &ok, Label::kNear);
276 StackCheckStub stub; 276 StackCheckStub stub;
277 __ CallStub(&stub); 277 __ CallStub(&stub);
278 __ bind(&ok); 278 __ bind(&ok);
279 } 279 }
280 280
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
366 366
367 void FullCodeGenerator::verify_stack_height() { 367 void FullCodeGenerator::verify_stack_height() {
368 ASSERT(FLAG_verify_stack_height); 368 ASSERT(FLAG_verify_stack_height);
369 __ sub(Operand(ebp), Immediate(kPointerSize * stack_height())); 369 __ sub(Operand(ebp), Immediate(kPointerSize * stack_height()));
370 __ cmp(ebp, Operand(esp)); 370 __ cmp(ebp, Operand(esp));
371 __ Assert(equal, "Full codegen stack height not as expected."); 371 __ Assert(equal, "Full codegen stack height not as expected.");
372 __ add(Operand(ebp), Immediate(kPointerSize * stack_height())); 372 __ add(Operand(ebp), Immediate(kPointerSize * stack_height()));
373 } 373 }
374 374
375 375
376 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { 376 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
377 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
377 } 378 }
378 379
379 380
380 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const { 381 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
381 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register()); 382 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
382 __ mov(result_register(), slot_operand); 383 codegen()->GetVar(result_register(), var);
383 } 384 }
384 385
385 386
386 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { 387 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
387 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register()); 388 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
389 MemOperand operand = codegen()->VarOperand(var, result_register());
388 // Memory operands can be pushed directly. 390 // Memory operands can be pushed directly.
389 __ push(slot_operand); 391 __ push(operand);
390 codegen()->increment_stack_height(); 392 codegen()->increment_stack_height();
391 } 393 }
392 394
393 395
394 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { 396 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
395 // For simplicity we always test the accumulator register. 397 // For simplicity we always test the accumulator register.
396 codegen()->Move(result_register(), slot); 398 codegen()->GetVar(result_register(), var);
397 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); 399 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
398 codegen()->DoTest(this); 400 codegen()->DoTest(this);
399 } 401 }
400 402
401 403
402 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 404 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
403 UNREACHABLE(); // Not used on IA32. 405 UNREACHABLE(); // Not used on IA32.
404 } 406 }
405 407
406 408
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after
610 __ j(cc, if_true); 612 __ j(cc, if_true);
611 } else if (if_true == fall_through) { 613 } else if (if_true == fall_through) {
612 __ j(NegateCondition(cc), if_false); 614 __ j(NegateCondition(cc), if_false);
613 } else { 615 } else {
614 __ j(cc, if_true); 616 __ j(cc, if_true);
615 __ jmp(if_false); 617 __ jmp(if_false);
616 } 618 }
617 } 619 }
618 620
619 621
620 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { 622 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
621 switch (slot->type()) { 623 ASSERT(var->IsStackAllocated());
622 case Slot::PARAMETER: 624 // Offset is negative because higher indexes are at lower addresses.
623 case Slot::LOCAL: 625 int offset = -var->index() * kPointerSize;
624 return Operand(ebp, SlotOffset(slot)); 626 // Adjust by a (parameter or local) base offset.
625 case Slot::CONTEXT: { 627 if (var->IsParameter()) {
626 int context_chain_length = 628 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
627 scope()->ContextChainLength(slot->var()->scope()); 629 } else {
628 __ LoadContext(scratch, context_chain_length); 630 offset += JavaScriptFrameConstants::kLocal0Offset;
629 return ContextOperand(scratch, slot->index());
630 }
631 case Slot::LOOKUP:
632 UNREACHABLE();
633 } 631 }
634 UNREACHABLE(); 632 return Operand(ebp, offset);
635 return Operand(eax, 0);
636 } 633 }
637 634
638 635
639 void FullCodeGenerator::Move(Register destination, Slot* source) { 636 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
640 MemOperand location = EmitSlotSearch(source, destination); 637 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
641 __ mov(destination, location); 638 if (var->IsContextSlot()) {
639 int context_chain_length = scope()->ContextChainLength(var->scope());
640 __ LoadContext(scratch, context_chain_length);
641 return ContextOperand(scratch, var->index());
642 } else {
643 return StackOperand(var);
644 }
642 } 645 }
643 646
644 647
645 void FullCodeGenerator::Move(Slot* dst, 648 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
646 Register src, 649 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
647 Register scratch1, 650 MemOperand location = VarOperand(var, dest);
648 Register scratch2) { 651 __ mov(dest, location);
649 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. 652 }
650 ASSERT(!scratch1.is(src) && !scratch2.is(src)); 653
651 MemOperand location = EmitSlotSearch(dst, scratch1); 654
655 void FullCodeGenerator::SetVar(Variable* var,
656 Register src,
657 Register scratch0,
658 Register scratch1) {
659 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
660 ASSERT(!scratch0.is(src));
661 ASSERT(!scratch0.is(scratch1));
662 ASSERT(!scratch1.is(src));
663 MemOperand location = VarOperand(var, scratch0);
652 __ mov(location, src); 664 __ mov(location, src);
653 665
654 // Emit the write barrier code if the location is in the heap. 666 // Emit the write barrier code if the location is in the heap.
655 if (dst->type() == Slot::CONTEXT) { 667 if (var->IsContextSlot()) {
656 int offset = Context::SlotOffset(dst->index()); 668 int offset = Context::SlotOffset(var->index());
657 ASSERT(!scratch1.is(esi) && !src.is(esi) && !scratch2.is(esi)); 669 ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
658 __ RecordWriteContextSlot(scratch1, offset, src, scratch2, kDontSaveFPRegs); 670 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
659 } 671 }
660 } 672 }
661 673
662 674
663 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 675 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
664 bool should_normalize, 676 bool should_normalize,
665 Label* if_true, 677 Label* if_true,
666 Label* if_false) { 678 Label* if_false) {
667 // Only prepare for bailouts before splits if we're in a test 679 // Only prepare for bailouts before splits if we're in a test
668 // context. Otherwise, we let the Visit function deal with the 680 // context. Otherwise, we let the Visit function deal with the
(...skipping 10 matching lines...) Expand all
679 } 691 }
680 692
681 if (should_normalize) { 693 if (should_normalize) {
682 __ cmp(eax, isolate()->factory()->true_value()); 694 __ cmp(eax, isolate()->factory()->true_value());
683 Split(equal, if_true, if_false, NULL); 695 Split(equal, if_true, if_false, NULL);
684 __ bind(&skip); 696 __ bind(&skip);
685 } 697 }
686 } 698 }
687 699
688 700
689 void FullCodeGenerator::EmitDeclaration(Variable* variable, 701 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
690 Variable::Mode mode, 702 Variable::Mode mode,
691 FunctionLiteral* function) { 703 FunctionLiteral* function,
692 Comment cmnt(masm_, "[ Declaration"); 704 int* global_count) {
693 ASSERT(variable != NULL); // Must have been resolved. 705 // If it was not possible to allocate the variable at compile time, we
694 Slot* slot = variable->AsSlot(); 706 // need to "declare" it at runtime to make sure it actually exists in the
695 Property* prop = variable->AsProperty(); 707 // local context.
708 Variable* variable = proxy->var();
709 switch (variable->location()) {
710 case Variable::UNALLOCATED:
711 ++(*global_count);
712 break;
696 713
697 if (slot != NULL) { 714 case Variable::PARAMETER:
698 switch (slot->type()) { 715 case Variable::LOCAL:
699 case Slot::PARAMETER: 716 if (function != NULL) {
700 case Slot::LOCAL: 717 Comment cmnt(masm_, "[ Declaration");
701 if (mode == Variable::CONST) { 718 VisitForAccumulatorValue(function);
702 __ mov(Operand(ebp, SlotOffset(slot)), 719 __ mov(StackOperand(variable), result_register());
703 Immediate(isolate()->factory()->the_hole_value())); 720 } else if (mode == Variable::CONST || mode == Variable::LET) {
704 } else if (function != NULL) { 721 Comment cmnt(masm_, "[ Declaration");
705 VisitForAccumulatorValue(function); 722 __ mov(StackOperand(variable),
706 __ mov(Operand(ebp, SlotOffset(slot)), result_register()); 723 Immediate(isolate()->factory()->the_hole_value()));
707 } 724 }
708 break; 725 break;
709 726
710 case Slot::CONTEXT: 727 case Variable::CONTEXT:
711 // We bypass the general EmitSlotSearch because we know more about 728 // The variable in the decl always resides in the current function
712 // this specific context. 729 // context.
730 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
731 if (FLAG_debug_code) {
732 // Check that we're not inside a with or catch context.
733 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
734 __ cmp(ebx, isolate()->factory()->with_context_map());
735 __ Check(not_equal, "Declaration in with context.");
736 __ cmp(ebx, isolate()->factory()->catch_context_map());
737 __ Check(not_equal, "Declaration in catch context.");
738 }
739 if (function != NULL) {
740 Comment cmnt(masm_, "[ Declaration");
741 VisitForAccumulatorValue(function);
742 __ mov(ContextOperand(esi, variable->index()), result_register());
743 // We know that we have written a function, which is not a smi.
744 __ RecordWriteContextSlot(esi,
745 Context::SlotOffset(variable->index()),
746 result_register(),
747 ecx,
748 kDontSaveFPRegs,
749 EMIT_REMEMBERED_SET,
750 OMIT_SMI_CHECK);
751 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
752 } else if (mode == Variable::CONST || mode == Variable::LET) {
753 Comment cmnt(masm_, "[ Declaration");
754 __ mov(ContextOperand(esi, variable->index()),
755 Immediate(isolate()->factory()->the_hole_value()));
756 // No write barrier since the hole value is in old space.
757 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
758 }
759 break;
713 760
714 // The variable in the decl always resides in the current function 761 case Variable::LOOKUP: {
715 // context. 762 Comment cmnt(masm_, "[ Declaration");
716 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 763 __ push(esi);
717 if (FLAG_debug_code) { 764 __ push(Immediate(variable->name()));
718 // Check that we're not inside a with or catch context. 765 // Declaration nodes are always introduced in one of three modes.
719 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset)); 766 ASSERT(mode == Variable::VAR ||
720 __ cmp(ebx, isolate()->factory()->with_context_map()); 767 mode == Variable::CONST ||
721 __ Check(not_equal, "Declaration in with context."); 768 mode == Variable::LET);
722 __ cmp(ebx, isolate()->factory()->catch_context_map()); 769 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
723 __ Check(not_equal, "Declaration in catch context."); 770 __ push(Immediate(Smi::FromInt(attr)));
724 } 771 // Push initial value, if any.
725 if (mode == Variable::CONST) { 772 // Note: For variables we must not push an initial value (such as
726 __ mov(ContextOperand(esi, slot->index()), 773 // 'undefined') because we may have a (legal) redeclaration and we
727 Immediate(isolate()->factory()->the_hole_value())); 774 // must not destroy the current value.
728 // No write barrier since the hole value is in old space, root and is 775 increment_stack_height(3);
729 // never evacuated from the page. 776 if (function != NULL) {
730 } else if (function != NULL) { 777 VisitForStackValue(function);
731 VisitForAccumulatorValue(function); 778 } else if (mode == Variable::CONST || mode == Variable::LET) {
732 __ mov(ContextOperand(esi, slot->index()), result_register()); 779 __ push(Immediate(isolate()->factory()->the_hole_value()));
733 int offset = Context::SlotOffset(slot->index()); 780 increment_stack_height();
734 // We know that we have written a function, which is not a smi. 781 } else {
735 __ RecordWriteContextSlot(esi, 782 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
736 offset, 783 increment_stack_height();
737 result_register(),
738 ecx,
739 kDontSaveFPRegs,
740 EMIT_REMEMBERED_SET,
741 OMIT_SMI_CHECK);
742 }
743 break;
744
745 case Slot::LOOKUP: {
746 __ push(esi);
747 __ push(Immediate(variable->name()));
748 // Declaration nodes are always introduced in one of two modes.
749 ASSERT(mode == Variable::VAR ||
750 mode == Variable::CONST ||
751 mode == Variable::LET);
752 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
753 __ push(Immediate(Smi::FromInt(attr)));
754 // Push initial value, if any.
755 // Note: For variables we must not push an initial value (such as
756 // 'undefined') because we may have a (legal) redeclaration and we
757 // must not destroy the current value.
758 increment_stack_height(3);
759 if (mode == Variable::CONST) {
760 __ push(Immediate(isolate()->factory()->the_hole_value()));
761 increment_stack_height();
762 } else if (function != NULL) {
763 VisitForStackValue(function);
764 } else {
765 __ push(Immediate(Smi::FromInt(0))); // No initial value!
766 increment_stack_height();
767 }
768 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
769 decrement_stack_height(4);
770 break;
771 } 784 }
772 } 785 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
773 786 decrement_stack_height(4);
774 } else if (prop != NULL) { 787 break;
775 // A const declaration aliasing a parameter is an illegal redeclaration.
776 ASSERT(mode != Variable::CONST);
777 if (function != NULL) {
778 // We are declaring a function that rewrites to a property.
779 // Use (keyed) IC to set the initial value. We cannot visit the
780 // rewrite because it's shared and we risk recording duplicate AST
781 // IDs for bailouts from optimized code.
782 ASSERT(prop->obj()->AsVariableProxy() != NULL);
783 { AccumulatorValueContext for_object(this);
784 EmitVariableLoad(prop->obj()->AsVariableProxy());
785 }
786
787 __ push(eax);
788 increment_stack_height();
789 VisitForAccumulatorValue(function);
790 __ pop(edx);
791 decrement_stack_height();
792
793 ASSERT(prop->key()->AsLiteral() != NULL &&
794 prop->key()->AsLiteral()->handle()->IsSmi());
795 __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle()));
796
797 Handle<Code> ic = is_strict_mode()
798 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
799 : isolate()->builtins()->KeyedStoreIC_Initialize();
800 __ call(ic);
801 } 788 }
802 } 789 }
803 } 790 }
804 791
805 792
806 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 793 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
807 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
808 }
809 794
810 795
811 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 796 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
812 // Call the runtime to declare the globals. 797 // Call the runtime to declare the globals.
813 __ push(esi); // The context is the first argument. 798 __ push(esi); // The context is the first argument.
814 __ push(Immediate(pairs)); 799 __ push(Immediate(pairs));
815 __ push(Immediate(Smi::FromInt(is_eval() ? 1 : 0))); 800 __ push(Immediate(Smi::FromInt(DeclareGlobalsFlags())));
816 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); 801 __ CallRuntime(Runtime::kDeclareGlobals, 3);
817 __ CallRuntime(Runtime::kDeclareGlobals, 4);
818 // Return value is ignored. 802 // Return value is ignored.
819 } 803 }
820 804
821 805
822 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 806 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
823 Comment cmnt(masm_, "[ SwitchStatement"); 807 Comment cmnt(masm_, "[ SwitchStatement");
824 Breakable nested_statement(this, stmt); 808 Breakable nested_statement(this, stmt);
825 SetStatementPosition(stmt); 809 SetStatementPosition(stmt);
826 810
827 int switch_clause_stack_height = stack_height(); 811 int switch_clause_stack_height = stack_height();
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after
1106 context()->Plug(eax); 1090 context()->Plug(eax);
1107 } 1091 }
1108 1092
1109 1093
1110 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1094 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1111 Comment cmnt(masm_, "[ VariableProxy"); 1095 Comment cmnt(masm_, "[ VariableProxy");
1112 EmitVariableLoad(expr); 1096 EmitVariableLoad(expr);
1113 } 1097 }
1114 1098
1115 1099
1116 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( 1100 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1117 Slot* slot, 1101 TypeofState typeof_state,
1118 TypeofState typeof_state, 1102 Label* slow) {
1119 Label* slow) {
1120 Register context = esi; 1103 Register context = esi;
1121 Register temp = edx; 1104 Register temp = edx;
1122 1105
1123 Scope* s = scope(); 1106 Scope* s = scope();
1124 while (s != NULL) { 1107 while (s != NULL) {
1125 if (s->num_heap_slots() > 0) { 1108 if (s->num_heap_slots() > 0) {
1126 if (s->calls_eval()) { 1109 if (s->calls_eval()) {
1127 // Check that extension is NULL. 1110 // Check that extension is NULL.
1128 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), 1111 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1129 Immediate(0)); 1112 Immediate(0));
(...skipping 28 matching lines...) Expand all
1158 __ j(not_equal, slow); 1141 __ j(not_equal, slow);
1159 // Load next context in chain. 1142 // Load next context in chain.
1160 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX)); 1143 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1161 __ jmp(&next); 1144 __ jmp(&next);
1162 __ bind(&fast); 1145 __ bind(&fast);
1163 } 1146 }
1164 1147
1165 // All extension objects were empty and it is safe to use a global 1148 // All extension objects were empty and it is safe to use a global
1166 // load IC call. 1149 // load IC call.
1167 __ mov(eax, GlobalObjectOperand()); 1150 __ mov(eax, GlobalObjectOperand());
1168 __ mov(ecx, slot->var()->name()); 1151 __ mov(ecx, var->name());
1169 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1152 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1170 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 1153 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1171 ? RelocInfo::CODE_TARGET 1154 ? RelocInfo::CODE_TARGET
1172 : RelocInfo::CODE_TARGET_CONTEXT; 1155 : RelocInfo::CODE_TARGET_CONTEXT;
1173 __ call(ic, mode); 1156 __ call(ic, mode);
1174 } 1157 }
1175 1158
1176 1159
1177 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( 1160 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1178 Slot* slot, 1161 Label* slow) {
1179 Label* slow) { 1162 ASSERT(var->IsContextSlot());
1180 ASSERT(slot->type() == Slot::CONTEXT);
1181 Register context = esi; 1163 Register context = esi;
1182 Register temp = ebx; 1164 Register temp = ebx;
1183 1165
1184 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { 1166 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1185 if (s->num_heap_slots() > 0) { 1167 if (s->num_heap_slots() > 0) {
1186 if (s->calls_eval()) { 1168 if (s->calls_eval()) {
1187 // Check that extension is NULL. 1169 // Check that extension is NULL.
1188 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), 1170 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1189 Immediate(0)); 1171 Immediate(0));
1190 __ j(not_equal, slow); 1172 __ j(not_equal, slow);
1191 } 1173 }
1192 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1174 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1193 // Walk the rest of the chain without clobbering esi. 1175 // Walk the rest of the chain without clobbering esi.
1194 context = temp; 1176 context = temp;
1195 } 1177 }
1196 } 1178 }
1197 // Check that last extension is NULL. 1179 // Check that last extension is NULL.
1198 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); 1180 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1199 __ j(not_equal, slow); 1181 __ j(not_equal, slow);
1200 1182
1201 // This function is used only for loads, not stores, so it's safe to 1183 // This function is used only for loads, not stores, so it's safe to
1202 // return an esi-based operand (the write barrier cannot be allowed to 1184 // return an esi-based operand (the write barrier cannot be allowed to
1203 // destroy the esi register). 1185 // destroy the esi register).
1204 return ContextOperand(context, slot->index()); 1186 return ContextOperand(context, var->index());
1205 } 1187 }
1206 1188
1207 1189
1208 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( 1190 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1209 Slot* slot, 1191 TypeofState typeof_state,
1210 TypeofState typeof_state, 1192 Label* slow,
1211 Label* slow, 1193 Label* done) {
1212 Label* done) {
1213 // Generate fast-case code for variables that might be shadowed by 1194 // Generate fast-case code for variables that might be shadowed by
1214 // eval-introduced variables. Eval is used a lot without 1195 // eval-introduced variables. Eval is used a lot without
1215 // introducing variables. In those cases, we do not want to 1196 // introducing variables. In those cases, we do not want to
1216 // perform a runtime call for all variables in the scope 1197 // perform a runtime call for all variables in the scope
1217 // containing the eval. 1198 // containing the eval.
1218 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { 1199 if (var->mode() == Variable::DYNAMIC_GLOBAL) {
1219 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow); 1200 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1220 __ jmp(done); 1201 __ jmp(done);
1221 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { 1202 } else if (var->mode() == Variable::DYNAMIC_LOCAL) {
1222 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot(); 1203 Variable* local = var->local_if_not_shadowed();
1223 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite(); 1204 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1224 if (potential_slot != NULL) { 1205 if (local->mode() == Variable::CONST) {
1225 // Generate fast case for locals that rewrite to slots. 1206 __ cmp(eax, isolate()->factory()->the_hole_value());
1226 __ mov(eax, 1207 __ j(not_equal, done);
1227 ContextSlotOperandCheckExtensions(potential_slot, slow)); 1208 __ mov(eax, isolate()->factory()->undefined_value());
1228 if (potential_slot->var()->mode() == Variable::CONST) {
1229 __ cmp(eax, isolate()->factory()->the_hole_value());
1230 __ j(not_equal, done);
1231 __ mov(eax, isolate()->factory()->undefined_value());
1232 }
1233 __ jmp(done);
1234 } else if (rewrite != NULL) {
1235 // Generate fast case for calls of an argument function.
1236 Property* property = rewrite->AsProperty();
1237 if (property != NULL) {
1238 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1239 Literal* key_literal = property->key()->AsLiteral();
1240 if (obj_proxy != NULL &&
1241 key_literal != NULL &&
1242 obj_proxy->IsArguments() &&
1243 key_literal->handle()->IsSmi()) {
1244 // Load arguments object if there are no eval-introduced
1245 // variables. Then load the argument from the arguments
1246 // object using keyed load.
1247 __ mov(edx,
1248 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1249 slow));
1250 __ SafeSet(eax, Immediate(key_literal->handle()));
1251 Handle<Code> ic =
1252 isolate()->builtins()->KeyedLoadIC_Initialize();
1253 __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1254 __ jmp(done);
1255 }
1256 }
1257 } 1209 }
1210 __ jmp(done);
1258 } 1211 }
1259 } 1212 }
1260 1213
1261 1214
1262 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1215 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1263 // Record position before possible IC call. 1216 // Record position before possible IC call.
1264 SetSourcePosition(proxy->position()); 1217 SetSourcePosition(proxy->position());
1265 Variable* var = proxy->var(); 1218 Variable* var = proxy->var();
1266 1219
1267 // Three cases: non-this global variables, lookup slots, and all other 1220 // Three cases: global variables, lookup variables, and all other types of
1268 // types of slots. 1221 // variables.
1269 Slot* slot = var->AsSlot(); 1222 switch (var->location()) {
1270 ASSERT((var->is_global() && !var->is_this()) == (slot == NULL)); 1223 case Variable::UNALLOCATED: {
1224 Comment cmnt(masm_, "Global variable");
1225 // Use inline caching. Variable name is passed in ecx and the global
1226 // object in eax.
1227 __ mov(eax, GlobalObjectOperand());
1228 __ mov(ecx, var->name());
1229 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1230 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1231 context()->Plug(eax);
1232 break;
1233 }
1271 1234
1272 if (slot == NULL) { 1235 case Variable::PARAMETER:
1273 Comment cmnt(masm_, "Global variable"); 1236 case Variable::LOCAL:
1274 // Use inline caching. Variable name is passed in ecx and the global 1237 case Variable::CONTEXT: {
1275 // object on the stack. 1238 Comment cmnt(masm_, var->IsContextSlot()
1276 __ mov(eax, GlobalObjectOperand()); 1239 ? "Context variable"
1277 __ mov(ecx, var->name()); 1240 : "Stack variable");
1278 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1241 if (var->mode() != Variable::LET && var->mode() != Variable::CONST) {
1279 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1242 context()->Plug(var);
1280 context()->Plug(eax); 1243 } else {
1244 // Let and const need a read barrier.
1245 Label done;
1246 GetVar(eax, var);
1247 __ cmp(eax, isolate()->factory()->the_hole_value());
1248 __ j(not_equal, &done, Label::kNear);
1249 if (var->mode() == Variable::LET) {
1250 __ push(Immediate(var->name()));
1251 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1252 } else { // Variable::CONST
1253 __ mov(eax, isolate()->factory()->undefined_value());
1254 }
1255 __ bind(&done);
1256 context()->Plug(eax);
1257 }
1258 break;
1259 }
1281 1260
1282 } else if (slot->type() == Slot::LOOKUP) { 1261 case Variable::LOOKUP: {
1283 Label done, slow; 1262 Label done, slow;
1284 1263 // Generate code for loading from variables potentially shadowed
1285 // Generate code for loading from variables potentially shadowed 1264 // by eval-introduced variables.
1286 // by eval-introduced variables. 1265 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1287 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done); 1266 __ bind(&slow);
1288 1267 Comment cmnt(masm_, "Lookup variable");
1289 __ bind(&slow); 1268 __ push(esi); // Context.
1290 Comment cmnt(masm_, "Lookup slot"); 1269 __ push(Immediate(var->name()));
1291 __ push(esi); // Context. 1270 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1292 __ push(Immediate(var->name()));
1293 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1294 __ bind(&done);
1295
1296 context()->Plug(eax);
1297
1298 } else {
1299 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1300 ? "Context slot"
1301 : "Stack slot");
1302 if (var->mode() == Variable::CONST) {
1303 // Constants may be the hole value if they have not been initialized.
1304 // Unhole them.
1305 Label done;
1306 MemOperand slot_operand = EmitSlotSearch(slot, eax);
1307 __ mov(eax, slot_operand);
1308 __ cmp(eax, isolate()->factory()->the_hole_value());
1309 __ j(not_equal, &done, Label::kNear);
1310 __ mov(eax, isolate()->factory()->undefined_value());
1311 __ bind(&done); 1271 __ bind(&done);
1312 context()->Plug(eax); 1272 context()->Plug(eax);
1313 } else { 1273 break;
1314 context()->Plug(slot);
1315 } 1274 }
1316 } 1275 }
1317 } 1276 }
1318 1277
1319 1278
1320 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1279 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1321 Comment cmnt(masm_, "[ RegExpLiteral"); 1280 Comment cmnt(masm_, "[ RegExpLiteral");
1322 Label materialized; 1281 Label materialized;
1323 // Registers will be used as follows: 1282 // Registers will be used as follows:
1324 // edi = JS function. 1283 // edi = JS function.
(...skipping 501 matching lines...) Expand 10 before | Expand all | Expand 10 after
1826 __ mov(ecx, prop->key()->AsLiteral()->handle()); 1785 __ mov(ecx, prop->key()->AsLiteral()->handle());
1827 Handle<Code> ic = is_strict_mode() 1786 Handle<Code> ic = is_strict_mode()
1828 ? isolate()->builtins()->StoreIC_Initialize_Strict() 1787 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1829 : isolate()->builtins()->StoreIC_Initialize(); 1788 : isolate()->builtins()->StoreIC_Initialize();
1830 __ call(ic); 1789 __ call(ic);
1831 break; 1790 break;
1832 } 1791 }
1833 case KEYED_PROPERTY: { 1792 case KEYED_PROPERTY: {
1834 __ push(eax); // Preserve value. 1793 __ push(eax); // Preserve value.
1835 increment_stack_height(); 1794 increment_stack_height();
1836 if (prop->is_synthetic()) { 1795 VisitForStackValue(prop->obj());
1837 ASSERT(prop->obj()->AsVariableProxy() != NULL); 1796 VisitForAccumulatorValue(prop->key());
1838 ASSERT(prop->key()->AsLiteral() != NULL); 1797 __ mov(ecx, eax);
1839 { AccumulatorValueContext for_object(this); 1798 __ pop(edx);
1840 EmitVariableLoad(prop->obj()->AsVariableProxy()); 1799 decrement_stack_height();
1841 }
1842 __ mov(edx, eax);
1843 __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle()));
1844 } else {
1845 VisitForStackValue(prop->obj());
1846 VisitForAccumulatorValue(prop->key());
1847 __ mov(ecx, eax);
1848 __ pop(edx);
1849 decrement_stack_height();
1850 }
1851 __ pop(eax); // Restore value. 1800 __ pop(eax); // Restore value.
1852 decrement_stack_height(); 1801 decrement_stack_height();
1853 Handle<Code> ic = is_strict_mode() 1802 Handle<Code> ic = is_strict_mode()
1854 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 1803 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1855 : isolate()->builtins()->KeyedStoreIC_Initialize(); 1804 : isolate()->builtins()->KeyedStoreIC_Initialize();
1856 __ call(ic); 1805 __ call(ic);
1857 break; 1806 break;
1858 } 1807 }
1859 } 1808 }
1860 PrepareForBailoutForId(bailout_ast_id, TOS_REG); 1809 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1861 context()->Plug(eax); 1810 context()->Plug(eax);
1862 } 1811 }
1863 1812
1864 1813
1865 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1814 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1866 Token::Value op) { 1815 Token::Value op) {
1867 ASSERT(var != NULL); 1816 if (var->IsUnallocated()) {
1868 ASSERT(var->is_global() || var->AsSlot() != NULL); 1817 // Global var, const, or let.
1869
1870 if (var->is_global()) {
1871 ASSERT(!var->is_this());
1872 // Assignment to a global variable. Use inline caching for the
1873 // assignment. Right-hand-side value is passed in eax, variable name in
1874 // ecx, and the global object on the stack.
1875 __ mov(ecx, var->name()); 1818 __ mov(ecx, var->name());
1876 __ mov(edx, GlobalObjectOperand()); 1819 __ mov(edx, GlobalObjectOperand());
1877 Handle<Code> ic = is_strict_mode() 1820 Handle<Code> ic = is_strict_mode()
1878 ? isolate()->builtins()->StoreIC_Initialize_Strict() 1821 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1879 : isolate()->builtins()->StoreIC_Initialize(); 1822 : isolate()->builtins()->StoreIC_Initialize();
1880 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1823 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1881 1824
1882 } else if (op == Token::INIT_CONST) { 1825 } else if (op == Token::INIT_CONST) {
1883 // Like var declarations, const declarations are hoisted to function 1826 // Const initializers need a write barrier.
1884 // scope. However, unlike var initializers, const initializers are able 1827 ASSERT(!var->IsParameter()); // No const parameters.
1885 // to drill a hole to that function context, even from inside a 'with' 1828 if (var->IsStackLocal()) {
1886 // context. We thus bypass the normal static scope lookup. 1829 Label skip;
1887 Slot* slot = var->AsSlot(); 1830 __ mov(edx, StackOperand(var));
1888 Label skip; 1831 __ cmp(edx, isolate()->factory()->the_hole_value());
1889 switch (slot->type()) { 1832 __ j(not_equal, &skip);
1890 case Slot::PARAMETER: 1833 __ mov(StackOperand(var), eax);
1891 // No const parameters. 1834 __ bind(&skip);
1892 UNREACHABLE(); 1835 } else {
1893 break; 1836 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
1894 case Slot::LOCAL: 1837 // Like var declarations, const declarations are hoisted to function
1895 __ mov(edx, Operand(ebp, SlotOffset(slot))); 1838 // scope. However, unlike var initializers, const initializers are
1896 __ cmp(edx, isolate()->factory()->the_hole_value()); 1839 // able to drill a hole to that function context, even from inside a
1897 __ j(not_equal, &skip); 1840 // 'with' context. We thus bypass the normal static scope lookup for
1898 __ mov(Operand(ebp, SlotOffset(slot)), eax); 1841 // var->IsContextSlot().
1899 break; 1842 __ push(eax);
1900 case Slot::CONTEXT: 1843 __ push(esi);
1901 case Slot::LOOKUP: 1844 __ push(Immediate(var->name()));
1902 __ push(eax); 1845 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1903 __ push(esi);
1904 __ push(Immediate(var->name()));
1905 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1906 break;
1907 } 1846 }
1908 __ bind(&skip); 1847
1848 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
1849 // Non-initializing assignment to let variable needs a write barrier.
1850 if (var->IsLookupSlot()) {
1851 __ push(eax); // Value.
1852 __ push(esi); // Context.
1853 __ push(Immediate(var->name()));
1854 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
1855 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1856 } else {
1857 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
1858 Label assign;
1859 MemOperand location = VarOperand(var, ecx);
1860 __ mov(edx, location);
1861 __ cmp(edx, isolate()->factory()->the_hole_value());
1862 __ j(not_equal, &assign, Label::kNear);
1863 __ push(Immediate(var->name()));
1864 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1865 __ bind(&assign);
1866 __ mov(location, eax);
1867 if (var->IsContextSlot()) {
1868 __ mov(edx, eax);
1869 int offset = Context::SlotOffset(var->index());
1870 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
1871 }
1872 }
1909 1873
1910 } else if (var->mode() != Variable::CONST) { 1874 } else if (var->mode() != Variable::CONST) {
1911 // Perform the assignment for non-const variables. Const assignments 1875 // Assignment to var or initializing assignment to let.
1912 // are simply skipped. 1876 if (var->IsStackAllocated() || var->IsContextSlot()) {
1913 Slot* slot = var->AsSlot(); 1877 MemOperand location = VarOperand(var, ecx);
1914 switch (slot->type()) { 1878 if (FLAG_debug_code && op == Token::INIT_LET) {
1915 case Slot::PARAMETER: 1879 // Check for an uninitialized let binding.
1916 case Slot::LOCAL: 1880 __ mov(edx, location);
1917 // Perform the assignment. 1881 __ cmp(edx, isolate()->factory()->the_hole_value());
1918 __ mov(Operand(ebp, SlotOffset(slot)), eax); 1882 __ Check(equal, "Let binding re-initialization.");
1919 break; 1883 }
1920 1884 // Perform the assignment.
1921 case Slot::CONTEXT: { 1885 __ mov(location, eax);
1922 MemOperand target = EmitSlotSearch(slot, ecx); 1886 if (var->IsContextSlot()) {
1923 // Perform the assignment and issue the write barrier.
1924 __ mov(target, eax);
1925
1926 // The value of the assignment is in eax. RecordWrite clobbers its
1927 // second and third register arguments.
1928 __ mov(edx, eax); 1887 __ mov(edx, eax);
1929 int offset = Context::SlotOffset(slot->index()); 1888 int offset = Context::SlotOffset(var->index());
1930 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs); 1889 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
1931 break;
1932 } 1890 }
1933 1891 } else {
1934 case Slot::LOOKUP: 1892 ASSERT(var->IsLookupSlot());
1935 // Call the runtime for the assignment. 1893 __ push(eax); // Value.
1936 __ push(eax); // Value. 1894 __ push(esi); // Context.
1937 __ push(esi); // Context. 1895 __ push(Immediate(var->name()));
1938 __ push(Immediate(var->name())); 1896 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
1939 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); 1897 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1940 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1941 break;
1942 } 1898 }
1943 } 1899 }
1900 // Non-initializing assignments to consts are ignored.
1944 } 1901 }
1945 1902
1946 1903
1947 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 1904 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1948 // Assignment to a property, using a named store IC. 1905 // Assignment to a property, using a named store IC.
1949 Property* prop = expr->target()->AsProperty(); 1906 Property* prop = expr->target()->AsProperty();
1950 ASSERT(prop != NULL); 1907 ASSERT(prop != NULL);
1951 ASSERT(prop->key()->AsLiteral() != NULL); 1908 ASSERT(prop->key()->AsLiteral() != NULL);
1952 1909
1953 // If the assignment starts a block of assignments to the same object, 1910 // If the assignment starts a block of assignments to the same object,
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after
2140 // Push copy of the first argument or undefined if it doesn't exist. 2097 // Push copy of the first argument or undefined if it doesn't exist.
2141 if (arg_count > 0) { 2098 if (arg_count > 0) {
2142 __ push(Operand(esp, arg_count * kPointerSize)); 2099 __ push(Operand(esp, arg_count * kPointerSize));
2143 } else { 2100 } else {
2144 __ push(Immediate(isolate()->factory()->undefined_value())); 2101 __ push(Immediate(isolate()->factory()->undefined_value()));
2145 } 2102 }
2146 2103
2147 // Push the receiver of the enclosing function. 2104 // Push the receiver of the enclosing function.
2148 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize)); 2105 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2149 2106
2150 // Push the strict mode flag. 2107 // Push the strict mode flag. In harmony mode every eval call
2151 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); 2108 // is a strict mode eval call.
2109 StrictModeFlag strict_mode = strict_mode_flag();
2110 if (FLAG_harmony_block_scoping) {
2111 strict_mode = kStrictMode;
2112 }
2113 __ push(Immediate(Smi::FromInt(strict_mode)));
2152 2114
2153 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP 2115 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2154 ? Runtime::kResolvePossiblyDirectEvalNoLookup 2116 ? Runtime::kResolvePossiblyDirectEvalNoLookup
2155 : Runtime::kResolvePossiblyDirectEval, 4); 2117 : Runtime::kResolvePossiblyDirectEval, 4);
2156 } 2118 }
2157 2119
2158 2120
2159 void FullCodeGenerator::VisitCall(Call* expr) { 2121 void FullCodeGenerator::VisitCall(Call* expr) {
2160 #ifdef DEBUG 2122 #ifdef DEBUG
2161 // We want to verify that RecordJSReturnSite gets called on all paths 2123 // We want to verify that RecordJSReturnSite gets called on all paths
2162 // through this function. Avoid early returns. 2124 // through this function. Avoid early returns.
2163 expr->return_is_recorded_ = false; 2125 expr->return_is_recorded_ = false;
2164 #endif 2126 #endif
2165 2127
2166 Comment cmnt(masm_, "[ Call"); 2128 Comment cmnt(masm_, "[ Call");
2167 Expression* fun = expr->expression(); 2129 Expression* callee = expr->expression();
2168 Variable* var = fun->AsVariableProxy()->AsVariable(); 2130 VariableProxy* proxy = callee->AsVariableProxy();
2131 Property* property = callee->AsProperty();
2169 2132
2170 if (var != NULL && var->is_possibly_eval()) { 2133 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2171 // In a call to eval, we first call %ResolvePossiblyDirectEval to 2134 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2172 // resolve the function we need to call and the receiver of the 2135 // resolve the function we need to call and the receiver of the call.
2173 // call. Then we call the resolved function using the given 2136 // Then we call the resolved function using the given arguments.
2174 // arguments.
2175 ZoneList<Expression*>* args = expr->arguments(); 2137 ZoneList<Expression*>* args = expr->arguments();
2176 int arg_count = args->length(); 2138 int arg_count = args->length();
2177 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2139 { PreservePositionScope pos_scope(masm()->positions_recorder());
2178 VisitForStackValue(fun); 2140 VisitForStackValue(callee);
2179 // Reserved receiver slot. 2141 // Reserved receiver slot.
2180 __ push(Immediate(isolate()->factory()->undefined_value())); 2142 __ push(Immediate(isolate()->factory()->undefined_value()));
2181 increment_stack_height(); 2143 increment_stack_height();
2182 // Push the arguments. 2144 // Push the arguments.
2183 for (int i = 0; i < arg_count; i++) { 2145 for (int i = 0; i < arg_count; i++) {
2184 VisitForStackValue(args->at(i)); 2146 VisitForStackValue(args->at(i));
2185 } 2147 }
2186 2148
2187 // If we know that eval can only be shadowed by eval-introduced 2149 // If we know that eval can only be shadowed by eval-introduced
2188 // variables we attempt to load the global eval function directly 2150 // variables we attempt to load the global eval function directly in
2189 // in generated code. If we succeed, there is no need to perform a 2151 // generated code. If we succeed, there is no need to perform a
2190 // context lookup in the runtime system. 2152 // context lookup in the runtime system.
2191 Label done; 2153 Label done;
2192 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { 2154 Variable* var = proxy->var();
2155 if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) {
2193 Label slow; 2156 Label slow;
2194 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(), 2157 EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow);
2195 NOT_INSIDE_TYPEOF,
2196 &slow);
2197 // Push the function and resolve eval. 2158 // Push the function and resolve eval.
2198 __ push(eax); 2159 __ push(eax);
2199 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); 2160 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2200 __ jmp(&done); 2161 __ jmp(&done);
2201 __ bind(&slow); 2162 __ bind(&slow);
2202 } 2163 }
2203 2164
2204 // Push copy of the function (found below the arguments) and 2165 // Push a copy of the function (found below the arguments) and
2205 // resolve eval. 2166 // resolve eval.
2206 __ push(Operand(esp, (arg_count + 1) * kPointerSize)); 2167 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2207 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); 2168 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2208 if (done.is_linked()) { 2169 __ bind(&done);
2209 __ bind(&done);
2210 }
2211 2170
2212 // The runtime call returns a pair of values in eax (function) and 2171 // The runtime call returns a pair of values in eax (function) and
2213 // edx (receiver). Touch up the stack with the right values. 2172 // edx (receiver). Touch up the stack with the right values.
2214 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx); 2173 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2215 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax); 2174 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2216 } 2175 }
2217 // Record source position for debugger. 2176 // Record source position for debugger.
2218 SetSourcePosition(expr->position()); 2177 SetSourcePosition(expr->position());
2219 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2178 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2220 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); 2179 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
2221 __ CallStub(&stub); 2180 __ CallStub(&stub);
2222 RecordJSReturnSite(expr); 2181 RecordJSReturnSite(expr);
2223 // Restore context register. 2182 // Restore context register.
2224 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2183 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2225 decrement_stack_height(arg_count + 1); // Function is left on the stack. 2184 decrement_stack_height(arg_count + 1); // Function is left on the stack.
2226 context()->DropAndPlug(1, eax); 2185 context()->DropAndPlug(1, eax);
2227 } else if (var != NULL && !var->is_this() && var->is_global()) { 2186
2187 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2228 // Push global object as receiver for the call IC. 2188 // Push global object as receiver for the call IC.
2229 __ push(GlobalObjectOperand()); 2189 __ push(GlobalObjectOperand());
2230 increment_stack_height(); 2190 increment_stack_height();
2231 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); 2191 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2232 } else if (var != NULL && var->AsSlot() != NULL && 2192
2233 var->AsSlot()->type() == Slot::LOOKUP) { 2193 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2234 // Call to a lookup slot (dynamically introduced variable). 2194 // Call to a lookup slot (dynamically introduced variable).
2235 Label slow, done; 2195 Label slow, done;
2236
2237 { PreservePositionScope scope(masm()->positions_recorder()); 2196 { PreservePositionScope scope(masm()->positions_recorder());
2238 // Generate code for loading from variables potentially shadowed 2197 // Generate code for loading from variables potentially shadowed by
2239 // by eval-introduced variables. 2198 // eval-introduced variables.
2240 EmitDynamicLoadFromSlotFastCase(var->AsSlot(), 2199 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2241 NOT_INSIDE_TYPEOF,
2242 &slow,
2243 &done);
2244 } 2200 }
2245
2246 __ bind(&slow); 2201 __ bind(&slow);
2247 // Call the runtime to find the function to call (returned in eax) 2202 // Call the runtime to find the function to call (returned in eax) and
2248 // and the object holding it (returned in edx). 2203 // the object holding it (returned in edx).
2249 __ push(context_register()); 2204 __ push(context_register());
2250 __ push(Immediate(var->name())); 2205 __ push(Immediate(proxy->name()));
2251 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2206 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2252 __ push(eax); // Function. 2207 __ push(eax); // Function.
2253 increment_stack_height();
2254 __ push(edx); // Receiver. 2208 __ push(edx); // Receiver.
2255 increment_stack_height(); 2209 increment_stack_height(2);
2256 2210
2257 // If fast case code has been generated, emit code to push the 2211 // If fast case code has been generated, emit code to push the function
2258 // function and receiver and have the slow path jump around this 2212 // and receiver and have the slow path jump around this code.
2259 // code.
2260 if (done.is_linked()) { 2213 if (done.is_linked()) {
2261 Label call; 2214 Label call;
2262 __ jmp(&call); 2215 __ jmp(&call, Label::kNear);
2263 __ bind(&done); 2216 __ bind(&done);
2264 // Push function. Stack height already incremented in slow case above. 2217 // Push function. Stack height already incremented in slow case
2218 // above.
2265 __ push(eax); 2219 __ push(eax);
2266 // The receiver is implicitly the global receiver. Indicate this 2220 // The receiver is implicitly the global receiver. Indicate this by
2267 // by passing the hole to the call function stub. 2221 // passing the hole to the call function stub.
2268 __ push(Immediate(isolate()->factory()->the_hole_value())); 2222 __ push(Immediate(isolate()->factory()->the_hole_value()));
2269 __ bind(&call); 2223 __ bind(&call);
2270 } 2224 }
2271 2225
2272 // The receiver is either the global receiver or an object found 2226 // The receiver is either the global receiver or an object found by
2273 // by LoadContextSlot. That object could be the hole if the 2227 // LoadContextSlot. That object could be the hole if the receiver is
2274 // receiver is implicitly the global object. 2228 // implicitly the global object.
2275 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); 2229 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2276 } else if (fun->AsProperty() != NULL) { 2230
2277 // Call to an object property. 2231 } else if (property != NULL) {
2278 Property* prop = fun->AsProperty(); 2232 { PreservePositionScope scope(masm()->positions_recorder());
2279 Literal* key = prop->key()->AsLiteral(); 2233 VisitForStackValue(property->obj());
2280 if (key != NULL && key->handle()->IsSymbol()) { 2234 }
2281 // Call to a named property, use call IC. 2235 if (property->key()->IsPropertyName()) {
2282 { PreservePositionScope scope(masm()->positions_recorder()); 2236 EmitCallWithIC(expr,
2283 VisitForStackValue(prop->obj()); 2237 property->key()->AsLiteral()->handle(),
2284 } 2238 RelocInfo::CODE_TARGET);
2285 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2286 } else { 2239 } else {
2287 // Call to a keyed property. 2240 EmitKeyedCallWithIC(expr, property->key());
2288 // For a synthetic property use keyed load IC followed by function call, 2241 }
2289 // for a regular property use EmitKeyedCallWithIC.
2290 if (prop->is_synthetic()) {
2291 // Do not visit the object and key subexpressions (they are shared
2292 // by all occurrences of the same rewritten parameter).
2293 ASSERT(prop->obj()->AsVariableProxy() != NULL);
2294 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2295 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2296 MemOperand operand = EmitSlotSearch(slot, edx);
2297 __ mov(edx, operand);
2298 2242
2299 ASSERT(prop->key()->AsLiteral() != NULL);
2300 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2301 __ mov(eax, prop->key()->AsLiteral()->handle());
2302
2303 // Record source code position for IC call.
2304 SetSourcePosition(prop->position());
2305
2306 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2307 __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
2308 // Push result (function).
2309 __ push(eax);
2310 increment_stack_height();
2311 // Push Global receiver.
2312 __ mov(ecx, GlobalObjectOperand());
2313 __ push(FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
2314 increment_stack_height();
2315 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2316 } else {
2317 { PreservePositionScope scope(masm()->positions_recorder());
2318 VisitForStackValue(prop->obj());
2319 }
2320 EmitKeyedCallWithIC(expr, prop->key());
2321 }
2322 }
2323 } else { 2243 } else {
2244 // Call to an arbitrary expression not handled specially above.
2324 { PreservePositionScope scope(masm()->positions_recorder()); 2245 { PreservePositionScope scope(masm()->positions_recorder());
2325 VisitForStackValue(fun); 2246 VisitForStackValue(callee);
2326 } 2247 }
2327 // Load global receiver object. 2248 // Load global receiver object.
2328 __ mov(ebx, GlobalObjectOperand()); 2249 __ mov(ebx, GlobalObjectOperand());
2329 __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset)); 2250 __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2330 increment_stack_height(); 2251 increment_stack_height();
2331 // Emit function call. 2252 // Emit function call.
2332 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); 2253 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2333 } 2254 }
2334 2255
2335 #ifdef DEBUG 2256 #ifdef DEBUG
(...skipping 953 matching lines...) Expand 10 before | Expand all | Expand 10 after
3289 Register tmp = ecx; 3210 Register tmp = ecx;
3290 __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX)); 3211 __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX));
3291 __ mov(cache, 3212 __ mov(cache,
3292 FieldOperand(cache, GlobalObject::kGlobalContextOffset)); 3213 FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3293 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3214 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3294 __ mov(cache, 3215 __ mov(cache,
3295 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3216 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3296 3217
3297 Label done, not_found; 3218 Label done, not_found;
3298 // tmp now holds finger offset as a smi. 3219 // tmp now holds finger offset as a smi.
3299 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3220 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3300 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); 3221 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3301 __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp)); 3222 __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
3302 __ j(not_equal, &not_found); 3223 __ j(not_equal, &not_found);
3303 3224
3304 __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1)); 3225 __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
3305 __ jmp(&done); 3226 __ jmp(&done);
3306 3227
3307 __ bind(&not_found); 3228 __ bind(&not_found);
3308 // Call runtime to perform the lookup. 3229 // Call runtime to perform the lookup.
3309 __ push(cache); 3230 __ push(cache);
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after
3650 __ mov(eax, result_operand); 3571 __ mov(eax, result_operand);
3651 // Drop temp values from the stack, and restore context register. 3572 // Drop temp values from the stack, and restore context register.
3652 __ add(Operand(esp), Immediate(3 * kPointerSize)); 3573 __ add(Operand(esp), Immediate(3 * kPointerSize));
3653 3574
3654 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 3575 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3655 decrement_stack_height(); 3576 decrement_stack_height();
3656 context()->Plug(eax); 3577 context()->Plug(eax);
3657 } 3578 }
3658 3579
3659 3580
3660 void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList<Expression*>* args) {
3661 ASSERT(args->length() == 1);
3662
3663 // Load the function into eax.
3664 VisitForAccumulatorValue(args->at(0));
3665
3666 // Prepare for the test.
3667 Label materialize_true, materialize_false;
3668 Label* if_true = NULL;
3669 Label* if_false = NULL;
3670 Label* fall_through = NULL;
3671 context()->PrepareTest(&materialize_true, &materialize_false,
3672 &if_true, &if_false, &fall_through);
3673
3674 // Test for strict mode function.
3675 __ mov(ecx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3676 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
3677 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
3678 __ j(not_equal, if_true);
3679
3680 // Test for native function.
3681 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
3682 1 << SharedFunctionInfo::kNativeBitWithinByte);
3683 __ j(not_equal, if_true);
3684
3685 // Not native or strict-mode function.
3686 __ jmp(if_false);
3687
3688 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3689 context()->Plug(if_true, if_false);
3690 }
3691
3692
3693 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3581 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3694 Handle<String> name = expr->name(); 3582 Handle<String> name = expr->name();
3695 if (name->length() > 0 && name->Get(0) == '_') { 3583 if (name->length() > 0 && name->Get(0) == '_') {
3696 Comment cmnt(masm_, "[ InlineRuntimeCall"); 3584 Comment cmnt(masm_, "[ InlineRuntimeCall");
3697 EmitInlineRuntimeCall(expr); 3585 EmitInlineRuntimeCall(expr);
3698 return; 3586 return;
3699 } 3587 }
3700 3588
3701 Comment cmnt(masm_, "[ CallRuntime"); 3589 Comment cmnt(masm_, "[ CallRuntime");
3702 ZoneList<Expression*>* args = expr->arguments(); 3590 ZoneList<Expression*>* args = expr->arguments();
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
3734 } 3622 }
3735 3623
3736 context()->Plug(eax); 3624 context()->Plug(eax);
3737 } 3625 }
3738 3626
3739 3627
3740 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3628 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3741 switch (expr->op()) { 3629 switch (expr->op()) {
3742 case Token::DELETE: { 3630 case Token::DELETE: {
3743 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3631 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3744 Property* prop = expr->expression()->AsProperty(); 3632 Property* property = expr->expression()->AsProperty();
3745 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); 3633 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3746 3634
3747 if (prop != NULL) { 3635 if (property != NULL) {
3748 if (prop->is_synthetic()) { 3636 VisitForStackValue(property->obj());
3749 // Result of deleting parameters is false, even when they rewrite 3637 VisitForStackValue(property->key());
3750 // to accesses on the arguments object. 3638 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
3751 context()->Plug(false); 3639 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3752 } else { 3640 decrement_stack_height(2);
3753 VisitForStackValue(prop->obj()); 3641 context()->Plug(eax);
3754 VisitForStackValue(prop->key()); 3642 } else if (proxy != NULL) {
3755 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); 3643 Variable* var = proxy->var();
3756 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3757 decrement_stack_height(2);
3758 context()->Plug(eax);
3759 }
3760 } else if (var != NULL) {
3761 // Delete of an unqualified identifier is disallowed in strict mode 3644 // Delete of an unqualified identifier is disallowed in strict mode
3762 // but "delete this" is. 3645 // but "delete this" is allowed.
3763 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); 3646 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3764 if (var->is_global()) { 3647 if (var->IsUnallocated()) {
3765 __ push(GlobalObjectOperand()); 3648 __ push(GlobalObjectOperand());
3766 __ push(Immediate(var->name())); 3649 __ push(Immediate(var->name()));
3767 __ push(Immediate(Smi::FromInt(kNonStrictMode))); 3650 __ push(Immediate(Smi::FromInt(kNonStrictMode)));
3768 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3651 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3769 context()->Plug(eax); 3652 context()->Plug(eax);
3770 } else if (var->AsSlot() != NULL && 3653 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3771 var->AsSlot()->type() != Slot::LOOKUP) { 3654 // Result of deleting non-global variables is false. 'this' is
3772 // Result of deleting non-global, non-dynamic variables is false. 3655 // not really a variable, though we implement it as one. The
3773 // The subexpression does not have side effects. 3656 // subexpression does not have side effects.
3774 context()->Plug(false); 3657 context()->Plug(var->is_this());
3775 } else { 3658 } else {
3776 // Non-global variable. Call the runtime to try to delete from the 3659 // Non-global variable. Call the runtime to try to delete from the
3777 // context where the variable was introduced. 3660 // context where the variable was introduced.
3778 __ push(context_register()); 3661 __ push(context_register());
3779 __ push(Immediate(var->name())); 3662 __ push(Immediate(var->name()));
3780 __ CallRuntime(Runtime::kDeleteContextSlot, 2); 3663 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3781 context()->Plug(eax); 3664 context()->Plug(eax);
3782 } 3665 }
3783 } else { 3666 } else {
3784 // Result of deleting non-property, non-variable reference is true. 3667 // Result of deleting non-property, non-variable reference is true.
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after
4061 } 3944 }
4062 } 3945 }
4063 } 3946 }
4064 3947
4065 3948
4066 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 3949 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4067 VariableProxy* proxy = expr->AsVariableProxy(); 3950 VariableProxy* proxy = expr->AsVariableProxy();
4068 ASSERT(!context()->IsEffect()); 3951 ASSERT(!context()->IsEffect());
4069 ASSERT(!context()->IsTest()); 3952 ASSERT(!context()->IsTest());
4070 3953
4071 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { 3954 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4072 Comment cmnt(masm_, "Global variable"); 3955 Comment cmnt(masm_, "Global variable");
4073 __ mov(eax, GlobalObjectOperand()); 3956 __ mov(eax, GlobalObjectOperand());
4074 __ mov(ecx, Immediate(proxy->name())); 3957 __ mov(ecx, Immediate(proxy->name()));
4075 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 3958 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4076 // Use a regular load, not a contextual load, to avoid a reference 3959 // Use a regular load, not a contextual load, to avoid a reference
4077 // error. 3960 // error.
4078 __ call(ic); 3961 __ call(ic);
4079 PrepareForBailout(expr, TOS_REG); 3962 PrepareForBailout(expr, TOS_REG);
4080 context()->Plug(eax); 3963 context()->Plug(eax);
4081 } else if (proxy != NULL && 3964 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4082 proxy->var()->AsSlot() != NULL &&
4083 proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
4084 Label done, slow; 3965 Label done, slow;
4085 3966
4086 // Generate code for loading from variables potentially shadowed 3967 // Generate code for loading from variables potentially shadowed
4087 // by eval-introduced variables. 3968 // by eval-introduced variables.
4088 Slot* slot = proxy->var()->AsSlot(); 3969 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4089 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
4090 3970
4091 __ bind(&slow); 3971 __ bind(&slow);
4092 __ push(esi); 3972 __ push(esi);
4093 __ push(Immediate(proxy->name())); 3973 __ push(Immediate(proxy->name()));
4094 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 3974 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4095 PrepareForBailout(expr, TOS_REG); 3975 PrepareForBailout(expr, TOS_REG);
4096 __ bind(&done); 3976 __ bind(&done);
4097 3977
4098 context()->Plug(eax); 3978 context()->Plug(eax);
4099 } else { 3979 } else {
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after
4371 4251
4372 4252
4373 // ---------------------------------------------------------------------------- 4253 // ----------------------------------------------------------------------------
4374 // Non-local control flow support. 4254 // Non-local control flow support.
4375 4255
4376 void FullCodeGenerator::EnterFinallyBlock() { 4256 void FullCodeGenerator::EnterFinallyBlock() {
4377 // Cook return address on top of stack (smi encoded Code* delta) 4257 // Cook return address on top of stack (smi encoded Code* delta)
4378 ASSERT(!result_register().is(edx)); 4258 ASSERT(!result_register().is(edx));
4379 __ pop(edx); 4259 __ pop(edx);
4380 __ sub(Operand(edx), Immediate(masm_->CodeObject())); 4260 __ sub(Operand(edx), Immediate(masm_->CodeObject()));
4381 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 4261 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4382 ASSERT_EQ(0, kSmiTag); 4262 STATIC_ASSERT(kSmiTag == 0);
4383 __ SmiTag(edx); 4263 __ SmiTag(edx);
4384 __ push(edx); 4264 __ push(edx);
4385 // Store result register while executing finally block. 4265 // Store result register while executing finally block.
4386 __ push(result_register()); 4266 __ push(result_register());
4387 } 4267 }
4388 4268
4389 4269
4390 void FullCodeGenerator::ExitFinallyBlock() { 4270 void FullCodeGenerator::ExitFinallyBlock() {
4391 ASSERT(!result_register().is(edx)); 4271 ASSERT(!result_register().is(edx));
4392 __ pop(result_register()); 4272 __ pop(result_register());
4393 // Uncook return address. 4273 // Uncook return address.
4394 __ pop(edx); 4274 __ pop(edx);
4395 __ SmiUntag(edx); 4275 __ SmiUntag(edx);
4396 __ add(Operand(edx), Immediate(masm_->CodeObject())); 4276 __ add(Operand(edx), Immediate(masm_->CodeObject()));
4397 __ jmp(Operand(edx)); 4277 __ jmp(Operand(edx));
4398 } 4278 }
4399 4279
4400 4280
4401 #undef __ 4281 #undef __
4402 4282
4283 #define __ ACCESS_MASM(masm())
4284
4285 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4286 int* stack_depth,
4287 int* context_length) {
4288 // The macros used here must preserve the result register.
4289
4290 // Because the handler block contains the context of the finally
4291 // code, we can restore it directly from there for the finally code
4292 // rather than iteratively unwinding contexts via their previous
4293 // links.
4294 __ Drop(*stack_depth); // Down to the handler block.
4295 if (*context_length > 0) {
4296 // Restore the context to its dedicated register and the stack.
4297 __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
4298 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
4299 }
4300 __ PopTryHandler();
4301 __ call(finally_entry_);
4302
4303 *stack_depth = 0;
4304 *context_length = 0;
4305 return previous_;
4306 }
4307
4308
4309 #undef __
4310
4403 } } // namespace v8::internal 4311 } } // namespace v8::internal
4404 4312
4405 #endif // V8_TARGET_ARCH_IA32 4313 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698