OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
44 #define __ ACCESS_MASM(masm_) | 44 #define __ ACCESS_MASM(masm_) |
45 | 45 |
46 | 46 |
47 static unsigned GetPropertyId(Property* property) { | 47 static unsigned GetPropertyId(Property* property) { |
48 return property->id(); | 48 return property->id(); |
49 } | 49 } |
50 | 50 |
51 | 51 |
52 class JumpPatchSite BASE_EMBEDDED { | 52 class JumpPatchSite BASE_EMBEDDED { |
53 public: | 53 public: |
54 explicit JumpPatchSite(MacroAssembler* masm) | 54 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
55 : masm_(masm) { | |
56 #ifdef DEBUG | 55 #ifdef DEBUG |
57 info_emitted_ = false; | 56 info_emitted_ = false; |
58 #endif | 57 #endif |
59 } | 58 } |
60 | 59 |
61 ~JumpPatchSite() { | 60 ~JumpPatchSite() { |
62 ASSERT(patch_site_.is_bound() == info_emitted_); | 61 ASSERT(patch_site_.is_bound() == info_emitted_); |
63 } | 62 } |
64 | 63 |
65 void EmitJumpIfNotSmi(Register reg, | 64 void EmitJumpIfNotSmi(Register reg, |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
180 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 179 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
181 } | 180 } |
182 function_in_register = false; | 181 function_in_register = false; |
183 // Context is returned in both rax and rsi. It replaces the context | 182 // Context is returned in both rax and rsi. It replaces the context |
184 // passed to us. It's saved in the stack and kept live in rsi. | 183 // passed to us. It's saved in the stack and kept live in rsi. |
185 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); | 184 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); |
186 | 185 |
187 // Copy any necessary parameters into the context. | 186 // Copy any necessary parameters into the context. |
188 int num_parameters = info->scope()->num_parameters(); | 187 int num_parameters = info->scope()->num_parameters(); |
189 for (int i = 0; i < num_parameters; i++) { | 188 for (int i = 0; i < num_parameters; i++) { |
190 Slot* slot = scope()->parameter(i)->AsSlot(); | 189 Variable* var = scope()->parameter(i); |
191 if (slot != NULL && slot->type() == Slot::CONTEXT) { | 190 if (var->IsContextSlot()) { |
192 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 191 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
193 (num_parameters - 1 - i) * kPointerSize; | 192 (num_parameters - 1 - i) * kPointerSize; |
194 // Load parameter from stack. | 193 // Load parameter from stack. |
195 __ movq(rax, Operand(rbp, parameter_offset)); | 194 __ movq(rax, Operand(rbp, parameter_offset)); |
196 // Store it in the context. | 195 // Store it in the context. |
197 int context_offset = Context::SlotOffset(slot->index()); | 196 int context_offset = Context::SlotOffset(var->index()); |
198 __ movq(Operand(rsi, context_offset), rax); | 197 __ movq(Operand(rsi, context_offset), rax); |
199 // Update the write barrier. This clobbers all involved | 198 // Update the write barrier. This clobbers all involved |
200 // registers, so we have use a third register to avoid | 199 // registers, so we have use a third register to avoid |
201 // clobbering rsi. | 200 // clobbering rsi. |
202 __ movq(rcx, rsi); | 201 __ movq(rcx, rsi); |
203 __ RecordWrite(rcx, context_offset, rax, rbx); | 202 __ RecordWrite(rcx, context_offset, rax, rbx); |
204 } | 203 } |
205 } | 204 } |
206 } | 205 } |
207 | 206 |
(...skipping 17 matching lines...) Expand all Loading... |
225 __ Push(Smi::FromInt(num_parameters)); | 224 __ Push(Smi::FromInt(num_parameters)); |
226 // Arguments to ArgumentsAccessStub: | 225 // Arguments to ArgumentsAccessStub: |
227 // function, receiver address, parameter count. | 226 // function, receiver address, parameter count. |
228 // The stub will rewrite receiver and parameter count if the previous | 227 // The stub will rewrite receiver and parameter count if the previous |
229 // stack frame was an arguments adapter frame. | 228 // stack frame was an arguments adapter frame. |
230 ArgumentsAccessStub stub( | 229 ArgumentsAccessStub stub( |
231 is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT | 230 is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT |
232 : ArgumentsAccessStub::NEW_NON_STRICT_SLOW); | 231 : ArgumentsAccessStub::NEW_NON_STRICT_SLOW); |
233 __ CallStub(&stub); | 232 __ CallStub(&stub); |
234 | 233 |
235 Move(arguments->AsSlot(), rax, rbx, rdx); | 234 SetVar(arguments, rax, rbx, rdx); |
236 } | 235 } |
237 | 236 |
238 if (FLAG_trace) { | 237 if (FLAG_trace) { |
239 __ CallRuntime(Runtime::kTraceEnter, 0); | 238 __ CallRuntime(Runtime::kTraceEnter, 0); |
240 } | 239 } |
241 | 240 |
242 // Visit the declarations and body unless there is an illegal | 241 // Visit the declarations and body unless there is an illegal |
243 // redeclaration. | 242 // redeclaration. |
244 if (scope()->HasIllegalRedeclaration()) { | 243 if (scope()->HasIllegalRedeclaration()) { |
245 Comment cmnt(masm_, "[ Declarations"); | 244 Comment cmnt(masm_, "[ Declarations"); |
246 scope()->VisitIllegalRedeclaration(this); | 245 scope()->VisitIllegalRedeclaration(this); |
| 246 |
247 } else { | 247 } else { |
248 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); | 248 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); |
249 { Comment cmnt(masm_, "[ Declarations"); | 249 { Comment cmnt(masm_, "[ Declarations"); |
250 // For named function expressions, declare the function name as a | 250 // For named function expressions, declare the function name as a |
251 // constant. | 251 // constant. |
252 if (scope()->is_function_scope() && scope()->function() != NULL) { | 252 if (scope()->is_function_scope() && scope()->function() != NULL) { |
253 EmitDeclaration(scope()->function(), Variable::CONST, NULL); | 253 int ignored = 0; |
| 254 EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored); |
254 } | 255 } |
255 VisitDeclarations(scope()->declarations()); | 256 VisitDeclarations(scope()->declarations()); |
256 } | 257 } |
257 | 258 |
258 { Comment cmnt(masm_, "[ Stack check"); | 259 { Comment cmnt(masm_, "[ Stack check"); |
259 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS); | 260 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS); |
260 Label ok; | 261 Label ok; |
261 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 262 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
262 __ j(above_equal, &ok, Label::kNear); | 263 __ j(above_equal, &ok, Label::kNear); |
263 StackCheckStub stub; | 264 StackCheckStub stub; |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
349 } | 350 } |
350 // Check that the size of the code used for returning is large enough | 351 // Check that the size of the code used for returning is large enough |
351 // for the debugger's requirements. | 352 // for the debugger's requirements. |
352 ASSERT(Assembler::kJSReturnSequenceLength <= | 353 ASSERT(Assembler::kJSReturnSequenceLength <= |
353 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); | 354 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); |
354 #endif | 355 #endif |
355 } | 356 } |
356 } | 357 } |
357 | 358 |
358 | 359 |
359 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { | 360 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { |
| 361 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
360 } | 362 } |
361 | 363 |
362 | 364 |
363 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const { | 365 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { |
364 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register()); | 366 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
365 __ movq(result_register(), slot_operand); | 367 codegen()->GetVar(result_register(), var); |
366 } | 368 } |
367 | 369 |
368 | 370 |
369 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { | 371 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { |
370 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register()); | 372 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
371 __ push(slot_operand); | 373 MemOperand operand = codegen()->VarOperand(var, result_register()); |
| 374 __ push(operand); |
372 } | 375 } |
373 | 376 |
374 | 377 |
375 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { | 378 void FullCodeGenerator::TestContext::Plug(Variable* var) const { |
376 codegen()->Move(result_register(), slot); | 379 codegen()->GetVar(result_register(), var); |
377 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); | 380 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
378 codegen()->DoTest(this); | 381 codegen()->DoTest(this); |
379 } | 382 } |
380 | 383 |
381 | 384 |
382 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { | 385 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { |
383 } | 386 } |
384 | 387 |
385 | 388 |
386 void FullCodeGenerator::AccumulatorValueContext::Plug( | 389 void FullCodeGenerator::AccumulatorValueContext::Plug( |
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
585 __ j(cc, if_true); | 588 __ j(cc, if_true); |
586 } else if (if_true == fall_through) { | 589 } else if (if_true == fall_through) { |
587 __ j(NegateCondition(cc), if_false); | 590 __ j(NegateCondition(cc), if_false); |
588 } else { | 591 } else { |
589 __ j(cc, if_true); | 592 __ j(cc, if_true); |
590 __ jmp(if_false); | 593 __ jmp(if_false); |
591 } | 594 } |
592 } | 595 } |
593 | 596 |
594 | 597 |
595 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { | 598 MemOperand FullCodeGenerator::StackOperand(Variable* var) { |
596 switch (slot->type()) { | 599 ASSERT(var->IsStackAllocated()); |
597 case Slot::PARAMETER: | 600 // Offset is negative because higher indexes are at lower addresses. |
598 case Slot::LOCAL: | 601 int offset = -var->index() * kPointerSize; |
599 return Operand(rbp, SlotOffset(slot)); | 602 // Adjust by a (parameter or local) base offset. |
600 case Slot::CONTEXT: { | 603 if (var->IsParameter()) { |
601 int context_chain_length = | 604 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; |
602 scope()->ContextChainLength(slot->var()->scope()); | 605 } else { |
603 __ LoadContext(scratch, context_chain_length); | 606 offset += JavaScriptFrameConstants::kLocal0Offset; |
604 return ContextOperand(scratch, slot->index()); | |
605 } | |
606 case Slot::LOOKUP: | |
607 UNREACHABLE(); | |
608 } | 607 } |
609 UNREACHABLE(); | 608 return Operand(rbp, offset); |
610 return Operand(rax, 0); | |
611 } | 609 } |
612 | 610 |
613 | 611 |
614 void FullCodeGenerator::Move(Register destination, Slot* source) { | 612 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { |
615 MemOperand location = EmitSlotSearch(source, destination); | 613 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
616 __ movq(destination, location); | 614 if (var->IsContextSlot()) { |
| 615 int context_chain_length = scope()->ContextChainLength(var->scope()); |
| 616 __ LoadContext(scratch, context_chain_length); |
| 617 return ContextOperand(scratch, var->index()); |
| 618 } else { |
| 619 return StackOperand(var); |
| 620 } |
617 } | 621 } |
618 | 622 |
619 | 623 |
620 void FullCodeGenerator::Move(Slot* dst, | 624 void FullCodeGenerator::GetVar(Register dest, Variable* var) { |
621 Register src, | 625 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
622 Register scratch1, | 626 MemOperand location = VarOperand(var, dest); |
623 Register scratch2) { | 627 __ movq(dest, location); |
624 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. | 628 } |
625 ASSERT(!scratch1.is(src) && !scratch2.is(src)); | 629 |
626 MemOperand location = EmitSlotSearch(dst, scratch1); | 630 |
| 631 void FullCodeGenerator::SetVar(Variable* var, |
| 632 Register src, |
| 633 Register scratch0, |
| 634 Register scratch1) { |
| 635 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
| 636 ASSERT(!scratch0.is(src)); |
| 637 ASSERT(!scratch0.is(scratch1)); |
| 638 ASSERT(!scratch1.is(src)); |
| 639 MemOperand location = VarOperand(var, scratch0); |
627 __ movq(location, src); | 640 __ movq(location, src); |
628 // Emit the write barrier code if the location is in the heap. | 641 // Emit the write barrier code if the location is in the heap. |
629 if (dst->type() == Slot::CONTEXT) { | 642 if (var->IsContextSlot()) { |
630 int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize; | 643 int offset = Context::SlotOffset(var->index()); |
631 __ RecordWrite(scratch1, offset, src, scratch2); | 644 __ RecordWrite(scratch0, offset, src, scratch1); |
632 } | 645 } |
633 } | 646 } |
634 | 647 |
635 | 648 |
636 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, | 649 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, |
637 bool should_normalize, | 650 bool should_normalize, |
638 Label* if_true, | 651 Label* if_true, |
639 Label* if_false) { | 652 Label* if_false) { |
640 // Only prepare for bailouts before splits if we're in a test | 653 // Only prepare for bailouts before splits if we're in a test |
641 // context. Otherwise, we let the Visit function deal with the | 654 // context. Otherwise, we let the Visit function deal with the |
(...skipping 12 matching lines...) Expand all Loading... |
654 if (should_normalize) { | 667 if (should_normalize) { |
655 __ CompareRoot(rax, Heap::kTrueValueRootIndex); | 668 __ CompareRoot(rax, Heap::kTrueValueRootIndex); |
656 Split(equal, if_true, if_false, NULL); | 669 Split(equal, if_true, if_false, NULL); |
657 __ bind(&skip); | 670 __ bind(&skip); |
658 } | 671 } |
659 } | 672 } |
660 | 673 |
661 | 674 |
662 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, | 675 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, |
663 Variable::Mode mode, | 676 Variable::Mode mode, |
664 FunctionLiteral* function) { | 677 FunctionLiteral* function, |
665 Comment cmnt(masm_, "[ Declaration"); | 678 int* global_count) { |
| 679 // If it was not possible to allocate the variable at compile time, we |
| 680 // need to "declare" it at runtime to make sure it actually exists in the |
| 681 // local context. |
666 Variable* variable = proxy->var(); | 682 Variable* variable = proxy->var(); |
667 ASSERT(variable != NULL); // Must have been resolved. | 683 switch (variable->location()) { |
668 Slot* slot = variable->AsSlot(); | 684 case Variable::UNALLOCATED: |
669 ASSERT(slot != NULL); | 685 ++(*global_count); |
670 switch (slot->type()) { | 686 break; |
671 case Slot::PARAMETER: | 687 |
672 case Slot::LOCAL: | 688 case Variable::PARAMETER: |
| 689 case Variable::LOCAL: |
673 if (function != NULL) { | 690 if (function != NULL) { |
| 691 Comment cmnt(masm_, "[ Declaration"); |
674 VisitForAccumulatorValue(function); | 692 VisitForAccumulatorValue(function); |
675 __ movq(Operand(rbp, SlotOffset(slot)), result_register()); | 693 __ movq(StackOperand(variable), result_register()); |
676 } else if (mode == Variable::CONST || mode == Variable::LET) { | 694 } else if (mode == Variable::CONST || mode == Variable::LET) { |
| 695 Comment cmnt(masm_, "[ Declaration"); |
677 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); | 696 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); |
678 __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister); | 697 __ movq(StackOperand(variable), kScratchRegister); |
679 } | 698 } |
680 break; | 699 break; |
681 | 700 |
682 case Slot::CONTEXT: | 701 case Variable::CONTEXT: |
683 // We bypass the general EmitSlotSearch because we know more about | |
684 // this specific context. | |
685 | |
686 // The variable in the decl always resides in the current function | 702 // The variable in the decl always resides in the current function |
687 // context. | 703 // context. |
688 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); | 704 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); |
689 if (FLAG_debug_code) { | 705 if (FLAG_debug_code) { |
690 // Check that we're not inside a with or catch context. | 706 // Check that we're not inside a with or catch context. |
691 __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset)); | 707 __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset)); |
692 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex); | 708 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex); |
693 __ Check(not_equal, "Declaration in with context."); | 709 __ Check(not_equal, "Declaration in with context."); |
694 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex); | 710 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex); |
695 __ Check(not_equal, "Declaration in catch context."); | 711 __ Check(not_equal, "Declaration in catch context."); |
696 } | 712 } |
697 if (function != NULL) { | 713 if (function != NULL) { |
| 714 Comment cmnt(masm_, "[ Declaration"); |
698 VisitForAccumulatorValue(function); | 715 VisitForAccumulatorValue(function); |
699 __ movq(ContextOperand(rsi, slot->index()), result_register()); | 716 __ movq(ContextOperand(rsi, variable->index()), result_register()); |
700 int offset = Context::SlotOffset(slot->index()); | 717 int offset = Context::SlotOffset(variable->index()); |
701 __ movq(rbx, rsi); | 718 __ movq(rbx, rsi); |
702 __ RecordWrite(rbx, offset, result_register(), rcx); | 719 __ RecordWrite(rbx, offset, result_register(), rcx); |
703 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 720 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
704 } else if (mode == Variable::CONST || mode == Variable::LET) { | 721 } else if (mode == Variable::CONST || mode == Variable::LET) { |
| 722 Comment cmnt(masm_, "[ Declaration"); |
705 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); | 723 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); |
706 __ movq(ContextOperand(rsi, slot->index()), kScratchRegister); | 724 __ movq(ContextOperand(rsi, variable->index()), kScratchRegister); |
707 // No write barrier since the hole value is in old space. | 725 // No write barrier since the hole value is in old space. |
708 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 726 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
709 } | 727 } |
710 break; | 728 break; |
711 | 729 |
712 case Slot::LOOKUP: { | 730 case Variable::LOOKUP: { |
| 731 Comment cmnt(masm_, "[ Declaration"); |
713 __ push(rsi); | 732 __ push(rsi); |
714 __ Push(variable->name()); | 733 __ Push(variable->name()); |
715 // Declaration nodes are always introduced in one of two modes. | 734 // Declaration nodes are always introduced in one of three modes. |
716 ASSERT(mode == Variable::VAR || | 735 ASSERT(mode == Variable::VAR || |
717 mode == Variable::CONST || | 736 mode == Variable::CONST || |
718 mode == Variable::LET); | 737 mode == Variable::LET); |
719 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; | 738 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; |
720 __ Push(Smi::FromInt(attr)); | 739 __ Push(Smi::FromInt(attr)); |
721 // Push initial value, if any. | 740 // Push initial value, if any. |
722 // Note: For variables we must not push an initial value (such as | 741 // Note: For variables we must not push an initial value (such as |
723 // 'undefined') because we may have a (legal) redeclaration and we | 742 // 'undefined') because we may have a (legal) redeclaration and we |
724 // must not destroy the current value. | 743 // must not destroy the current value. |
725 if (function != NULL) { | 744 if (function != NULL) { |
726 VisitForStackValue(function); | 745 VisitForStackValue(function); |
727 } else if (mode == Variable::CONST || mode == Variable::LET) { | 746 } else if (mode == Variable::CONST || mode == Variable::LET) { |
728 __ PushRoot(Heap::kTheHoleValueRootIndex); | 747 __ PushRoot(Heap::kTheHoleValueRootIndex); |
729 } else { | 748 } else { |
730 __ Push(Smi::FromInt(0)); // no initial value! | 749 __ Push(Smi::FromInt(0)); // Indicates no initial value. |
731 } | 750 } |
732 __ CallRuntime(Runtime::kDeclareContextSlot, 4); | 751 __ CallRuntime(Runtime::kDeclareContextSlot, 4); |
733 break; | 752 break; |
734 } | 753 } |
735 } | 754 } |
736 } | 755 } |
737 | 756 |
738 | 757 |
739 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { | 758 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { } |
740 EmitDeclaration(decl->proxy(), decl->mode(), decl->fun()); | |
741 } | |
742 | 759 |
743 | 760 |
744 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 761 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
745 // Call the runtime to declare the globals. | 762 // Call the runtime to declare the globals. |
746 __ push(rsi); // The context is the first argument. | 763 __ push(rsi); // The context is the first argument. |
747 __ Push(pairs); | 764 __ Push(pairs); |
748 __ Push(Smi::FromInt(DeclareGlobalsFlags())); | 765 __ Push(Smi::FromInt(DeclareGlobalsFlags())); |
749 __ CallRuntime(Runtime::kDeclareGlobals, 3); | 766 __ CallRuntime(Runtime::kDeclareGlobals, 3); |
750 // Return value is ignored. | 767 // Return value is ignored. |
751 } | 768 } |
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1044 context()->Plug(rax); | 1061 context()->Plug(rax); |
1045 } | 1062 } |
1046 | 1063 |
1047 | 1064 |
1048 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { | 1065 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { |
1049 Comment cmnt(masm_, "[ VariableProxy"); | 1066 Comment cmnt(masm_, "[ VariableProxy"); |
1050 EmitVariableLoad(expr); | 1067 EmitVariableLoad(expr); |
1051 } | 1068 } |
1052 | 1069 |
1053 | 1070 |
1054 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( | 1071 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, |
1055 Slot* slot, | 1072 TypeofState typeof_state, |
1056 TypeofState typeof_state, | 1073 Label* slow) { |
1057 Label* slow) { | |
1058 Register context = rsi; | 1074 Register context = rsi; |
1059 Register temp = rdx; | 1075 Register temp = rdx; |
1060 | 1076 |
1061 Scope* s = scope(); | 1077 Scope* s = scope(); |
1062 while (s != NULL) { | 1078 while (s != NULL) { |
1063 if (s->num_heap_slots() > 0) { | 1079 if (s->num_heap_slots() > 0) { |
1064 if (s->calls_eval()) { | 1080 if (s->calls_eval()) { |
1065 // Check that extension is NULL. | 1081 // Check that extension is NULL. |
1066 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), | 1082 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), |
1067 Immediate(0)); | 1083 Immediate(0)); |
(...skipping 29 matching lines...) Expand all Loading... |
1097 __ j(not_equal, slow); | 1113 __ j(not_equal, slow); |
1098 // Load next context in chain. | 1114 // Load next context in chain. |
1099 __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX)); | 1115 __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX)); |
1100 __ jmp(&next); | 1116 __ jmp(&next); |
1101 __ bind(&fast); | 1117 __ bind(&fast); |
1102 } | 1118 } |
1103 | 1119 |
1104 // All extension objects were empty and it is safe to use a global | 1120 // All extension objects were empty and it is safe to use a global |
1105 // load IC call. | 1121 // load IC call. |
1106 __ movq(rax, GlobalObjectOperand()); | 1122 __ movq(rax, GlobalObjectOperand()); |
1107 __ Move(rcx, slot->var()->name()); | 1123 __ Move(rcx, var->name()); |
1108 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 1124 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
1109 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) | 1125 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) |
1110 ? RelocInfo::CODE_TARGET | 1126 ? RelocInfo::CODE_TARGET |
1111 : RelocInfo::CODE_TARGET_CONTEXT; | 1127 : RelocInfo::CODE_TARGET_CONTEXT; |
1112 __ call(ic, mode); | 1128 __ call(ic, mode); |
1113 } | 1129 } |
1114 | 1130 |
1115 | 1131 |
1116 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( | 1132 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
1117 Slot* slot, | 1133 Label* slow) { |
1118 Label* slow) { | 1134 ASSERT(var->IsContextSlot()); |
1119 ASSERT(slot->type() == Slot::CONTEXT); | |
1120 Register context = rsi; | 1135 Register context = rsi; |
1121 Register temp = rbx; | 1136 Register temp = rbx; |
1122 | 1137 |
1123 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { | 1138 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
1124 if (s->num_heap_slots() > 0) { | 1139 if (s->num_heap_slots() > 0) { |
1125 if (s->calls_eval()) { | 1140 if (s->calls_eval()) { |
1126 // Check that extension is NULL. | 1141 // Check that extension is NULL. |
1127 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), | 1142 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), |
1128 Immediate(0)); | 1143 Immediate(0)); |
1129 __ j(not_equal, slow); | 1144 __ j(not_equal, slow); |
1130 } | 1145 } |
1131 __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); | 1146 __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); |
1132 // Walk the rest of the chain without clobbering rsi. | 1147 // Walk the rest of the chain without clobbering rsi. |
1133 context = temp; | 1148 context = temp; |
1134 } | 1149 } |
1135 } | 1150 } |
1136 // Check that last extension is NULL. | 1151 // Check that last extension is NULL. |
1137 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); | 1152 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); |
1138 __ j(not_equal, slow); | 1153 __ j(not_equal, slow); |
1139 | 1154 |
1140 // This function is used only for loads, not stores, so it's safe to | 1155 // This function is used only for loads, not stores, so it's safe to |
1141 // return an rsi-based operand (the write barrier cannot be allowed to | 1156 // return an rsi-based operand (the write barrier cannot be allowed to |
1142 // destroy the rsi register). | 1157 // destroy the rsi register). |
1143 return ContextOperand(context, slot->index()); | 1158 return ContextOperand(context, var->index()); |
1144 } | 1159 } |
1145 | 1160 |
1146 | 1161 |
1147 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( | 1162 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, |
1148 Slot* slot, | 1163 TypeofState typeof_state, |
1149 TypeofState typeof_state, | 1164 Label* slow, |
1150 Label* slow, | 1165 Label* done) { |
1151 Label* done) { | |
1152 // Generate fast-case code for variables that might be shadowed by | 1166 // Generate fast-case code for variables that might be shadowed by |
1153 // eval-introduced variables. Eval is used a lot without | 1167 // eval-introduced variables. Eval is used a lot without |
1154 // introducing variables. In those cases, we do not want to | 1168 // introducing variables. In those cases, we do not want to |
1155 // perform a runtime call for all variables in the scope | 1169 // perform a runtime call for all variables in the scope |
1156 // containing the eval. | 1170 // containing the eval. |
1157 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { | 1171 if (var->mode() == Variable::DYNAMIC_GLOBAL) { |
1158 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow); | 1172 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); |
1159 __ jmp(done); | 1173 __ jmp(done); |
1160 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { | 1174 } else if (var->mode() == Variable::DYNAMIC_LOCAL) { |
1161 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot(); | 1175 Variable* local = var->local_if_not_shadowed(); |
1162 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite(); | 1176 __ movq(rax, ContextSlotOperandCheckExtensions(local, slow)); |
1163 if (potential_slot != NULL) { | 1177 if (local->mode() == Variable::CONST) { |
1164 // Generate fast case for locals that rewrite to slots. | 1178 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); |
1165 __ movq(rax, | 1179 __ j(not_equal, done); |
1166 ContextSlotOperandCheckExtensions(potential_slot, slow)); | 1180 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
1167 if (potential_slot->var()->mode() == Variable::CONST) { | |
1168 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); | |
1169 __ j(not_equal, done); | |
1170 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); | |
1171 } | |
1172 __ jmp(done); | |
1173 } else if (rewrite != NULL) { | |
1174 // Generate fast case for calls of an argument function. | |
1175 Property* property = rewrite->AsProperty(); | |
1176 if (property != NULL) { | |
1177 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); | |
1178 Literal* key_literal = property->key()->AsLiteral(); | |
1179 if (obj_proxy != NULL && | |
1180 key_literal != NULL && | |
1181 obj_proxy->IsArguments() && | |
1182 key_literal->handle()->IsSmi()) { | |
1183 // Load arguments object if there are no eval-introduced | |
1184 // variables. Then load the argument from the arguments | |
1185 // object using keyed load. | |
1186 __ movq(rdx, | |
1187 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(), | |
1188 slow)); | |
1189 __ Move(rax, key_literal->handle()); | |
1190 Handle<Code> ic = | |
1191 isolate()->builtins()->KeyedLoadIC_Initialize(); | |
1192 __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property)); | |
1193 __ jmp(done); | |
1194 } | |
1195 } | |
1196 } | 1181 } |
| 1182 __ jmp(done); |
1197 } | 1183 } |
1198 } | 1184 } |
1199 | 1185 |
1200 | 1186 |
1201 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { | 1187 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { |
1202 // Record position before possible IC call. | 1188 // Record position before possible IC call. |
1203 SetSourcePosition(proxy->position()); | 1189 SetSourcePosition(proxy->position()); |
1204 Variable* var = proxy->var(); | 1190 Variable* var = proxy->var(); |
1205 | 1191 |
1206 // Three cases: non-this global variables, lookup slots, and all other | 1192 // Three cases: global variables, lookup variables, and all other types of |
1207 // types of slots. | 1193 // variables. |
1208 Slot* slot = var->AsSlot(); | 1194 switch (var->location()) { |
1209 ASSERT((var->is_global() && !var->is_this()) == (slot == NULL)); | 1195 case Variable::UNALLOCATED: { |
| 1196 Comment cmnt(masm_, "Global variable"); |
| 1197 // Use inline caching. Variable name is passed in rcx and the global |
| 1198 // object on the stack. |
| 1199 __ Move(rcx, var->name()); |
| 1200 __ movq(rax, GlobalObjectOperand()); |
| 1201 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 1202 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); |
| 1203 context()->Plug(rax); |
| 1204 break; |
| 1205 } |
1210 | 1206 |
1211 if (slot == NULL) { | 1207 case Variable::PARAMETER: |
1212 Comment cmnt(masm_, "Global variable"); | 1208 case Variable::LOCAL: |
1213 // Use inline caching. Variable name is passed in rcx and the global | 1209 case Variable::CONTEXT: { |
1214 // object on the stack. | 1210 Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot"); |
1215 __ Move(rcx, var->name()); | 1211 if (var->mode() != Variable::LET && var->mode() != Variable::CONST) { |
1216 __ movq(rax, GlobalObjectOperand()); | 1212 context()->Plug(var); |
1217 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 1213 } else { |
1218 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); | 1214 // Let and const need a read barrier. |
1219 context()->Plug(rax); | 1215 Label done; |
| 1216 GetVar(rax, var); |
| 1217 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); |
| 1218 __ j(not_equal, &done, Label::kNear); |
| 1219 if (var->mode() == Variable::LET) { |
| 1220 __ Push(var->name()); |
| 1221 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 1222 } else { // Variable::CONST |
| 1223 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
| 1224 } |
| 1225 __ bind(&done); |
| 1226 context()->Plug(rax); |
| 1227 } |
| 1228 break; |
| 1229 } |
1220 | 1230 |
1221 } else if (slot != NULL && slot->type() == Slot::LOOKUP) { | 1231 case Variable::LOOKUP: { |
1222 Label done, slow; | 1232 Label done, slow; |
1223 | 1233 // Generate code for loading from variables potentially shadowed |
1224 // Generate code for loading from variables potentially shadowed | 1234 // by eval-introduced variables. |
1225 // by eval-introduced variables. | 1235 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); |
1226 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done); | 1236 __ bind(&slow); |
1227 | 1237 Comment cmnt(masm_, "Lookup slot"); |
1228 __ bind(&slow); | 1238 __ push(rsi); // Context. |
1229 Comment cmnt(masm_, "Lookup slot"); | 1239 __ Push(var->name()); |
1230 __ push(rsi); // Context. | 1240 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
1231 __ Push(var->name()); | |
1232 __ CallRuntime(Runtime::kLoadContextSlot, 2); | |
1233 __ bind(&done); | |
1234 | |
1235 context()->Plug(rax); | |
1236 | |
1237 } else { | |
1238 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) | |
1239 ? "Context slot" | |
1240 : "Stack slot"); | |
1241 if (var->mode() == Variable::CONST) { | |
1242 // Constants may be the hole value if they have not been initialized. | |
1243 // Unhole them. | |
1244 Label done; | |
1245 MemOperand slot_operand = EmitSlotSearch(slot, rax); | |
1246 __ movq(rax, slot_operand); | |
1247 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); | |
1248 __ j(not_equal, &done, Label::kNear); | |
1249 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); | |
1250 __ bind(&done); | 1241 __ bind(&done); |
1251 context()->Plug(rax); | 1242 context()->Plug(rax); |
1252 } else if (var->mode() == Variable::LET) { | 1243 break; |
1253 // Let bindings may be the hole value if they have not been initialized. | |
1254 // Throw a type error in this case. | |
1255 Label done; | |
1256 MemOperand slot_operand = EmitSlotSearch(slot, rax); | |
1257 __ movq(rax, slot_operand); | |
1258 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); | |
1259 __ j(not_equal, &done, Label::kNear); | |
1260 __ Push(var->name()); | |
1261 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
1262 __ bind(&done); | |
1263 context()->Plug(rax); | |
1264 } else { | |
1265 context()->Plug(slot); | |
1266 } | 1244 } |
1267 } | 1245 } |
1268 } | 1246 } |
1269 | 1247 |
1270 | 1248 |
1271 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | 1249 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
1272 Comment cmnt(masm_, "[ RegExpLiteral"); | 1250 Comment cmnt(masm_, "[ RegExpLiteral"); |
1273 Label materialized; | 1251 Label materialized; |
1274 // Registers will be used as follows: | 1252 // Registers will be used as follows: |
1275 // rdi = JS function. | 1253 // rdi = JS function. |
1276 // rcx = literals array. | 1254 // rcx = literals array. |
1277 // rbx = regexp literal. | 1255 // rbx = regexp literal. |
1278 // rax = regexp literal clone. | 1256 // rax = regexp literal clone. |
1279 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1257 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1280 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset)); | 1258 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset)); |
1281 int literal_offset = | 1259 int literal_offset = |
1282 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; | 1260 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; |
1283 __ movq(rbx, FieldOperand(rcx, literal_offset)); | 1261 __ movq(rbx, FieldOperand(rcx, literal_offset)); |
1284 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 1262 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
1285 __ j(not_equal, &materialized); | 1263 __ j(not_equal, &materialized, Label::kNear); |
1286 | 1264 |
1287 // Create regexp literal using runtime function | 1265 // Create regexp literal using runtime function |
1288 // Result will be in rax. | 1266 // Result will be in rax. |
1289 __ push(rcx); | 1267 __ push(rcx); |
1290 __ Push(Smi::FromInt(expr->literal_index())); | 1268 __ Push(Smi::FromInt(expr->literal_index())); |
1291 __ Push(expr->pattern()); | 1269 __ Push(expr->pattern()); |
1292 __ Push(expr->flags()); | 1270 __ Push(expr->flags()); |
1293 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | 1271 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
1294 __ movq(rbx, rax); | 1272 __ movq(rbx, rax); |
1295 | 1273 |
(...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1740 break; | 1718 break; |
1741 } | 1719 } |
1742 } | 1720 } |
1743 PrepareForBailoutForId(bailout_ast_id, TOS_REG); | 1721 PrepareForBailoutForId(bailout_ast_id, TOS_REG); |
1744 context()->Plug(rax); | 1722 context()->Plug(rax); |
1745 } | 1723 } |
1746 | 1724 |
1747 | 1725 |
1748 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | 1726 void FullCodeGenerator::EmitVariableAssignment(Variable* var, |
1749 Token::Value op) { | 1727 Token::Value op) { |
1750 ASSERT(var != NULL); | 1728 if (var->IsUnallocated()) { |
1751 ASSERT(var->is_global() || var->AsSlot() != NULL); | 1729 // Global var, const, or let. |
1752 | |
1753 if (var->is_global()) { | |
1754 ASSERT(!var->is_this()); | |
1755 // Assignment to a global variable. Use inline caching for the | |
1756 // assignment. Right-hand-side value is passed in rax, variable name in | |
1757 // rcx, and the global object on the stack. | |
1758 __ Move(rcx, var->name()); | 1730 __ Move(rcx, var->name()); |
1759 __ movq(rdx, GlobalObjectOperand()); | 1731 __ movq(rdx, GlobalObjectOperand()); |
1760 Handle<Code> ic = is_strict_mode() | 1732 Handle<Code> ic = is_strict_mode() |
1761 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 1733 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
1762 : isolate()->builtins()->StoreIC_Initialize(); | 1734 : isolate()->builtins()->StoreIC_Initialize(); |
1763 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); | 1735 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); |
1764 | |
1765 } else if (op == Token::INIT_CONST) { | 1736 } else if (op == Token::INIT_CONST) { |
1766 // Like var declarations, const declarations are hoisted to function | 1737 // Const initializers need a write barrier. |
1767 // scope. However, unlike var initializers, const initializers are able | 1738 ASSERT(!var->IsParameter()); // No const parameters. |
1768 // to drill a hole to that function context, even from inside a 'with' | 1739 if (var->IsStackLocal()) { |
1769 // context. We thus bypass the normal static scope lookup. | 1740 Label skip; |
1770 Slot* slot = var->AsSlot(); | 1741 __ movq(rdx, StackOperand(var)); |
1771 Label skip; | 1742 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); |
1772 switch (slot->type()) { | 1743 __ j(not_equal, &skip); |
1773 case Slot::PARAMETER: | 1744 __ movq(StackOperand(var), rax); |
1774 // No const parameters. | 1745 __ bind(&skip); |
1775 UNREACHABLE(); | 1746 } else { |
1776 break; | 1747 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); |
1777 case Slot::LOCAL: | 1748 // Like var declarations, const declarations are hoisted to function |
1778 __ movq(rdx, Operand(rbp, SlotOffset(slot))); | 1749 // scope. However, unlike var initializers, const initializers are |
1779 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); | 1750 // able to drill a hole to that function context, even from inside a |
1780 __ j(not_equal, &skip); | 1751 // 'with' context. We thus bypass the normal static scope lookup for |
1781 __ movq(Operand(rbp, SlotOffset(slot)), rax); | 1752 // var->IsContextSlot(). |
1782 break; | 1753 __ push(rax); |
1783 case Slot::CONTEXT: | 1754 __ push(rsi); |
1784 case Slot::LOOKUP: | 1755 __ Push(var->name()); |
1785 __ push(rax); | 1756 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
1786 __ push(rsi); | |
1787 __ Push(var->name()); | |
1788 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | |
1789 break; | |
1790 } | 1757 } |
1791 __ bind(&skip); | |
1792 | 1758 |
1793 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) { | 1759 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) { |
1794 // Perform the assignment for non-const variables. Const assignments | 1760 // Non-initializing assignment to let variable needs a write barrier. |
1795 // are simply skipped. | 1761 if (var->IsLookupSlot()) { |
1796 Slot* slot = var->AsSlot(); | 1762 __ push(rax); // Value. |
1797 switch (slot->type()) { | 1763 __ push(rsi); // Context. |
1798 case Slot::PARAMETER: | 1764 __ Push(var->name()); |
1799 case Slot::LOCAL: { | 1765 __ Push(Smi::FromInt(strict_mode_flag())); |
1800 Label assign; | 1766 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
1801 // Check for an initialized let binding. | 1767 } else { |
1802 __ movq(rdx, Operand(rbp, SlotOffset(slot))); | 1768 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
1803 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); | 1769 Label assign; |
1804 __ j(not_equal, &assign); | 1770 MemOperand location = VarOperand(var, rcx); |
1805 __ Push(var->name()); | 1771 __ movq(rdx, location); |
1806 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1772 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); |
1807 // Perform the assignment. | 1773 __ j(not_equal, &assign, Label::kNear); |
1808 __ bind(&assign); | 1774 __ Push(var->name()); |
1809 __ movq(Operand(rbp, SlotOffset(slot)), rax); | 1775 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
1810 break; | 1776 __ bind(&assign); |
| 1777 __ movq(location, rax); |
| 1778 if (var->IsContextSlot()) { |
| 1779 __ movq(rdx, rax); |
| 1780 __ RecordWrite(rcx, Context::SlotOffset(var->index()), rdx, rbx); |
1811 } | 1781 } |
| 1782 } |
1812 | 1783 |
1813 case Slot::CONTEXT: { | |
1814 // Let variables may be the hole value if they have not been | |
1815 // initialized. Throw a type error in this case. | |
1816 Label assign; | |
1817 MemOperand target = EmitSlotSearch(slot, rcx); | |
1818 // Check for an initialized let binding. | |
1819 __ movq(rdx, target); | |
1820 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); | |
1821 __ j(not_equal, &assign, Label::kNear); | |
1822 __ Push(var->name()); | |
1823 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
1824 // Perform the assignment. | |
1825 __ bind(&assign); | |
1826 __ movq(target, rax); | |
1827 // The value of the assignment is in eax. RecordWrite clobbers its | |
1828 // register arguments. | |
1829 __ movq(rdx, rax); | |
1830 int offset = Context::SlotOffset(slot->index()); | |
1831 __ RecordWrite(rcx, offset, rdx, rbx); | |
1832 break; | |
1833 } | |
1834 | |
1835 case Slot::LOOKUP: | |
1836 // Call the runtime for the assignment. | |
1837 __ push(rax); // Value. | |
1838 __ push(rsi); // Context. | |
1839 __ Push(var->name()); | |
1840 __ Push(Smi::FromInt(strict_mode_flag())); | |
1841 __ CallRuntime(Runtime::kStoreContextSlot, 4); | |
1842 break; | |
1843 } | |
1844 } else if (var->mode() != Variable::CONST) { | 1784 } else if (var->mode() != Variable::CONST) { |
1845 // Perform the assignment for non-const variables. Const assignments | 1785 // Assignment to var or initializing assignment to let. |
1846 // are simply skipped. | 1786 if (var->IsStackAllocated()) { |
1847 Slot* slot = var->AsSlot(); | 1787 __ movq(StackOperand(var), rax); |
1848 switch (slot->type()) { | 1788 } else if (var->IsContextSlot()) { |
1849 case Slot::PARAMETER: | 1789 // Preserve the value in rax against the write barrier. |
1850 case Slot::LOCAL: | 1790 __ movq(rdx, rax); |
1851 // Perform the assignment. | 1791 SetVar(var, rdx, rcx, rbx); |
1852 __ movq(Operand(rbp, SlotOffset(slot)), rax); | 1792 } else { |
1853 break; | 1793 ASSERT(var->IsLookupSlot()); |
1854 | 1794 __ push(rax); // Value. |
1855 case Slot::CONTEXT: { | 1795 __ push(rsi); // Context. |
1856 MemOperand target = EmitSlotSearch(slot, rcx); | 1796 __ Push(var->name()); |
1857 // Perform the assignment and issue the write barrier. | 1797 __ Push(Smi::FromInt(strict_mode_flag())); |
1858 __ movq(target, rax); | 1798 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
1859 // The value of the assignment is in rax. RecordWrite clobbers its | |
1860 // register arguments. | |
1861 __ movq(rdx, rax); | |
1862 int offset = Context::SlotOffset(slot->index()); | |
1863 __ RecordWrite(rcx, offset, rdx, rbx); | |
1864 break; | |
1865 } | |
1866 | |
1867 case Slot::LOOKUP: | |
1868 // Call the runtime for the assignment. | |
1869 __ push(rax); // Value. | |
1870 __ push(rsi); // Context. | |
1871 __ Push(var->name()); | |
1872 __ Push(Smi::FromInt(strict_mode_flag())); | |
1873 __ CallRuntime(Runtime::kStoreContextSlot, 4); | |
1874 break; | |
1875 } | 1799 } |
1876 } | 1800 } |
| 1801 // Non-initializing assignments to consts are ignored. |
1877 } | 1802 } |
1878 | 1803 |
1879 | 1804 |
1880 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | 1805 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
1881 // Assignment to a property, using a named store IC. | 1806 // Assignment to a property, using a named store IC. |
1882 Property* prop = expr->target()->AsProperty(); | 1807 Property* prop = expr->target()->AsProperty(); |
1883 ASSERT(prop != NULL); | 1808 ASSERT(prop != NULL); |
1884 ASSERT(prop->key()->AsLiteral() != NULL); | 1809 ASSERT(prop->key()->AsLiteral() != NULL); |
1885 | 1810 |
1886 // If the assignment starts a block of assignments to the same object, | 1811 // If the assignment starts a block of assignments to the same object, |
(...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2087 | 2012 |
2088 | 2013 |
2089 void FullCodeGenerator::VisitCall(Call* expr) { | 2014 void FullCodeGenerator::VisitCall(Call* expr) { |
2090 #ifdef DEBUG | 2015 #ifdef DEBUG |
2091 // We want to verify that RecordJSReturnSite gets called on all paths | 2016 // We want to verify that RecordJSReturnSite gets called on all paths |
2092 // through this function. Avoid early returns. | 2017 // through this function. Avoid early returns. |
2093 expr->return_is_recorded_ = false; | 2018 expr->return_is_recorded_ = false; |
2094 #endif | 2019 #endif |
2095 | 2020 |
2096 Comment cmnt(masm_, "[ Call"); | 2021 Comment cmnt(masm_, "[ Call"); |
2097 Expression* fun = expr->expression(); | 2022 Expression* callee = expr->expression(); |
2098 Variable* var = fun->AsVariableProxy()->AsVariable(); | 2023 VariableProxy* proxy = callee->AsVariableProxy(); |
| 2024 Property* property = callee->AsProperty(); |
2099 | 2025 |
2100 if (var != NULL && var->is_possibly_eval()) { | 2026 if (proxy != NULL && proxy->var()->is_possibly_eval()) { |
2101 // In a call to eval, we first call %ResolvePossiblyDirectEval to | 2027 // In a call to eval, we first call %ResolvePossiblyDirectEval to |
2102 // resolve the function we need to call and the receiver of the | 2028 // resolve the function we need to call and the receiver of the call. |
2103 // call. Then we call the resolved function using the given | 2029 // Then we call the resolved function using the given arguments. |
2104 // arguments. | |
2105 ZoneList<Expression*>* args = expr->arguments(); | 2030 ZoneList<Expression*>* args = expr->arguments(); |
2106 int arg_count = args->length(); | 2031 int arg_count = args->length(); |
2107 { PreservePositionScope pos_scope(masm()->positions_recorder()); | 2032 { PreservePositionScope pos_scope(masm()->positions_recorder()); |
2108 VisitForStackValue(fun); | 2033 VisitForStackValue(callee); |
2109 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot. | 2034 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot. |
2110 | 2035 |
2111 // Push the arguments. | 2036 // Push the arguments. |
2112 for (int i = 0; i < arg_count; i++) { | 2037 for (int i = 0; i < arg_count; i++) { |
2113 VisitForStackValue(args->at(i)); | 2038 VisitForStackValue(args->at(i)); |
2114 } | 2039 } |
2115 | 2040 |
2116 // If we know that eval can only be shadowed by eval-introduced | 2041 // If we know that eval can only be shadowed by eval-introduced |
2117 // variables we attempt to load the global eval function directly | 2042 // variables we attempt to load the global eval function directly in |
2118 // in generated code. If we succeed, there is no need to perform a | 2043 // generated code. If we succeed, there is no need to perform a |
2119 // context lookup in the runtime system. | 2044 // context lookup in the runtime system. |
2120 Label done; | 2045 Label done; |
2121 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { | 2046 Variable* var = proxy->var(); |
| 2047 if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) { |
2122 Label slow; | 2048 Label slow; |
2123 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(), | 2049 EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow); |
2124 NOT_INSIDE_TYPEOF, | |
2125 &slow); | |
2126 // Push the function and resolve eval. | 2050 // Push the function and resolve eval. |
2127 __ push(rax); | 2051 __ push(rax); |
2128 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); | 2052 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); |
2129 __ jmp(&done); | 2053 __ jmp(&done); |
2130 __ bind(&slow); | 2054 __ bind(&slow); |
2131 } | 2055 } |
2132 | 2056 |
2133 // Push copy of the function (found below the arguments) and | 2057 // Push a copy of the function (found below the arguments) and resolve |
2134 // resolve eval. | 2058 // eval. |
2135 __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); | 2059 __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); |
2136 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); | 2060 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); |
2137 if (done.is_linked()) { | 2061 __ bind(&done); |
2138 __ bind(&done); | |
2139 } | |
2140 | 2062 |
2141 // The runtime call returns a pair of values in rax (function) and | 2063 // The runtime call returns a pair of values in rax (function) and |
2142 // rdx (receiver). Touch up the stack with the right values. | 2064 // rdx (receiver). Touch up the stack with the right values. |
2143 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); | 2065 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); |
2144 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); | 2066 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); |
2145 } | 2067 } |
2146 // Record source position for debugger. | 2068 // Record source position for debugger. |
2147 SetSourcePosition(expr->position()); | 2069 SetSourcePosition(expr->position()); |
2148 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; | 2070 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
2149 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); | 2071 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); |
2150 __ CallStub(&stub); | 2072 __ CallStub(&stub); |
2151 RecordJSReturnSite(expr); | 2073 RecordJSReturnSite(expr); |
2152 // Restore context register. | 2074 // Restore context register. |
2153 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2075 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2154 context()->DropAndPlug(1, rax); | 2076 context()->DropAndPlug(1, rax); |
2155 } else if (var != NULL && !var->is_this() && var->is_global()) { | 2077 } else if (proxy != NULL && proxy->var()->IsUnallocated()) { |
2156 // Call to a global variable. | 2078 // Call to a global variable. Push global object as receiver for the |
2157 // Push global object as receiver for the call IC lookup. | 2079 // call IC lookup. |
2158 __ push(GlobalObjectOperand()); | 2080 __ push(GlobalObjectOperand()); |
2159 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); | 2081 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT); |
2160 } else if (var != NULL && var->AsSlot() != NULL && | 2082 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { |
2161 var->AsSlot()->type() == Slot::LOOKUP) { | |
2162 // Call to a lookup slot (dynamically introduced variable). | 2083 // Call to a lookup slot (dynamically introduced variable). |
2163 Label slow, done; | 2084 Label slow, done; |
2164 | 2085 |
2165 { PreservePositionScope scope(masm()->positions_recorder()); | 2086 { PreservePositionScope scope(masm()->positions_recorder()); |
2166 // Generate code for loading from variables potentially shadowed | 2087 // Generate code for loading from variables potentially shadowed by |
2167 // by eval-introduced variables. | 2088 // eval-introduced variables. |
2168 EmitDynamicLoadFromSlotFastCase(var->AsSlot(), | 2089 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); |
2169 NOT_INSIDE_TYPEOF, | |
2170 &slow, | |
2171 &done); | |
2172 | |
2173 __ bind(&slow); | |
2174 } | 2090 } |
2175 // Call the runtime to find the function to call (returned in rax) | 2091 __ bind(&slow); |
2176 // and the object holding it (returned in rdx). | 2092 // Call the runtime to find the function to call (returned in rax) and |
| 2093 // the object holding it (returned in rdx). |
2177 __ push(context_register()); | 2094 __ push(context_register()); |
2178 __ Push(var->name()); | 2095 __ Push(proxy->name()); |
2179 __ CallRuntime(Runtime::kLoadContextSlot, 2); | 2096 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
2180 __ push(rax); // Function. | 2097 __ push(rax); // Function. |
2181 __ push(rdx); // Receiver. | 2098 __ push(rdx); // Receiver. |
2182 | 2099 |
2183 // If fast case code has been generated, emit code to push the | 2100 // If fast case code has been generated, emit code to push the function |
2184 // function and receiver and have the slow path jump around this | 2101 // and receiver and have the slow path jump around this code. |
2185 // code. | |
2186 if (done.is_linked()) { | 2102 if (done.is_linked()) { |
2187 Label call; | 2103 Label call; |
2188 __ jmp(&call, Label::kNear); | 2104 __ jmp(&call, Label::kNear); |
2189 __ bind(&done); | 2105 __ bind(&done); |
2190 // Push function. | 2106 // Push function. |
2191 __ push(rax); | 2107 __ push(rax); |
2192 // The receiver is implicitly the global receiver. Indicate this | 2108 // The receiver is implicitly the global receiver. Indicate this by |
2193 // by passing the hole to the call function stub. | 2109 // passing the hole to the call function stub. |
2194 __ PushRoot(Heap::kTheHoleValueRootIndex); | 2110 __ PushRoot(Heap::kTheHoleValueRootIndex); |
2195 __ bind(&call); | 2111 __ bind(&call); |
2196 } | 2112 } |
2197 | 2113 |
2198 // The receiver is either the global receiver or an object found | 2114 // The receiver is either the global receiver or an object found by |
2199 // by LoadContextSlot. That object could be the hole if the | 2115 // LoadContextSlot. That object could be the hole if the receiver is |
2200 // receiver is implicitly the global object. | 2116 // implicitly the global object. |
2201 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); | 2117 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); |
2202 } else if (fun->AsProperty() != NULL) { | 2118 } else if (property != NULL) { |
2203 // Call to an object property. | 2119 { PreservePositionScope scope(masm()->positions_recorder()); |
2204 Property* prop = fun->AsProperty(); | 2120 VisitForStackValue(property->obj()); |
2205 Literal* key = prop->key()->AsLiteral(); | 2121 } |
2206 if (key != NULL && key->handle()->IsSymbol()) { | 2122 if (property->key()->IsPropertyName()) { |
2207 // Call to a named property, use call IC. | 2123 EmitCallWithIC(expr, |
2208 { PreservePositionScope scope(masm()->positions_recorder()); | 2124 property->key()->AsLiteral()->handle(), |
2209 VisitForStackValue(prop->obj()); | 2125 RelocInfo::CODE_TARGET); |
2210 } | |
2211 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); | |
2212 } else { | 2126 } else { |
2213 // Call to a keyed property. | 2127 EmitKeyedCallWithIC(expr, property->key()); |
2214 { PreservePositionScope scope(masm()->positions_recorder()); | |
2215 VisitForStackValue(prop->obj()); | |
2216 } | |
2217 EmitKeyedCallWithIC(expr, prop->key()); | |
2218 } | 2128 } |
2219 } else { | 2129 } else { |
| 2130 // Call to an arbitrary expression not handled specially above. |
2220 { PreservePositionScope scope(masm()->positions_recorder()); | 2131 { PreservePositionScope scope(masm()->positions_recorder()); |
2221 VisitForStackValue(fun); | 2132 VisitForStackValue(callee); |
2222 } | 2133 } |
2223 // Load global receiver object. | 2134 // Load global receiver object. |
2224 __ movq(rbx, GlobalObjectOperand()); | 2135 __ movq(rbx, GlobalObjectOperand()); |
2225 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); | 2136 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); |
2226 // Emit function call. | 2137 // Emit function call. |
2227 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); | 2138 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); |
2228 } | 2139 } |
2229 | 2140 |
2230 #ifdef DEBUG | 2141 #ifdef DEBUG |
2231 // RecordJSReturnSite should have been called. | 2142 // RecordJSReturnSite should have been called. |
(...skipping 1337 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3569 __ CallRuntime(expr->function(), arg_count); | 3480 __ CallRuntime(expr->function(), arg_count); |
3570 } | 3481 } |
3571 context()->Plug(rax); | 3482 context()->Plug(rax); |
3572 } | 3483 } |
3573 | 3484 |
3574 | 3485 |
3575 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 3486 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
3576 switch (expr->op()) { | 3487 switch (expr->op()) { |
3577 case Token::DELETE: { | 3488 case Token::DELETE: { |
3578 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | 3489 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
3579 Property* prop = expr->expression()->AsProperty(); | 3490 Property* property = expr->expression()->AsProperty(); |
3580 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); | 3491 VariableProxy* proxy = expr->expression()->AsVariableProxy(); |
3581 | 3492 |
3582 if (prop != NULL) { | 3493 if (property != NULL) { |
3583 VisitForStackValue(prop->obj()); | 3494 VisitForStackValue(property->obj()); |
3584 VisitForStackValue(prop->key()); | 3495 VisitForStackValue(property->key()); |
3585 __ Push(Smi::FromInt(strict_mode_flag())); | 3496 __ Push(Smi::FromInt(strict_mode_flag())); |
3586 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 3497 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
3587 context()->Plug(rax); | 3498 context()->Plug(rax); |
3588 } else if (var != NULL) { | 3499 } else if (proxy != NULL) { |
| 3500 Variable* var = proxy->var(); |
3589 // Delete of an unqualified identifier is disallowed in strict mode | 3501 // Delete of an unqualified identifier is disallowed in strict mode |
3590 // but "delete this" is. | 3502 // but "delete this" is allowed. |
3591 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); | 3503 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); |
3592 if (var->is_global()) { | 3504 if (var->IsUnallocated()) { |
3593 __ push(GlobalObjectOperand()); | 3505 __ push(GlobalObjectOperand()); |
3594 __ Push(var->name()); | 3506 __ Push(var->name()); |
3595 __ Push(Smi::FromInt(kNonStrictMode)); | 3507 __ Push(Smi::FromInt(kNonStrictMode)); |
3596 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 3508 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
3597 context()->Plug(rax); | 3509 context()->Plug(rax); |
3598 } else if (var->AsSlot() != NULL && | 3510 } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
3599 var->AsSlot()->type() != Slot::LOOKUP) { | 3511 // Result of deleting non-global variables is false. 'this' is |
3600 // Result of deleting non-global, non-dynamic variables is false. | 3512 // not really a variable, though we implement it as one. The |
3601 // The subexpression does not have side effects. | 3513 // subexpression does not have side effects. |
3602 context()->Plug(false); | 3514 context()->Plug(var->is_this()); |
3603 } else { | 3515 } else { |
3604 // Non-global variable. Call the runtime to try to delete from the | 3516 // Non-global variable. Call the runtime to try to delete from the |
3605 // context where the variable was introduced. | 3517 // context where the variable was introduced. |
3606 __ push(context_register()); | 3518 __ push(context_register()); |
3607 __ Push(var->name()); | 3519 __ Push(var->name()); |
3608 __ CallRuntime(Runtime::kDeleteContextSlot, 2); | 3520 __ CallRuntime(Runtime::kDeleteContextSlot, 2); |
3609 context()->Plug(rax); | 3521 context()->Plug(rax); |
3610 } | 3522 } |
3611 } else { | 3523 } else { |
3612 // Result of deleting non-property, non-variable reference is true. | 3524 // Result of deleting non-property, non-variable reference is true. |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3878 } | 3790 } |
3879 } | 3791 } |
3880 } | 3792 } |
3881 | 3793 |
3882 | 3794 |
3883 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { | 3795 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { |
3884 VariableProxy* proxy = expr->AsVariableProxy(); | 3796 VariableProxy* proxy = expr->AsVariableProxy(); |
3885 ASSERT(!context()->IsEffect()); | 3797 ASSERT(!context()->IsEffect()); |
3886 ASSERT(!context()->IsTest()); | 3798 ASSERT(!context()->IsTest()); |
3887 | 3799 |
3888 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { | 3800 if (proxy != NULL && proxy->var()->IsUnallocated()) { |
3889 Comment cmnt(masm_, "Global variable"); | 3801 Comment cmnt(masm_, "Global variable"); |
3890 __ Move(rcx, proxy->name()); | 3802 __ Move(rcx, proxy->name()); |
3891 __ movq(rax, GlobalObjectOperand()); | 3803 __ movq(rax, GlobalObjectOperand()); |
3892 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 3804 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
3893 // Use a regular load, not a contextual load, to avoid a reference | 3805 // Use a regular load, not a contextual load, to avoid a reference |
3894 // error. | 3806 // error. |
3895 __ call(ic); | 3807 __ call(ic); |
3896 PrepareForBailout(expr, TOS_REG); | 3808 PrepareForBailout(expr, TOS_REG); |
3897 context()->Plug(rax); | 3809 context()->Plug(rax); |
3898 } else if (proxy != NULL && | 3810 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { |
3899 proxy->var()->AsSlot() != NULL && | |
3900 proxy->var()->AsSlot()->type() == Slot::LOOKUP) { | |
3901 Label done, slow; | 3811 Label done, slow; |
3902 | 3812 |
3903 // Generate code for loading from variables potentially shadowed | 3813 // Generate code for loading from variables potentially shadowed |
3904 // by eval-introduced variables. | 3814 // by eval-introduced variables. |
3905 Slot* slot = proxy->var()->AsSlot(); | 3815 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); |
3906 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done); | |
3907 | 3816 |
3908 __ bind(&slow); | 3817 __ bind(&slow); |
3909 __ push(rsi); | 3818 __ push(rsi); |
3910 __ Push(proxy->name()); | 3819 __ Push(proxy->name()); |
3911 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); | 3820 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
3912 PrepareForBailout(expr, TOS_REG); | 3821 PrepareForBailout(expr, TOS_REG); |
3913 __ bind(&done); | 3822 __ bind(&done); |
3914 | 3823 |
3915 context()->Plug(rax); | 3824 context()->Plug(rax); |
3916 } else { | 3825 } else { |
(...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4212 __ jmp(rdx); | 4121 __ jmp(rdx); |
4213 } | 4122 } |
4214 | 4123 |
4215 | 4124 |
4216 #undef __ | 4125 #undef __ |
4217 | 4126 |
4218 | 4127 |
4219 } } // namespace v8::internal | 4128 } } // namespace v8::internal |
4220 | 4129 |
4221 #endif // V8_TARGET_ARCH_X64 | 4130 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |