Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(474)

Side by Side Diff: src/x64/full-codegen-x64.cc

Issue 7860035: Merge bleeding edge up to 9192 into the GC branch. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 27 matching lines...) Expand all
38 #include "scopes.h" 38 #include "scopes.h"
39 #include "stub-cache.h" 39 #include "stub-cache.h"
40 40
41 namespace v8 { 41 namespace v8 {
42 namespace internal { 42 namespace internal {
43 43
44 #define __ ACCESS_MASM(masm_) 44 #define __ ACCESS_MASM(masm_)
45 45
46 46
47 static unsigned GetPropertyId(Property* property) { 47 static unsigned GetPropertyId(Property* property) {
48 if (property->is_synthetic()) return AstNode::kNoNumber;
49 return property->id(); 48 return property->id();
50 } 49 }
51 50
52 51
53 class JumpPatchSite BASE_EMBEDDED { 52 class JumpPatchSite BASE_EMBEDDED {
54 public: 53 public:
55 explicit JumpPatchSite(MacroAssembler* masm) 54 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
56 : masm_(masm) {
57 #ifdef DEBUG 55 #ifdef DEBUG
58 info_emitted_ = false; 56 info_emitted_ = false;
59 #endif 57 #endif
60 } 58 }
61 59
62 ~JumpPatchSite() { 60 ~JumpPatchSite() {
63 ASSERT(patch_site_.is_bound() == info_emitted_); 61 ASSERT(patch_site_.is_bound() == info_emitted_);
64 } 62 }
65 63
66 void EmitJumpIfNotSmi(Register reg, 64 void EmitJumpIfNotSmi(Register reg,
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
181 __ CallRuntime(Runtime::kNewFunctionContext, 1); 179 __ CallRuntime(Runtime::kNewFunctionContext, 1);
182 } 180 }
183 function_in_register = false; 181 function_in_register = false;
184 // Context is returned in both rax and rsi. It replaces the context 182 // Context is returned in both rax and rsi. It replaces the context
185 // passed to us. It's saved in the stack and kept live in rsi. 183 // passed to us. It's saved in the stack and kept live in rsi.
186 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); 184 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
187 185
188 // Copy any necessary parameters into the context. 186 // Copy any necessary parameters into the context.
189 int num_parameters = info->scope()->num_parameters(); 187 int num_parameters = info->scope()->num_parameters();
190 for (int i = 0; i < num_parameters; i++) { 188 for (int i = 0; i < num_parameters; i++) {
191 Slot* slot = scope()->parameter(i)->AsSlot(); 189 Variable* var = scope()->parameter(i);
192 if (slot != NULL && slot->type() == Slot::CONTEXT) { 190 if (var->IsContextSlot()) {
193 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 191 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
194 (num_parameters - 1 - i) * kPointerSize; 192 (num_parameters - 1 - i) * kPointerSize;
195 // Load parameter from stack. 193 // Load parameter from stack.
196 __ movq(rax, Operand(rbp, parameter_offset)); 194 __ movq(rax, Operand(rbp, parameter_offset));
197 // Store it in the context. 195 // Store it in the context.
198 int context_offset = Context::SlotOffset(slot->index()); 196 int context_offset = Context::SlotOffset(var->index());
199 __ movq(Operand(rsi, context_offset), rax); 197 __ movq(Operand(rsi, context_offset), rax);
200 // Update the write barrier. This clobbers rax and rbx. 198 // Update the write barrier. This clobbers rax and rbx.
201 __ RecordWriteContextSlot( 199 __ RecordWriteContextSlot(
202 rsi, context_offset, rax, rbx, kDontSaveFPRegs); 200 rsi, context_offset, rax, rbx, kDontSaveFPRegs);
203 } 201 }
204 } 202 }
205 } 203 }
206 204
207 // Possibly allocate an arguments object. 205 // Possibly allocate an arguments object.
208 Variable* arguments = scope()->arguments(); 206 Variable* arguments = scope()->arguments();
(...skipping 15 matching lines...) Expand all
224 __ Push(Smi::FromInt(num_parameters)); 222 __ Push(Smi::FromInt(num_parameters));
225 // Arguments to ArgumentsAccessStub: 223 // Arguments to ArgumentsAccessStub:
226 // function, receiver address, parameter count. 224 // function, receiver address, parameter count.
227 // The stub will rewrite receiver and parameter count if the previous 225 // The stub will rewrite receiver and parameter count if the previous
228 // stack frame was an arguments adapter frame. 226 // stack frame was an arguments adapter frame.
229 ArgumentsAccessStub stub( 227 ArgumentsAccessStub stub(
230 is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT 228 is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
231 : ArgumentsAccessStub::NEW_NON_STRICT_SLOW); 229 : ArgumentsAccessStub::NEW_NON_STRICT_SLOW);
232 __ CallStub(&stub); 230 __ CallStub(&stub);
233 231
234 Move(arguments->AsSlot(), rax, rbx, rdx); 232 SetVar(arguments, rax, rbx, rdx);
235 } 233 }
236 234
237 if (FLAG_trace) { 235 if (FLAG_trace) {
238 __ CallRuntime(Runtime::kTraceEnter, 0); 236 __ CallRuntime(Runtime::kTraceEnter, 0);
239 } 237 }
240 238
241 // Visit the declarations and body unless there is an illegal 239 // Visit the declarations and body unless there is an illegal
242 // redeclaration. 240 // redeclaration.
243 if (scope()->HasIllegalRedeclaration()) { 241 if (scope()->HasIllegalRedeclaration()) {
244 Comment cmnt(masm_, "[ Declarations"); 242 Comment cmnt(masm_, "[ Declarations");
245 scope()->VisitIllegalRedeclaration(this); 243 scope()->VisitIllegalRedeclaration(this);
244
246 } else { 245 } else {
246 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
247 { Comment cmnt(masm_, "[ Declarations"); 247 { Comment cmnt(masm_, "[ Declarations");
248 // For named function expressions, declare the function name as a 248 // For named function expressions, declare the function name as a
249 // constant. 249 // constant.
250 if (scope()->is_function_scope() && scope()->function() != NULL) { 250 if (scope()->is_function_scope() && scope()->function() != NULL) {
251 EmitDeclaration(scope()->function(), Variable::CONST, NULL); 251 int ignored = 0;
252 EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored);
252 } 253 }
253 VisitDeclarations(scope()->declarations()); 254 VisitDeclarations(scope()->declarations());
254 } 255 }
255 256
256 { Comment cmnt(masm_, "[ Stack check"); 257 { Comment cmnt(masm_, "[ Stack check");
257 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); 258 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
258 Label ok; 259 Label ok;
259 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 260 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
260 __ j(above_equal, &ok, Label::kNear); 261 __ j(above_equal, &ok, Label::kNear);
261 StackCheckStub stub; 262 StackCheckStub stub;
262 __ CallStub(&stub); 263 __ CallStub(&stub);
263 __ bind(&ok); 264 __ bind(&ok);
264 } 265 }
265 266
266 { Comment cmnt(masm_, "[ Body"); 267 { Comment cmnt(masm_, "[ Body");
267 ASSERT(loop_depth() == 0); 268 ASSERT(loop_depth() == 0);
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
347 } 348 }
348 // Check that the size of the code used for returning is large enough 349 // Check that the size of the code used for returning is large enough
349 // for the debugger's requirements. 350 // for the debugger's requirements.
350 ASSERT(Assembler::kJSReturnSequenceLength <= 351 ASSERT(Assembler::kJSReturnSequenceLength <=
351 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); 352 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
352 #endif 353 #endif
353 } 354 }
354 } 355 }
355 356
356 357
357 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { 358 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
359 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
358 } 360 }
359 361
360 362
361 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const { 363 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
362 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register()); 364 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
363 __ movq(result_register(), slot_operand); 365 codegen()->GetVar(result_register(), var);
364 } 366 }
365 367
366 368
367 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { 369 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
368 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register()); 370 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
369 __ push(slot_operand); 371 MemOperand operand = codegen()->VarOperand(var, result_register());
372 __ push(operand);
370 } 373 }
371 374
372 375
373 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { 376 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
374 codegen()->Move(result_register(), slot); 377 codegen()->GetVar(result_register(), var);
375 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); 378 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
376 codegen()->DoTest(this); 379 codegen()->DoTest(this);
377 } 380 }
378 381
379 382
380 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 383 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
381 } 384 }
382 385
383 386
384 void FullCodeGenerator::AccumulatorValueContext::Plug( 387 void FullCodeGenerator::AccumulatorValueContext::Plug(
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
583 __ j(cc, if_true); 586 __ j(cc, if_true);
584 } else if (if_true == fall_through) { 587 } else if (if_true == fall_through) {
585 __ j(NegateCondition(cc), if_false); 588 __ j(NegateCondition(cc), if_false);
586 } else { 589 } else {
587 __ j(cc, if_true); 590 __ j(cc, if_true);
588 __ jmp(if_false); 591 __ jmp(if_false);
589 } 592 }
590 } 593 }
591 594
592 595
593 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { 596 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
594 switch (slot->type()) { 597 ASSERT(var->IsStackAllocated());
595 case Slot::PARAMETER: 598 // Offset is negative because higher indexes are at lower addresses.
596 case Slot::LOCAL: 599 int offset = -var->index() * kPointerSize;
597 return Operand(rbp, SlotOffset(slot)); 600 // Adjust by a (parameter or local) base offset.
598 case Slot::CONTEXT: { 601 if (var->IsParameter()) {
599 int context_chain_length = 602 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
600 scope()->ContextChainLength(slot->var()->scope()); 603 } else {
601 __ LoadContext(scratch, context_chain_length); 604 offset += JavaScriptFrameConstants::kLocal0Offset;
602 return ContextOperand(scratch, slot->index());
603 }
604 case Slot::LOOKUP:
605 UNREACHABLE();
606 } 605 }
607 UNREACHABLE(); 606 return Operand(rbp, offset);
608 return Operand(rax, 0);
609 } 607 }
610 608
611 609
612 void FullCodeGenerator::Move(Register destination, Slot* source) { 610 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
613 MemOperand location = EmitSlotSearch(source, destination); 611 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
614 __ movq(destination, location); 612 if (var->IsContextSlot()) {
613 int context_chain_length = scope()->ContextChainLength(var->scope());
614 __ LoadContext(scratch, context_chain_length);
615 return ContextOperand(scratch, var->index());
616 } else {
617 return StackOperand(var);
618 }
615 } 619 }
616 620
617 621
618 void FullCodeGenerator::Move(Slot* dst, 622 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
619 Register src, 623 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
620 Register scratch1, 624 MemOperand location = VarOperand(var, dest);
621 Register scratch2) { 625 __ movq(dest, location);
622 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. 626 }
623 ASSERT(!scratch1.is(src) && !scratch2.is(src)); 627
624 MemOperand location = EmitSlotSearch(dst, scratch1); 628
629 void FullCodeGenerator::SetVar(Variable* var,
630 Register src,
631 Register scratch0,
632 Register scratch1) {
633 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
634 ASSERT(!scratch0.is(src));
635 ASSERT(!scratch0.is(scratch1));
636 ASSERT(!scratch1.is(src));
637 MemOperand location = VarOperand(var, scratch0);
625 __ movq(location, src); 638 __ movq(location, src);
626 639
627 // Emit the write barrier code if the location is in the heap. 640 // Emit the write barrier code if the location is in the heap.
628 if (dst->type() == Slot::CONTEXT) { 641 if (var->IsContextSlot()) {
629 int offset = Context::SlotOffset(dst->index()); 642 int offset = Context::SlotOffset(var->index());
630 __ RecordWriteContextSlot(scratch1, offset, src, scratch2, kDontSaveFPRegs); 643 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
631 } 644 }
632 } 645 }
633 646
634 647
635 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 648 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
636 bool should_normalize, 649 bool should_normalize,
637 Label* if_true, 650 Label* if_true,
638 Label* if_false) { 651 Label* if_false) {
639 // Only prepare for bailouts before splits if we're in a test 652 // Only prepare for bailouts before splits if we're in a test
640 // context. Otherwise, we let the Visit function deal with the 653 // context. Otherwise, we let the Visit function deal with the
(...skipping 10 matching lines...) Expand all
651 } 664 }
652 665
653 if (should_normalize) { 666 if (should_normalize) {
654 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 667 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
655 Split(equal, if_true, if_false, NULL); 668 Split(equal, if_true, if_false, NULL);
656 __ bind(&skip); 669 __ bind(&skip);
657 } 670 }
658 } 671 }
659 672
660 673
661 void FullCodeGenerator::EmitDeclaration(Variable* variable, 674 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
662 Variable::Mode mode, 675 Variable::Mode mode,
663 FunctionLiteral* function) { 676 FunctionLiteral* function,
664 Comment cmnt(masm_, "[ Declaration"); 677 int* global_count) {
665 ASSERT(variable != NULL); // Must have been resolved. 678 // If it was not possible to allocate the variable at compile time, we
666 Slot* slot = variable->AsSlot(); 679 // need to "declare" it at runtime to make sure it actually exists in the
667 Property* prop = variable->AsProperty(); 680 // local context.
681 Variable* variable = proxy->var();
682 switch (variable->location()) {
683 case Variable::UNALLOCATED:
684 ++(*global_count);
685 break;
668 686
669 if (slot != NULL) { 687 case Variable::PARAMETER:
670 switch (slot->type()) { 688 case Variable::LOCAL:
671 case Slot::PARAMETER: 689 if (function != NULL) {
672 case Slot::LOCAL: 690 Comment cmnt(masm_, "[ Declaration");
673 if (mode == Variable::CONST) { 691 VisitForAccumulatorValue(function);
674 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 692 __ movq(StackOperand(variable), result_register());
675 __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister); 693 } else if (mode == Variable::CONST || mode == Variable::LET) {
676 } else if (function != NULL) { 694 Comment cmnt(masm_, "[ Declaration");
677 VisitForAccumulatorValue(function); 695 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
678 __ movq(Operand(rbp, SlotOffset(slot)), result_register()); 696 __ movq(StackOperand(variable), kScratchRegister);
679 } 697 }
680 break; 698 break;
681 699
682 case Slot::CONTEXT: 700 case Variable::CONTEXT:
683 // We bypass the general EmitSlotSearch because we know more about 701 // The variable in the decl always resides in the current function
684 // this specific context. 702 // context.
703 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
704 if (FLAG_debug_code) {
705 // Check that we're not inside a with or catch context.
706 __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
707 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
708 __ Check(not_equal, "Declaration in with context.");
709 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
710 __ Check(not_equal, "Declaration in catch context.");
711 }
712 if (function != NULL) {
713 Comment cmnt(masm_, "[ Declaration");
714 VisitForAccumulatorValue(function);
715 __ movq(ContextOperand(rsi, variable->index()), result_register());
716 int offset = Context::SlotOffset(variable->index());
717 // We know that we have written a function, which is not a smi.
718 __ RecordWriteContextSlot(rsi,
719 offset,
720 result_register(),
721 rcx,
722 kDontSaveFPRegs,
723 EMIT_REMEMBERED_SET,
724 OMIT_SMI_CHECK);
725 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
726 } else if (mode == Variable::CONST || mode == Variable::LET) {
727 Comment cmnt(masm_, "[ Declaration");
728 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
729 __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
730 // No write barrier since the hole value is in old space.
731 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
732 }
733 break;
685 734
686 // The variable in the decl always resides in the current function 735 case Variable::LOOKUP: {
687 // context. 736 Comment cmnt(masm_, "[ Declaration");
688 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 737 __ push(rsi);
689 if (FLAG_debug_code) { 738 __ Push(variable->name());
690 // Check that we're not inside a with or catch context. 739 // Declaration nodes are always introduced in one of three modes.
691 __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset)); 740 ASSERT(mode == Variable::VAR ||
692 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex); 741 mode == Variable::CONST ||
693 __ Check(not_equal, "Declaration in with context."); 742 mode == Variable::LET);
694 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex); 743 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
695 __ Check(not_equal, "Declaration in catch context."); 744 __ Push(Smi::FromInt(attr));
696 } 745 // Push initial value, if any.
697 if (mode == Variable::CONST) { 746 // Note: For variables we must not push an initial value (such as
698 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 747 // 'undefined') because we may have a (legal) redeclaration and we
699 __ movq(ContextOperand(rsi, slot->index()), kScratchRegister); 748 // must not destroy the current value.
700 // No write barrier since the hole value is in old space. 749 if (function != NULL) {
701 } else if (function != NULL) { 750 VisitForStackValue(function);
702 VisitForAccumulatorValue(function); 751 } else if (mode == Variable::CONST || mode == Variable::LET) {
703 __ movq(ContextOperand(rsi, slot->index()), result_register()); 752 __ PushRoot(Heap::kTheHoleValueRootIndex);
704 int offset = Context::SlotOffset(slot->index()); 753 } else {
705 // We know that we have written a function, which is not a smi. 754 __ Push(Smi::FromInt(0)); // Indicates no initial value.
706 __ RecordWriteContextSlot(rsi,
707 offset,
708 result_register(),
709 rcx,
710 kDontSaveFPRegs,
711 EMIT_REMEMBERED_SET,
712 OMIT_SMI_CHECK);
713 }
714 break;
715
716 case Slot::LOOKUP: {
717 __ push(rsi);
718 __ Push(variable->name());
719 // Declaration nodes are always introduced in one of two modes.
720 ASSERT(mode == Variable::VAR ||
721 mode == Variable::CONST ||
722 mode == Variable::LET);
723 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
724 __ Push(Smi::FromInt(attr));
725 // Push initial value, if any.
726 // Note: For variables we must not push an initial value (such as
727 // 'undefined') because we may have a (legal) redeclaration and we
728 // must not destroy the current value.
729 if (mode == Variable::CONST) {
730 __ PushRoot(Heap::kTheHoleValueRootIndex);
731 } else if (function != NULL) {
732 VisitForStackValue(function);
733 } else {
734 __ Push(Smi::FromInt(0)); // no initial value!
735 }
736 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
737 break;
738 } 755 }
739 } 756 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
740 757 break;
741 } else if (prop != NULL) {
742 // A const declaration aliasing a parameter is an illegal redeclaration.
743 ASSERT(mode != Variable::CONST);
744 if (function != NULL) {
745 // We are declaring a function that rewrites to a property.
746 // Use (keyed) IC to set the initial value. We cannot visit the
747 // rewrite because it's shared and we risk recording duplicate AST
748 // IDs for bailouts from optimized code.
749 ASSERT(prop->obj()->AsVariableProxy() != NULL);
750 { AccumulatorValueContext for_object(this);
751 EmitVariableLoad(prop->obj()->AsVariableProxy());
752 }
753 __ push(rax);
754 VisitForAccumulatorValue(function);
755 __ pop(rdx);
756 ASSERT(prop->key()->AsLiteral() != NULL &&
757 prop->key()->AsLiteral()->handle()->IsSmi());
758 __ Move(rcx, prop->key()->AsLiteral()->handle());
759
760 Handle<Code> ic = is_strict_mode()
761 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
762 : isolate()->builtins()->KeyedStoreIC_Initialize();
763 __ call(ic);
764 } 758 }
765 } 759 }
766 } 760 }
767 761
768 762
769 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 763 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
770 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
771 }
772 764
773 765
774 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 766 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
775 // Call the runtime to declare the globals. 767 // Call the runtime to declare the globals.
776 __ push(rsi); // The context is the first argument. 768 __ push(rsi); // The context is the first argument.
777 __ Push(pairs); 769 __ Push(pairs);
778 __ Push(Smi::FromInt(is_eval() ? 1 : 0)); 770 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
779 __ Push(Smi::FromInt(strict_mode_flag())); 771 __ CallRuntime(Runtime::kDeclareGlobals, 3);
780 __ CallRuntime(Runtime::kDeclareGlobals, 4);
781 // Return value is ignored. 772 // Return value is ignored.
782 } 773 }
783 774
784 775
785 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 776 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
786 Comment cmnt(masm_, "[ SwitchStatement"); 777 Comment cmnt(masm_, "[ SwitchStatement");
787 Breakable nested_statement(this, stmt); 778 Breakable nested_statement(this, stmt);
788 SetStatementPosition(stmt); 779 SetStatementPosition(stmt);
789 780
790 // Keep the switch value on the stack until a case matches. 781 // Keep the switch value on the stack until a case matches.
(...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after
1075 context()->Plug(rax); 1066 context()->Plug(rax);
1076 } 1067 }
1077 1068
1078 1069
1079 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1070 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1080 Comment cmnt(masm_, "[ VariableProxy"); 1071 Comment cmnt(masm_, "[ VariableProxy");
1081 EmitVariableLoad(expr); 1072 EmitVariableLoad(expr);
1082 } 1073 }
1083 1074
1084 1075
1085 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( 1076 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1086 Slot* slot, 1077 TypeofState typeof_state,
1087 TypeofState typeof_state, 1078 Label* slow) {
1088 Label* slow) {
1089 Register context = rsi; 1079 Register context = rsi;
1090 Register temp = rdx; 1080 Register temp = rdx;
1091 1081
1092 Scope* s = scope(); 1082 Scope* s = scope();
1093 while (s != NULL) { 1083 while (s != NULL) {
1094 if (s->num_heap_slots() > 0) { 1084 if (s->num_heap_slots() > 0) {
1095 if (s->calls_eval()) { 1085 if (s->calls_eval()) {
1096 // Check that extension is NULL. 1086 // Check that extension is NULL.
1097 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), 1087 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1098 Immediate(0)); 1088 Immediate(0));
(...skipping 29 matching lines...) Expand all
1128 __ j(not_equal, slow); 1118 __ j(not_equal, slow);
1129 // Load next context in chain. 1119 // Load next context in chain.
1130 __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX)); 1120 __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1131 __ jmp(&next); 1121 __ jmp(&next);
1132 __ bind(&fast); 1122 __ bind(&fast);
1133 } 1123 }
1134 1124
1135 // All extension objects were empty and it is safe to use a global 1125 // All extension objects were empty and it is safe to use a global
1136 // load IC call. 1126 // load IC call.
1137 __ movq(rax, GlobalObjectOperand()); 1127 __ movq(rax, GlobalObjectOperand());
1138 __ Move(rcx, slot->var()->name()); 1128 __ Move(rcx, var->name());
1139 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1129 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1140 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 1130 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1141 ? RelocInfo::CODE_TARGET 1131 ? RelocInfo::CODE_TARGET
1142 : RelocInfo::CODE_TARGET_CONTEXT; 1132 : RelocInfo::CODE_TARGET_CONTEXT;
1143 __ call(ic, mode); 1133 __ call(ic, mode);
1144 } 1134 }
1145 1135
1146 1136
1147 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( 1137 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1148 Slot* slot, 1138 Label* slow) {
1149 Label* slow) { 1139 ASSERT(var->IsContextSlot());
1150 ASSERT(slot->type() == Slot::CONTEXT);
1151 Register context = rsi; 1140 Register context = rsi;
1152 Register temp = rbx; 1141 Register temp = rbx;
1153 1142
1154 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { 1143 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1155 if (s->num_heap_slots() > 0) { 1144 if (s->num_heap_slots() > 0) {
1156 if (s->calls_eval()) { 1145 if (s->calls_eval()) {
1157 // Check that extension is NULL. 1146 // Check that extension is NULL.
1158 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), 1147 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1159 Immediate(0)); 1148 Immediate(0));
1160 __ j(not_equal, slow); 1149 __ j(not_equal, slow);
1161 } 1150 }
1162 __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1151 __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1163 // Walk the rest of the chain without clobbering rsi. 1152 // Walk the rest of the chain without clobbering rsi.
1164 context = temp; 1153 context = temp;
1165 } 1154 }
1166 } 1155 }
1167 // Check that last extension is NULL. 1156 // Check that last extension is NULL.
1168 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); 1157 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1169 __ j(not_equal, slow); 1158 __ j(not_equal, slow);
1170 1159
1171 // This function is used only for loads, not stores, so it's safe to 1160 // This function is used only for loads, not stores, so it's safe to
1172 // return an rsi-based operand (the write barrier cannot be allowed to 1161 // return an rsi-based operand (the write barrier cannot be allowed to
1173 // destroy the rsi register). 1162 // destroy the rsi register).
1174 return ContextOperand(context, slot->index()); 1163 return ContextOperand(context, var->index());
1175 } 1164 }
1176 1165
1177 1166
1178 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( 1167 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1179 Slot* slot, 1168 TypeofState typeof_state,
1180 TypeofState typeof_state, 1169 Label* slow,
1181 Label* slow, 1170 Label* done) {
1182 Label* done) {
1183 // Generate fast-case code for variables that might be shadowed by 1171 // Generate fast-case code for variables that might be shadowed by
1184 // eval-introduced variables. Eval is used a lot without 1172 // eval-introduced variables. Eval is used a lot without
1185 // introducing variables. In those cases, we do not want to 1173 // introducing variables. In those cases, we do not want to
1186 // perform a runtime call for all variables in the scope 1174 // perform a runtime call for all variables in the scope
1187 // containing the eval. 1175 // containing the eval.
1188 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { 1176 if (var->mode() == Variable::DYNAMIC_GLOBAL) {
1189 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow); 1177 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1190 __ jmp(done); 1178 __ jmp(done);
1191 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { 1179 } else if (var->mode() == Variable::DYNAMIC_LOCAL) {
1192 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot(); 1180 Variable* local = var->local_if_not_shadowed();
1193 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite(); 1181 __ movq(rax, ContextSlotOperandCheckExtensions(local, slow));
1194 if (potential_slot != NULL) { 1182 if (local->mode() == Variable::CONST) {
1195 // Generate fast case for locals that rewrite to slots. 1183 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1196 __ movq(rax, 1184 __ j(not_equal, done);
1197 ContextSlotOperandCheckExtensions(potential_slot, slow)); 1185 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1198 if (potential_slot->var()->mode() == Variable::CONST) {
1199 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1200 __ j(not_equal, done);
1201 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1202 }
1203 __ jmp(done);
1204 } else if (rewrite != NULL) {
1205 // Generate fast case for calls of an argument function.
1206 Property* property = rewrite->AsProperty();
1207 if (property != NULL) {
1208 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1209 Literal* key_literal = property->key()->AsLiteral();
1210 if (obj_proxy != NULL &&
1211 key_literal != NULL &&
1212 obj_proxy->IsArguments() &&
1213 key_literal->handle()->IsSmi()) {
1214 // Load arguments object if there are no eval-introduced
1215 // variables. Then load the argument from the arguments
1216 // object using keyed load.
1217 __ movq(rdx,
1218 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1219 slow));
1220 __ Move(rax, key_literal->handle());
1221 Handle<Code> ic =
1222 isolate()->builtins()->KeyedLoadIC_Initialize();
1223 __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1224 __ jmp(done);
1225 }
1226 }
1227 } 1186 }
1187 __ jmp(done);
1228 } 1188 }
1229 } 1189 }
1230 1190
1231 1191
1232 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1192 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1233 // Record position before possible IC call. 1193 // Record position before possible IC call.
1234 SetSourcePosition(proxy->position()); 1194 SetSourcePosition(proxy->position());
1235 Variable* var = proxy->var(); 1195 Variable* var = proxy->var();
1236 1196
1237 // Three cases: non-this global variables, lookup slots, and all other 1197 // Three cases: global variables, lookup variables, and all other types of
1238 // types of slots. 1198 // variables.
1239 Slot* slot = var->AsSlot(); 1199 switch (var->location()) {
1240 ASSERT((var->is_global() && !var->is_this()) == (slot == NULL)); 1200 case Variable::UNALLOCATED: {
1201 Comment cmnt(masm_, "Global variable");
1202 // Use inline caching. Variable name is passed in rcx and the global
1203 // object on the stack.
1204 __ Move(rcx, var->name());
1205 __ movq(rax, GlobalObjectOperand());
1206 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1207 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1208 context()->Plug(rax);
1209 break;
1210 }
1241 1211
1242 if (slot == NULL) { 1212 case Variable::PARAMETER:
1243 Comment cmnt(masm_, "Global variable"); 1213 case Variable::LOCAL:
1244 // Use inline caching. Variable name is passed in rcx and the global 1214 case Variable::CONTEXT: {
1245 // object on the stack. 1215 Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
1246 __ Move(rcx, var->name()); 1216 if (var->mode() != Variable::LET && var->mode() != Variable::CONST) {
1247 __ movq(rax, GlobalObjectOperand()); 1217 context()->Plug(var);
1248 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1218 } else {
1249 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1219 // Let and const need a read barrier.
1250 context()->Plug(rax); 1220 Label done;
1221 GetVar(rax, var);
1222 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1223 __ j(not_equal, &done, Label::kNear);
1224 if (var->mode() == Variable::LET) {
1225 __ Push(var->name());
1226 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1227 } else { // Variable::CONST
1228 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1229 }
1230 __ bind(&done);
1231 context()->Plug(rax);
1232 }
1233 break;
1234 }
1251 1235
1252 } else if (slot != NULL && slot->type() == Slot::LOOKUP) { 1236 case Variable::LOOKUP: {
1253 Label done, slow; 1237 Label done, slow;
1254 1238 // Generate code for loading from variables potentially shadowed
1255 // Generate code for loading from variables potentially shadowed 1239 // by eval-introduced variables.
1256 // by eval-introduced variables. 1240 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1257 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done); 1241 __ bind(&slow);
1258 1242 Comment cmnt(masm_, "Lookup slot");
1259 __ bind(&slow); 1243 __ push(rsi); // Context.
1260 Comment cmnt(masm_, "Lookup slot"); 1244 __ Push(var->name());
1261 __ push(rsi); // Context. 1245 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1262 __ Push(var->name());
1263 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1264 __ bind(&done);
1265
1266 context()->Plug(rax);
1267
1268 } else {
1269 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1270 ? "Context slot"
1271 : "Stack slot");
1272 if (var->mode() == Variable::CONST) {
1273 // Constants may be the hole value if they have not been initialized.
1274 // Unhole them.
1275 Label done;
1276 MemOperand slot_operand = EmitSlotSearch(slot, rax);
1277 __ movq(rax, slot_operand);
1278 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1279 __ j(not_equal, &done, Label::kNear);
1280 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1281 __ bind(&done); 1246 __ bind(&done);
1282 context()->Plug(rax); 1247 context()->Plug(rax);
1283 } else { 1248 break;
1284 context()->Plug(slot);
1285 } 1249 }
1286 } 1250 }
1287 } 1251 }
1288 1252
1289 1253
1290 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1254 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1291 Comment cmnt(masm_, "[ RegExpLiteral"); 1255 Comment cmnt(masm_, "[ RegExpLiteral");
1292 Label materialized; 1256 Label materialized;
1293 // Registers will be used as follows: 1257 // Registers will be used as follows:
1294 // rdi = JS function. 1258 // rdi = JS function.
1295 // rcx = literals array. 1259 // rcx = literals array.
1296 // rbx = regexp literal. 1260 // rbx = regexp literal.
1297 // rax = regexp literal clone. 1261 // rax = regexp literal clone.
1298 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1262 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1299 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset)); 1263 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1300 int literal_offset = 1264 int literal_offset =
1301 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1265 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1302 __ movq(rbx, FieldOperand(rcx, literal_offset)); 1266 __ movq(rbx, FieldOperand(rcx, literal_offset));
1303 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 1267 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1304 __ j(not_equal, &materialized); 1268 __ j(not_equal, &materialized, Label::kNear);
1305 1269
1306 // Create regexp literal using runtime function 1270 // Create regexp literal using runtime function
1307 // Result will be in rax. 1271 // Result will be in rax.
1308 __ push(rcx); 1272 __ push(rcx);
1309 __ Push(Smi::FromInt(expr->literal_index())); 1273 __ Push(Smi::FromInt(expr->literal_index()));
1310 __ Push(expr->pattern()); 1274 __ Push(expr->pattern());
1311 __ Push(expr->flags()); 1275 __ Push(expr->flags());
1312 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1276 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1313 __ movq(rbx, rax); 1277 __ movq(rbx, rax);
1314 1278
(...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after
1759 break; 1723 break;
1760 } 1724 }
1761 } 1725 }
1762 PrepareForBailoutForId(bailout_ast_id, TOS_REG); 1726 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1763 context()->Plug(rax); 1727 context()->Plug(rax);
1764 } 1728 }
1765 1729
1766 1730
1767 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1731 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1768 Token::Value op) { 1732 Token::Value op) {
1769 ASSERT(var != NULL); 1733 if (var->IsUnallocated()) {
1770 ASSERT(var->is_global() || var->AsSlot() != NULL); 1734 // Global var, const, or let.
1771
1772 if (var->is_global()) {
1773 ASSERT(!var->is_this());
1774 // Assignment to a global variable. Use inline caching for the
1775 // assignment. Right-hand-side value is passed in rax, variable name in
1776 // rcx, and the global object on the stack.
1777 __ Move(rcx, var->name()); 1735 __ Move(rcx, var->name());
1778 __ movq(rdx, GlobalObjectOperand()); 1736 __ movq(rdx, GlobalObjectOperand());
1779 Handle<Code> ic = is_strict_mode() 1737 Handle<Code> ic = is_strict_mode()
1780 ? isolate()->builtins()->StoreIC_Initialize_Strict() 1738 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1781 : isolate()->builtins()->StoreIC_Initialize(); 1739 : isolate()->builtins()->StoreIC_Initialize();
1782 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1740 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1741 } else if (op == Token::INIT_CONST) {
1742 // Const initializers need a write barrier.
1743 ASSERT(!var->IsParameter()); // No const parameters.
1744 if (var->IsStackLocal()) {
1745 Label skip;
1746 __ movq(rdx, StackOperand(var));
1747 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1748 __ j(not_equal, &skip);
1749 __ movq(StackOperand(var), rax);
1750 __ bind(&skip);
1751 } else {
1752 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
1753 // Like var declarations, const declarations are hoisted to function
1754 // scope. However, unlike var initializers, const initializers are
1755 // able to drill a hole to that function context, even from inside a
1756 // 'with' context. We thus bypass the normal static scope lookup for
1757 // var->IsContextSlot().
1758 __ push(rax);
1759 __ push(rsi);
1760 __ Push(var->name());
1761 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1762 }
1783 1763
1784 } else if (op == Token::INIT_CONST) { 1764 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
1785 // Like var declarations, const declarations are hoisted to function 1765 // Non-initializing assignment to let variable needs a write barrier.
1786 // scope. However, unlike var initializers, const initializers are able 1766 if (var->IsLookupSlot()) {
1787 // to drill a hole to that function context, even from inside a 'with' 1767 __ push(rax); // Value.
1788 // context. We thus bypass the normal static scope lookup. 1768 __ push(rsi); // Context.
1789 Slot* slot = var->AsSlot(); 1769 __ Push(var->name());
1790 Label skip; 1770 __ Push(Smi::FromInt(strict_mode_flag()));
1791 switch (slot->type()) { 1771 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1792 case Slot::PARAMETER: 1772 } else {
1793 // No const parameters. 1773 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
1794 UNREACHABLE(); 1774 Label assign;
1795 break; 1775 MemOperand location = VarOperand(var, rcx);
1796 case Slot::LOCAL: 1776 __ movq(rdx, location);
1797 __ movq(rdx, Operand(rbp, SlotOffset(slot))); 1777 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1798 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 1778 __ j(not_equal, &assign, Label::kNear);
1799 __ j(not_equal, &skip); 1779 __ Push(var->name());
1800 __ movq(Operand(rbp, SlotOffset(slot)), rax); 1780 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1801 break; 1781 __ bind(&assign);
1802 case Slot::CONTEXT: 1782 __ movq(location, rax);
1803 case Slot::LOOKUP: 1783 if (var->IsContextSlot()) {
1804 __ push(rax); 1784 __ movq(rdx, rax);
1805 __ push(rsi); 1785 __ RecordWriteContextSlot(
1806 __ Push(var->name()); 1786 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
1807 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 1787 }
1808 break;
1809 } 1788 }
1810 __ bind(&skip);
1811 1789
1812 } else if (var->mode() != Variable::CONST) { 1790 } else if (var->mode() != Variable::CONST) {
1813 // Perform the assignment for non-const variables. Const assignments 1791 // Assignment to var or initializing assignment to let.
1814 // are simply skipped. 1792 if (var->IsStackAllocated() || var->IsContextSlot()) {
1815 Slot* slot = var->AsSlot(); 1793 MemOperand location = VarOperand(var, rcx);
1816 switch (slot->type()) { 1794 if (FLAG_debug_code && op == Token::INIT_LET) {
1817 case Slot::PARAMETER: 1795 // Check for an uninitialized let binding.
1818 case Slot::LOCAL: 1796 __ movq(rdx, location);
1819 // Perform the assignment. 1797 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1820 __ movq(Operand(rbp, SlotOffset(slot)), rax); 1798 __ Check(equal, "Let binding re-initialization.");
1821 break; 1799 }
1822 1800 // Perform the assignment.
1823 case Slot::CONTEXT: { 1801 __ movq(location, rax);
1824 MemOperand target = EmitSlotSearch(slot, rcx); 1802 if (var->IsContextSlot()) {
1825 // Perform the assignment and issue the write barrier.
1826 __ movq(target, rax);
1827
1828 // The value of the assignment is in rax. RecordWrite clobbers its
1829 // last two register arguments.
1830 __ movq(rdx, rax); 1803 __ movq(rdx, rax);
1831 int offset = Context::SlotOffset(slot->index()); 1804 __ RecordWriteContextSlot(
1832 __ RecordWriteContextSlot(rcx, offset, rdx, rbx, kDontSaveFPRegs); 1805 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
1833 break;
1834 } 1806 }
1835 1807 } else {
1836 case Slot::LOOKUP: 1808 ASSERT(var->IsLookupSlot());
1837 // Call the runtime for the assignment. 1809 __ push(rax); // Value.
1838 __ push(rax); // Value. 1810 __ push(rsi); // Context.
1839 __ push(rsi); // Context. 1811 __ Push(var->name());
1840 __ Push(var->name()); 1812 __ Push(Smi::FromInt(strict_mode_flag()));
1841 __ Push(Smi::FromInt(strict_mode_flag())); 1813 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1842 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1843 break;
1844 } 1814 }
1845 } 1815 }
1816 // Non-initializing assignments to consts are ignored.
1846 } 1817 }
1847 1818
1848 1819
1849 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 1820 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1850 // Assignment to a property, using a named store IC. 1821 // Assignment to a property, using a named store IC.
1851 Property* prop = expr->target()->AsProperty(); 1822 Property* prop = expr->target()->AsProperty();
1852 ASSERT(prop != NULL); 1823 ASSERT(prop != NULL);
1853 ASSERT(prop->key()->AsLiteral() != NULL); 1824 ASSERT(prop->key()->AsLiteral() != NULL);
1854 1825
1855 // If the assignment starts a block of assignments to the same object, 1826 // If the assignment starts a block of assignments to the same object,
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
2034 // Push copy of the first argument or undefined if it doesn't exist. 2005 // Push copy of the first argument or undefined if it doesn't exist.
2035 if (arg_count > 0) { 2006 if (arg_count > 0) {
2036 __ push(Operand(rsp, arg_count * kPointerSize)); 2007 __ push(Operand(rsp, arg_count * kPointerSize));
2037 } else { 2008 } else {
2038 __ PushRoot(Heap::kUndefinedValueRootIndex); 2009 __ PushRoot(Heap::kUndefinedValueRootIndex);
2039 } 2010 }
2040 2011
2041 // Push the receiver of the enclosing function and do runtime call. 2012 // Push the receiver of the enclosing function and do runtime call.
2042 __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize)); 2013 __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2043 2014
2044 // Push the strict mode flag. 2015 // Push the strict mode flag. In harmony mode every eval call
2045 __ Push(Smi::FromInt(strict_mode_flag())); 2016 // is a strict mode eval call.
2017 StrictModeFlag strict_mode = strict_mode_flag();
2018 if (FLAG_harmony_block_scoping) {
2019 strict_mode = kStrictMode;
2020 }
2021 __ Push(Smi::FromInt(strict_mode));
2046 2022
2047 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP 2023 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2048 ? Runtime::kResolvePossiblyDirectEvalNoLookup 2024 ? Runtime::kResolvePossiblyDirectEvalNoLookup
2049 : Runtime::kResolvePossiblyDirectEval, 4); 2025 : Runtime::kResolvePossiblyDirectEval, 4);
2050 } 2026 }
2051 2027
2052 2028
2053 void FullCodeGenerator::VisitCall(Call* expr) { 2029 void FullCodeGenerator::VisitCall(Call* expr) {
2054 #ifdef DEBUG 2030 #ifdef DEBUG
2055 // We want to verify that RecordJSReturnSite gets called on all paths 2031 // We want to verify that RecordJSReturnSite gets called on all paths
2056 // through this function. Avoid early returns. 2032 // through this function. Avoid early returns.
2057 expr->return_is_recorded_ = false; 2033 expr->return_is_recorded_ = false;
2058 #endif 2034 #endif
2059 2035
2060 Comment cmnt(masm_, "[ Call"); 2036 Comment cmnt(masm_, "[ Call");
2061 Expression* fun = expr->expression(); 2037 Expression* callee = expr->expression();
2062 Variable* var = fun->AsVariableProxy()->AsVariable(); 2038 VariableProxy* proxy = callee->AsVariableProxy();
2039 Property* property = callee->AsProperty();
2063 2040
2064 if (var != NULL && var->is_possibly_eval()) { 2041 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2065 // In a call to eval, we first call %ResolvePossiblyDirectEval to 2042 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2066 // resolve the function we need to call and the receiver of the 2043 // resolve the function we need to call and the receiver of the call.
2067 // call. Then we call the resolved function using the given 2044 // Then we call the resolved function using the given arguments.
2068 // arguments.
2069 ZoneList<Expression*>* args = expr->arguments(); 2045 ZoneList<Expression*>* args = expr->arguments();
2070 int arg_count = args->length(); 2046 int arg_count = args->length();
2071 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2047 { PreservePositionScope pos_scope(masm()->positions_recorder());
2072 VisitForStackValue(fun); 2048 VisitForStackValue(callee);
2073 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot. 2049 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2074 2050
2075 // Push the arguments. 2051 // Push the arguments.
2076 for (int i = 0; i < arg_count; i++) { 2052 for (int i = 0; i < arg_count; i++) {
2077 VisitForStackValue(args->at(i)); 2053 VisitForStackValue(args->at(i));
2078 } 2054 }
2079 2055
2080 // If we know that eval can only be shadowed by eval-introduced 2056 // If we know that eval can only be shadowed by eval-introduced
2081 // variables we attempt to load the global eval function directly 2057 // variables we attempt to load the global eval function directly in
2082 // in generated code. If we succeed, there is no need to perform a 2058 // generated code. If we succeed, there is no need to perform a
2083 // context lookup in the runtime system. 2059 // context lookup in the runtime system.
2084 Label done; 2060 Label done;
2085 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { 2061 Variable* var = proxy->var();
2062 if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) {
2086 Label slow; 2063 Label slow;
2087 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(), 2064 EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow);
2088 NOT_INSIDE_TYPEOF,
2089 &slow);
2090 // Push the function and resolve eval. 2065 // Push the function and resolve eval.
2091 __ push(rax); 2066 __ push(rax);
2092 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); 2067 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2093 __ jmp(&done); 2068 __ jmp(&done);
2094 __ bind(&slow); 2069 __ bind(&slow);
2095 } 2070 }
2096 2071
2097 // Push copy of the function (found below the arguments) and 2072 // Push a copy of the function (found below the arguments) and resolve
2098 // resolve eval. 2073 // eval.
2099 __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); 2074 __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
2100 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); 2075 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2101 if (done.is_linked()) { 2076 __ bind(&done);
2102 __ bind(&done);
2103 }
2104 2077
2105 // The runtime call returns a pair of values in rax (function) and 2078 // The runtime call returns a pair of values in rax (function) and
2106 // rdx (receiver). Touch up the stack with the right values. 2079 // rdx (receiver). Touch up the stack with the right values.
2107 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); 2080 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2108 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); 2081 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2109 } 2082 }
2110 // Record source position for debugger. 2083 // Record source position for debugger.
2111 SetSourcePosition(expr->position()); 2084 SetSourcePosition(expr->position());
2112 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2085 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2113 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); 2086 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
2114 __ CallStub(&stub); 2087 __ CallStub(&stub);
2115 RecordJSReturnSite(expr); 2088 RecordJSReturnSite(expr);
2116 // Restore context register. 2089 // Restore context register.
2117 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2090 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2118 context()->DropAndPlug(1, rax); 2091 context()->DropAndPlug(1, rax);
2119 } else if (var != NULL && !var->is_this() && var->is_global()) { 2092 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2120 // Call to a global variable. 2093 // Call to a global variable. Push global object as receiver for the
2121 // Push global object as receiver for the call IC lookup. 2094 // call IC lookup.
2122 __ push(GlobalObjectOperand()); 2095 __ push(GlobalObjectOperand());
2123 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); 2096 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2124 } else if (var != NULL && var->AsSlot() != NULL && 2097 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2125 var->AsSlot()->type() == Slot::LOOKUP) {
2126 // Call to a lookup slot (dynamically introduced variable). 2098 // Call to a lookup slot (dynamically introduced variable).
2127 Label slow, done; 2099 Label slow, done;
2128 2100
2129 { PreservePositionScope scope(masm()->positions_recorder()); 2101 { PreservePositionScope scope(masm()->positions_recorder());
2130 // Generate code for loading from variables potentially shadowed 2102 // Generate code for loading from variables potentially shadowed by
2131 // by eval-introduced variables. 2103 // eval-introduced variables.
2132 EmitDynamicLoadFromSlotFastCase(var->AsSlot(), 2104 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2133 NOT_INSIDE_TYPEOF,
2134 &slow,
2135 &done);
2136
2137 __ bind(&slow);
2138 } 2105 }
2139 // Call the runtime to find the function to call (returned in rax) 2106 __ bind(&slow);
2140 // and the object holding it (returned in rdx). 2107 // Call the runtime to find the function to call (returned in rax) and
2108 // the object holding it (returned in rdx).
2141 __ push(context_register()); 2109 __ push(context_register());
2142 __ Push(var->name()); 2110 __ Push(proxy->name());
2143 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2111 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2144 __ push(rax); // Function. 2112 __ push(rax); // Function.
2145 __ push(rdx); // Receiver. 2113 __ push(rdx); // Receiver.
2146 2114
2147 // If fast case code has been generated, emit code to push the 2115 // If fast case code has been generated, emit code to push the function
2148 // function and receiver and have the slow path jump around this 2116 // and receiver and have the slow path jump around this code.
2149 // code.
2150 if (done.is_linked()) { 2117 if (done.is_linked()) {
2151 Label call; 2118 Label call;
2152 __ jmp(&call, Label::kNear); 2119 __ jmp(&call, Label::kNear);
2153 __ bind(&done); 2120 __ bind(&done);
2154 // Push function. 2121 // Push function.
2155 __ push(rax); 2122 __ push(rax);
2156 // The receiver is implicitly the global receiver. Indicate this 2123 // The receiver is implicitly the global receiver. Indicate this by
2157 // by passing the hole to the call function stub. 2124 // passing the hole to the call function stub.
2158 __ PushRoot(Heap::kTheHoleValueRootIndex); 2125 __ PushRoot(Heap::kTheHoleValueRootIndex);
2159 __ bind(&call); 2126 __ bind(&call);
2160 } 2127 }
2161 2128
2162 // The receiver is either the global receiver or an object found 2129 // The receiver is either the global receiver or an object found by
2163 // by LoadContextSlot. That object could be the hole if the 2130 // LoadContextSlot. That object could be the hole if the receiver is
2164 // receiver is implicitly the global object. 2131 // implicitly the global object.
2165 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); 2132 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2166 } else if (fun->AsProperty() != NULL) { 2133 } else if (property != NULL) {
2167 // Call to an object property. 2134 { PreservePositionScope scope(masm()->positions_recorder());
2168 Property* prop = fun->AsProperty(); 2135 VisitForStackValue(property->obj());
2169 Literal* key = prop->key()->AsLiteral(); 2136 }
2170 if (key != NULL && key->handle()->IsSymbol()) { 2137 if (property->key()->IsPropertyName()) {
2171 // Call to a named property, use call IC. 2138 EmitCallWithIC(expr,
2172 { PreservePositionScope scope(masm()->positions_recorder()); 2139 property->key()->AsLiteral()->handle(),
2173 VisitForStackValue(prop->obj()); 2140 RelocInfo::CODE_TARGET);
2174 }
2175 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2176 } else { 2141 } else {
2177 // Call to a keyed property. 2142 EmitKeyedCallWithIC(expr, property->key());
2178 // For a synthetic property use keyed load IC followed by function call,
2179 // for a regular property use EmitKeyedCallWithIC.
2180 if (prop->is_synthetic()) {
2181 // Do not visit the object and key subexpressions (they are shared
2182 // by all occurrences of the same rewritten parameter).
2183 ASSERT(prop->obj()->AsVariableProxy() != NULL);
2184 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2185 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2186 MemOperand operand = EmitSlotSearch(slot, rdx);
2187 __ movq(rdx, operand);
2188
2189 ASSERT(prop->key()->AsLiteral() != NULL);
2190 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2191 __ Move(rax, prop->key()->AsLiteral()->handle());
2192
2193 // Record source code position for IC call.
2194 SetSourcePosition(prop->position());
2195
2196 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2197 __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
2198 // Push result (function).
2199 __ push(rax);
2200 // Push Global receiver.
2201 __ movq(rcx, GlobalObjectOperand());
2202 __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
2203 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2204 } else {
2205 { PreservePositionScope scope(masm()->positions_recorder());
2206 VisitForStackValue(prop->obj());
2207 }
2208 EmitKeyedCallWithIC(expr, prop->key());
2209 }
2210 } 2143 }
2211 } else { 2144 } else {
2145 // Call to an arbitrary expression not handled specially above.
2212 { PreservePositionScope scope(masm()->positions_recorder()); 2146 { PreservePositionScope scope(masm()->positions_recorder());
2213 VisitForStackValue(fun); 2147 VisitForStackValue(callee);
2214 } 2148 }
2215 // Load global receiver object. 2149 // Load global receiver object.
2216 __ movq(rbx, GlobalObjectOperand()); 2150 __ movq(rbx, GlobalObjectOperand());
2217 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 2151 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2218 // Emit function call. 2152 // Emit function call.
2219 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); 2153 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2220 } 2154 }
2221 2155
2222 #ifdef DEBUG 2156 #ifdef DEBUG
2223 // RecordJSReturnSite should have been called. 2157 // RecordJSReturnSite should have been called.
(...skipping 920 matching lines...) Expand 10 before | Expand all | Expand 10 after
3144 __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX)); 3078 __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
3145 __ movq(cache, 3079 __ movq(cache,
3146 FieldOperand(cache, GlobalObject::kGlobalContextOffset)); 3080 FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3147 __ movq(cache, 3081 __ movq(cache,
3148 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3082 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3149 __ movq(cache, 3083 __ movq(cache,
3150 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3084 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3151 3085
3152 Label done, not_found; 3086 Label done, not_found;
3153 // tmp now holds finger offset as a smi. 3087 // tmp now holds finger offset as a smi.
3154 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3088 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3155 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); 3089 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3156 SmiIndex index = 3090 SmiIndex index =
3157 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2); 3091 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3158 __ cmpq(key, FieldOperand(cache, 3092 __ cmpq(key, FieldOperand(cache,
3159 index.reg, 3093 index.reg,
3160 index.scale, 3094 index.scale,
3161 FixedArray::kHeaderSize)); 3095 FixedArray::kHeaderSize));
3162 __ j(not_equal, &not_found, Label::kNear); 3096 __ j(not_equal, &not_found, Label::kNear);
3163 __ movq(rax, FieldOperand(cache, 3097 __ movq(rax, FieldOperand(cache,
3164 index.reg, 3098 index.reg,
(...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after
3530 __ movq(rax, result_operand); 3464 __ movq(rax, result_operand);
3531 3465
3532 __ bind(&return_result); 3466 __ bind(&return_result);
3533 // Drop temp values from the stack, and restore context register. 3467 // Drop temp values from the stack, and restore context register.
3534 __ addq(rsp, Immediate(3 * kPointerSize)); 3468 __ addq(rsp, Immediate(3 * kPointerSize));
3535 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 3469 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3536 context()->Plug(rax); 3470 context()->Plug(rax);
3537 } 3471 }
3538 3472
3539 3473
3540 void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList<Expression*>* args) {
3541 ASSERT(args->length() == 1);
3542
3543 // Load the function into rax.
3544 VisitForAccumulatorValue(args->at(0));
3545
3546 // Prepare for the test.
3547 Label materialize_true, materialize_false;
3548 Label* if_true = NULL;
3549 Label* if_false = NULL;
3550 Label* fall_through = NULL;
3551 context()->PrepareTest(&materialize_true, &materialize_false,
3552 &if_true, &if_false, &fall_through);
3553
3554 // Test for strict mode function.
3555 __ movq(rdx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3556 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
3557 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
3558 __ j(not_equal, if_true);
3559
3560 // Test for native function.
3561 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
3562 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
3563 __ j(not_equal, if_true);
3564
3565 // Not native or strict-mode function.
3566 __ jmp(if_false);
3567
3568 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3569 context()->Plug(if_true, if_false);
3570 }
3571
3572
3573 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3474 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3574 Handle<String> name = expr->name(); 3475 Handle<String> name = expr->name();
3575 if (name->length() > 0 && name->Get(0) == '_') { 3476 if (name->length() > 0 && name->Get(0) == '_') {
3576 Comment cmnt(masm_, "[ InlineRuntimeCall"); 3477 Comment cmnt(masm_, "[ InlineRuntimeCall");
3577 EmitInlineRuntimeCall(expr); 3478 EmitInlineRuntimeCall(expr);
3578 return; 3479 return;
3579 } 3480 }
3580 3481
3581 Comment cmnt(masm_, "[ CallRuntime"); 3482 Comment cmnt(masm_, "[ CallRuntime");
3582 ZoneList<Expression*>* args = expr->arguments(); 3483 ZoneList<Expression*>* args = expr->arguments();
(...skipping 24 matching lines...) Expand all
3607 __ CallRuntime(expr->function(), arg_count); 3508 __ CallRuntime(expr->function(), arg_count);
3608 } 3509 }
3609 context()->Plug(rax); 3510 context()->Plug(rax);
3610 } 3511 }
3611 3512
3612 3513
3613 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3514 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3614 switch (expr->op()) { 3515 switch (expr->op()) {
3615 case Token::DELETE: { 3516 case Token::DELETE: {
3616 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3517 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3617 Property* prop = expr->expression()->AsProperty(); 3518 Property* property = expr->expression()->AsProperty();
3618 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); 3519 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3619 3520
3620 if (prop != NULL) { 3521 if (property != NULL) {
3621 if (prop->is_synthetic()) { 3522 VisitForStackValue(property->obj());
3622 // Result of deleting parameters is false, even when they rewrite 3523 VisitForStackValue(property->key());
3623 // to accesses on the arguments object. 3524 __ Push(Smi::FromInt(strict_mode_flag()));
3624 context()->Plug(false); 3525 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3625 } else { 3526 context()->Plug(rax);
3626 VisitForStackValue(prop->obj()); 3527 } else if (proxy != NULL) {
3627 VisitForStackValue(prop->key()); 3528 Variable* var = proxy->var();
3628 __ Push(Smi::FromInt(strict_mode_flag()));
3629 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3630 context()->Plug(rax);
3631 }
3632 } else if (var != NULL) {
3633 // Delete of an unqualified identifier is disallowed in strict mode 3529 // Delete of an unqualified identifier is disallowed in strict mode
3634 // but "delete this" is. 3530 // but "delete this" is allowed.
3635 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); 3531 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3636 if (var->is_global()) { 3532 if (var->IsUnallocated()) {
3637 __ push(GlobalObjectOperand()); 3533 __ push(GlobalObjectOperand());
3638 __ Push(var->name()); 3534 __ Push(var->name());
3639 __ Push(Smi::FromInt(kNonStrictMode)); 3535 __ Push(Smi::FromInt(kNonStrictMode));
3640 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3536 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3641 context()->Plug(rax); 3537 context()->Plug(rax);
3642 } else if (var->AsSlot() != NULL && 3538 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3643 var->AsSlot()->type() != Slot::LOOKUP) { 3539 // Result of deleting non-global variables is false. 'this' is
3644 // Result of deleting non-global, non-dynamic variables is false. 3540 // not really a variable, though we implement it as one. The
3645 // The subexpression does not have side effects. 3541 // subexpression does not have side effects.
3646 context()->Plug(false); 3542 context()->Plug(var->is_this());
3647 } else { 3543 } else {
3648 // Non-global variable. Call the runtime to try to delete from the 3544 // Non-global variable. Call the runtime to try to delete from the
3649 // context where the variable was introduced. 3545 // context where the variable was introduced.
3650 __ push(context_register()); 3546 __ push(context_register());
3651 __ Push(var->name()); 3547 __ Push(var->name());
3652 __ CallRuntime(Runtime::kDeleteContextSlot, 2); 3548 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3653 context()->Plug(rax); 3549 context()->Plug(rax);
3654 } 3550 }
3655 } else { 3551 } else {
3656 // Result of deleting non-property, non-variable reference is true. 3552 // Result of deleting non-property, non-variable reference is true.
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after
3922 } 3818 }
3923 } 3819 }
3924 } 3820 }
3925 3821
3926 3822
3927 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 3823 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3928 VariableProxy* proxy = expr->AsVariableProxy(); 3824 VariableProxy* proxy = expr->AsVariableProxy();
3929 ASSERT(!context()->IsEffect()); 3825 ASSERT(!context()->IsEffect());
3930 ASSERT(!context()->IsTest()); 3826 ASSERT(!context()->IsTest());
3931 3827
3932 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { 3828 if (proxy != NULL && proxy->var()->IsUnallocated()) {
3933 Comment cmnt(masm_, "Global variable"); 3829 Comment cmnt(masm_, "Global variable");
3934 __ Move(rcx, proxy->name()); 3830 __ Move(rcx, proxy->name());
3935 __ movq(rax, GlobalObjectOperand()); 3831 __ movq(rax, GlobalObjectOperand());
3936 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 3832 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3937 // Use a regular load, not a contextual load, to avoid a reference 3833 // Use a regular load, not a contextual load, to avoid a reference
3938 // error. 3834 // error.
3939 __ call(ic); 3835 __ call(ic);
3940 PrepareForBailout(expr, TOS_REG); 3836 PrepareForBailout(expr, TOS_REG);
3941 context()->Plug(rax); 3837 context()->Plug(rax);
3942 } else if (proxy != NULL && 3838 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
3943 proxy->var()->AsSlot() != NULL &&
3944 proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
3945 Label done, slow; 3839 Label done, slow;
3946 3840
3947 // Generate code for loading from variables potentially shadowed 3841 // Generate code for loading from variables potentially shadowed
3948 // by eval-introduced variables. 3842 // by eval-introduced variables.
3949 Slot* slot = proxy->var()->AsSlot(); 3843 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
3950 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
3951 3844
3952 __ bind(&slow); 3845 __ bind(&slow);
3953 __ push(rsi); 3846 __ push(rsi);
3954 __ Push(proxy->name()); 3847 __ Push(proxy->name());
3955 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 3848 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3956 PrepareForBailout(expr, TOS_REG); 3849 PrepareForBailout(expr, TOS_REG);
3957 __ bind(&done); 3850 __ bind(&done);
3958 3851
3959 context()->Plug(rax); 3852 context()->Plug(rax);
3960 } else { 3853 } else {
(...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after
4252 __ pop(rdx); 4145 __ pop(rdx);
4253 __ SmiToInteger32(rdx, rdx); 4146 __ SmiToInteger32(rdx, rdx);
4254 __ Move(rcx, masm_->CodeObject()); 4147 __ Move(rcx, masm_->CodeObject());
4255 __ addq(rdx, rcx); 4148 __ addq(rdx, rcx);
4256 __ jmp(rdx); 4149 __ jmp(rdx);
4257 } 4150 }
4258 4151
4259 4152
4260 #undef __ 4153 #undef __
4261 4154
4155 #define __ ACCESS_MASM(masm())
4156
4157 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4158 int* stack_depth,
4159 int* context_length) {
4160 // The macros used here must preserve the result register.
4161
4162 // Because the handler block contains the context of the finally
4163 // code, we can restore it directly from there for the finally code
4164 // rather than iteratively unwinding contexts via their previous
4165 // links.
4166 __ Drop(*stack_depth); // Down to the handler block.
4167 if (*context_length > 0) {
4168 // Restore the context to its dedicated register and the stack.
4169 __ movq(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
4170 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
4171 }
4172 __ PopTryHandler();
4173 __ call(finally_entry_);
4174
4175 *stack_depth = 0;
4176 *context_length = 0;
4177 return previous_;
4178 }
4179
4180
4181 #undef __
4262 4182
4263 } } // namespace v8::internal 4183 } } // namespace v8::internal
4264 4184
4265 #endif // V8_TARGET_ARCH_X64 4185 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698