Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(103)

Side by Side Diff: src/mips/full-codegen-mips.cc

Issue 7860035: Merge bleeding edge up to 9192 into the GC branch. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/frames-mips.h ('k') | src/mips/ic-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 48
49 #include "mips/code-stubs-mips.h" 49 #include "mips/code-stubs-mips.h"
50 50
51 namespace v8 { 51 namespace v8 {
52 namespace internal { 52 namespace internal {
53 53
54 #define __ ACCESS_MASM(masm_) 54 #define __ ACCESS_MASM(masm_)
55 55
56 56
57 static unsigned GetPropertyId(Property* property) { 57 static unsigned GetPropertyId(Property* property) {
58 if (property->is_synthetic()) return AstNode::kNoNumber;
59 return property->id(); 58 return property->id();
60 } 59 }
61 60
62 61
63 // A patch site is a location in the code which it is possible to patch. This 62 // A patch site is a location in the code which it is possible to patch. This
64 // class has a number of methods to emit the code which is patchable and the 63 // class has a number of methods to emit the code which is patchable and the
65 // method EmitPatchInfo to record a marker back to the patchable code. This 64 // method EmitPatchInfo to record a marker back to the patchable code. This
66 // marker is a andi at, rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 65 // marker is a andi at, rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16
67 // bit immediate value is used) is the delta from the pc to the first 66 // bit immediate value is used) is the delta from the pc to the first
68 // instruction of the patchable code. 67 // instruction of the patchable code.
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
194 } else { 193 } else {
195 __ CallRuntime(Runtime::kNewFunctionContext, 1); 194 __ CallRuntime(Runtime::kNewFunctionContext, 1);
196 } 195 }
197 function_in_register = false; 196 function_in_register = false;
198 // Context is returned in both v0 and cp. It replaces the context 197 // Context is returned in both v0 and cp. It replaces the context
199 // passed to us. It's saved in the stack and kept live in cp. 198 // passed to us. It's saved in the stack and kept live in cp.
200 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 199 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
201 // Copy any necessary parameters into the context. 200 // Copy any necessary parameters into the context.
202 int num_parameters = info->scope()->num_parameters(); 201 int num_parameters = info->scope()->num_parameters();
203 for (int i = 0; i < num_parameters; i++) { 202 for (int i = 0; i < num_parameters; i++) {
204 Slot* slot = scope()->parameter(i)->AsSlot(); 203 Variable* var = scope()->parameter(i);
205 if (slot != NULL && slot->type() == Slot::CONTEXT) { 204 if (var->IsContextSlot()) {
206 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 205 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
207 (num_parameters - 1 - i) * kPointerSize; 206 (num_parameters - 1 - i) * kPointerSize;
208 // Load parameter from stack. 207 // Load parameter from stack.
209 __ lw(a0, MemOperand(fp, parameter_offset)); 208 __ lw(a0, MemOperand(fp, parameter_offset));
210 // Store it in the context. 209 // Store it in the context.
211 __ li(a1, Operand(Context::SlotOffset(slot->index()))); 210 __ li(a1, Operand(Context::SlotOffset(var->index())));
212 __ addu(a2, cp, a1); 211 __ addu(a2, cp, a1);
213 __ sw(a0, MemOperand(a2, 0)); 212 __ sw(a0, MemOperand(a2, 0));
214 // Update the write barrier. This clobbers all involved 213 // Update the write barrier. This clobbers all involved
215 // registers, so we have to use two more registers to avoid 214 // registers, so we have to use two more registers to avoid
216 // clobbering cp. 215 // clobbering cp.
217 __ mov(a2, cp); 216 __ mov(a2, cp);
218 __ RecordWrite(a2, a1, a3); 217 __ RecordWrite(a2, a1, a3);
219 } 218 }
220 } 219 }
221 } 220 }
(...skipping 24 matching lines...) Expand all
246 if (is_strict_mode()) { 245 if (is_strict_mode()) {
247 type = ArgumentsAccessStub::NEW_STRICT; 246 type = ArgumentsAccessStub::NEW_STRICT;
248 } else if (function()->has_duplicate_parameters()) { 247 } else if (function()->has_duplicate_parameters()) {
249 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 248 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
250 } else { 249 } else {
251 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 250 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
252 } 251 }
253 ArgumentsAccessStub stub(type); 252 ArgumentsAccessStub stub(type);
254 __ CallStub(&stub); 253 __ CallStub(&stub);
255 254
256 Move(arguments->AsSlot(), v0, a1, a2); 255 SetVar(arguments, v0, a1, a2);
257 } 256 }
258 257
259 if (FLAG_trace) { 258 if (FLAG_trace) {
260 __ CallRuntime(Runtime::kTraceEnter, 0); 259 __ CallRuntime(Runtime::kTraceEnter, 0);
261 } 260 }
262 261
263 // Visit the declarations and body unless there is an illegal 262 // Visit the declarations and body unless there is an illegal
264 // redeclaration. 263 // redeclaration.
265 if (scope()->HasIllegalRedeclaration()) { 264 if (scope()->HasIllegalRedeclaration()) {
266 Comment cmnt(masm_, "[ Declarations"); 265 Comment cmnt(masm_, "[ Declarations");
267 scope()->VisitIllegalRedeclaration(this); 266 scope()->VisitIllegalRedeclaration(this);
268 267
269 } else { 268 } else {
269 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
270 { Comment cmnt(masm_, "[ Declarations"); 270 { Comment cmnt(masm_, "[ Declarations");
271 // For named function expressions, declare the function name as a 271 // For named function expressions, declare the function name as a
272 // constant. 272 // constant.
273 if (scope()->is_function_scope() && scope()->function() != NULL) { 273 if (scope()->is_function_scope() && scope()->function() != NULL) {
274 EmitDeclaration(scope()->function(), Variable::CONST, NULL); 274 int ignored = 0;
275 EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored);
275 } 276 }
276 VisitDeclarations(scope()->declarations()); 277 VisitDeclarations(scope()->declarations());
277 } 278 }
278 279
279 { Comment cmnt(masm_, "[ Stack check"); 280 { Comment cmnt(masm_, "[ Stack check");
280 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); 281 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
281 Label ok; 282 Label ok;
282 __ LoadRoot(t0, Heap::kStackLimitRootIndex); 283 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
283 __ Branch(&ok, hs, sp, Operand(t0)); 284 __ Branch(&ok, hs, sp, Operand(t0));
284 StackCheckStub stub; 285 StackCheckStub stub;
285 __ CallStub(&stub); 286 __ CallStub(&stub);
286 __ bind(&ok); 287 __ bind(&ok);
287 } 288 }
288 289
289 { Comment cmnt(masm_, "[ Body"); 290 { Comment cmnt(masm_, "[ Body");
290 ASSERT(loop_depth() == 0); 291 ASSERT(loop_depth() == 0);
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
364 #ifdef DEBUG 365 #ifdef DEBUG
365 // Check that the size of the code used for returning is large enough 366 // Check that the size of the code used for returning is large enough
366 // for the debugger's requirements. 367 // for the debugger's requirements.
367 ASSERT(Assembler::kJSReturnSequenceInstructions <= 368 ASSERT(Assembler::kJSReturnSequenceInstructions <=
368 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 369 masm_->InstructionsGeneratedSince(&check_exit_codesize));
369 #endif 370 #endif
370 } 371 }
371 } 372 }
372 373
373 374
374 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { 375 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
376 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
375 } 377 }
376 378
377 379
378 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const { 380 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
379 codegen()->Move(result_register(), slot); 381 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
382 codegen()->GetVar(result_register(), var);
380 } 383 }
381 384
382 385
383 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { 386 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
384 codegen()->Move(result_register(), slot); 387 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
388 codegen()->GetVar(result_register(), var);
385 __ push(result_register()); 389 __ push(result_register());
386 } 390 }
387 391
388 392
389 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { 393 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
390 // For simplicity we always test the accumulator register. 394 // For simplicity we always test the accumulator register.
391 codegen()->Move(result_register(), slot); 395 codegen()->GetVar(result_register(), var);
392 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); 396 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
393 codegen()->DoTest(this); 397 codegen()->DoTest(this);
394 } 398 }
395 399
396 400
397 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 401 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
398 } 402 }
399 403
400 404
401 void FullCodeGenerator::AccumulatorValueContext::Plug( 405 void FullCodeGenerator::AccumulatorValueContext::Plug(
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
614 __ Branch(if_true, cc, lhs, rhs); 618 __ Branch(if_true, cc, lhs, rhs);
615 } else if (if_true == fall_through) { 619 } else if (if_true == fall_through) {
616 __ Branch(if_false, NegateCondition(cc), lhs, rhs); 620 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
617 } else { 621 } else {
618 __ Branch(if_true, cc, lhs, rhs); 622 __ Branch(if_true, cc, lhs, rhs);
619 __ Branch(if_false); 623 __ Branch(if_false);
620 } 624 }
621 } 625 }
622 626
623 627
624 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { 628 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
625 switch (slot->type()) { 629 ASSERT(var->IsStackAllocated());
626 case Slot::PARAMETER: 630 // Offset is negative because higher indexes are at lower addresses.
627 case Slot::LOCAL: 631 int offset = -var->index() * kPointerSize;
628 return MemOperand(fp, SlotOffset(slot)); 632 // Adjust by a (parameter or local) base offset.
629 case Slot::CONTEXT: { 633 if (var->IsParameter()) {
630 int context_chain_length = 634 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
631 scope()->ContextChainLength(slot->var()->scope()); 635 } else {
632 __ LoadContext(scratch, context_chain_length); 636 offset += JavaScriptFrameConstants::kLocal0Offset;
633 return ContextOperand(scratch, slot->index());
634 }
635 case Slot::LOOKUP:
636 UNREACHABLE();
637 } 637 }
638 UNREACHABLE(); 638 return MemOperand(fp, offset);
639 return MemOperand(v0, 0);
640 } 639 }
641 640
642 641
643 void FullCodeGenerator::Move(Register destination, Slot* source) { 642 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
644 // Use destination as scratch. 643 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
645 MemOperand slot_operand = EmitSlotSearch(source, destination); 644 if (var->IsContextSlot()) {
646 __ lw(destination, slot_operand); 645 int context_chain_length = scope()->ContextChainLength(var->scope());
646 __ LoadContext(scratch, context_chain_length);
647 return ContextOperand(scratch, var->index());
648 } else {
649 return StackOperand(var);
650 }
647 } 651 }
648 652
649 653
654 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
655 // Use destination as scratch.
656 MemOperand location = VarOperand(var, dest);
657 __ lw(dest, location);
658 }
659
660
661 void FullCodeGenerator::SetVar(Variable* var,
662 Register src,
663 Register scratch0,
664 Register scratch1) {
665 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
666 ASSERT(!scratch0.is(src));
667 ASSERT(!scratch0.is(scratch1));
668 ASSERT(!scratch1.is(src));
669 MemOperand location = VarOperand(var, scratch0);
670 __ sw(src, location);
671 // Emit the write barrier code if the location is in the heap.
672 if (var->IsContextSlot()) {
673 __ RecordWrite(scratch0,
674 Operand(Context::SlotOffset(var->index())),
675 scratch1,
676 src);
677 }
678 }
679
680
650 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 681 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
651 bool should_normalize, 682 bool should_normalize,
652 Label* if_true, 683 Label* if_true,
653 Label* if_false) { 684 Label* if_false) {
654 // Only prepare for bailouts before splits if we're in a test 685 // Only prepare for bailouts before splits if we're in a test
655 // context. Otherwise, we let the Visit function deal with the 686 // context. Otherwise, we let the Visit function deal with the
656 // preparation to avoid preparing with the same AST id twice. 687 // preparation to avoid preparing with the same AST id twice.
657 if (!context()->IsTest() || !info_->IsOptimizable()) return; 688 if (!context()->IsTest() || !info_->IsOptimizable()) return;
658 689
659 Label skip; 690 Label skip;
660 if (should_normalize) __ Branch(&skip); 691 if (should_normalize) __ Branch(&skip);
661 692
662 ForwardBailoutStack* current = forward_bailout_stack_; 693 ForwardBailoutStack* current = forward_bailout_stack_;
663 while (current != NULL) { 694 while (current != NULL) {
664 PrepareForBailout(current->expr(), state); 695 PrepareForBailout(current->expr(), state);
665 current = current->parent(); 696 current = current->parent();
666 } 697 }
667 698
668 if (should_normalize) { 699 if (should_normalize) {
669 __ LoadRoot(t0, Heap::kTrueValueRootIndex); 700 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
670 Split(eq, a0, Operand(t0), if_true, if_false, NULL); 701 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
671 __ bind(&skip); 702 __ bind(&skip);
672 } 703 }
673 } 704 }
674 705
675 706
676 void FullCodeGenerator::Move(Slot* dst, 707 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
677 Register src, 708 Variable::Mode mode,
678 Register scratch1, 709 FunctionLiteral* function,
679 Register scratch2) { 710 int* global_count) {
680 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. 711 // If it was not possible to allocate the variable at compile time, we
681 ASSERT(!scratch1.is(src) && !scratch2.is(src)); 712 // need to "declare" it at runtime to make sure it actually exists in the
682 MemOperand location = EmitSlotSearch(dst, scratch1); 713 // local context.
683 __ sw(src, location); 714 Variable* variable = proxy->var();
684 // Emit the write barrier code if the location is in the heap. 715 switch (variable->location()) {
685 if (dst->type() == Slot::CONTEXT) { 716 case Variable::UNALLOCATED:
686 __ RecordWrite(scratch1, 717 ++(*global_count);
687 Operand(Context::SlotOffset(dst->index())), 718 break;
688 scratch2,
689 src);
690 }
691 }
692 719
720 case Variable::PARAMETER:
721 case Variable::LOCAL:
722 if (function != NULL) {
723 Comment cmnt(masm_, "[ Declaration");
724 VisitForAccumulatorValue(function);
725 __ sw(result_register(), StackOperand(variable));
726 } else if (mode == Variable::CONST || mode == Variable::LET) {
727 Comment cmnt(masm_, "[ Declaration");
728 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
729 __ sw(t0, StackOperand(variable));
730 }
731 break;
693 732
694 void FullCodeGenerator::EmitDeclaration(Variable* variable, 733 case Variable::CONTEXT:
695 Variable::Mode mode, 734 // The variable in the decl always resides in the current function
696 FunctionLiteral* function) { 735 // context.
697 Comment cmnt(masm_, "[ Declaration"); 736 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
698 ASSERT(variable != NULL); // Must have been resolved. 737 if (FLAG_debug_code) {
699 Slot* slot = variable->AsSlot(); 738 // Check that we're not inside a with or catch context.
700 Property* prop = variable->AsProperty(); 739 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
740 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
741 __ Check(ne, "Declaration in with context.",
742 a1, Operand(t0));
743 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
744 __ Check(ne, "Declaration in catch context.",
745 a1, Operand(t0));
746 }
747 if (function != NULL) {
748 Comment cmnt(masm_, "[ Declaration");
749 VisitForAccumulatorValue(function);
750 __ sw(result_register(), ContextOperand(cp, variable->index()));
751 int offset = Context::SlotOffset(variable->index());
752 // We know that we have written a function, which is not a smi.
753 __ mov(a1, cp);
754 __ RecordWrite(a1, Operand(offset), a2, result_register());
755 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
756 } else if (mode == Variable::CONST || mode == Variable::LET) {
757 Comment cmnt(masm_, "[ Declaration");
758 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
759 __ sw(at, ContextOperand(cp, variable->index()));
760 // No write barrier since the_hole_value is in old space.
761 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
762 }
763 break;
701 764
702 if (slot != NULL) { 765 case Variable::LOOKUP: {
703 switch (slot->type()) { 766 Comment cmnt(masm_, "[ Declaration");
704 case Slot::PARAMETER: 767 __ li(a2, Operand(variable->name()));
705 case Slot::LOCAL: 768 // Declaration nodes are always introduced in one of three modes.
706 if (mode == Variable::CONST) { 769 ASSERT(mode == Variable::VAR ||
707 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 770 mode == Variable::CONST ||
708 __ sw(t0, MemOperand(fp, SlotOffset(slot))); 771 mode == Variable::LET);
709 } else if (function != NULL) { 772 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
710 VisitForAccumulatorValue(function); 773 __ li(a1, Operand(Smi::FromInt(attr)));
711 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); 774 // Push initial value, if any.
712 } 775 // Note: For variables we must not push an initial value (such as
713 break; 776 // 'undefined') because we may have a (legal) redeclaration and we
714 777 // must not destroy the current value.
715 case Slot::CONTEXT: 778 if (function != NULL) {
716 // We bypass the general EmitSlotSearch because we know more about 779 __ Push(cp, a2, a1);
717 // this specific context. 780 // Push initial value for function declaration.
718 781 VisitForStackValue(function);
719 // The variable in the decl always resides in the current function 782 } else if (mode == Variable::CONST || mode == Variable::LET) {
720 // context.
721 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
722 if (FLAG_debug_code) {
723 // Check that we're not inside a with or catch context.
724 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
725 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
726 __ Check(ne, "Declaration in with context.",
727 a1, Operand(t0));
728 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
729 __ Check(ne, "Declaration in catch context.",
730 a1, Operand(t0));
731 }
732 if (mode == Variable::CONST) {
733 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
734 __ sw(at, ContextOperand(cp, slot->index()));
735 // No write barrier since the_hole_value is in old space.
736 } else if (function != NULL) {
737 VisitForAccumulatorValue(function);
738 __ sw(result_register(), ContextOperand(cp, slot->index()));
739 int offset = Context::SlotOffset(slot->index());
740 // We know that we have written a function, which is not a smi.
741 __ mov(a1, cp);
742 __ RecordWrite(a1, Operand(offset), a2, result_register());
743 }
744 break;
745
746 case Slot::LOOKUP: {
747 __ li(a2, Operand(variable->name()));
748 // Declaration nodes are always introduced in one of two modes.
749 ASSERT(mode == Variable::VAR ||
750 mode == Variable::CONST ||
751 mode == Variable::LET);
752 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
753 __ li(a1, Operand(Smi::FromInt(attr)));
754 // Push initial value, if any.
755 // Note: For variables we must not push an initial value (such as
756 // 'undefined') because we may have a (legal) redeclaration and we
757 // must not destroy the current value.
758 if (mode == Variable::CONST) {
759 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); 783 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
760 __ Push(cp, a2, a1, a0); 784 __ Push(cp, a2, a1, a0);
761 } else if (function != NULL) { 785 } else {
762 __ Push(cp, a2, a1); 786 ASSERT(Smi::FromInt(0) == 0);
763 // Push initial value for function declaration. 787 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
764 VisitForStackValue(function); 788 __ Push(cp, a2, a1, a0);
765 } else {
766 ASSERT(Smi::FromInt(0) == 0);
767 // No initial value!
768 __ mov(a0, zero_reg); // Operand(Smi::FromInt(0)));
769 __ Push(cp, a2, a1, a0);
770 }
771 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
772 break;
773 } 789 }
774 } 790 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
775 791 break;
776 } else if (prop != NULL) {
777 // A const declaration aliasing a parameter is an illegal redeclaration.
778 ASSERT(mode != Variable::CONST);
779 if (function != NULL) {
780 // We are declaring a function that rewrites to a property.
781 // Use (keyed) IC to set the initial value. We cannot visit the
782 // rewrite because it's shared and we risk recording duplicate AST
783 // IDs for bailouts from optimized code.
784 ASSERT(prop->obj()->AsVariableProxy() != NULL);
785 { AccumulatorValueContext for_object(this);
786 EmitVariableLoad(prop->obj()->AsVariableProxy());
787 }
788
789 __ push(result_register());
790 VisitForAccumulatorValue(function);
791 __ mov(a0, result_register());
792 __ pop(a2);
793
794 ASSERT(prop->key()->AsLiteral() != NULL &&
795 prop->key()->AsLiteral()->handle()->IsSmi());
796 __ li(a1, Operand(prop->key()->AsLiteral()->handle()));
797
798 Handle<Code> ic = is_strict_mode()
799 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
800 : isolate()->builtins()->KeyedStoreIC_Initialize();
801 __ Call(ic);
802 // Value in v0 is ignored (declarations are statements).
803 } 792 }
804 } 793 }
805 } 794 }
806 795
807 796
808 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 797 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
809 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
810 }
811 798
812 799
813 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 800 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
814 // Call the runtime to declare the globals. 801 // Call the runtime to declare the globals.
815 // The context is the first argument. 802 // The context is the first argument.
816 __ li(a2, Operand(pairs)); 803 __ li(a1, Operand(pairs));
817 __ li(a1, Operand(Smi::FromInt(is_eval() ? 1 : 0))); 804 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
818 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); 805 __ Push(cp, a1, a0);
819 __ Push(cp, a2, a1, a0); 806 __ CallRuntime(Runtime::kDeclareGlobals, 3);
820 __ CallRuntime(Runtime::kDeclareGlobals, 4);
821 // Return value is ignored. 807 // Return value is ignored.
822 } 808 }
823 809
824 810
825 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 811 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
826 Comment cmnt(masm_, "[ SwitchStatement"); 812 Comment cmnt(masm_, "[ SwitchStatement");
827 Breakable nested_statement(this, stmt); 813 Breakable nested_statement(this, stmt);
828 SetStatementPosition(stmt); 814 SetStatementPosition(stmt);
829 815
830 // Keep the switch value on the stack until a case matches. 816 // Keep the switch value on the stack until a case matches.
(...skipping 283 matching lines...) Expand 10 before | Expand all | Expand 10 after
1114 context()->Plug(v0); 1100 context()->Plug(v0);
1115 } 1101 }
1116 1102
1117 1103
1118 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1104 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1119 Comment cmnt(masm_, "[ VariableProxy"); 1105 Comment cmnt(masm_, "[ VariableProxy");
1120 EmitVariableLoad(expr); 1106 EmitVariableLoad(expr);
1121 } 1107 }
1122 1108
1123 1109
1124 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( 1110 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1125 Slot* slot, 1111 TypeofState typeof_state,
1126 TypeofState typeof_state, 1112 Label* slow) {
1127 Label* slow) {
1128 Register current = cp; 1113 Register current = cp;
1129 Register next = a1; 1114 Register next = a1;
1130 Register temp = a2; 1115 Register temp = a2;
1131 1116
1132 Scope* s = scope(); 1117 Scope* s = scope();
1133 while (s != NULL) { 1118 while (s != NULL) {
1134 if (s->num_heap_slots() > 0) { 1119 if (s->num_heap_slots() > 0) {
1135 if (s->calls_eval()) { 1120 if (s->calls_eval()) {
1136 // Check that extension is NULL. 1121 // Check that extension is NULL.
1137 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1122 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
(...skipping 23 matching lines...) Expand all
1161 // Check that extension is NULL. 1146 // Check that extension is NULL.
1162 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1147 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1163 __ Branch(slow, ne, temp, Operand(zero_reg)); 1148 __ Branch(slow, ne, temp, Operand(zero_reg));
1164 // Load next context in chain. 1149 // Load next context in chain.
1165 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1150 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1166 __ Branch(&loop); 1151 __ Branch(&loop);
1167 __ bind(&fast); 1152 __ bind(&fast);
1168 } 1153 }
1169 1154
1170 __ lw(a0, GlobalObjectOperand()); 1155 __ lw(a0, GlobalObjectOperand());
1171 __ li(a2, Operand(slot->var()->name())); 1156 __ li(a2, Operand(var->name()));
1172 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 1157 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1173 ? RelocInfo::CODE_TARGET 1158 ? RelocInfo::CODE_TARGET
1174 : RelocInfo::CODE_TARGET_CONTEXT; 1159 : RelocInfo::CODE_TARGET_CONTEXT;
1175 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1160 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1176 __ Call(ic, mode); 1161 __ Call(ic, mode);
1177 } 1162 }
1178 1163
1179 1164
1180 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( 1165 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1181 Slot* slot, 1166 Label* slow) {
1182 Label* slow) { 1167 ASSERT(var->IsContextSlot());
1183 ASSERT(slot->type() == Slot::CONTEXT);
1184 Register context = cp; 1168 Register context = cp;
1185 Register next = a3; 1169 Register next = a3;
1186 Register temp = t0; 1170 Register temp = t0;
1187 1171
1188 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { 1172 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1189 if (s->num_heap_slots() > 0) { 1173 if (s->num_heap_slots() > 0) {
1190 if (s->calls_eval()) { 1174 if (s->calls_eval()) {
1191 // Check that extension is NULL. 1175 // Check that extension is NULL.
1192 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1176 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1193 __ Branch(slow, ne, temp, Operand(zero_reg)); 1177 __ Branch(slow, ne, temp, Operand(zero_reg));
1194 } 1178 }
1195 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1179 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1196 // Walk the rest of the chain without clobbering cp. 1180 // Walk the rest of the chain without clobbering cp.
1197 context = next; 1181 context = next;
1198 } 1182 }
1199 } 1183 }
1200 // Check that last extension is NULL. 1184 // Check that last extension is NULL.
1201 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1185 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1202 __ Branch(slow, ne, temp, Operand(zero_reg)); 1186 __ Branch(slow, ne, temp, Operand(zero_reg));
1203 1187
1204 // This function is used only for loads, not stores, so it's safe to 1188 // This function is used only for loads, not stores, so it's safe to
1205 // return an cp-based operand (the write barrier cannot be allowed to 1189 // return an cp-based operand (the write barrier cannot be allowed to
1206 // destroy the cp register). 1190 // destroy the cp register).
1207 return ContextOperand(context, slot->index()); 1191 return ContextOperand(context, var->index());
1208 } 1192 }
1209 1193
1210 1194
1211 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( 1195 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1212 Slot* slot, 1196 TypeofState typeof_state,
1213 TypeofState typeof_state, 1197 Label* slow,
1214 Label* slow, 1198 Label* done) {
1215 Label* done) {
1216 // Generate fast-case code for variables that might be shadowed by 1199 // Generate fast-case code for variables that might be shadowed by
1217 // eval-introduced variables. Eval is used a lot without 1200 // eval-introduced variables. Eval is used a lot without
1218 // introducing variables. In those cases, we do not want to 1201 // introducing variables. In those cases, we do not want to
1219 // perform a runtime call for all variables in the scope 1202 // perform a runtime call for all variables in the scope
1220 // containing the eval. 1203 // containing the eval.
1221 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { 1204 if (var->mode() == Variable::DYNAMIC_GLOBAL) {
1222 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow); 1205 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1223 __ Branch(done); 1206 __ Branch(done);
1224 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { 1207 } else if (var->mode() == Variable::DYNAMIC_LOCAL) {
1225 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot(); 1208 Variable* local = var->local_if_not_shadowed();
1226 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite(); 1209 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1227 if (potential_slot != NULL) { 1210 if (local->mode() == Variable::CONST) {
1228 // Generate fast case for locals that rewrite to slots. 1211 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1229 __ lw(v0, ContextSlotOperandCheckExtensions(potential_slot, slow)); 1212 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1230 if (potential_slot->var()->mode() == Variable::CONST) { 1213 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1231 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1214 __ movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1232 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1233 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1234 __ movz(v0, a0, at); // Conditional move.
1235 }
1236 __ Branch(done);
1237 } else if (rewrite != NULL) {
1238 // Generate fast case for calls of an argument function.
1239 Property* property = rewrite->AsProperty();
1240 if (property != NULL) {
1241 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1242 Literal* key_literal = property->key()->AsLiteral();
1243 if (obj_proxy != NULL &&
1244 key_literal != NULL &&
1245 obj_proxy->IsArguments() &&
1246 key_literal->handle()->IsSmi()) {
1247 // Load arguments object if there are no eval-introduced
1248 // variables. Then load the argument from the arguments
1249 // object using keyed load.
1250 __ lw(a1,
1251 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1252 slow));
1253 __ li(a0, Operand(key_literal->handle()));
1254 Handle<Code> ic =
1255 isolate()->builtins()->KeyedLoadIC_Initialize();
1256 __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1257 __ Branch(done);
1258 }
1259 }
1260 } 1215 }
1216 __ Branch(done);
1261 } 1217 }
1262 } 1218 }
1263 1219
1264 1220
1265 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1221 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1266 // Record position before possible IC call. 1222 // Record position before possible IC call.
1267 SetSourcePosition(proxy->position()); 1223 SetSourcePosition(proxy->position());
1268 Variable* var = proxy->var(); 1224 Variable* var = proxy->var();
1269 1225
1270 // Three cases: non-this global variables, lookup slots, and all other 1226 // Three cases: global variables, lookup variables, and all other types of
1271 // types of slots. 1227 // variables.
1272 Slot* slot = var->AsSlot(); 1228 switch (var->location()) {
1273 ASSERT((var->is_global() && !var->is_this()) == (slot == NULL)); 1229 case Variable::UNALLOCATED: {
1230 Comment cmnt(masm_, "Global variable");
1231 // Use inline caching. Variable name is passed in a2 and the global
1232 // object (receiver) in a0.
1233 __ lw(a0, GlobalObjectOperand());
1234 __ li(a2, Operand(var->name()));
1235 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1236 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1237 context()->Plug(v0);
1238 break;
1239 }
1274 1240
1275 if (slot == NULL) { 1241 case Variable::PARAMETER:
1276 Comment cmnt(masm_, "Global variable"); 1242 case Variable::LOCAL:
1277 // Use inline caching. Variable name is passed in a2 and the global 1243 case Variable::CONTEXT: {
1278 // object (receiver) in a0. 1244 Comment cmnt(masm_, var->IsContextSlot()
1279 __ lw(a0, GlobalObjectOperand()); 1245 ? "Context variable"
1280 __ li(a2, Operand(var->name())); 1246 : "Stack variable");
1281 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1247 if (var->mode() != Variable::LET && var->mode() != Variable::CONST) {
1282 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1248 context()->Plug(var);
1283 context()->Plug(v0); 1249 } else {
1250 // Let and const need a read barrier.
1251 GetVar(v0, var);
1252 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1253 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1254 if (var->mode() == Variable::LET) {
1255 Label done;
1256 __ Branch(&done, ne, at, Operand(zero_reg));
1257 __ li(a0, Operand(var->name()));
1258 __ push(a0);
1259 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1260 __ bind(&done);
1261 } else {
1262 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1263 __ movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1264 }
1265 context()->Plug(v0);
1266 }
1267 break;
1268 }
1284 1269
1285 } else if (slot->type() == Slot::LOOKUP) { 1270 case Variable::LOOKUP: {
1286 Label done, slow; 1271 Label done, slow;
1287 1272 // Generate code for loading from variables potentially shadowed
1288 // Generate code for loading from variables potentially shadowed 1273 // by eval-introduced variables.
1289 // by eval-introduced variables. 1274 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1290 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done); 1275 __ bind(&slow);
1291 1276 Comment cmnt(masm_, "Lookup variable");
1292 __ bind(&slow); 1277 __ li(a1, Operand(var->name()));
1293 Comment cmnt(masm_, "Lookup slot"); 1278 __ Push(cp, a1); // Context and name.
1294 __ li(a1, Operand(var->name())); 1279 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1295 __ Push(cp, a1); // Context and name. 1280 __ bind(&done);
1296 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1281 context()->Plug(v0);
1297 __ bind(&done); 1282 }
1298
1299 context()->Plug(v0);
1300
1301 } else {
1302 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1303 ? "Context slot"
1304 : "Stack slot");
1305 if (var->mode() == Variable::CONST) {
1306 // Constants may be the hole value if they have not been initialized.
1307 // Unhole them.
1308 MemOperand slot_operand = EmitSlotSearch(slot, a0);
1309 __ lw(v0, slot_operand);
1310 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1311 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1312 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1313 __ movz(v0, a0, at); // Conditional move.
1314 context()->Plug(v0);
1315 } else {
1316 context()->Plug(slot);
1317 }
1318 } 1283 }
1319 } 1284 }
1320 1285
1321 1286
1322 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1287 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1323 Comment cmnt(masm_, "[ RegExpLiteral"); 1288 Comment cmnt(masm_, "[ RegExpLiteral");
1324 Label materialized; 1289 Label materialized;
1325 // Registers will be used as follows: 1290 // Registers will be used as follows:
1326 // t1 = materialized value (RegExp literal) 1291 // t1 = materialized value (RegExp literal)
1327 // t0 = JS function, literals array 1292 // t0 = JS function, literals array
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
1526 VisitForAccumulatorValue(subexpr); 1491 VisitForAccumulatorValue(subexpr);
1527 1492
1528 // Store the subexpression value in the array's elements. 1493 // Store the subexpression value in the array's elements.
1529 __ lw(a1, MemOperand(sp)); // Copy of array literal. 1494 __ lw(a1, MemOperand(sp)); // Copy of array literal.
1530 __ lw(a1, FieldMemOperand(a1, JSObject::kElementsOffset)); 1495 __ lw(a1, FieldMemOperand(a1, JSObject::kElementsOffset));
1531 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1496 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1532 __ sw(result_register(), FieldMemOperand(a1, offset)); 1497 __ sw(result_register(), FieldMemOperand(a1, offset));
1533 1498
1534 // Update the write barrier for the array store with v0 as the scratch 1499 // Update the write barrier for the array store with v0 as the scratch
1535 // register. 1500 // register.
1536 __ li(a2, Operand(offset)); 1501 __ RecordWrite(a1, Operand(offset), a2, result_register());
1537 // TODO(PJ): double check this RecordWrite call.
1538 __ RecordWrite(a1, a2, result_register());
1539 1502
1540 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1503 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1541 } 1504 }
1542 1505
1543 if (result_saved) { 1506 if (result_saved) {
1544 context()->PlugTOS(); 1507 context()->PlugTOS();
1545 } else { 1508 } else {
1546 context()->Plug(v0); 1509 context()->Plug(v0);
1547 } 1510 }
1548 } 1511 }
(...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after
1847 break; 1810 break;
1848 } 1811 }
1849 } 1812 }
1850 PrepareForBailoutForId(bailout_ast_id, TOS_REG); 1813 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1851 context()->Plug(v0); 1814 context()->Plug(v0);
1852 } 1815 }
1853 1816
1854 1817
1855 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1818 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1856 Token::Value op) { 1819 Token::Value op) {
1857 ASSERT(var != NULL); 1820 if (var->IsUnallocated()) {
1858 ASSERT(var->is_global() || var->AsSlot() != NULL); 1821 // Global var, const, or let.
1859
1860 if (var->is_global()) {
1861 ASSERT(!var->is_this());
1862 // Assignment to a global variable. Use inline caching for the
1863 // assignment. Right-hand-side value is passed in a0, variable name in
1864 // a2, and the global object in a1.
1865 __ mov(a0, result_register()); 1822 __ mov(a0, result_register());
1866 __ li(a2, Operand(var->name())); 1823 __ li(a2, Operand(var->name()));
1867 __ lw(a1, GlobalObjectOperand()); 1824 __ lw(a1, GlobalObjectOperand());
1868 Handle<Code> ic = is_strict_mode() 1825 Handle<Code> ic = is_strict_mode()
1869 ? isolate()->builtins()->StoreIC_Initialize_Strict() 1826 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1870 : isolate()->builtins()->StoreIC_Initialize(); 1827 : isolate()->builtins()->StoreIC_Initialize();
1871 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1828 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1872 1829
1873 } else if (op == Token::INIT_CONST) { 1830 } else if (op == Token::INIT_CONST) {
1874 // Like var declarations, const declarations are hoisted to function 1831 // Const initializers need a write barrier.
1875 // scope. However, unlike var initializers, const initializers are able 1832 ASSERT(!var->IsParameter()); // No const parameters.
1876 // to drill a hole to that function context, even from inside a 'with' 1833 if (var->IsStackLocal()) {
1877 // context. We thus bypass the normal static scope lookup. 1834 Label skip;
1878 Slot* slot = var->AsSlot(); 1835 __ lw(a1, StackOperand(var));
1879 Label skip; 1836 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
1880 switch (slot->type()) { 1837 __ Branch(&skip, ne, a1, Operand(t0));
1881 case Slot::PARAMETER: 1838 __ sw(result_register(), StackOperand(var));
1882 // No const parameters. 1839 __ bind(&skip);
1883 UNREACHABLE(); 1840 } else {
1884 break; 1841 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
1885 case Slot::LOCAL: 1842 // Like var declarations, const declarations are hoisted to function
1886 // Detect const reinitialization by checking for the hole value. 1843 // scope. However, unlike var initializers, const initializers are
1887 __ lw(a1, MemOperand(fp, SlotOffset(slot))); 1844 // able to drill a hole to that function context, even from inside a
1888 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 1845 // 'with' context. We thus bypass the normal static scope lookup for
1889 __ Branch(&skip, ne, a1, Operand(t0)); 1846 // var->IsContextSlot().
1890 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); 1847 __ push(v0);
1891 break; 1848 __ li(a0, Operand(var->name()));
1892 case Slot::CONTEXT: 1849 __ Push(cp, a0); // Context and name.
1893 case Slot::LOOKUP: 1850 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1894 __ push(result_register());
1895 __ li(a0, Operand(slot->var()->name()));
1896 __ Push(cp, a0); // Context and name.
1897 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1898 break;
1899 } 1851 }
1900 __ bind(&skip); 1852
1853 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
1854 // Non-initializing assignment to let variable needs a write barrier.
1855 if (var->IsLookupSlot()) {
1856 __ push(v0); // Value.
1857 __ li(a1, Operand(var->name()));
1858 __ li(a0, Operand(Smi::FromInt(strict_mode_flag())));
1859 __ Push(cp, a1, a0); // Context, name, strict mode.
1860 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1861 } else {
1862 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
1863 Label assign;
1864 MemOperand location = VarOperand(var, a1);
1865 __ lw(a3, location);
1866 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
1867 __ Branch(&assign, ne, a3, Operand(t0));
1868 __ li(a3, Operand(var->name()));
1869 __ push(a3);
1870 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1871 // Perform the assignment.
1872 __ bind(&assign);
1873 __ sw(result_register(), location);
1874 if (var->IsContextSlot()) {
1875 // RecordWrite may destroy all its register arguments.
1876 __ mov(a3, result_register());
1877 int offset = Context::SlotOffset(var->index());
1878 __ RecordWrite(a1, Operand(offset), a2, a3);
1879 }
1880 }
1901 1881
1902 } else if (var->mode() != Variable::CONST) { 1882 } else if (var->mode() != Variable::CONST) {
1903 // Perform the assignment for non-const variables. Const assignments 1883 // Assignment to var or initializing assignment to let.
1904 // are simply skipped. 1884 if (var->IsStackAllocated() || var->IsContextSlot()) {
1905 Slot* slot = var->AsSlot(); 1885 MemOperand location = VarOperand(var, a1);
1906 switch (slot->type()) { 1886 if (FLAG_debug_code && op == Token::INIT_LET) {
1907 case Slot::PARAMETER: 1887 // Check for an uninitialized let binding.
1908 case Slot::LOCAL: 1888 __ lw(a2, location);
1909 // Perform the assignment. 1889 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
1910 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); 1890 __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
1911 break;
1912
1913 case Slot::CONTEXT: {
1914 MemOperand target = EmitSlotSearch(slot, a1);
1915 // Perform the assignment and issue the write barrier.
1916 __ sw(result_register(), target);
1917 // RecordWrite may destroy all its register arguments.
1918 __ mov(a3, result_register());
1919 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
1920 __ RecordWrite(a1, Operand(offset), a2, a3);
1921 break;
1922 } 1891 }
1923 1892 // Perform the assignment.
1924 case Slot::LOOKUP: 1893 __ sw(v0, location);
1925 // Call the runtime for the assignment. 1894 if (var->IsContextSlot()) {
1926 __ push(v0); // Value. 1895 __ mov(a3, v0);
1927 __ li(a1, Operand(slot->var()->name())); 1896 __ RecordWrite(a1, Operand(Context::SlotOffset(var->index())), a2, a3);
1928 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); 1897 }
1929 __ Push(cp, a1, a0); // Context, name, strict mode. 1898 } else {
1930 __ CallRuntime(Runtime::kStoreContextSlot, 4); 1899 ASSERT(var->IsLookupSlot());
1931 break; 1900 __ push(v0); // Value.
1901 __ li(a1, Operand(var->name()));
1902 __ li(a0, Operand(Smi::FromInt(strict_mode_flag())));
1903 __ Push(cp, a1, a0); // Context, name, strict mode.
1904 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1932 } 1905 }
1933 } 1906 }
1907 // Non-initializing assignments to consts are ignored.
1934 } 1908 }
1935 1909
1936 1910
1937 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 1911 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1938 // Assignment to a property, using a named store IC. 1912 // Assignment to a property, using a named store IC.
1939 Property* prop = expr->target()->AsProperty(); 1913 Property* prop = expr->target()->AsProperty();
1940 ASSERT(prop != NULL); 1914 ASSERT(prop != NULL);
1941 ASSERT(prop->key()->AsLiteral() != NULL); 1915 ASSERT(prop->key()->AsLiteral() != NULL);
1942 1916
1943 // If the assignment starts a block of assignments to the same object, 1917 // If the assignment starts a block of assignments to the same object,
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after
2141 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); 2115 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2142 } else { 2116 } else {
2143 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); 2117 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2144 } 2118 }
2145 __ push(a1); 2119 __ push(a1);
2146 2120
2147 // Push the receiver of the enclosing function and do runtime call. 2121 // Push the receiver of the enclosing function and do runtime call.
2148 int receiver_offset = 2 + info_->scope()->num_parameters(); 2122 int receiver_offset = 2 + info_->scope()->num_parameters();
2149 __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize)); 2123 __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
2150 __ push(a1); 2124 __ push(a1);
2151 // Push the strict mode flag. 2125 // Push the strict mode flag. In harmony mode every eval call
2152 __ li(a1, Operand(Smi::FromInt(strict_mode_flag()))); 2126 // is a strict mode eval call.
2127 StrictModeFlag strict_mode = strict_mode_flag();
2128 if (FLAG_harmony_block_scoping) {
2129 strict_mode = kStrictMode;
2130 }
2131 __ li(a1, Operand(Smi::FromInt(strict_mode)));
2153 __ push(a1); 2132 __ push(a1);
2154 2133
2155 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP 2134 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2156 ? Runtime::kResolvePossiblyDirectEvalNoLookup 2135 ? Runtime::kResolvePossiblyDirectEvalNoLookup
2157 : Runtime::kResolvePossiblyDirectEval, 4); 2136 : Runtime::kResolvePossiblyDirectEval, 4);
2158 } 2137 }
2159 2138
2160 2139
2161 void FullCodeGenerator::VisitCall(Call* expr) { 2140 void FullCodeGenerator::VisitCall(Call* expr) {
2162 #ifdef DEBUG 2141 #ifdef DEBUG
2163 // We want to verify that RecordJSReturnSite gets called on all paths 2142 // We want to verify that RecordJSReturnSite gets called on all paths
2164 // through this function. Avoid early returns. 2143 // through this function. Avoid early returns.
2165 expr->return_is_recorded_ = false; 2144 expr->return_is_recorded_ = false;
2166 #endif 2145 #endif
2167 2146
2168 Comment cmnt(masm_, "[ Call"); 2147 Comment cmnt(masm_, "[ Call");
2169 Expression* fun = expr->expression(); 2148 Expression* callee = expr->expression();
2170 Variable* var = fun->AsVariableProxy()->AsVariable(); 2149 VariableProxy* proxy = callee->AsVariableProxy();
2150 Property* property = callee->AsProperty();
2171 2151
2172 if (var != NULL && var->is_possibly_eval()) { 2152 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2173 // In a call to eval, we first call %ResolvePossiblyDirectEval to 2153 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2174 // resolve the function we need to call and the receiver of the 2154 // resolve the function we need to call and the receiver of the
2175 // call. Then we call the resolved function using the given 2155 // call. Then we call the resolved function using the given
2176 // arguments. 2156 // arguments.
2177 ZoneList<Expression*>* args = expr->arguments(); 2157 ZoneList<Expression*>* args = expr->arguments();
2178 int arg_count = args->length(); 2158 int arg_count = args->length();
2179 2159
2180 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2160 { PreservePositionScope pos_scope(masm()->positions_recorder());
2181 VisitForStackValue(fun); 2161 VisitForStackValue(callee);
2182 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 2162 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2183 __ push(a2); // Reserved receiver slot. 2163 __ push(a2); // Reserved receiver slot.
2184 2164
2185 // Push the arguments. 2165 // Push the arguments.
2186 for (int i = 0; i < arg_count; i++) { 2166 for (int i = 0; i < arg_count; i++) {
2187 VisitForStackValue(args->at(i)); 2167 VisitForStackValue(args->at(i));
2188 } 2168 }
2169
2189 // If we know that eval can only be shadowed by eval-introduced 2170 // If we know that eval can only be shadowed by eval-introduced
2190 // variables we attempt to load the global eval function directly 2171 // variables we attempt to load the global eval function directly
2191 // in generated code. If we succeed, there is no need to perform a 2172 // in generated code. If we succeed, there is no need to perform a
2192 // context lookup in the runtime system. 2173 // context lookup in the runtime system.
2193 Label done; 2174 Label done;
2194 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { 2175 Variable* var = proxy->var();
2176 if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) {
2195 Label slow; 2177 Label slow;
2196 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(), 2178 EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow);
2197 NOT_INSIDE_TYPEOF,
2198 &slow);
2199 // Push the function and resolve eval. 2179 // Push the function and resolve eval.
2200 __ push(v0); 2180 __ push(v0);
2201 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); 2181 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2202 __ jmp(&done); 2182 __ jmp(&done);
2203 __ bind(&slow); 2183 __ bind(&slow);
2204 } 2184 }
2205 2185
2206 // Push copy of the function (found below the arguments) and 2186 // Push a copy of the function (found below the arguments) and
2207 // resolve eval. 2187 // resolve eval.
2208 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2188 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2209 __ push(a1); 2189 __ push(a1);
2210 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); 2190 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2211 if (done.is_linked()) { 2191 __ bind(&done);
2212 __ bind(&done);
2213 }
2214 2192
2215 // The runtime call returns a pair of values in v0 (function) and 2193 // The runtime call returns a pair of values in v0 (function) and
2216 // v1 (receiver). Touch up the stack with the right values. 2194 // v1 (receiver). Touch up the stack with the right values.
2217 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2195 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2218 __ sw(v1, MemOperand(sp, arg_count * kPointerSize)); 2196 __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2219 } 2197 }
2220 // Record source position for debugger. 2198 // Record source position for debugger.
2221 SetSourcePosition(expr->position()); 2199 SetSourcePosition(expr->position());
2222 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2200 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2223 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); 2201 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
2224 __ CallStub(&stub); 2202 __ CallStub(&stub);
2225 RecordJSReturnSite(expr); 2203 RecordJSReturnSite(expr);
2226 // Restore context register. 2204 // Restore context register.
2227 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2205 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2228 context()->DropAndPlug(1, v0); 2206 context()->DropAndPlug(1, v0);
2229 } else if (var != NULL && !var->is_this() && var->is_global()) { 2207 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2230 // Push global object as receiver for the call IC. 2208 // Push global object as receiver for the call IC.
2231 __ lw(a0, GlobalObjectOperand()); 2209 __ lw(a0, GlobalObjectOperand());
2232 __ push(a0); 2210 __ push(a0);
2233 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); 2211 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2234 } else if (var != NULL && var->AsSlot() != NULL && 2212 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2235 var->AsSlot()->type() == Slot::LOOKUP) {
2236 // Call to a lookup slot (dynamically introduced variable). 2213 // Call to a lookup slot (dynamically introduced variable).
2237 Label slow, done; 2214 Label slow, done;
2238 2215
2239 { PreservePositionScope scope(masm()->positions_recorder()); 2216 { PreservePositionScope scope(masm()->positions_recorder());
2240 // Generate code for loading from variables potentially shadowed 2217 // Generate code for loading from variables potentially shadowed
2241 // by eval-introduced variables. 2218 // by eval-introduced variables.
2242 EmitDynamicLoadFromSlotFastCase(var->AsSlot(), 2219 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2243 NOT_INSIDE_TYPEOF,
2244 &slow,
2245 &done);
2246 } 2220 }
2247 2221
2248 __ bind(&slow); 2222 __ bind(&slow);
2249 // Call the runtime to find the function to call (returned in v0) 2223 // Call the runtime to find the function to call (returned in v0)
2250 // and the object holding it (returned in v1). 2224 // and the object holding it (returned in v1).
2251 __ push(context_register()); 2225 __ push(context_register());
2252 __ li(a2, Operand(var->name())); 2226 __ li(a2, Operand(proxy->name()));
2253 __ push(a2); 2227 __ push(a2);
2254 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2228 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2255 __ Push(v0, v1); // Function, receiver. 2229 __ Push(v0, v1); // Function, receiver.
2256 2230
2257 // If fast case code has been generated, emit code to push the 2231 // If fast case code has been generated, emit code to push the
2258 // function and receiver and have the slow path jump around this 2232 // function and receiver and have the slow path jump around this
2259 // code. 2233 // code.
2260 if (done.is_linked()) { 2234 if (done.is_linked()) {
2261 Label call; 2235 Label call;
2262 __ Branch(&call); 2236 __ Branch(&call);
2263 __ bind(&done); 2237 __ bind(&done);
2264 // Push function. 2238 // Push function.
2265 __ push(v0); 2239 __ push(v0);
2266 // The receiver is implicitly the global receiver. Indicate this 2240 // The receiver is implicitly the global receiver. Indicate this
2267 // by passing the hole to the call function stub. 2241 // by passing the hole to the call function stub.
2268 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); 2242 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
2269 __ push(a1); 2243 __ push(a1);
2270 __ bind(&call); 2244 __ bind(&call);
2271 } 2245 }
2272 2246
2273 // The receiver is either the global receiver or an object found 2247 // The receiver is either the global receiver or an object found
2274 // by LoadContextSlot. That object could be the hole if the 2248 // by LoadContextSlot. That object could be the hole if the
2275 // receiver is implicitly the global object. 2249 // receiver is implicitly the global object.
2276 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); 2250 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2277 } else if (fun->AsProperty() != NULL) { 2251 } else if (property != NULL) {
2278 // Call to an object property. 2252 { PreservePositionScope scope(masm()->positions_recorder());
2279 Property* prop = fun->AsProperty(); 2253 VisitForStackValue(property->obj());
2280 Literal* key = prop->key()->AsLiteral(); 2254 }
2281 if (key != NULL && key->handle()->IsSymbol()) { 2255 if (property->key()->IsPropertyName()) {
2282 // Call to a named property, use call IC. 2256 EmitCallWithIC(expr,
2283 { PreservePositionScope scope(masm()->positions_recorder()); 2257 property->key()->AsLiteral()->handle(),
2284 VisitForStackValue(prop->obj()); 2258 RelocInfo::CODE_TARGET);
2285 }
2286 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2287 } else { 2259 } else {
2288 // Call to a keyed property. 2260 EmitKeyedCallWithIC(expr, property->key());
2289 // For a synthetic property use keyed load IC followed by function call,
2290 // for a regular property use EmitKeyedCallWithIC.
2291 if (prop->is_synthetic()) {
2292 // Do not visit the object and key subexpressions (they are shared
2293 // by all occurrences of the same rewritten parameter).
2294 ASSERT(prop->obj()->AsVariableProxy() != NULL);
2295 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2296 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2297 MemOperand operand = EmitSlotSearch(slot, a1);
2298 __ lw(a1, operand);
2299
2300 ASSERT(prop->key()->AsLiteral() != NULL);
2301 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2302 __ li(a0, Operand(prop->key()->AsLiteral()->handle()));
2303
2304 // Record source code position for IC call.
2305 SetSourcePosition(prop->position());
2306
2307 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2308 __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
2309 __ lw(a1, GlobalObjectOperand());
2310 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2311 __ Push(v0, a1); // Function, receiver.
2312 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2313 } else {
2314 { PreservePositionScope scope(masm()->positions_recorder());
2315 VisitForStackValue(prop->obj());
2316 }
2317 EmitKeyedCallWithIC(expr, prop->key());
2318 }
2319 } 2261 }
2320 } else { 2262 } else {
2263 // Call to an arbitrary expression not handled specially above.
2321 { PreservePositionScope scope(masm()->positions_recorder()); 2264 { PreservePositionScope scope(masm()->positions_recorder());
2322 VisitForStackValue(fun); 2265 VisitForStackValue(callee);
2323 } 2266 }
2324 // Load global receiver object. 2267 // Load global receiver object.
2325 __ lw(a1, GlobalObjectOperand()); 2268 __ lw(a1, GlobalObjectOperand());
2326 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); 2269 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2327 __ push(a1); 2270 __ push(a1);
2328 // Emit function call. 2271 // Emit function call.
2329 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); 2272 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2330 } 2273 }
2331 2274
2332 #ifdef DEBUG 2275 #ifdef DEBUG
(...skipping 927 matching lines...) Expand 10 before | Expand all | Expand 10 after
3260 __ lw(cache, ContextOperand(cp, Context::GLOBAL_INDEX)); 3203 __ lw(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
3261 __ lw(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset)); 3204 __ lw(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
3262 __ lw(cache, 3205 __ lw(cache,
3263 ContextOperand( 3206 ContextOperand(
3264 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3207 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3265 __ lw(cache, 3208 __ lw(cache,
3266 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3209 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3267 3210
3268 3211
3269 Label done, not_found; 3212 Label done, not_found;
3270 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3213 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3271 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 3214 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3272 // a2 now holds finger offset as a smi. 3215 // a2 now holds finger offset as a smi.
3273 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3216 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3274 // a3 now points to the start of fixed array elements. 3217 // a3 now points to the start of fixed array elements.
3275 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize); 3218 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3276 __ addu(a3, a3, at); 3219 __ addu(a3, a3, at);
3277 // a3 now points to key of indexed element of cache. 3220 // a3 now points to key of indexed element of cache.
3278 __ lw(a2, MemOperand(a3)); 3221 __ lw(a2, MemOperand(a3));
3279 __ Branch(&not_found, ne, key, Operand(a2)); 3222 __ Branch(&not_found, ne, key, Operand(a2));
3280 3223
(...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after
3595 ASSERT(result.is(v0)); 3538 ASSERT(result.is(v0));
3596 __ Branch(&done); 3539 __ Branch(&done);
3597 3540
3598 __ bind(&bailout); 3541 __ bind(&bailout);
3599 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 3542 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3600 __ bind(&done); 3543 __ bind(&done);
3601 context()->Plug(v0); 3544 context()->Plug(v0);
3602 } 3545 }
3603 3546
3604 3547
3605 void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList<Expression*>* args) {
3606 ASSERT(args->length() == 1);
3607
3608 // Load the function into v0.
3609 VisitForAccumulatorValue(args->at(0));
3610
3611 // Prepare for the test.
3612 Label materialize_true, materialize_false;
3613 Label* if_true = NULL;
3614 Label* if_false = NULL;
3615 Label* fall_through = NULL;
3616 context()->PrepareTest(&materialize_true, &materialize_false,
3617 &if_true, &if_false, &fall_through);
3618
3619 // Test for strict mode function.
3620 __ lw(a1, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3621 __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kCompilerHintsOffset));
3622 __ And(at, a1, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
3623 kSmiTagSize)));
3624 __ Branch(if_true, ne, at, Operand(zero_reg));
3625
3626 // Test for native function.
3627 __ And(at, a1, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
3628 __ Branch(if_true, ne, at, Operand(zero_reg));
3629
3630 // Not native or strict-mode function.
3631 __ Branch(if_false);
3632
3633 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3634 context()->Plug(if_true, if_false);
3635 }
3636
3637
3638 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3548 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3639 Handle<String> name = expr->name(); 3549 Handle<String> name = expr->name();
3640 if (name->length() > 0 && name->Get(0) == '_') { 3550 if (name->length() > 0 && name->Get(0) == '_') {
3641 Comment cmnt(masm_, "[ InlineRuntimeCall"); 3551 Comment cmnt(masm_, "[ InlineRuntimeCall");
3642 EmitInlineRuntimeCall(expr); 3552 EmitInlineRuntimeCall(expr);
3643 return; 3553 return;
3644 } 3554 }
3645 3555
3646 Comment cmnt(masm_, "[ CallRuntime"); 3556 Comment cmnt(masm_, "[ CallRuntime");
3647 ZoneList<Expression*>* args = expr->arguments(); 3557 ZoneList<Expression*>* args = expr->arguments();
(...skipping 27 matching lines...) Expand all
3675 __ CallRuntime(expr->function(), arg_count); 3585 __ CallRuntime(expr->function(), arg_count);
3676 } 3586 }
3677 context()->Plug(v0); 3587 context()->Plug(v0);
3678 } 3588 }
3679 3589
3680 3590
3681 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3591 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3682 switch (expr->op()) { 3592 switch (expr->op()) {
3683 case Token::DELETE: { 3593 case Token::DELETE: {
3684 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3594 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3685 Property* prop = expr->expression()->AsProperty(); 3595 Property* property = expr->expression()->AsProperty();
3686 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); 3596 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3687 3597
3688 if (prop != NULL) { 3598 if (property != NULL) {
3689 if (prop->is_synthetic()) { 3599 VisitForStackValue(property->obj());
3690 // Result of deleting parameters is false, even when they rewrite 3600 VisitForStackValue(property->key());
3691 // to accesses on the arguments object. 3601 __ li(a1, Operand(Smi::FromInt(strict_mode_flag())));
3692 context()->Plug(false); 3602 __ push(a1);
3693 } else { 3603 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3694 VisitForStackValue(prop->obj()); 3604 context()->Plug(v0);
3695 VisitForStackValue(prop->key()); 3605 } else if (proxy != NULL) {
3696 __ li(a1, Operand(Smi::FromInt(strict_mode_flag()))); 3606 Variable* var = proxy->var();
3697 __ push(a1);
3698 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3699 context()->Plug(v0);
3700 }
3701 } else if (var != NULL) {
3702 // Delete of an unqualified identifier is disallowed in strict mode 3607 // Delete of an unqualified identifier is disallowed in strict mode
3703 // but "delete this" is. 3608 // but "delete this" is allowed.
3704 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); 3609 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3705 if (var->is_global()) { 3610 if (var->IsUnallocated()) {
3706 __ lw(a2, GlobalObjectOperand()); 3611 __ lw(a2, GlobalObjectOperand());
3707 __ li(a1, Operand(var->name())); 3612 __ li(a1, Operand(var->name()));
3708 __ li(a0, Operand(Smi::FromInt(kNonStrictMode))); 3613 __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
3709 __ Push(a2, a1, a0); 3614 __ Push(a2, a1, a0);
3710 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3615 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3711 context()->Plug(v0); 3616 context()->Plug(v0);
3712 } else if (var->AsSlot() != NULL && 3617 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3713 var->AsSlot()->type() != Slot::LOOKUP) {
3714 // Result of deleting non-global, non-dynamic variables is false. 3618 // Result of deleting non-global, non-dynamic variables is false.
3715 // The subexpression does not have side effects. 3619 // The subexpression does not have side effects.
3716 context()->Plug(false); 3620 context()->Plug(var->is_this());
3717 } else { 3621 } else {
3718 // Non-global variable. Call the runtime to try to delete from the 3622 // Non-global variable. Call the runtime to try to delete from the
3719 // context where the variable was introduced. 3623 // context where the variable was introduced.
3720 __ push(context_register()); 3624 __ push(context_register());
3721 __ li(a2, Operand(var->name())); 3625 __ li(a2, Operand(var->name()));
3722 __ push(a2); 3626 __ push(a2);
3723 __ CallRuntime(Runtime::kDeleteContextSlot, 2); 3627 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3724 context()->Plug(v0); 3628 context()->Plug(v0);
3725 } 3629 }
3726 } else { 3630 } else {
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after
3981 } else { 3885 } else {
3982 context()->Plug(v0); 3886 context()->Plug(v0);
3983 } 3887 }
3984 break; 3888 break;
3985 } 3889 }
3986 } 3890 }
3987 } 3891 }
3988 3892
3989 3893
3990 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 3894 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3895 ASSERT(!context()->IsEffect());
3896 ASSERT(!context()->IsTest());
3991 VariableProxy* proxy = expr->AsVariableProxy(); 3897 VariableProxy* proxy = expr->AsVariableProxy();
3992 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { 3898 if (proxy != NULL && proxy->var()->IsUnallocated()) {
3993 Comment cmnt(masm_, "Global variable"); 3899 Comment cmnt(masm_, "Global variable");
3994 __ lw(a0, GlobalObjectOperand()); 3900 __ lw(a0, GlobalObjectOperand());
3995 __ li(a2, Operand(proxy->name())); 3901 __ li(a2, Operand(proxy->name()));
3996 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 3902 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3997 // Use a regular load, not a contextual load, to avoid a reference 3903 // Use a regular load, not a contextual load, to avoid a reference
3998 // error. 3904 // error.
3999 __ Call(ic); 3905 __ Call(ic);
4000 PrepareForBailout(expr, TOS_REG); 3906 PrepareForBailout(expr, TOS_REG);
4001 context()->Plug(v0); 3907 context()->Plug(v0);
4002 } else if (proxy != NULL && 3908 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4003 proxy->var()->AsSlot() != NULL &&
4004 proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
4005 Label done, slow; 3909 Label done, slow;
4006 3910
4007 // Generate code for loading from variables potentially shadowed 3911 // Generate code for loading from variables potentially shadowed
4008 // by eval-introduced variables. 3912 // by eval-introduced variables.
4009 Slot* slot = proxy->var()->AsSlot(); 3913 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4010 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
4011 3914
4012 __ bind(&slow); 3915 __ bind(&slow);
4013 __ li(a0, Operand(proxy->name())); 3916 __ li(a0, Operand(proxy->name()));
4014 __ Push(cp, a0); 3917 __ Push(cp, a0);
4015 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 3918 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4016 PrepareForBailout(expr, TOS_REG); 3919 PrepareForBailout(expr, TOS_REG);
4017 __ bind(&done); 3920 __ bind(&done);
4018 3921
4019 context()->Plug(v0); 3922 context()->Plug(v0);
4020 } else { 3923 } else {
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after
4294 // ---------------------------------------------------------------------------- 4197 // ----------------------------------------------------------------------------
4295 // Non-local control flow support. 4198 // Non-local control flow support.
4296 4199
4297 void FullCodeGenerator::EnterFinallyBlock() { 4200 void FullCodeGenerator::EnterFinallyBlock() {
4298 ASSERT(!result_register().is(a1)); 4201 ASSERT(!result_register().is(a1));
4299 // Store result register while executing finally block. 4202 // Store result register while executing finally block.
4300 __ push(result_register()); 4203 __ push(result_register());
4301 // Cook return address in link register to stack (smi encoded Code* delta). 4204 // Cook return address in link register to stack (smi encoded Code* delta).
4302 __ Subu(a1, ra, Operand(masm_->CodeObject())); 4205 __ Subu(a1, ra, Operand(masm_->CodeObject()));
4303 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 4206 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4304 ASSERT_EQ(0, kSmiTag); 4207 STATIC_ASSERT(0 == kSmiTag);
4305 __ Addu(a1, a1, Operand(a1)); // Convert to smi. 4208 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
4306 __ push(a1); 4209 __ push(a1);
4307 } 4210 }
4308 4211
4309 4212
4310 void FullCodeGenerator::ExitFinallyBlock() { 4213 void FullCodeGenerator::ExitFinallyBlock() {
4311 ASSERT(!result_register().is(a1)); 4214 ASSERT(!result_register().is(a1));
4312 // Restore result register from stack. 4215 // Restore result register from stack.
4313 __ pop(a1); 4216 __ pop(a1);
4314 // Uncook return address and return. 4217 // Uncook return address and return.
4315 __ pop(result_register()); 4218 __ pop(result_register());
4316 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 4219 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4317 __ sra(a1, a1, 1); // Un-smi-tag value. 4220 __ sra(a1, a1, 1); // Un-smi-tag value.
4318 __ Addu(at, a1, Operand(masm_->CodeObject())); 4221 __ Addu(at, a1, Operand(masm_->CodeObject()));
4319 __ Jump(at); 4222 __ Jump(at);
4320 } 4223 }
4321 4224
4322 4225
4323 #undef __ 4226 #undef __
4324 4227
4228 #define __ ACCESS_MASM(masm())
4229
4230 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4231 int* stack_depth,
4232 int* context_length) {
4233 // The macros used here must preserve the result register.
4234
4235 // Because the handler block contains the context of the finally
4236 // code, we can restore it directly from there for the finally code
4237 // rather than iteratively unwinding contexts via their previous
4238 // links.
4239 __ Drop(*stack_depth); // Down to the handler block.
4240 if (*context_length > 0) {
4241 // Restore the context to its dedicated register and the stack.
4242 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4243 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4244 }
4245 __ PopTryHandler();
4246 __ Call(finally_entry_);
4247
4248 *stack_depth = 0;
4249 *context_length = 0;
4250 return previous_;
4251 }
4252
4253
4254 #undef __
4255
4325 } } // namespace v8::internal 4256 } } // namespace v8::internal
4326 4257
4327 #endif // V8_TARGET_ARCH_MIPS 4258 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/frames-mips.h ('k') | src/mips/ic-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698