| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 193 } else { | 193 } else { |
| 194 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 194 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
| 195 } | 195 } |
| 196 function_in_register = false; | 196 function_in_register = false; |
| 197 // Context is returned in both v0 and cp. It replaces the context | 197 // Context is returned in both v0 and cp. It replaces the context |
| 198 // passed to us. It's saved in the stack and kept live in cp. | 198 // passed to us. It's saved in the stack and kept live in cp. |
| 199 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 199 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 200 // Copy any necessary parameters into the context. | 200 // Copy any necessary parameters into the context. |
| 201 int num_parameters = info->scope()->num_parameters(); | 201 int num_parameters = info->scope()->num_parameters(); |
| 202 for (int i = 0; i < num_parameters; i++) { | 202 for (int i = 0; i < num_parameters; i++) { |
| 203 Slot* slot = scope()->parameter(i)->rewrite(); | 203 Variable* var = scope()->parameter(i); |
| 204 if (slot != NULL && slot->type() == Slot::CONTEXT) { | 204 if (var->IsContextSlot()) { |
| 205 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 205 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
| 206 (num_parameters - 1 - i) * kPointerSize; | 206 (num_parameters - 1 - i) * kPointerSize; |
| 207 // Load parameter from stack. | 207 // Load parameter from stack. |
| 208 __ lw(a0, MemOperand(fp, parameter_offset)); | 208 __ lw(a0, MemOperand(fp, parameter_offset)); |
| 209 // Store it in the context. | 209 // Store it in the context. |
| 210 __ li(a1, Operand(Context::SlotOffset(slot->index()))); | 210 __ li(a1, Operand(Context::SlotOffset(var->index()))); |
| 211 __ addu(a2, cp, a1); | 211 __ addu(a2, cp, a1); |
| 212 __ sw(a0, MemOperand(a2, 0)); | 212 __ sw(a0, MemOperand(a2, 0)); |
| 213 // Update the write barrier. This clobbers all involved | 213 // Update the write barrier. This clobbers all involved |
| 214 // registers, so we have to use two more registers to avoid | 214 // registers, so we have to use two more registers to avoid |
| 215 // clobbering cp. | 215 // clobbering cp. |
| 216 __ mov(a2, cp); | 216 __ mov(a2, cp); |
| 217 __ RecordWrite(a2, a1, a3); | 217 __ RecordWrite(a2, a1, a3); |
| 218 } | 218 } |
| 219 } | 219 } |
| 220 } | 220 } |
| (...skipping 24 matching lines...) Expand all Loading... |
| 245 if (is_strict_mode()) { | 245 if (is_strict_mode()) { |
| 246 type = ArgumentsAccessStub::NEW_STRICT; | 246 type = ArgumentsAccessStub::NEW_STRICT; |
| 247 } else if (function()->has_duplicate_parameters()) { | 247 } else if (function()->has_duplicate_parameters()) { |
| 248 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; | 248 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; |
| 249 } else { | 249 } else { |
| 250 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; | 250 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; |
| 251 } | 251 } |
| 252 ArgumentsAccessStub stub(type); | 252 ArgumentsAccessStub stub(type); |
| 253 __ CallStub(&stub); | 253 __ CallStub(&stub); |
| 254 | 254 |
| 255 Move(arguments->rewrite(), v0, a1, a2); | 255 SetVar(arguments, v0, a1, a2); |
| 256 } | 256 } |
| 257 | 257 |
| 258 if (FLAG_trace) { | 258 if (FLAG_trace) { |
| 259 __ CallRuntime(Runtime::kTraceEnter, 0); | 259 __ CallRuntime(Runtime::kTraceEnter, 0); |
| 260 } | 260 } |
| 261 | 261 |
| 262 // Visit the declarations and body unless there is an illegal | 262 // Visit the declarations and body unless there is an illegal |
| 263 // redeclaration. | 263 // redeclaration. |
| 264 if (scope()->HasIllegalRedeclaration()) { | 264 if (scope()->HasIllegalRedeclaration()) { |
| 265 Comment cmnt(masm_, "[ Declarations"); | 265 Comment cmnt(masm_, "[ Declarations"); |
| 266 scope()->VisitIllegalRedeclaration(this); | 266 scope()->VisitIllegalRedeclaration(this); |
| 267 | 267 |
| 268 } else { | 268 } else { |
| 269 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); | 269 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); |
| 270 { Comment cmnt(masm_, "[ Declarations"); | 270 { Comment cmnt(masm_, "[ Declarations"); |
| 271 // For named function expressions, declare the function name as a | 271 // For named function expressions, declare the function name as a |
| 272 // constant. | 272 // constant. |
| 273 if (scope()->is_function_scope() && scope()->function() != NULL) { | 273 if (scope()->is_function_scope() && scope()->function() != NULL) { |
| 274 EmitDeclaration(scope()->function(), Variable::CONST, NULL); | 274 int ignored = 0; |
| 275 EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored); |
| 275 } | 276 } |
| 276 VisitDeclarations(scope()->declarations()); | 277 VisitDeclarations(scope()->declarations()); |
| 277 } | 278 } |
| 278 | 279 |
| 279 { Comment cmnt(masm_, "[ Stack check"); | 280 { Comment cmnt(masm_, "[ Stack check"); |
| 280 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS); | 281 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS); |
| 281 Label ok; | 282 Label ok; |
| 282 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | 283 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
| 283 __ Branch(&ok, hs, sp, Operand(t0)); | 284 __ Branch(&ok, hs, sp, Operand(t0)); |
| 284 StackCheckStub stub; | 285 StackCheckStub stub; |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 364 #ifdef DEBUG | 365 #ifdef DEBUG |
| 365 // Check that the size of the code used for returning is large enough | 366 // Check that the size of the code used for returning is large enough |
| 366 // for the debugger's requirements. | 367 // for the debugger's requirements. |
| 367 ASSERT(Assembler::kJSReturnSequenceInstructions <= | 368 ASSERT(Assembler::kJSReturnSequenceInstructions <= |
| 368 masm_->InstructionsGeneratedSince(&check_exit_codesize)); | 369 masm_->InstructionsGeneratedSince(&check_exit_codesize)); |
| 369 #endif | 370 #endif |
| 370 } | 371 } |
| 371 } | 372 } |
| 372 | 373 |
| 373 | 374 |
| 374 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { | 375 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { |
| 376 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
| 375 } | 377 } |
| 376 | 378 |
| 377 | 379 |
| 378 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const { | 380 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { |
| 379 codegen()->Move(result_register(), slot); | 381 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
| 382 codegen()->GetVar(result_register(), var); |
| 380 } | 383 } |
| 381 | 384 |
| 382 | 385 |
| 383 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { | 386 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { |
| 384 codegen()->Move(result_register(), slot); | 387 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
| 388 codegen()->GetVar(result_register(), var); |
| 385 __ push(result_register()); | 389 __ push(result_register()); |
| 386 } | 390 } |
| 387 | 391 |
| 388 | 392 |
| 389 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { | 393 void FullCodeGenerator::TestContext::Plug(Variable* var) const { |
| 390 // For simplicity we always test the accumulator register. | 394 // For simplicity we always test the accumulator register. |
| 391 codegen()->Move(result_register(), slot); | 395 codegen()->GetVar(result_register(), var); |
| 392 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); | 396 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
| 393 codegen()->DoTest(this); | 397 codegen()->DoTest(this); |
| 394 } | 398 } |
| 395 | 399 |
| 396 | 400 |
| 397 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { | 401 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { |
| 398 } | 402 } |
| 399 | 403 |
| 400 | 404 |
| 401 void FullCodeGenerator::AccumulatorValueContext::Plug( | 405 void FullCodeGenerator::AccumulatorValueContext::Plug( |
| (...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 614 __ Branch(if_true, cc, lhs, rhs); | 618 __ Branch(if_true, cc, lhs, rhs); |
| 615 } else if (if_true == fall_through) { | 619 } else if (if_true == fall_through) { |
| 616 __ Branch(if_false, NegateCondition(cc), lhs, rhs); | 620 __ Branch(if_false, NegateCondition(cc), lhs, rhs); |
| 617 } else { | 621 } else { |
| 618 __ Branch(if_true, cc, lhs, rhs); | 622 __ Branch(if_true, cc, lhs, rhs); |
| 619 __ Branch(if_false); | 623 __ Branch(if_false); |
| 620 } | 624 } |
| 621 } | 625 } |
| 622 | 626 |
| 623 | 627 |
| 624 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { | 628 MemOperand FullCodeGenerator::StackOperand(Variable* var) { |
| 625 switch (slot->type()) { | 629 ASSERT(var->IsStackAllocated()); |
| 626 case Slot::PARAMETER: | 630 // Offset is negative because higher indexes are at lower addresses. |
| 627 case Slot::LOCAL: | 631 int offset = -var->index() * kPointerSize; |
| 628 return MemOperand(fp, SlotOffset(slot)); | 632 // Adjust by a (parameter or local) base offset. |
| 629 case Slot::CONTEXT: { | 633 if (var->IsParameter()) { |
| 630 int context_chain_length = | 634 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; |
| 631 scope()->ContextChainLength(slot->var()->scope()); | 635 } else { |
| 632 __ LoadContext(scratch, context_chain_length); | 636 offset += JavaScriptFrameConstants::kLocal0Offset; |
| 633 return ContextOperand(scratch, slot->index()); | |
| 634 } | |
| 635 case Slot::LOOKUP: | |
| 636 case Slot::GLOBAL: | |
| 637 UNREACHABLE(); | |
| 638 } | 637 } |
| 639 UNREACHABLE(); | 638 return MemOperand(fp, offset); |
| 640 return MemOperand(v0, 0); | |
| 641 } | 639 } |
| 642 | 640 |
| 643 | 641 |
| 644 void FullCodeGenerator::Move(Register destination, Slot* source) { | 642 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { |
| 645 // Use destination as scratch. | 643 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
| 646 MemOperand slot_operand = EmitSlotSearch(source, destination); | 644 if (var->IsContextSlot()) { |
| 647 __ lw(destination, slot_operand); | 645 int context_chain_length = scope()->ContextChainLength(var->scope()); |
| 646 __ LoadContext(scratch, context_chain_length); |
| 647 return ContextOperand(scratch, var->index()); |
| 648 } else { |
| 649 return StackOperand(var); |
| 650 } |
| 648 } | 651 } |
| 649 | 652 |
| 650 | 653 |
| 654 void FullCodeGenerator::GetVar(Register dest, Variable* var) { |
| 655 // Use destination as scratch. |
| 656 MemOperand location = VarOperand(var, dest); |
| 657 __ lw(dest, location); |
| 658 } |
| 659 |
| 660 |
| 661 void FullCodeGenerator::SetVar(Variable* var, |
| 662 Register src, |
| 663 Register scratch0, |
| 664 Register scratch1) { |
| 665 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
| 666 ASSERT(!scratch0.is(src)); |
| 667 ASSERT(!scratch0.is(scratch1)); |
| 668 ASSERT(!scratch1.is(src)); |
| 669 MemOperand location = VarOperand(var, scratch0); |
| 670 __ sw(src, location); |
| 671 // Emit the write barrier code if the location is in the heap. |
| 672 if (var->IsContextSlot()) { |
| 673 __ RecordWrite(scratch0, |
| 674 Operand(Context::SlotOffset(var->index())), |
| 675 scratch1, |
| 676 src); |
| 677 } |
| 678 } |
| 679 |
| 680 |
| 651 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, | 681 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, |
| 652 bool should_normalize, | 682 bool should_normalize, |
| 653 Label* if_true, | 683 Label* if_true, |
| 654 Label* if_false) { | 684 Label* if_false) { |
| 655 // Only prepare for bailouts before splits if we're in a test | 685 // Only prepare for bailouts before splits if we're in a test |
| 656 // context. Otherwise, we let the Visit function deal with the | 686 // context. Otherwise, we let the Visit function deal with the |
| 657 // preparation to avoid preparing with the same AST id twice. | 687 // preparation to avoid preparing with the same AST id twice. |
| 658 if (!context()->IsTest() || !info_->IsOptimizable()) return; | 688 if (!context()->IsTest() || !info_->IsOptimizable()) return; |
| 659 | 689 |
| 660 Label skip; | 690 Label skip; |
| 661 if (should_normalize) __ Branch(&skip); | 691 if (should_normalize) __ Branch(&skip); |
| 662 | 692 |
| 663 ForwardBailoutStack* current = forward_bailout_stack_; | 693 ForwardBailoutStack* current = forward_bailout_stack_; |
| 664 while (current != NULL) { | 694 while (current != NULL) { |
| 665 PrepareForBailout(current->expr(), state); | 695 PrepareForBailout(current->expr(), state); |
| 666 current = current->parent(); | 696 current = current->parent(); |
| 667 } | 697 } |
| 668 | 698 |
| 669 if (should_normalize) { | 699 if (should_normalize) { |
| 670 __ LoadRoot(t0, Heap::kTrueValueRootIndex); | 700 __ LoadRoot(t0, Heap::kTrueValueRootIndex); |
| 671 Split(eq, a0, Operand(t0), if_true, if_false, NULL); | 701 Split(eq, a0, Operand(t0), if_true, if_false, NULL); |
| 672 __ bind(&skip); | 702 __ bind(&skip); |
| 673 } | 703 } |
| 674 } | 704 } |
| 675 | 705 |
| 676 | 706 |
| 677 void FullCodeGenerator::Move(Slot* dst, | |
| 678 Register src, | |
| 679 Register scratch1, | |
| 680 Register scratch2) { | |
| 681 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. | |
| 682 ASSERT(!scratch1.is(src) && !scratch2.is(src)); | |
| 683 MemOperand location = EmitSlotSearch(dst, scratch1); | |
| 684 __ sw(src, location); | |
| 685 // Emit the write barrier code if the location is in the heap. | |
| 686 if (dst->type() == Slot::CONTEXT) { | |
| 687 __ RecordWrite(scratch1, | |
| 688 Operand(Context::SlotOffset(dst->index())), | |
| 689 scratch2, | |
| 690 src); | |
| 691 } | |
| 692 } | |
| 693 | |
| 694 | |
| 695 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, | 707 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, |
| 696 Variable::Mode mode, | 708 Variable::Mode mode, |
| 697 FunctionLiteral* function) { | 709 FunctionLiteral* function, |
| 698 Comment cmnt(masm_, "[ Declaration"); | 710 int* global_count) { |
| 711 // If it was not possible to allocate the variable at compile time, we |
| 712 // need to "declare" it at runtime to make sure it actually exists in the |
| 713 // local context. |
| 699 Variable* variable = proxy->var(); | 714 Variable* variable = proxy->var(); |
| 700 ASSERT(variable != NULL); // Must have been resolved. | 715 switch (variable->location()) { |
| 701 Slot* slot = variable->rewrite(); | 716 case Variable::UNALLOCATED: |
| 702 ASSERT(slot != NULL); | 717 ++(*global_count); |
| 703 switch (slot->type()) { | 718 break; |
| 704 case Slot::PARAMETER: | 719 |
| 705 case Slot::LOCAL: | 720 case Variable::PARAMETER: |
| 721 case Variable::LOCAL: |
| 706 if (function != NULL) { | 722 if (function != NULL) { |
| 723 Comment cmnt(masm_, "[ Declaration"); |
| 707 VisitForAccumulatorValue(function); | 724 VisitForAccumulatorValue(function); |
| 708 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); | 725 __ sw(result_register(), StackOperand(variable)); |
| 709 } else if (mode == Variable::CONST || mode == Variable::LET) { | 726 } else if (mode == Variable::CONST || mode == Variable::LET) { |
| 727 Comment cmnt(masm_, "[ Declaration"); |
| 710 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | 728 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 711 __ sw(t0, MemOperand(fp, SlotOffset(slot))); | 729 __ sw(t0, StackOperand(variable)); |
| 712 } | 730 } |
| 713 break; | 731 break; |
| 714 | 732 |
| 715 case Slot::CONTEXT: | 733 case Variable::CONTEXT: |
| 716 // We bypass the general EmitSlotSearch because we know more about | |
| 717 // this specific context. | |
| 718 | |
| 719 // The variable in the decl always resides in the current function | 734 // The variable in the decl always resides in the current function |
| 720 // context. | 735 // context. |
| 721 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); | 736 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); |
| 722 if (FLAG_debug_code) { | 737 if (FLAG_debug_code) { |
| 723 // Check that we're not inside a with or catch context. | 738 // Check that we're not inside a with or catch context. |
| 724 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); | 739 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); |
| 725 __ LoadRoot(t0, Heap::kWithContextMapRootIndex); | 740 __ LoadRoot(t0, Heap::kWithContextMapRootIndex); |
| 726 __ Check(ne, "Declaration in with context.", | 741 __ Check(ne, "Declaration in with context.", |
| 727 a1, Operand(t0)); | 742 a1, Operand(t0)); |
| 728 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); | 743 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); |
| 729 __ Check(ne, "Declaration in catch context.", | 744 __ Check(ne, "Declaration in catch context.", |
| 730 a1, Operand(t0)); | 745 a1, Operand(t0)); |
| 731 } | 746 } |
| 732 if (function != NULL) { | 747 if (function != NULL) { |
| 748 Comment cmnt(masm_, "[ Declaration"); |
| 733 VisitForAccumulatorValue(function); | 749 VisitForAccumulatorValue(function); |
| 734 __ sw(result_register(), ContextOperand(cp, slot->index())); | 750 __ sw(result_register(), ContextOperand(cp, variable->index())); |
| 735 int offset = Context::SlotOffset(slot->index()); | 751 int offset = Context::SlotOffset(variable->index()); |
| 736 // We know that we have written a function, which is not a smi. | 752 // We know that we have written a function, which is not a smi. |
| 737 __ mov(a1, cp); | 753 __ mov(a1, cp); |
| 738 __ RecordWrite(a1, Operand(offset), a2, result_register()); | 754 __ RecordWrite(a1, Operand(offset), a2, result_register()); |
| 739 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 755 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
| 740 } else if (mode == Variable::CONST || mode == Variable::LET) { | 756 } else if (mode == Variable::CONST || mode == Variable::LET) { |
| 757 Comment cmnt(masm_, "[ Declaration"); |
| 741 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 758 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 742 __ sw(at, ContextOperand(cp, slot->index())); | 759 __ sw(at, ContextOperand(cp, variable->index())); |
| 743 // No write barrier since the_hole_value is in old space. | 760 // No write barrier since the_hole_value is in old space. |
| 744 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 761 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
| 745 } | 762 } |
| 746 break; | 763 break; |
| 747 | 764 |
| 748 case Slot::LOOKUP: { | 765 case Variable::LOOKUP: { |
| 766 Comment cmnt(masm_, "[ Declaration"); |
| 749 __ li(a2, Operand(variable->name())); | 767 __ li(a2, Operand(variable->name())); |
| 750 // Declaration nodes are always introduced in one of two modes. | 768 // Declaration nodes are always introduced in one of three modes. |
| 751 ASSERT(mode == Variable::VAR || | 769 ASSERT(mode == Variable::VAR || |
| 752 mode == Variable::CONST || | 770 mode == Variable::CONST || |
| 753 mode == Variable::LET); | 771 mode == Variable::LET); |
| 754 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; | 772 PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; |
| 755 __ li(a1, Operand(Smi::FromInt(attr))); | 773 __ li(a1, Operand(Smi::FromInt(attr))); |
| 756 // Push initial value, if any. | 774 // Push initial value, if any. |
| 757 // Note: For variables we must not push an initial value (such as | 775 // Note: For variables we must not push an initial value (such as |
| 758 // 'undefined') because we may have a (legal) redeclaration and we | 776 // 'undefined') because we may have a (legal) redeclaration and we |
| 759 // must not destroy the current value. | 777 // must not destroy the current value. |
| 760 if (function != NULL) { | 778 if (function != NULL) { |
| 761 __ Push(cp, a2, a1); | 779 __ Push(cp, a2, a1); |
| 762 // Push initial value for function declaration. | 780 // Push initial value for function declaration. |
| 763 VisitForStackValue(function); | 781 VisitForStackValue(function); |
| 764 } else if (mode == Variable::CONST || mode == Variable::LET) { | 782 } else if (mode == Variable::CONST || mode == Variable::LET) { |
| 765 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); | 783 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); |
| 766 __ Push(cp, a2, a1, a0); | 784 __ Push(cp, a2, a1, a0); |
| 767 } else { | 785 } else { |
| 768 ASSERT(Smi::FromInt(0) == 0); | 786 ASSERT(Smi::FromInt(0) == 0); |
| 769 // No initial value! | 787 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value. |
| 770 __ mov(a0, zero_reg); // Operand(Smi::FromInt(0))); | |
| 771 __ Push(cp, a2, a1, a0); | 788 __ Push(cp, a2, a1, a0); |
| 772 } | 789 } |
| 773 __ CallRuntime(Runtime::kDeclareContextSlot, 4); | 790 __ CallRuntime(Runtime::kDeclareContextSlot, 4); |
| 774 break; | 791 break; |
| 775 } | 792 } |
| 776 | |
| 777 case Slot::GLOBAL: | |
| 778 UNREACHABLE(); | |
| 779 } | 793 } |
| 780 } | 794 } |
| 781 | 795 |
| 782 | 796 |
| 783 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { | 797 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { } |
| 784 EmitDeclaration(decl->proxy(), decl->mode(), decl->fun()); | |
| 785 } | |
| 786 | 798 |
| 787 | 799 |
| 788 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 800 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
| 789 // Call the runtime to declare the globals. | 801 // Call the runtime to declare the globals. |
| 790 // The context is the first argument. | 802 // The context is the first argument. |
| 791 __ li(a1, Operand(pairs)); | 803 __ li(a1, Operand(pairs)); |
| 792 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); | 804 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); |
| 793 __ Push(cp, a1, a0); | 805 __ Push(cp, a1, a0); |
| 794 __ CallRuntime(Runtime::kDeclareGlobals, 3); | 806 __ CallRuntime(Runtime::kDeclareGlobals, 3); |
| 795 // Return value is ignored. | 807 // Return value is ignored. |
| (...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1088 context()->Plug(v0); | 1100 context()->Plug(v0); |
| 1089 } | 1101 } |
| 1090 | 1102 |
| 1091 | 1103 |
| 1092 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { | 1104 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { |
| 1093 Comment cmnt(masm_, "[ VariableProxy"); | 1105 Comment cmnt(masm_, "[ VariableProxy"); |
| 1094 EmitVariableLoad(expr); | 1106 EmitVariableLoad(expr); |
| 1095 } | 1107 } |
| 1096 | 1108 |
| 1097 | 1109 |
| 1098 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( | 1110 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, |
| 1099 Slot* slot, | 1111 TypeofState typeof_state, |
| 1100 TypeofState typeof_state, | 1112 Label* slow) { |
| 1101 Label* slow) { | |
| 1102 Register current = cp; | 1113 Register current = cp; |
| 1103 Register next = a1; | 1114 Register next = a1; |
| 1104 Register temp = a2; | 1115 Register temp = a2; |
| 1105 | 1116 |
| 1106 Scope* s = scope(); | 1117 Scope* s = scope(); |
| 1107 while (s != NULL) { | 1118 while (s != NULL) { |
| 1108 if (s->num_heap_slots() > 0) { | 1119 if (s->num_heap_slots() > 0) { |
| 1109 if (s->calls_eval()) { | 1120 if (s->calls_eval()) { |
| 1110 // Check that extension is NULL. | 1121 // Check that extension is NULL. |
| 1111 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); | 1122 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1135 // Check that extension is NULL. | 1146 // Check that extension is NULL. |
| 1136 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX)); | 1147 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX)); |
| 1137 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1148 __ Branch(slow, ne, temp, Operand(zero_reg)); |
| 1138 // Load next context in chain. | 1149 // Load next context in chain. |
| 1139 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX)); | 1150 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX)); |
| 1140 __ Branch(&loop); | 1151 __ Branch(&loop); |
| 1141 __ bind(&fast); | 1152 __ bind(&fast); |
| 1142 } | 1153 } |
| 1143 | 1154 |
| 1144 __ lw(a0, GlobalObjectOperand()); | 1155 __ lw(a0, GlobalObjectOperand()); |
| 1145 __ li(a2, Operand(slot->var()->name())); | 1156 __ li(a2, Operand(var->name())); |
| 1146 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) | 1157 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) |
| 1147 ? RelocInfo::CODE_TARGET | 1158 ? RelocInfo::CODE_TARGET |
| 1148 : RelocInfo::CODE_TARGET_CONTEXT; | 1159 : RelocInfo::CODE_TARGET_CONTEXT; |
| 1149 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 1160 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 1150 __ Call(ic, mode); | 1161 __ Call(ic, mode); |
| 1151 } | 1162 } |
| 1152 | 1163 |
| 1153 | 1164 |
| 1154 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( | 1165 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
| 1155 Slot* slot, | 1166 Label* slow) { |
| 1156 Label* slow) { | 1167 ASSERT(var->IsContextSlot()); |
| 1157 ASSERT(slot->type() == Slot::CONTEXT); | |
| 1158 Register context = cp; | 1168 Register context = cp; |
| 1159 Register next = a3; | 1169 Register next = a3; |
| 1160 Register temp = t0; | 1170 Register temp = t0; |
| 1161 | 1171 |
| 1162 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { | 1172 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
| 1163 if (s->num_heap_slots() > 0) { | 1173 if (s->num_heap_slots() > 0) { |
| 1164 if (s->calls_eval()) { | 1174 if (s->calls_eval()) { |
| 1165 // Check that extension is NULL. | 1175 // Check that extension is NULL. |
| 1166 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | 1176 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 1167 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1177 __ Branch(slow, ne, temp, Operand(zero_reg)); |
| 1168 } | 1178 } |
| 1169 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX)); | 1179 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX)); |
| 1170 // Walk the rest of the chain without clobbering cp. | 1180 // Walk the rest of the chain without clobbering cp. |
| 1171 context = next; | 1181 context = next; |
| 1172 } | 1182 } |
| 1173 } | 1183 } |
| 1174 // Check that last extension is NULL. | 1184 // Check that last extension is NULL. |
| 1175 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | 1185 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 1176 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1186 __ Branch(slow, ne, temp, Operand(zero_reg)); |
| 1177 | 1187 |
| 1178 // This function is used only for loads, not stores, so it's safe to | 1188 // This function is used only for loads, not stores, so it's safe to |
| 1179 // return an cp-based operand (the write barrier cannot be allowed to | 1189 // return an cp-based operand (the write barrier cannot be allowed to |
| 1180 // destroy the cp register). | 1190 // destroy the cp register). |
| 1181 return ContextOperand(context, slot->index()); | 1191 return ContextOperand(context, var->index()); |
| 1182 } | 1192 } |
| 1183 | 1193 |
| 1184 | 1194 |
| 1185 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( | 1195 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, |
| 1186 Slot* slot, | 1196 TypeofState typeof_state, |
| 1187 TypeofState typeof_state, | 1197 Label* slow, |
| 1188 Label* slow, | 1198 Label* done) { |
| 1189 Label* done) { | |
| 1190 // Generate fast-case code for variables that might be shadowed by | 1199 // Generate fast-case code for variables that might be shadowed by |
| 1191 // eval-introduced variables. Eval is used a lot without | 1200 // eval-introduced variables. Eval is used a lot without |
| 1192 // introducing variables. In those cases, we do not want to | 1201 // introducing variables. In those cases, we do not want to |
| 1193 // perform a runtime call for all variables in the scope | 1202 // perform a runtime call for all variables in the scope |
| 1194 // containing the eval. | 1203 // containing the eval. |
| 1195 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { | 1204 if (var->mode() == Variable::DYNAMIC_GLOBAL) { |
| 1196 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow); | 1205 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); |
| 1197 __ Branch(done); | 1206 __ Branch(done); |
| 1198 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { | 1207 } else if (var->mode() == Variable::DYNAMIC_LOCAL) { |
| 1199 Slot* potential_slot = slot->var()->local_if_not_shadowed()->rewrite(); | 1208 Variable* local = var->local_if_not_shadowed(); |
| 1200 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite(); | 1209 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow)); |
| 1201 if (potential_slot != NULL) { | 1210 if (local->mode() == Variable::CONST) { |
| 1202 // Generate fast case for locals that rewrite to slots. | 1211 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 1203 __ lw(v0, ContextSlotOperandCheckExtensions(potential_slot, slow)); | 1212 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. |
| 1204 if (potential_slot->var()->mode() == Variable::CONST) { | 1213 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); |
| 1205 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 1214 __ movz(v0, a0, at); // Conditional move: return Undefined if TheHole. |
| 1206 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. | |
| 1207 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); | |
| 1208 __ movz(v0, a0, at); // Conditional move. | |
| 1209 } | |
| 1210 __ Branch(done); | |
| 1211 } else if (rewrite != NULL) { | |
| 1212 // Generate fast case for calls of an argument function. | |
| 1213 Property* property = rewrite->AsProperty(); | |
| 1214 if (property != NULL) { | |
| 1215 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); | |
| 1216 Literal* key_literal = property->key()->AsLiteral(); | |
| 1217 if (obj_proxy != NULL && | |
| 1218 key_literal != NULL && | |
| 1219 obj_proxy->IsArguments() && | |
| 1220 key_literal->handle()->IsSmi()) { | |
| 1221 // Load arguments object if there are no eval-introduced | |
| 1222 // variables. Then load the argument from the arguments | |
| 1223 // object using keyed load. | |
| 1224 __ lw(a1, | |
| 1225 ContextSlotOperandCheckExtensions(obj_proxy->var()->rewrite(), | |
| 1226 slow)); | |
| 1227 __ li(a0, Operand(key_literal->handle())); | |
| 1228 Handle<Code> ic = | |
| 1229 isolate()->builtins()->KeyedLoadIC_Initialize(); | |
| 1230 __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property)); | |
| 1231 __ Branch(done); | |
| 1232 } | |
| 1233 } | |
| 1234 } | 1215 } |
| 1216 __ Branch(done); |
| 1235 } | 1217 } |
| 1236 } | 1218 } |
| 1237 | 1219 |
| 1238 | 1220 |
| 1239 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { | 1221 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { |
| 1240 // Record position before possible IC call. | 1222 // Record position before possible IC call. |
| 1241 SetSourcePosition(proxy->position()); | 1223 SetSourcePosition(proxy->position()); |
| 1242 Variable* var = proxy->var(); | 1224 Variable* var = proxy->var(); |
| 1243 | 1225 |
| 1244 // Three cases: non-this global variables, lookup slots, and all other | 1226 // Three cases: global variables, lookup variables, and all other types of |
| 1245 // types of slots. | 1227 // variables. |
| 1246 Slot* slot = var->rewrite(); | 1228 switch (var->location()) { |
| 1247 ASSERT((var->is_global() && !var->is_this()) == (slot == NULL)); | 1229 case Variable::UNALLOCATED: { |
| 1230 Comment cmnt(masm_, "Global variable"); |
| 1231 // Use inline caching. Variable name is passed in a2 and the global |
| 1232 // object (receiver) in a0. |
| 1233 __ lw(a0, GlobalObjectOperand()); |
| 1234 __ li(a2, Operand(var->name())); |
| 1235 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 1236 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); |
| 1237 context()->Plug(v0); |
| 1238 break; |
| 1239 } |
| 1248 | 1240 |
| 1249 if (slot == NULL) { | 1241 case Variable::PARAMETER: |
| 1250 Comment cmnt(masm_, "Global variable"); | 1242 case Variable::LOCAL: |
| 1251 // Use inline caching. Variable name is passed in a2 and the global | 1243 case Variable::CONTEXT: { |
| 1252 // object (receiver) in a0. | 1244 Comment cmnt(masm_, var->IsContextSlot() |
| 1253 __ lw(a0, GlobalObjectOperand()); | 1245 ? "Context variable" |
| 1254 __ li(a2, Operand(var->name())); | 1246 : "Stack variable"); |
| 1255 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 1247 if (var->mode() != Variable::LET && var->mode() != Variable::CONST) { |
| 1256 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); | 1248 context()->Plug(var); |
| 1257 context()->Plug(v0); | 1249 } else { |
| 1250 // Let and const need a read barrier. |
| 1251 GetVar(v0, var); |
| 1252 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 1253 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. |
| 1254 if (var->mode() == Variable::LET) { |
| 1255 Label done; |
| 1256 __ Branch(&done, ne, at, Operand(zero_reg)); |
| 1257 __ li(a0, Operand(var->name())); |
| 1258 __ push(a0); |
| 1259 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 1260 __ bind(&done); |
| 1261 } else { |
| 1262 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); |
| 1263 __ movz(v0, a0, at); // Conditional move: Undefined if TheHole. |
| 1264 } |
| 1265 context()->Plug(v0); |
| 1266 } |
| 1267 break; |
| 1268 } |
| 1258 | 1269 |
| 1259 } else if (slot->type() == Slot::LOOKUP) { | 1270 case Variable::LOOKUP: { |
| 1260 Label done, slow; | 1271 Label done, slow; |
| 1261 | 1272 // Generate code for loading from variables potentially shadowed |
| 1262 // Generate code for loading from variables potentially shadowed | 1273 // by eval-introduced variables. |
| 1263 // by eval-introduced variables. | 1274 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); |
| 1264 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done); | 1275 __ bind(&slow); |
| 1265 | 1276 Comment cmnt(masm_, "Lookup variable"); |
| 1266 __ bind(&slow); | 1277 __ li(a1, Operand(var->name())); |
| 1267 Comment cmnt(masm_, "Lookup slot"); | 1278 __ Push(cp, a1); // Context and name. |
| 1268 __ li(a1, Operand(var->name())); | 1279 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
| 1269 __ Push(cp, a1); // Context and name. | |
| 1270 __ CallRuntime(Runtime::kLoadContextSlot, 2); | |
| 1271 __ bind(&done); | |
| 1272 | |
| 1273 context()->Plug(v0); | |
| 1274 | |
| 1275 } else { | |
| 1276 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) | |
| 1277 ? "Context slot" | |
| 1278 : "Stack slot"); | |
| 1279 if (var->mode() == Variable::CONST) { | |
| 1280 // Constants may be the hole value if they have not been initialized. | |
| 1281 // Unhole them. | |
| 1282 MemOperand slot_operand = EmitSlotSearch(slot, a0); | |
| 1283 __ lw(v0, slot_operand); | |
| 1284 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
| 1285 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. | |
| 1286 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); | |
| 1287 __ movz(v0, a0, at); // Conditional move. | |
| 1288 context()->Plug(v0); | |
| 1289 } else if (var->mode() == Variable::LET) { | |
| 1290 // Let bindings may be the hole value if they have not been initialized. | |
| 1291 // Throw a type error in this case. | |
| 1292 Label done; | |
| 1293 MemOperand slot_operand = EmitSlotSearch(slot, a0); | |
| 1294 __ lw(v0, slot_operand); | |
| 1295 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); | |
| 1296 __ Branch(&done, ne, v0, Operand(a1)); | |
| 1297 __ li(v0, Operand(var->name())); | |
| 1298 __ push(v0); | |
| 1299 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
| 1300 __ bind(&done); | 1280 __ bind(&done); |
| 1301 context()->Plug(v0); | 1281 context()->Plug(v0); |
| 1302 } else { | |
| 1303 context()->Plug(slot); | |
| 1304 } | 1282 } |
| 1305 } | 1283 } |
| 1306 } | 1284 } |
| 1307 | 1285 |
| 1308 | 1286 |
| 1309 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | 1287 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
| 1310 Comment cmnt(masm_, "[ RegExpLiteral"); | 1288 Comment cmnt(masm_, "[ RegExpLiteral"); |
| 1311 Label materialized; | 1289 Label materialized; |
| 1312 // Registers will be used as follows: | 1290 // Registers will be used as follows: |
| 1313 // t1 = materialized value (RegExp literal) | 1291 // t1 = materialized value (RegExp literal) |
| (...skipping 518 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1832 break; | 1810 break; |
| 1833 } | 1811 } |
| 1834 } | 1812 } |
| 1835 PrepareForBailoutForId(bailout_ast_id, TOS_REG); | 1813 PrepareForBailoutForId(bailout_ast_id, TOS_REG); |
| 1836 context()->Plug(v0); | 1814 context()->Plug(v0); |
| 1837 } | 1815 } |
| 1838 | 1816 |
| 1839 | 1817 |
| 1840 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | 1818 void FullCodeGenerator::EmitVariableAssignment(Variable* var, |
| 1841 Token::Value op) { | 1819 Token::Value op) { |
| 1842 ASSERT(var != NULL); | 1820 if (var->IsUnallocated()) { |
| 1843 ASSERT(var->is_global() || var->rewrite() != NULL); | 1821 // Global var, const, or let. |
| 1844 | |
| 1845 if (var->is_global()) { | |
| 1846 ASSERT(!var->is_this()); | |
| 1847 // Assignment to a global variable. Use inline caching for the | |
| 1848 // assignment. Right-hand-side value is passed in a0, variable name in | |
| 1849 // a2, and the global object in a1. | |
| 1850 __ mov(a0, result_register()); | 1822 __ mov(a0, result_register()); |
| 1851 __ li(a2, Operand(var->name())); | 1823 __ li(a2, Operand(var->name())); |
| 1852 __ lw(a1, GlobalObjectOperand()); | 1824 __ lw(a1, GlobalObjectOperand()); |
| 1853 Handle<Code> ic = is_strict_mode() | 1825 Handle<Code> ic = is_strict_mode() |
| 1854 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 1826 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 1855 : isolate()->builtins()->StoreIC_Initialize(); | 1827 : isolate()->builtins()->StoreIC_Initialize(); |
| 1856 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); | 1828 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); |
| 1857 | 1829 |
| 1858 } else if (op == Token::INIT_CONST) { | 1830 } else if (op == Token::INIT_CONST) { |
| 1859 // Like var declarations, const declarations are hoisted to function | 1831 // Const initializers need a write barrier. |
| 1860 // scope. However, unlike var initializers, const initializers are able | 1832 ASSERT(!var->IsParameter()); // No const parameters. |
| 1861 // to drill a hole to that function context, even from inside a 'with' | 1833 if (var->IsStackLocal()) { |
| 1862 // context. We thus bypass the normal static scope lookup. | 1834 Label skip; |
| 1863 Slot* slot = var->rewrite(); | 1835 __ lw(a1, StackOperand(var)); |
| 1864 Label skip; | 1836 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 1865 switch (slot->type()) { | 1837 __ Branch(&skip, ne, a1, Operand(t0)); |
| 1866 case Slot::PARAMETER: | 1838 __ sw(result_register(), StackOperand(var)); |
| 1867 // No const parameters. | 1839 __ bind(&skip); |
| 1868 UNREACHABLE(); | 1840 } else { |
| 1869 break; | 1841 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); |
| 1870 case Slot::LOCAL: | 1842 // Like var declarations, const declarations are hoisted to function |
| 1871 // Detect const reinitialization by checking for the hole value. | 1843 // scope. However, unlike var initializers, const initializers are |
| 1872 __ lw(a1, MemOperand(fp, SlotOffset(slot))); | 1844 // able to drill a hole to that function context, even from inside a |
| 1873 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | 1845 // 'with' context. We thus bypass the normal static scope lookup for |
| 1874 __ Branch(&skip, ne, a1, Operand(t0)); | 1846 // var->IsContextSlot(). |
| 1875 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); | 1847 __ push(v0); |
| 1876 break; | 1848 __ li(a0, Operand(var->name())); |
| 1877 case Slot::CONTEXT: | 1849 __ Push(cp, a0); // Context and name. |
| 1878 case Slot::LOOKUP: | 1850 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
| 1879 __ push(result_register()); | |
| 1880 __ li(a0, Operand(slot->var()->name())); | |
| 1881 __ Push(cp, a0); // Context and name. | |
| 1882 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | |
| 1883 break; | |
| 1884 case Slot::GLOBAL: | |
| 1885 UNREACHABLE(); | |
| 1886 } | 1851 } |
| 1887 __ bind(&skip); | 1852 |
| 1888 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) { | 1853 } else if (var->mode() == Variable::LET && op != Token::INIT_LET) { |
| 1889 // Perform the assignment for non-const variables. Const assignments | 1854 // Non-initializing assignment to let variable needs a write barrier. |
| 1890 // are simply skipped. | 1855 if (var->IsLookupSlot()) { |
| 1891 Slot* slot = var->AsSlot(); | 1856 __ push(v0); // Value. |
| 1892 switch (slot->type()) { | 1857 __ li(a1, Operand(var->name())); |
| 1893 case Slot::PARAMETER: | 1858 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); |
| 1894 case Slot::LOCAL: { | 1859 __ Push(cp, a1, a0); // Context, name, strict mode. |
| 1895 Label assign; | 1860 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
| 1896 // Check for an initialized let binding. | 1861 } else { |
| 1897 __ lw(a1, MemOperand(fp, SlotOffset(slot))); | 1862 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
| 1898 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | 1863 Label assign; |
| 1899 __ Branch(&assign, ne, a1, Operand(t0)); | 1864 MemOperand location = VarOperand(var, a1); |
| 1900 __ li(a1, Operand(var->name())); | 1865 __ lw(a3, location); |
| 1901 __ push(a1); | 1866 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 1902 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1867 __ Branch(&assign, ne, a3, Operand(t0)); |
| 1903 // Perform the assignment. | 1868 __ li(a3, Operand(var->name())); |
| 1904 __ bind(&assign); | 1869 __ push(a3); |
| 1905 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); | 1870 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 1906 break; | 1871 // Perform the assignment. |
| 1907 } | 1872 __ bind(&assign); |
| 1908 case Slot::CONTEXT: { | 1873 __ sw(result_register(), location); |
| 1909 // Let variables may be the hole value if they have not been | 1874 if (var->IsContextSlot()) { |
| 1910 // initialized. Throw a type error in this case. | |
| 1911 Label assign; | |
| 1912 MemOperand target = EmitSlotSearch(slot, a1); | |
| 1913 // Check for an initialized let binding. | |
| 1914 __ lw(a3, target); | |
| 1915 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | |
| 1916 __ Branch(&assign, ne, a3, Operand(t0)); | |
| 1917 __ li(a3, Operand(var->name())); | |
| 1918 __ push(a3); | |
| 1919 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
| 1920 // Perform the assignment. | |
| 1921 __ bind(&assign); | |
| 1922 __ sw(result_register(), target); | |
| 1923 // RecordWrite may destroy all its register arguments. | 1875 // RecordWrite may destroy all its register arguments. |
| 1924 __ mov(a3, result_register()); | 1876 __ mov(a3, result_register()); |
| 1925 int offset = Context::SlotOffset(slot->index()); | 1877 int offset = Context::SlotOffset(var->index()); |
| 1926 __ RecordWrite(a1, Operand(offset), a2, a3); | 1878 __ RecordWrite(a1, Operand(offset), a2, a3); |
| 1927 break; | |
| 1928 } | 1879 } |
| 1929 case Slot::LOOKUP: | |
| 1930 // Call the runtime for the assignment. | |
| 1931 __ push(v0); // Value. | |
| 1932 __ li(a1, Operand(slot->var()->name())); | |
| 1933 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); | |
| 1934 __ Push(cp, a1, a0); // Context, name, strict mode. | |
| 1935 __ CallRuntime(Runtime::kStoreContextSlot, 4); | |
| 1936 break; | |
| 1937 } | 1880 } |
| 1938 | 1881 |
| 1939 } else if (var->mode() != Variable::CONST) { | 1882 } else if (var->mode() != Variable::CONST) { |
| 1940 // Perform the assignment for non-const variables. Const assignments | 1883 // Assignment to var or initializing assignment to let. |
| 1941 // are simply skipped. | 1884 if (var->IsStackAllocated() || var->IsContextSlot()) { |
| 1942 Slot* slot = var->rewrite(); | 1885 MemOperand location = VarOperand(var, a1); |
| 1943 switch (slot->type()) { | 1886 if (FLAG_debug_code && op == Token::INIT_LET) { |
| 1944 case Slot::PARAMETER: | 1887 // Check for an uninitialized let binding. |
| 1945 case Slot::LOCAL: | 1888 __ lw(a2, location); |
| 1946 // Perform the assignment. | 1889 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 1947 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); | 1890 __ Check(eq, "Let binding re-initialization.", a2, Operand(t0)); |
| 1948 break; | |
| 1949 | |
| 1950 case Slot::CONTEXT: { | |
| 1951 MemOperand target = EmitSlotSearch(slot, a1); | |
| 1952 // Perform the assignment and issue the write barrier. | |
| 1953 __ sw(result_register(), target); | |
| 1954 // RecordWrite may destroy all its register arguments. | |
| 1955 __ mov(a3, result_register()); | |
| 1956 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; | |
| 1957 __ RecordWrite(a1, Operand(offset), a2, a3); | |
| 1958 break; | |
| 1959 } | 1891 } |
| 1960 | 1892 // Perform the assignment. |
| 1961 case Slot::LOOKUP: | 1893 __ sw(v0, location); |
| 1962 // Call the runtime for the assignment. | 1894 if (var->IsContextSlot()) { |
| 1963 __ push(v0); // Value. | 1895 __ mov(a3, v0); |
| 1964 __ li(a1, Operand(slot->var()->name())); | 1896 __ RecordWrite(a1, Operand(Context::SlotOffset(var->index())), a2, a3); |
| 1965 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); | 1897 } |
| 1966 __ Push(cp, a1, a0); // Context, name, strict mode. | 1898 } else { |
| 1967 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 1899 ASSERT(var->IsLookupSlot()); |
| 1968 break; | 1900 __ push(v0); // Value. |
| 1969 | 1901 __ li(a1, Operand(var->name())); |
| 1970 case Slot::GLOBAL: | 1902 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); |
| 1971 UNREACHABLE(); | 1903 __ Push(cp, a1, a0); // Context, name, strict mode. |
| 1904 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
| 1972 } | 1905 } |
| 1973 } | 1906 } |
| 1907 // Non-initializing assignments to consts are ignored. |
| 1974 } | 1908 } |
| 1975 | 1909 |
| 1976 | 1910 |
| 1977 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | 1911 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
| 1978 // Assignment to a property, using a named store IC. | 1912 // Assignment to a property, using a named store IC. |
| 1979 Property* prop = expr->target()->AsProperty(); | 1913 Property* prop = expr->target()->AsProperty(); |
| 1980 ASSERT(prop != NULL); | 1914 ASSERT(prop != NULL); |
| 1981 ASSERT(prop->key()->AsLiteral() != NULL); | 1915 ASSERT(prop->key()->AsLiteral() != NULL); |
| 1982 | 1916 |
| 1983 // If the assignment starts a block of assignments to the same object, | 1917 // If the assignment starts a block of assignments to the same object, |
| (...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2204 | 2138 |
| 2205 | 2139 |
| 2206 void FullCodeGenerator::VisitCall(Call* expr) { | 2140 void FullCodeGenerator::VisitCall(Call* expr) { |
| 2207 #ifdef DEBUG | 2141 #ifdef DEBUG |
| 2208 // We want to verify that RecordJSReturnSite gets called on all paths | 2142 // We want to verify that RecordJSReturnSite gets called on all paths |
| 2209 // through this function. Avoid early returns. | 2143 // through this function. Avoid early returns. |
| 2210 expr->return_is_recorded_ = false; | 2144 expr->return_is_recorded_ = false; |
| 2211 #endif | 2145 #endif |
| 2212 | 2146 |
| 2213 Comment cmnt(masm_, "[ Call"); | 2147 Comment cmnt(masm_, "[ Call"); |
| 2214 Expression* fun = expr->expression(); | 2148 Expression* callee = expr->expression(); |
| 2215 Variable* var = fun->AsVariableProxy()->AsVariable(); | 2149 VariableProxy* proxy = callee->AsVariableProxy(); |
| 2150 Property* property = callee->AsProperty(); |
| 2216 | 2151 |
| 2217 if (var != NULL && var->is_possibly_eval()) { | 2152 if (proxy != NULL && proxy->var()->is_possibly_eval()) { |
| 2218 // In a call to eval, we first call %ResolvePossiblyDirectEval to | 2153 // In a call to eval, we first call %ResolvePossiblyDirectEval to |
| 2219 // resolve the function we need to call and the receiver of the | 2154 // resolve the function we need to call and the receiver of the |
| 2220 // call. Then we call the resolved function using the given | 2155 // call. Then we call the resolved function using the given |
| 2221 // arguments. | 2156 // arguments. |
| 2222 ZoneList<Expression*>* args = expr->arguments(); | 2157 ZoneList<Expression*>* args = expr->arguments(); |
| 2223 int arg_count = args->length(); | 2158 int arg_count = args->length(); |
| 2224 | 2159 |
| 2225 { PreservePositionScope pos_scope(masm()->positions_recorder()); | 2160 { PreservePositionScope pos_scope(masm()->positions_recorder()); |
| 2226 VisitForStackValue(fun); | 2161 VisitForStackValue(callee); |
| 2227 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | 2162 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
| 2228 __ push(a2); // Reserved receiver slot. | 2163 __ push(a2); // Reserved receiver slot. |
| 2229 | 2164 |
| 2230 // Push the arguments. | 2165 // Push the arguments. |
| 2231 for (int i = 0; i < arg_count; i++) { | 2166 for (int i = 0; i < arg_count; i++) { |
| 2232 VisitForStackValue(args->at(i)); | 2167 VisitForStackValue(args->at(i)); |
| 2233 } | 2168 } |
| 2169 |
| 2234 // If we know that eval can only be shadowed by eval-introduced | 2170 // If we know that eval can only be shadowed by eval-introduced |
| 2235 // variables we attempt to load the global eval function directly | 2171 // variables we attempt to load the global eval function directly |
| 2236 // in generated code. If we succeed, there is no need to perform a | 2172 // in generated code. If we succeed, there is no need to perform a |
| 2237 // context lookup in the runtime system. | 2173 // context lookup in the runtime system. |
| 2238 Label done; | 2174 Label done; |
| 2239 if (var->rewrite() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { | 2175 Variable* var = proxy->var(); |
| 2176 if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) { |
| 2240 Label slow; | 2177 Label slow; |
| 2241 EmitLoadGlobalSlotCheckExtensions(var->rewrite(), | 2178 EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow); |
| 2242 NOT_INSIDE_TYPEOF, | |
| 2243 &slow); | |
| 2244 // Push the function and resolve eval. | 2179 // Push the function and resolve eval. |
| 2245 __ push(v0); | 2180 __ push(v0); |
| 2246 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); | 2181 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); |
| 2247 __ jmp(&done); | 2182 __ jmp(&done); |
| 2248 __ bind(&slow); | 2183 __ bind(&slow); |
| 2249 } | 2184 } |
| 2250 | 2185 |
| 2251 // Push copy of the function (found below the arguments) and | 2186 // Push a copy of the function (found below the arguments) and |
| 2252 // resolve eval. | 2187 // resolve eval. |
| 2253 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2188 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 2254 __ push(a1); | 2189 __ push(a1); |
| 2255 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); | 2190 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); |
| 2256 if (done.is_linked()) { | 2191 __ bind(&done); |
| 2257 __ bind(&done); | |
| 2258 } | |
| 2259 | 2192 |
| 2260 // The runtime call returns a pair of values in v0 (function) and | 2193 // The runtime call returns a pair of values in v0 (function) and |
| 2261 // v1 (receiver). Touch up the stack with the right values. | 2194 // v1 (receiver). Touch up the stack with the right values. |
| 2262 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2195 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 2263 __ sw(v1, MemOperand(sp, arg_count * kPointerSize)); | 2196 __ sw(v1, MemOperand(sp, arg_count * kPointerSize)); |
| 2264 } | 2197 } |
| 2265 // Record source position for debugger. | 2198 // Record source position for debugger. |
| 2266 SetSourcePosition(expr->position()); | 2199 SetSourcePosition(expr->position()); |
| 2267 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; | 2200 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 2268 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); | 2201 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT); |
| 2269 __ CallStub(&stub); | 2202 __ CallStub(&stub); |
| 2270 RecordJSReturnSite(expr); | 2203 RecordJSReturnSite(expr); |
| 2271 // Restore context register. | 2204 // Restore context register. |
| 2272 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2205 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2273 context()->DropAndPlug(1, v0); | 2206 context()->DropAndPlug(1, v0); |
| 2274 } else if (var != NULL && !var->is_this() && var->is_global()) { | 2207 } else if (proxy != NULL && proxy->var()->IsUnallocated()) { |
| 2275 // Push global object as receiver for the call IC. | 2208 // Push global object as receiver for the call IC. |
| 2276 __ lw(a0, GlobalObjectOperand()); | 2209 __ lw(a0, GlobalObjectOperand()); |
| 2277 __ push(a0); | 2210 __ push(a0); |
| 2278 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); | 2211 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT); |
| 2279 } else if (var != NULL && var->rewrite() != NULL && | 2212 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { |
| 2280 var->rewrite()->type() == Slot::LOOKUP) { | |
| 2281 // Call to a lookup slot (dynamically introduced variable). | 2213 // Call to a lookup slot (dynamically introduced variable). |
| 2282 Label slow, done; | 2214 Label slow, done; |
| 2283 | 2215 |
| 2284 { PreservePositionScope scope(masm()->positions_recorder()); | 2216 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2285 // Generate code for loading from variables potentially shadowed | 2217 // Generate code for loading from variables potentially shadowed |
| 2286 // by eval-introduced variables. | 2218 // by eval-introduced variables. |
| 2287 EmitDynamicLoadFromSlotFastCase(var->rewrite(), | 2219 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); |
| 2288 NOT_INSIDE_TYPEOF, | |
| 2289 &slow, | |
| 2290 &done); | |
| 2291 } | 2220 } |
| 2292 | 2221 |
| 2293 __ bind(&slow); | 2222 __ bind(&slow); |
| 2294 // Call the runtime to find the function to call (returned in v0) | 2223 // Call the runtime to find the function to call (returned in v0) |
| 2295 // and the object holding it (returned in v1). | 2224 // and the object holding it (returned in v1). |
| 2296 __ push(context_register()); | 2225 __ push(context_register()); |
| 2297 __ li(a2, Operand(var->name())); | 2226 __ li(a2, Operand(proxy->name())); |
| 2298 __ push(a2); | 2227 __ push(a2); |
| 2299 __ CallRuntime(Runtime::kLoadContextSlot, 2); | 2228 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
| 2300 __ Push(v0, v1); // Function, receiver. | 2229 __ Push(v0, v1); // Function, receiver. |
| 2301 | 2230 |
| 2302 // If fast case code has been generated, emit code to push the | 2231 // If fast case code has been generated, emit code to push the |
| 2303 // function and receiver and have the slow path jump around this | 2232 // function and receiver and have the slow path jump around this |
| 2304 // code. | 2233 // code. |
| 2305 if (done.is_linked()) { | 2234 if (done.is_linked()) { |
| 2306 Label call; | 2235 Label call; |
| 2307 __ Branch(&call); | 2236 __ Branch(&call); |
| 2308 __ bind(&done); | 2237 __ bind(&done); |
| 2309 // Push function. | 2238 // Push function. |
| 2310 __ push(v0); | 2239 __ push(v0); |
| 2311 // The receiver is implicitly the global receiver. Indicate this | 2240 // The receiver is implicitly the global receiver. Indicate this |
| 2312 // by passing the hole to the call function stub. | 2241 // by passing the hole to the call function stub. |
| 2313 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); | 2242 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); |
| 2314 __ push(a1); | 2243 __ push(a1); |
| 2315 __ bind(&call); | 2244 __ bind(&call); |
| 2316 } | 2245 } |
| 2317 | 2246 |
| 2318 // The receiver is either the global receiver or an object found | 2247 // The receiver is either the global receiver or an object found |
| 2319 // by LoadContextSlot. That object could be the hole if the | 2248 // by LoadContextSlot. That object could be the hole if the |
| 2320 // receiver is implicitly the global object. | 2249 // receiver is implicitly the global object. |
| 2321 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); | 2250 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); |
| 2322 } else if (fun->AsProperty() != NULL) { | 2251 } else if (property != NULL) { |
| 2323 // Call to an object property. | 2252 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2324 Property* prop = fun->AsProperty(); | 2253 VisitForStackValue(property->obj()); |
| 2325 Literal* key = prop->key()->AsLiteral(); | 2254 } |
| 2326 if (key != NULL && key->handle()->IsSymbol()) { | 2255 if (property->key()->IsPropertyName()) { |
| 2327 // Call to a named property, use call IC. | 2256 EmitCallWithIC(expr, |
| 2328 { PreservePositionScope scope(masm()->positions_recorder()); | 2257 property->key()->AsLiteral()->handle(), |
| 2329 VisitForStackValue(prop->obj()); | 2258 RelocInfo::CODE_TARGET); |
| 2330 } | |
| 2331 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); | |
| 2332 } else { | 2259 } else { |
| 2333 // Call to a keyed property. | 2260 EmitKeyedCallWithIC(expr, property->key()); |
| 2334 { PreservePositionScope scope(masm()->positions_recorder()); | |
| 2335 VisitForStackValue(prop->obj()); | |
| 2336 } | |
| 2337 EmitKeyedCallWithIC(expr, prop->key()); | |
| 2338 } | 2261 } |
| 2339 } else { | 2262 } else { |
| 2263 // Call to an arbitrary expression not handled specially above. |
| 2340 { PreservePositionScope scope(masm()->positions_recorder()); | 2264 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2341 VisitForStackValue(fun); | 2265 VisitForStackValue(callee); |
| 2342 } | 2266 } |
| 2343 // Load global receiver object. | 2267 // Load global receiver object. |
| 2344 __ lw(a1, GlobalObjectOperand()); | 2268 __ lw(a1, GlobalObjectOperand()); |
| 2345 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); | 2269 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); |
| 2346 __ push(a1); | 2270 __ push(a1); |
| 2347 // Emit function call. | 2271 // Emit function call. |
| 2348 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); | 2272 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); |
| 2349 } | 2273 } |
| 2350 | 2274 |
| 2351 #ifdef DEBUG | 2275 #ifdef DEBUG |
| (...skipping 1309 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3661 __ CallRuntime(expr->function(), arg_count); | 3585 __ CallRuntime(expr->function(), arg_count); |
| 3662 } | 3586 } |
| 3663 context()->Plug(v0); | 3587 context()->Plug(v0); |
| 3664 } | 3588 } |
| 3665 | 3589 |
| 3666 | 3590 |
| 3667 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 3591 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
| 3668 switch (expr->op()) { | 3592 switch (expr->op()) { |
| 3669 case Token::DELETE: { | 3593 case Token::DELETE: { |
| 3670 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | 3594 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
| 3671 Property* prop = expr->expression()->AsProperty(); | 3595 Property* property = expr->expression()->AsProperty(); |
| 3672 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); | 3596 VariableProxy* proxy = expr->expression()->AsVariableProxy(); |
| 3673 | 3597 |
| 3674 if (prop != NULL) { | 3598 if (property != NULL) { |
| 3675 VisitForStackValue(prop->obj()); | 3599 VisitForStackValue(property->obj()); |
| 3676 VisitForStackValue(prop->key()); | 3600 VisitForStackValue(property->key()); |
| 3677 __ li(a1, Operand(Smi::FromInt(strict_mode_flag()))); | 3601 __ li(a1, Operand(Smi::FromInt(strict_mode_flag()))); |
| 3678 __ push(a1); | 3602 __ push(a1); |
| 3679 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 3603 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
| 3680 context()->Plug(v0); | 3604 context()->Plug(v0); |
| 3681 } else if (var != NULL) { | 3605 } else if (proxy != NULL) { |
| 3606 Variable* var = proxy->var(); |
| 3682 // Delete of an unqualified identifier is disallowed in strict mode | 3607 // Delete of an unqualified identifier is disallowed in strict mode |
| 3683 // but "delete this" is. | 3608 // but "delete this" is allowed. |
| 3684 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); | 3609 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); |
| 3685 if (var->is_global()) { | 3610 if (var->IsUnallocated()) { |
| 3686 __ lw(a2, GlobalObjectOperand()); | 3611 __ lw(a2, GlobalObjectOperand()); |
| 3687 __ li(a1, Operand(var->name())); | 3612 __ li(a1, Operand(var->name())); |
| 3688 __ li(a0, Operand(Smi::FromInt(kNonStrictMode))); | 3613 __ li(a0, Operand(Smi::FromInt(kNonStrictMode))); |
| 3689 __ Push(a2, a1, a0); | 3614 __ Push(a2, a1, a0); |
| 3690 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 3615 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
| 3691 context()->Plug(v0); | 3616 context()->Plug(v0); |
| 3692 } else if (var->rewrite() != NULL && | 3617 } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
| 3693 var->rewrite()->type() != Slot::LOOKUP) { | |
| 3694 // Result of deleting non-global, non-dynamic variables is false. | 3618 // Result of deleting non-global, non-dynamic variables is false. |
| 3695 // The subexpression does not have side effects. | 3619 // The subexpression does not have side effects. |
| 3696 context()->Plug(false); | 3620 context()->Plug(var->is_this()); |
| 3697 } else { | 3621 } else { |
| 3698 // Non-global variable. Call the runtime to try to delete from the | 3622 // Non-global variable. Call the runtime to try to delete from the |
| 3699 // context where the variable was introduced. | 3623 // context where the variable was introduced. |
| 3700 __ push(context_register()); | 3624 __ push(context_register()); |
| 3701 __ li(a2, Operand(var->name())); | 3625 __ li(a2, Operand(var->name())); |
| 3702 __ push(a2); | 3626 __ push(a2); |
| 3703 __ CallRuntime(Runtime::kDeleteContextSlot, 2); | 3627 __ CallRuntime(Runtime::kDeleteContextSlot, 2); |
| 3704 context()->Plug(v0); | 3628 context()->Plug(v0); |
| 3705 } | 3629 } |
| 3706 } else { | 3630 } else { |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3961 } else { | 3885 } else { |
| 3962 context()->Plug(v0); | 3886 context()->Plug(v0); |
| 3963 } | 3887 } |
| 3964 break; | 3888 break; |
| 3965 } | 3889 } |
| 3966 } | 3890 } |
| 3967 } | 3891 } |
| 3968 | 3892 |
| 3969 | 3893 |
| 3970 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { | 3894 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { |
| 3895 ASSERT(!context()->IsEffect()); |
| 3896 ASSERT(!context()->IsTest()); |
| 3971 VariableProxy* proxy = expr->AsVariableProxy(); | 3897 VariableProxy* proxy = expr->AsVariableProxy(); |
| 3972 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { | 3898 if (proxy != NULL && proxy->var()->IsUnallocated()) { |
| 3973 Comment cmnt(masm_, "Global variable"); | 3899 Comment cmnt(masm_, "Global variable"); |
| 3974 __ lw(a0, GlobalObjectOperand()); | 3900 __ lw(a0, GlobalObjectOperand()); |
| 3975 __ li(a2, Operand(proxy->name())); | 3901 __ li(a2, Operand(proxy->name())); |
| 3976 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 3902 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 3977 // Use a regular load, not a contextual load, to avoid a reference | 3903 // Use a regular load, not a contextual load, to avoid a reference |
| 3978 // error. | 3904 // error. |
| 3979 __ Call(ic); | 3905 __ Call(ic); |
| 3980 PrepareForBailout(expr, TOS_REG); | 3906 PrepareForBailout(expr, TOS_REG); |
| 3981 context()->Plug(v0); | 3907 context()->Plug(v0); |
| 3982 } else if (proxy != NULL && | 3908 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { |
| 3983 proxy->var()->rewrite() != NULL && | |
| 3984 proxy->var()->rewrite()->type() == Slot::LOOKUP) { | |
| 3985 Label done, slow; | 3909 Label done, slow; |
| 3986 | 3910 |
| 3987 // Generate code for loading from variables potentially shadowed | 3911 // Generate code for loading from variables potentially shadowed |
| 3988 // by eval-introduced variables. | 3912 // by eval-introduced variables. |
| 3989 Slot* slot = proxy->var()->rewrite(); | 3913 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); |
| 3990 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done); | |
| 3991 | 3914 |
| 3992 __ bind(&slow); | 3915 __ bind(&slow); |
| 3993 __ li(a0, Operand(proxy->name())); | 3916 __ li(a0, Operand(proxy->name())); |
| 3994 __ Push(cp, a0); | 3917 __ Push(cp, a0); |
| 3995 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); | 3918 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
| 3996 PrepareForBailout(expr, TOS_REG); | 3919 PrepareForBailout(expr, TOS_REG); |
| 3997 __ bind(&done); | 3920 __ bind(&done); |
| 3998 | 3921 |
| 3999 context()->Plug(v0); | 3922 context()->Plug(v0); |
| 4000 } else { | 3923 } else { |
| (...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4326 *context_length = 0; | 4249 *context_length = 0; |
| 4327 return previous_; | 4250 return previous_; |
| 4328 } | 4251 } |
| 4329 | 4252 |
| 4330 | 4253 |
| 4331 #undef __ | 4254 #undef __ |
| 4332 | 4255 |
| 4333 } } // namespace v8::internal | 4256 } } // namespace v8::internal |
| 4334 | 4257 |
| 4335 #endif // V8_TARGET_ARCH_MIPS | 4258 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |