Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1407)

Side by Side Diff: src/ia32/codegen-ia32.cc

Issue 660095: Merge revision 3813 to 3930 from bleeding_edge to partial snapshots branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/partial_snapshots/
Patch Set: '' Created 10 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/codegen-ia32.h ('k') | src/ia32/debug-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
118 118
119 Scope* CodeGenerator::scope() { return info_->function()->scope(); } 119 Scope* CodeGenerator::scope() { return info_->function()->scope(); }
120 120
121 121
122 // Calling conventions: 122 // Calling conventions:
123 // ebp: caller's frame pointer 123 // ebp: caller's frame pointer
124 // esp: stack pointer 124 // esp: stack pointer
125 // edi: called JS function 125 // edi: called JS function
126 // esi: callee's context 126 // esi: callee's context
127 127
128 void CodeGenerator::Generate(CompilationInfo* info, Mode mode) { 128 void CodeGenerator::Generate(CompilationInfo* info) {
129 // Record the position for debugging purposes. 129 // Record the position for debugging purposes.
130 CodeForFunctionPosition(info->function()); 130 CodeForFunctionPosition(info->function());
131 131
132 // Initialize state. 132 // Initialize state.
133 info_ = info; 133 info_ = info;
134 ASSERT(allocator_ == NULL); 134 ASSERT(allocator_ == NULL);
135 RegisterAllocator register_allocator(this); 135 RegisterAllocator register_allocator(this);
136 allocator_ = &register_allocator; 136 allocator_ = &register_allocator;
137 ASSERT(frame_ == NULL); 137 ASSERT(frame_ == NULL);
138 frame_ = new VirtualFrame(); 138 frame_ = new VirtualFrame();
(...skipping 18 matching lines...) Expand all
157 CodeGenState state(this); 157 CodeGenState state(this);
158 158
159 // Entry: 159 // Entry:
160 // Stack: receiver, arguments, return address. 160 // Stack: receiver, arguments, return address.
161 // ebp: caller's frame pointer 161 // ebp: caller's frame pointer
162 // esp: stack pointer 162 // esp: stack pointer
163 // edi: called JS function 163 // edi: called JS function
164 // esi: callee's context 164 // esi: callee's context
165 allocator_->Initialize(); 165 allocator_->Initialize();
166 166
167 if (mode == PRIMARY) { 167 if (info->mode() == CompilationInfo::PRIMARY) {
168 frame_->Enter(); 168 frame_->Enter();
169 169
170 // Allocate space for locals and initialize them. 170 // Allocate space for locals and initialize them.
171 frame_->AllocateStackSlots(); 171 frame_->AllocateStackSlots();
172 172
173 // Allocate the local context if needed. 173 // Allocate the local context if needed.
174 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 174 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
175 if (heap_slots > 0) { 175 if (heap_slots > 0) {
176 Comment cmnt(masm_, "[ allocate local context"); 176 Comment cmnt(masm_, "[ allocate local context");
177 // Allocate local context. 177 // Allocate local context.
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
248 if (scope()->is_function_scope() && scope()->function() != NULL) { 248 if (scope()->is_function_scope() && scope()->function() != NULL) {
249 frame_->Push(Factory::the_hole_value()); 249 frame_->Push(Factory::the_hole_value());
250 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); 250 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
251 } 251 }
252 } else { 252 } else {
253 // When used as the secondary compiler for splitting, ebp, esi, 253 // When used as the secondary compiler for splitting, ebp, esi,
254 // and edi have been pushed on the stack. Adjust the virtual 254 // and edi have been pushed on the stack. Adjust the virtual
255 // frame to match this state. 255 // frame to match this state.
256 frame_->Adjust(3); 256 frame_->Adjust(3);
257 allocator_->Unuse(edi); 257 allocator_->Unuse(edi);
258
259 // Bind all the bailout labels to the beginning of the function.
260 List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
261 for (int i = 0; i < bailouts->length(); i++) {
262 __ bind(bailouts->at(i)->label());
263 }
258 } 264 }
259 265
260 // Initialize the function return target after the locals are set 266 // Initialize the function return target after the locals are set
261 // up, because it needs the expected frame height from the frame. 267 // up, because it needs the expected frame height from the frame.
262 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); 268 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
263 function_return_is_shadowed_ = false; 269 function_return_is_shadowed_ = false;
264 270
265 // Generate code to 'execute' declarations and initialize functions 271 // Generate code to 'execute' declarations and initialize functions
266 // (source elements). In case of an illegal redeclaration we need to 272 // (source elements). In case of an illegal redeclaration we need to
267 // handle that instead of processing the declarations. 273 // handle that instead of processing the declarations.
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after
567 // <global>.<variable> and perform a (regular non-contextual) property 573 // <global>.<variable> and perform a (regular non-contextual) property
568 // load to make sure we do not get reference errors. 574 // load to make sure we do not get reference errors.
569 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX); 575 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
570 Literal key(variable->name()); 576 Literal key(variable->name());
571 Property property(&global, &key, RelocInfo::kNoPosition); 577 Property property(&global, &key, RelocInfo::kNoPosition);
572 Reference ref(this, &property); 578 Reference ref(this, &property);
573 ref.GetValue(); 579 ref.GetValue();
574 } else if (variable != NULL && variable->slot() != NULL) { 580 } else if (variable != NULL && variable->slot() != NULL) {
575 // For a variable that rewrites to a slot, we signal it is the immediate 581 // For a variable that rewrites to a slot, we signal it is the immediate
576 // subexpression of a typeof. 582 // subexpression of a typeof.
577 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF); 583 Result result =
584 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
585 frame()->Push(&result);
578 } else { 586 } else {
579 // Anything else can be handled normally. 587 // Anything else can be handled normally.
580 Load(expr); 588 Load(expr);
581 } 589 }
582 } 590 }
583 591
584 592
585 ArgumentsAllocationMode CodeGenerator::ArgumentsMode() { 593 ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
586 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION; 594 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
587 ASSERT(scope()->arguments_shadow() != NULL); 595 ASSERT(scope()->arguments_shadow() != NULL);
(...skipping 28 matching lines...) Expand all
616 Variable* arguments = scope()->arguments()->var(); 624 Variable* arguments = scope()->arguments()->var();
617 Variable* shadow = scope()->arguments_shadow()->var(); 625 Variable* shadow = scope()->arguments_shadow()->var();
618 ASSERT(arguments != NULL && arguments->slot() != NULL); 626 ASSERT(arguments != NULL && arguments->slot() != NULL);
619 ASSERT(shadow != NULL && shadow->slot() != NULL); 627 ASSERT(shadow != NULL && shadow->slot() != NULL);
620 JumpTarget done; 628 JumpTarget done;
621 bool skip_arguments = false; 629 bool skip_arguments = false;
622 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) { 630 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
623 // We have to skip storing into the arguments slot if it has already 631 // We have to skip storing into the arguments slot if it has already
624 // been written to. This can happen if the a function has a local 632 // been written to. This can happen if the a function has a local
625 // variable named 'arguments'. 633 // variable named 'arguments'.
626 LoadFromSlot(arguments->slot(), NOT_INSIDE_TYPEOF); 634 Result probe = LoadFromSlot(arguments->slot(), NOT_INSIDE_TYPEOF);
627 Result probe = frame_->Pop();
628 if (probe.is_constant()) { 635 if (probe.is_constant()) {
629 // We have to skip updating the arguments object if it has 636 // We have to skip updating the arguments object if it has
630 // been assigned a proper value. 637 // been assigned a proper value.
631 skip_arguments = !probe.handle()->IsTheHole(); 638 skip_arguments = !probe.handle()->IsTheHole();
632 } else { 639 } else {
633 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value())); 640 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
634 probe.Unuse(); 641 probe.Unuse();
635 done.Branch(not_equal); 642 done.Branch(not_equal);
636 } 643 }
637 } 644 }
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
681 if (property->key()->IsPropertyName()) { 688 if (property->key()->IsPropertyName()) {
682 ref->set_type(Reference::NAMED); 689 ref->set_type(Reference::NAMED);
683 } else { 690 } else {
684 Load(property->key()); 691 Load(property->key());
685 ref->set_type(Reference::KEYED); 692 ref->set_type(Reference::KEYED);
686 } 693 }
687 } else if (var != NULL) { 694 } else if (var != NULL) {
688 // The expression is a variable proxy that does not rewrite to a 695 // The expression is a variable proxy that does not rewrite to a
689 // property. Global variables are treated as named property references. 696 // property. Global variables are treated as named property references.
690 if (var->is_global()) { 697 if (var->is_global()) {
698 // If eax is free, the register allocator prefers it. Thus the code
699 // generator will load the global object into eax, which is where
700 // LoadIC wants it. Most uses of Reference call LoadIC directly
701 // after the reference is created.
702 frame_->Spill(eax);
691 LoadGlobal(); 703 LoadGlobal();
692 ref->set_type(Reference::NAMED); 704 ref->set_type(Reference::NAMED);
693 } else { 705 } else {
694 ASSERT(var->slot() != NULL); 706 ASSERT(var->slot() != NULL);
695 ref->set_type(Reference::SLOT); 707 ref->set_type(Reference::SLOT);
696 } 708 }
697 } else { 709 } else {
698 // Anything else is a runtime error. 710 // Anything else is a runtime error.
699 Load(e); 711 Load(e);
700 frame_->CallRuntime(Runtime::kThrowReferenceError, 1); 712 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
(...skipping 13 matching lines...) Expand all
714 726
715 // ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and 727 // ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
716 // convert it to a boolean in the condition code register or jump to 728 // convert it to a boolean in the condition code register or jump to
717 // 'false_target'/'true_target' as appropriate. 729 // 'false_target'/'true_target' as appropriate.
718 void CodeGenerator::ToBoolean(ControlDestination* dest) { 730 void CodeGenerator::ToBoolean(ControlDestination* dest) {
719 Comment cmnt(masm_, "[ ToBoolean"); 731 Comment cmnt(masm_, "[ ToBoolean");
720 732
721 // The value to convert should be popped from the frame. 733 // The value to convert should be popped from the frame.
722 Result value = frame_->Pop(); 734 Result value = frame_->Pop();
723 value.ToRegister(); 735 value.ToRegister();
724 // Fast case checks.
725 736
726 // 'false' => false. 737 if (value.is_number()) {
727 __ cmp(value.reg(), Factory::false_value()); 738 Comment cmnt(masm_, "ONLY_NUMBER");
728 dest->false_target()->Branch(equal); 739 // Fast case if NumberInfo indicates only numbers.
740 if (FLAG_debug_code) {
741 __ AbortIfNotNumber(value.reg(), "ToBoolean operand is not a number.");
742 }
743 // Smi => false iff zero.
744 ASSERT(kSmiTag == 0);
745 __ test(value.reg(), Operand(value.reg()));
746 dest->false_target()->Branch(zero);
747 __ test(value.reg(), Immediate(kSmiTagMask));
748 dest->true_target()->Branch(zero);
749 __ fldz();
750 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
751 __ FCmp();
752 value.Unuse();
753 dest->Split(not_zero);
754 } else {
755 // Fast case checks.
756 // 'false' => false.
757 __ cmp(value.reg(), Factory::false_value());
758 dest->false_target()->Branch(equal);
729 759
730 // 'true' => true. 760 // 'true' => true.
731 __ cmp(value.reg(), Factory::true_value()); 761 __ cmp(value.reg(), Factory::true_value());
732 dest->true_target()->Branch(equal); 762 dest->true_target()->Branch(equal);
733 763
734 // 'undefined' => false. 764 // 'undefined' => false.
735 __ cmp(value.reg(), Factory::undefined_value()); 765 __ cmp(value.reg(), Factory::undefined_value());
736 dest->false_target()->Branch(equal); 766 dest->false_target()->Branch(equal);
737 767
738 // Smi => false iff zero. 768 // Smi => false iff zero.
739 ASSERT(kSmiTag == 0); 769 ASSERT(kSmiTag == 0);
740 __ test(value.reg(), Operand(value.reg())); 770 __ test(value.reg(), Operand(value.reg()));
741 dest->false_target()->Branch(zero); 771 dest->false_target()->Branch(zero);
742 __ test(value.reg(), Immediate(kSmiTagMask)); 772 __ test(value.reg(), Immediate(kSmiTagMask));
743 dest->true_target()->Branch(zero); 773 dest->true_target()->Branch(zero);
744 774
745 // Call the stub for all other cases. 775 // Call the stub for all other cases.
746 frame_->Push(&value); // Undo the Pop() from above. 776 frame_->Push(&value); // Undo the Pop() from above.
747 ToBooleanStub stub; 777 ToBooleanStub stub;
748 Result temp = frame_->CallStub(&stub, 1); 778 Result temp = frame_->CallStub(&stub, 1);
749 // Convert the result to a condition code. 779 // Convert the result to a condition code.
750 __ test(temp.reg(), Operand(temp.reg())); 780 __ test(temp.reg(), Operand(temp.reg()));
751 temp.Unuse(); 781 temp.Unuse();
752 dest->Split(not_equal); 782 dest->Split(not_equal);
783 }
753 } 784 }
754 785
755 786
756 class FloatingPointHelper : public AllStatic { 787 class FloatingPointHelper : public AllStatic {
757 public: 788 public:
758 789
759 enum ArgLocation { 790 enum ArgLocation {
760 ARGS_ON_STACK, 791 ARGS_ON_STACK,
761 ARGS_IN_REGISTERS 792 ARGS_IN_REGISTERS
762 }; 793 };
(...skipping 19 matching lines...) Expand all
782 // operand_1 in eax, operand_2 in edx; falls through on float 813 // operand_1 in eax, operand_2 in edx; falls through on float
783 // operands, jumps to the non_float label otherwise. 814 // operands, jumps to the non_float label otherwise.
784 static void CheckFloatOperands(MacroAssembler* masm, 815 static void CheckFloatOperands(MacroAssembler* masm,
785 Label* non_float, 816 Label* non_float,
786 Register scratch); 817 Register scratch);
787 // Takes the operands in edx and eax and loads them as integers in eax 818 // Takes the operands in edx and eax and loads them as integers in eax
788 // and ecx. 819 // and ecx.
789 static void LoadAsIntegers(MacroAssembler* masm, 820 static void LoadAsIntegers(MacroAssembler* masm,
790 bool use_sse3, 821 bool use_sse3,
791 Label* operand_conversion_failure); 822 Label* operand_conversion_failure);
823 // Test if operands are smis or heap numbers and load them
824 // into xmm0 and xmm1 if they are. Operands are in edx and eax.
825 // Leaves operands unchanged.
826 static void LoadSSE2Operands(MacroAssembler* masm);
792 // Test if operands are numbers (smi or HeapNumber objects), and load 827 // Test if operands are numbers (smi or HeapNumber objects), and load
793 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if 828 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
794 // either operand is not a number. Operands are in edx and eax. 829 // either operand is not a number. Operands are in edx and eax.
795 // Leaves operands unchanged. 830 // Leaves operands unchanged.
796 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers); 831 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
797 832
798 // Similar to LoadSSE2Operands but assumes that both operands are smis. 833 // Similar to LoadSSE2Operands but assumes that both operands are smis.
799 // Expects operands in edx, eax. 834 // Expects operands in edx, eax.
800 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch); 835 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
801 }; 836 };
802 837
803 838
804 const char* GenericBinaryOpStub::GetName() { 839 const char* GenericBinaryOpStub::GetName() {
805 if (name_ != NULL) return name_; 840 if (name_ != NULL) return name_;
806 const int kMaxNameLength = 100; 841 const int kMaxNameLength = 100;
807 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength); 842 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
808 if (name_ == NULL) return "OOM"; 843 if (name_ == NULL) return "OOM";
809 const char* op_name = Token::Name(op_); 844 const char* op_name = Token::Name(op_);
810 const char* overwrite_name; 845 const char* overwrite_name;
811 switch (mode_) { 846 switch (mode_) {
812 case NO_OVERWRITE: overwrite_name = "Alloc"; break; 847 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
813 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; 848 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
814 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; 849 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
815 default: overwrite_name = "UnknownOverwrite"; break; 850 default: overwrite_name = "UnknownOverwrite"; break;
816 } 851 }
817 852
818 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), 853 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
819 "GenericBinaryOpStub_%s_%s%s_%s%s", 854 "GenericBinaryOpStub_%s_%s%s_%s%s_%s",
820 op_name, 855 op_name,
821 overwrite_name, 856 overwrite_name,
822 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "", 857 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
823 args_in_registers_ ? "RegArgs" : "StackArgs", 858 args_in_registers_ ? "RegArgs" : "StackArgs",
824 args_reversed_ ? "_R" : ""); 859 args_reversed_ ? "_R" : "",
860 NumberInfo::ToString(operands_type_));
825 return name_; 861 return name_;
826 } 862 }
827 863
828 864
829 // Call the specialized stub for a binary operation. 865 // Call the specialized stub for a binary operation.
830 class DeferredInlineBinaryOperation: public DeferredCode { 866 class DeferredInlineBinaryOperation: public DeferredCode {
831 public: 867 public:
832 DeferredInlineBinaryOperation(Token::Value op, 868 DeferredInlineBinaryOperation(Token::Value op,
833 Register dst, 869 Register dst,
834 Register left, 870 Register left,
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
964 } else if (right_is_string) { 1000 } else if (right_is_string) {
965 answer = 1001 answer =
966 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2); 1002 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2);
967 } 1003 }
968 frame_->Push(&answer); 1004 frame_->Push(&answer);
969 return; 1005 return;
970 } 1006 }
971 // Neither operand is known to be a string. 1007 // Neither operand is known to be a string.
972 } 1008 }
973 1009
974 bool left_is_smi = left.is_constant() && left.handle()->IsSmi(); 1010 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
975 bool left_is_non_smi = left.is_constant() && !left.handle()->IsSmi(); 1011 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
976 bool right_is_smi = right.is_constant() && right.handle()->IsSmi(); 1012 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
977 bool right_is_non_smi = right.is_constant() && !right.handle()->IsSmi(); 1013 bool right_is_non_smi_constant =
1014 right.is_constant() && !right.handle()->IsSmi();
978 1015
979 if (left_is_smi && right_is_smi) { 1016 if (left_is_smi_constant && right_is_smi_constant) {
980 // Compute the constant result at compile time, and leave it on the frame. 1017 // Compute the constant result at compile time, and leave it on the frame.
981 int left_int = Smi::cast(*left.handle())->value(); 1018 int left_int = Smi::cast(*left.handle())->value();
982 int right_int = Smi::cast(*right.handle())->value(); 1019 int right_int = Smi::cast(*right.handle())->value();
983 if (FoldConstantSmis(op, left_int, right_int)) return; 1020 if (FoldConstantSmis(op, left_int, right_int)) return;
984 } 1021 }
985 1022
1023 // Get number type of left and right sub-expressions.
1024 NumberInfo::Type operands_type =
1025 NumberInfo::Combine(left.number_info(), right.number_info());
1026
986 Result answer; 1027 Result answer;
987 if (left_is_non_smi || right_is_non_smi) { 1028 if (left_is_non_smi_constant || right_is_non_smi_constant) {
988 // Go straight to the slow case, with no smi code. 1029 // Go straight to the slow case, with no smi code.
989 GenericBinaryOpStub stub(op, overwrite_mode, NO_SMI_CODE_IN_STUB); 1030 GenericBinaryOpStub stub(op,
1031 overwrite_mode,
1032 NO_SMI_CODE_IN_STUB,
1033 operands_type);
990 answer = stub.GenerateCall(masm_, frame_, &left, &right); 1034 answer = stub.GenerateCall(masm_, frame_, &left, &right);
991 } else if (right_is_smi) { 1035 } else if (right_is_smi_constant) {
992 answer = ConstantSmiBinaryOperation(op, &left, right.handle(), 1036 answer = ConstantSmiBinaryOperation(op, &left, right.handle(),
993 type, false, overwrite_mode); 1037 type, false, overwrite_mode);
994 } else if (left_is_smi) { 1038 } else if (left_is_smi_constant) {
995 answer = ConstantSmiBinaryOperation(op, &right, left.handle(), 1039 answer = ConstantSmiBinaryOperation(op, &right, left.handle(),
996 type, true, overwrite_mode); 1040 type, true, overwrite_mode);
997 } else { 1041 } else {
998 // Set the flags based on the operation, type and loop nesting level. 1042 // Set the flags based on the operation, type and loop nesting level.
999 // Bit operations always assume they likely operate on Smis. Still only 1043 // Bit operations always assume they likely operate on Smis. Still only
1000 // generate the inline Smi check code if this operation is part of a loop. 1044 // generate the inline Smi check code if this operation is part of a loop.
1001 // For all other operations only inline the Smi check code for likely smis 1045 // For all other operations only inline the Smi check code for likely smis
1002 // if the operation is part of a loop. 1046 // if the operation is part of a loop.
1003 if (loop_nesting() > 0 && (Token::IsBitOp(op) || type->IsLikelySmi())) { 1047 if (loop_nesting() > 0 && (Token::IsBitOp(op) || type->IsLikelySmi())) {
1004 answer = LikelySmiBinaryOperation(op, &left, &right, overwrite_mode); 1048 answer = LikelySmiBinaryOperation(op, &left, &right, overwrite_mode);
1005 } else { 1049 } else {
1006 GenericBinaryOpStub stub(op, overwrite_mode, NO_GENERIC_BINARY_FLAGS); 1050 GenericBinaryOpStub stub(op,
1051 overwrite_mode,
1052 NO_GENERIC_BINARY_FLAGS,
1053 operands_type);
1007 answer = stub.GenerateCall(masm_, frame_, &left, &right); 1054 answer = stub.GenerateCall(masm_, frame_, &left, &right);
1008 } 1055 }
1009 } 1056 }
1057
1058 // Set NumberInfo of result according to the operation performed.
1059 // Rely on the fact that smis have a 31 bit payload on ia32.
1060 ASSERT(kSmiValueSize == 31);
1061 NumberInfo::Type result_type = NumberInfo::kUnknown;
1062 switch (op) {
1063 case Token::COMMA:
1064 result_type = right.number_info();
1065 break;
1066 case Token::OR:
1067 case Token::AND:
1068 // Result type can be either of the two input types.
1069 result_type = operands_type;
1070 break;
1071 case Token::BIT_OR:
1072 case Token::BIT_XOR:
1073 case Token::BIT_AND:
1074 // Result is always a number. Smi property of inputs is preserved.
1075 result_type = (operands_type == NumberInfo::kSmi)
1076 ? NumberInfo::kSmi
1077 : NumberInfo::kNumber;
1078 break;
1079 case Token::SAR:
1080 // Result is a smi if we shift by a constant >= 1, otherwise a number.
1081 result_type = (right.is_constant() && right.handle()->IsSmi()
1082 && Smi::cast(*right.handle())->value() >= 1)
1083 ? NumberInfo::kSmi
1084 : NumberInfo::kNumber;
1085 break;
1086 case Token::SHR:
1087 // Result is a smi if we shift by a constant >= 2, otherwise a number.
1088 result_type = (right.is_constant() && right.handle()->IsSmi()
1089 && Smi::cast(*right.handle())->value() >= 2)
1090 ? NumberInfo::kSmi
1091 : NumberInfo::kNumber;
1092 break;
1093 case Token::ADD:
1094 // Result could be a string or a number. Check types of inputs.
1095 result_type = NumberInfo::IsNumber(operands_type)
1096 ? NumberInfo::kNumber
1097 : NumberInfo::kUnknown;
1098 break;
1099 case Token::SHL:
1100 case Token::SUB:
1101 case Token::MUL:
1102 case Token::DIV:
1103 case Token::MOD:
1104 // Result is always a number.
1105 result_type = NumberInfo::kNumber;
1106 break;
1107 default:
1108 UNREACHABLE();
1109 }
1110 answer.set_number_info(result_type);
1010 frame_->Push(&answer); 1111 frame_->Push(&answer);
1011 } 1112 }
1012 1113
1013 1114
1014 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) { 1115 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1015 Object* answer_object = Heap::undefined_value(); 1116 Object* answer_object = Heap::undefined_value();
1016 switch (op) { 1117 switch (op) {
1017 case Token::ADD: 1118 case Token::ADD:
1018 if (Smi::IsValid(left + right)) { 1119 if (Smi::IsValid(left + right)) {
1019 answer_object = Smi::FromInt(left + right); 1120 answer_object = Smi::FromInt(left + right);
(...skipping 821 matching lines...) Expand 10 before | Expand all | Expand 10 after
1841 ASSERT(op == Token::BIT_OR); 1942 ASSERT(op == Token::BIT_OR);
1842 if (int_value != 0) { 1943 if (int_value != 0) {
1843 __ or_(Operand(operand->reg()), Immediate(value)); 1944 __ or_(Operand(operand->reg()), Immediate(value));
1844 } 1945 }
1845 } 1946 }
1846 deferred->BindExit(); 1947 deferred->BindExit();
1847 answer = *operand; 1948 answer = *operand;
1848 break; 1949 break;
1849 } 1950 }
1850 1951
1952 case Token::DIV:
1953 if (!reversed && int_value == 2) {
1954 operand->ToRegister();
1955 frame_->Spill(operand->reg());
1956
1957 DeferredInlineSmiOperation* deferred =
1958 new DeferredInlineSmiOperation(op,
1959 operand->reg(),
1960 operand->reg(),
1961 smi_value,
1962 overwrite_mode);
1963 // Check that lowest log2(value) bits of operand are zero, and test
1964 // smi tag at the same time.
1965 ASSERT_EQ(0, kSmiTag);
1966 ASSERT_EQ(1, kSmiTagSize);
1967 __ test(operand->reg(), Immediate(3));
1968 deferred->Branch(not_zero); // Branch if non-smi or odd smi.
1969 __ sar(operand->reg(), 1);
1970 deferred->BindExit();
1971 answer = *operand;
1972 } else {
1973 // Cannot fall through MOD to default case, so we duplicate the
1974 // default case here.
1975 Result constant_operand(value);
1976 if (reversed) {
1977 answer = LikelySmiBinaryOperation(op, &constant_operand, operand,
1978 overwrite_mode);
1979 } else {
1980 answer = LikelySmiBinaryOperation(op, operand, &constant_operand,
1981 overwrite_mode);
1982 }
1983 }
1984 break;
1851 // Generate inline code for mod of powers of 2 and negative powers of 2. 1985 // Generate inline code for mod of powers of 2 and negative powers of 2.
1852 case Token::MOD: 1986 case Token::MOD:
1853 if (!reversed && 1987 if (!reversed &&
1854 int_value != 0 && 1988 int_value != 0 &&
1855 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) { 1989 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
1856 operand->ToRegister(); 1990 operand->ToRegister();
1857 frame_->Spill(operand->reg()); 1991 frame_->Spill(operand->reg());
1858 DeferredCode* deferred = new DeferredInlineSmiOperation(op, 1992 DeferredCode* deferred = new DeferredInlineSmiOperation(op,
1859 operand->reg(), 1993 operand->reg(),
1860 operand->reg(), 1994 operand->reg(),
(...skipping 459 matching lines...) Expand 10 before | Expand all | Expand 10 after
2320 // just copies y and the arguments of the current function on the 2454 // just copies y and the arguments of the current function on the
2321 // stack, as receiver and arguments, and calls x. 2455 // stack, as receiver and arguments, and calls x.
2322 // In the implementation comments, we call x the applicand 2456 // In the implementation comments, we call x the applicand
2323 // and y the receiver. 2457 // and y the receiver.
2324 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); 2458 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
2325 ASSERT(arguments->IsArguments()); 2459 ASSERT(arguments->IsArguments());
2326 2460
2327 // Load applicand.apply onto the stack. This will usually 2461 // Load applicand.apply onto the stack. This will usually
2328 // give us a megamorphic load site. Not super, but it works. 2462 // give us a megamorphic load site. Not super, but it works.
2329 Load(applicand); 2463 Load(applicand);
2464 frame()->Dup();
2330 Handle<String> name = Factory::LookupAsciiSymbol("apply"); 2465 Handle<String> name = Factory::LookupAsciiSymbol("apply");
2331 frame()->Push(name); 2466 frame()->Push(name);
2332 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET); 2467 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
2333 __ nop(); 2468 __ nop();
2334 frame()->Push(&answer); 2469 frame()->Push(&answer);
2335 2470
2336 // Load the receiver and the existing arguments object onto the 2471 // Load the receiver and the existing arguments object onto the
2337 // expression stack. Avoid allocating the arguments object here. 2472 // expression stack. Avoid allocating the arguments object here.
2338 Load(receiver); 2473 Load(receiver);
2339 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); 2474 Result existing_args =
2475 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
2476 frame()->Push(&existing_args);
2340 2477
2341 // Emit the source position information after having loaded the 2478 // Emit the source position information after having loaded the
2342 // receiver and the arguments. 2479 // receiver and the arguments.
2343 CodeForSourcePosition(position); 2480 CodeForSourcePosition(position);
2344 // Contents of frame at this point: 2481 // Contents of frame at this point:
2345 // Frame[0]: arguments object of the current function or the hole. 2482 // Frame[0]: arguments object of the current function or the hole.
2346 // Frame[1]: receiver 2483 // Frame[1]: receiver
2347 // Frame[2]: applicand.apply 2484 // Frame[2]: applicand.apply
2348 // Frame[3]: applicand. 2485 // Frame[3]: applicand.
2349 2486
(...skipping 1549 matching lines...) Expand 10 before | Expand all | Expand 10 after
3899 4036
3900 4037
3901 void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) { 4038 void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
3902 ASSERT(!in_spilled_code()); 4039 ASSERT(!in_spilled_code());
3903 Comment cmnt(masm_, "[ DebuggerStatement"); 4040 Comment cmnt(masm_, "[ DebuggerStatement");
3904 CodeForStatementPosition(node); 4041 CodeForStatementPosition(node);
3905 #ifdef ENABLE_DEBUGGER_SUPPORT 4042 #ifdef ENABLE_DEBUGGER_SUPPORT
3906 // Spill everything, even constants, to the frame. 4043 // Spill everything, even constants, to the frame.
3907 frame_->SpillAll(); 4044 frame_->SpillAll();
3908 4045
3909 DebuggerStatementStub ces; 4046 frame_->DebugBreak();
3910 frame_->CallStub(&ces, 0);
3911 // Ignore the return value. 4047 // Ignore the return value.
3912 #endif 4048 #endif
3913 } 4049 }
3914 4050
3915 4051
3916 void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) { 4052 Result CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) {
3917 ASSERT(boilerplate->IsBoilerplate()); 4053 ASSERT(boilerplate->IsBoilerplate());
3918 4054
3919 // The inevitable call will sync frame elements to memory anyway, so 4055 // The inevitable call will sync frame elements to memory anyway, so
3920 // we do it eagerly to allow us to push the arguments directly into 4056 // we do it eagerly to allow us to push the arguments directly into
3921 // place. 4057 // place.
3922 frame_->SyncRange(0, frame_->element_count() - 1); 4058 frame()->SyncRange(0, frame()->element_count() - 1);
3923 4059
3924 // Use the fast case closure allocation code that allocates in new 4060 // Use the fast case closure allocation code that allocates in new
3925 // space for nested functions that don't need literals cloning. 4061 // space for nested functions that don't need literals cloning.
3926 if (scope()->is_function_scope() && boilerplate->NumberOfLiterals() == 0) { 4062 if (scope()->is_function_scope() && boilerplate->NumberOfLiterals() == 0) {
3927 FastNewClosureStub stub; 4063 FastNewClosureStub stub;
3928 frame_->EmitPush(Immediate(boilerplate)); 4064 frame()->EmitPush(Immediate(boilerplate));
3929 Result answer = frame_->CallStub(&stub, 1); 4065 return frame()->CallStub(&stub, 1);
3930 frame_->Push(&answer);
3931 } else { 4066 } else {
3932 // Call the runtime to instantiate the function boilerplate 4067 // Call the runtime to instantiate the function boilerplate
3933 // object. 4068 // object.
3934 frame_->EmitPush(esi); 4069 frame()->EmitPush(esi);
3935 frame_->EmitPush(Immediate(boilerplate)); 4070 frame()->EmitPush(Immediate(boilerplate));
3936 Result result = frame_->CallRuntime(Runtime::kNewClosure, 2); 4071 return frame()->CallRuntime(Runtime::kNewClosure, 2);
3937 frame_->Push(&result);
3938 } 4072 }
3939 } 4073 }
3940 4074
3941 4075
3942 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { 4076 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
3943 Comment cmnt(masm_, "[ FunctionLiteral"); 4077 Comment cmnt(masm_, "[ FunctionLiteral");
3944 4078
3945 // Build the function boilerplate and instantiate it. 4079 // Build the function boilerplate and instantiate it.
3946 Handle<JSFunction> boilerplate = 4080 Handle<JSFunction> boilerplate =
3947 Compiler::BuildBoilerplate(node, script(), this); 4081 Compiler::BuildBoilerplate(node, script(), this);
3948 // Check for stack-overflow exception. 4082 // Check for stack-overflow exception.
3949 if (HasStackOverflow()) return; 4083 if (HasStackOverflow()) return;
3950 InstantiateBoilerplate(boilerplate); 4084 Result result = InstantiateBoilerplate(boilerplate);
4085 frame()->Push(&result);
3951 } 4086 }
3952 4087
3953 4088
3954 void CodeGenerator::VisitFunctionBoilerplateLiteral( 4089 void CodeGenerator::VisitFunctionBoilerplateLiteral(
3955 FunctionBoilerplateLiteral* node) { 4090 FunctionBoilerplateLiteral* node) {
3956 Comment cmnt(masm_, "[ FunctionBoilerplateLiteral"); 4091 Comment cmnt(masm_, "[ FunctionBoilerplateLiteral");
3957 InstantiateBoilerplate(node->boilerplate()); 4092 Result result = InstantiateBoilerplate(node->boilerplate());
4093 frame()->Push(&result);
3958 } 4094 }
3959 4095
3960 4096
3961 void CodeGenerator::VisitConditional(Conditional* node) { 4097 void CodeGenerator::VisitConditional(Conditional* node) {
3962 Comment cmnt(masm_, "[ Conditional"); 4098 Comment cmnt(masm_, "[ Conditional");
3963 JumpTarget then; 4099 JumpTarget then;
3964 JumpTarget else_; 4100 JumpTarget else_;
3965 JumpTarget exit; 4101 JumpTarget exit;
3966 ControlDestination dest(&then, &else_, true); 4102 ControlDestination dest(&then, &else_, true);
3967 LoadCondition(node->condition(), &dest, true); 4103 LoadCondition(node->condition(), &dest, true);
(...skipping 15 matching lines...) Expand all
3983 exit.Jump(); 4119 exit.Jump();
3984 else_.Bind(); 4120 else_.Bind();
3985 Load(node->else_expression()); 4121 Load(node->else_expression());
3986 } 4122 }
3987 } 4123 }
3988 4124
3989 exit.Bind(); 4125 exit.Bind();
3990 } 4126 }
3991 4127
3992 4128
3993 void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { 4129 Result CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
4130 Result result;
3994 if (slot->type() == Slot::LOOKUP) { 4131 if (slot->type() == Slot::LOOKUP) {
3995 ASSERT(slot->var()->is_dynamic()); 4132 ASSERT(slot->var()->is_dynamic());
3996
3997 JumpTarget slow; 4133 JumpTarget slow;
3998 JumpTarget done; 4134 JumpTarget done;
3999 Result value;
4000 4135
4001 // Generate fast-case code for variables that might be shadowed by 4136 // Generate fast-case code for variables that might be shadowed by
4002 // eval-introduced variables. Eval is used a lot without 4137 // eval-introduced variables. Eval is used a lot without
4003 // introducing variables. In those cases, we do not want to 4138 // introducing variables. In those cases, we do not want to
4004 // perform a runtime call for all variables in the scope 4139 // perform a runtime call for all variables in the scope
4005 // containing the eval. 4140 // containing the eval.
4006 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { 4141 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
4007 value = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, &slow); 4142 result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, &slow);
4008 // If there was no control flow to slow, we can exit early. 4143 // If there was no control flow to slow, we can exit early.
4009 if (!slow.is_linked()) { 4144 if (!slow.is_linked()) return result;
4010 frame_->Push(&value); 4145 done.Jump(&result);
4011 return;
4012 }
4013
4014 done.Jump(&value);
4015 4146
4016 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { 4147 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
4017 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); 4148 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
4018 // Only generate the fast case for locals that rewrite to slots. 4149 // Only generate the fast case for locals that rewrite to slots.
4019 // This rules out argument loads. 4150 // This rules out argument loads.
4020 if (potential_slot != NULL) { 4151 if (potential_slot != NULL) {
4021 // Allocate a fresh register to use as a temp in 4152 // Allocate a fresh register to use as a temp in
4022 // ContextSlotOperandCheckExtensions and to hold the result 4153 // ContextSlotOperandCheckExtensions and to hold the result
4023 // value. 4154 // value.
4024 value = allocator_->Allocate(); 4155 result = allocator()->Allocate();
4025 ASSERT(value.is_valid()); 4156 ASSERT(result.is_valid());
4026 __ mov(value.reg(), 4157 __ mov(result.reg(),
4027 ContextSlotOperandCheckExtensions(potential_slot, 4158 ContextSlotOperandCheckExtensions(potential_slot,
4028 value, 4159 result,
4029 &slow)); 4160 &slow));
4030 if (potential_slot->var()->mode() == Variable::CONST) { 4161 if (potential_slot->var()->mode() == Variable::CONST) {
4031 __ cmp(value.reg(), Factory::the_hole_value()); 4162 __ cmp(result.reg(), Factory::the_hole_value());
4032 done.Branch(not_equal, &value); 4163 done.Branch(not_equal, &result);
4033 __ mov(value.reg(), Factory::undefined_value()); 4164 __ mov(result.reg(), Factory::undefined_value());
4034 } 4165 }
4035 // There is always control flow to slow from 4166 // There is always control flow to slow from
4036 // ContextSlotOperandCheckExtensions so we have to jump around 4167 // ContextSlotOperandCheckExtensions so we have to jump around
4037 // it. 4168 // it.
4038 done.Jump(&value); 4169 done.Jump(&result);
4039 } 4170 }
4040 } 4171 }
4041 4172
4042 slow.Bind(); 4173 slow.Bind();
4043 // A runtime call is inevitable. We eagerly sync frame elements 4174 // A runtime call is inevitable. We eagerly sync frame elements
4044 // to memory so that we can push the arguments directly into place 4175 // to memory so that we can push the arguments directly into place
4045 // on top of the frame. 4176 // on top of the frame.
4046 frame_->SyncRange(0, frame_->element_count() - 1); 4177 frame()->SyncRange(0, frame()->element_count() - 1);
4047 frame_->EmitPush(esi); 4178 frame()->EmitPush(esi);
4048 frame_->EmitPush(Immediate(slot->var()->name())); 4179 frame()->EmitPush(Immediate(slot->var()->name()));
4049 if (typeof_state == INSIDE_TYPEOF) { 4180 if (typeof_state == INSIDE_TYPEOF) {
4050 value = 4181 result =
4051 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 4182 frame()->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4052 } else { 4183 } else {
4053 value = frame_->CallRuntime(Runtime::kLoadContextSlot, 2); 4184 result = frame()->CallRuntime(Runtime::kLoadContextSlot, 2);
4054 } 4185 }
4055 4186
4056 done.Bind(&value); 4187 done.Bind(&result);
4057 frame_->Push(&value); 4188 return result;
4058 4189
4059 } else if (slot->var()->mode() == Variable::CONST) { 4190 } else if (slot->var()->mode() == Variable::CONST) {
4060 // Const slots may contain 'the hole' value (the constant hasn't been 4191 // Const slots may contain 'the hole' value (the constant hasn't been
4061 // initialized yet) which needs to be converted into the 'undefined' 4192 // initialized yet) which needs to be converted into the 'undefined'
4062 // value. 4193 // value.
4063 // 4194 //
4064 // We currently spill the virtual frame because constants use the 4195 // We currently spill the virtual frame because constants use the
4065 // potentially unsafe direct-frame access of SlotOperand. 4196 // potentially unsafe direct-frame access of SlotOperand.
4066 VirtualFrame::SpilledScope spilled_scope; 4197 VirtualFrame::SpilledScope spilled_scope;
4067 Comment cmnt(masm_, "[ Load const"); 4198 Comment cmnt(masm_, "[ Load const");
4068 JumpTarget exit; 4199 Label exit;
4069 __ mov(ecx, SlotOperand(slot, ecx)); 4200 __ mov(ecx, SlotOperand(slot, ecx));
4070 __ cmp(ecx, Factory::the_hole_value()); 4201 __ cmp(ecx, Factory::the_hole_value());
4071 exit.Branch(not_equal); 4202 __ j(not_equal, &exit);
4072 __ mov(ecx, Factory::undefined_value()); 4203 __ mov(ecx, Factory::undefined_value());
4073 exit.Bind(); 4204 __ bind(&exit);
4074 frame_->EmitPush(ecx); 4205 return Result(ecx);
4075 4206
4076 } else if (slot->type() == Slot::PARAMETER) { 4207 } else if (slot->type() == Slot::PARAMETER) {
4077 frame_->PushParameterAt(slot->index()); 4208 frame()->PushParameterAt(slot->index());
4209 return frame()->Pop();
4078 4210
4079 } else if (slot->type() == Slot::LOCAL) { 4211 } else if (slot->type() == Slot::LOCAL) {
4080 frame_->PushLocalAt(slot->index()); 4212 frame()->PushLocalAt(slot->index());
4213 return frame()->Pop();
4081 4214
4082 } else { 4215 } else {
4083 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach 4216 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
4084 // here. 4217 // here.
4085 // 4218 //
4086 // The use of SlotOperand below is safe for an unspilled frame 4219 // The use of SlotOperand below is safe for an unspilled frame
4087 // because it will always be a context slot. 4220 // because it will always be a context slot.
4088 ASSERT(slot->type() == Slot::CONTEXT); 4221 ASSERT(slot->type() == Slot::CONTEXT);
4089 Result temp = allocator_->Allocate(); 4222 result = allocator()->Allocate();
4090 ASSERT(temp.is_valid()); 4223 ASSERT(result.is_valid());
4091 __ mov(temp.reg(), SlotOperand(slot, temp.reg())); 4224 __ mov(result.reg(), SlotOperand(slot, result.reg()));
4092 frame_->Push(&temp); 4225 return result;
4093 } 4226 }
4094 } 4227 }
4095 4228
4096 4229
4097 void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot, 4230 Result CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
4098 TypeofState state) { 4231 TypeofState state) {
4099 LoadFromSlot(slot, state); 4232 Result result = LoadFromSlot(slot, state);
4100 4233
4101 // Bail out quickly if we're not using lazy arguments allocation. 4234 // Bail out quickly if we're not using lazy arguments allocation.
4102 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return; 4235 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return result;
4103 4236
4104 // ... or if the slot isn't a non-parameter arguments slot. 4237 // ... or if the slot isn't a non-parameter arguments slot.
4105 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return; 4238 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return result;
4106
4107 // Pop the loaded value from the stack.
4108 Result value = frame_->Pop();
4109 4239
4110 // If the loaded value is a constant, we know if the arguments 4240 // If the loaded value is a constant, we know if the arguments
4111 // object has been lazily loaded yet. 4241 // object has been lazily loaded yet.
4112 if (value.is_constant()) { 4242 if (result.is_constant()) {
4113 if (value.handle()->IsTheHole()) { 4243 if (result.handle()->IsTheHole()) {
4114 Result arguments = StoreArgumentsObject(false); 4244 result.Unuse();
4115 frame_->Push(&arguments); 4245 return StoreArgumentsObject(false);
4116 } else { 4246 } else {
4117 frame_->Push(&value); 4247 return result;
4118 } 4248 }
4119 return;
4120 } 4249 }
4121 4250
4122 // The loaded value is in a register. If it is the sentinel that 4251 // The loaded value is in a register. If it is the sentinel that
4123 // indicates that we haven't loaded the arguments object yet, we 4252 // indicates that we haven't loaded the arguments object yet, we
4124 // need to do it now. 4253 // need to do it now.
4125 JumpTarget exit; 4254 JumpTarget exit;
4126 __ cmp(Operand(value.reg()), Immediate(Factory::the_hole_value())); 4255 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
4127 frame_->Push(&value); 4256 exit.Branch(not_equal, &result);
4128 exit.Branch(not_equal); 4257
4129 Result arguments = StoreArgumentsObject(false); 4258 result.Unuse();
4130 frame_->SetElementAt(0, &arguments); 4259 result = StoreArgumentsObject(false);
4131 exit.Bind(); 4260 exit.Bind(&result);
4261 return result;
4132 } 4262 }
4133 4263
4134 4264
4135 Result CodeGenerator::LoadFromGlobalSlotCheckExtensions( 4265 Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
4136 Slot* slot, 4266 Slot* slot,
4137 TypeofState typeof_state, 4267 TypeofState typeof_state,
4138 JumpTarget* slow) { 4268 JumpTarget* slow) {
4139 // Check that no extension objects have been created by calls to 4269 // Check that no extension objects have been created by calls to
4140 // eval from the current scope to the global scope. 4270 // eval from the current scope to the global scope.
4141 Register context = esi; 4271 Register context = esi;
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
4181 // Load next context in chain. 4311 // Load next context in chain.
4182 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX)); 4312 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
4183 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset)); 4313 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4184 __ jmp(&next); 4314 __ jmp(&next);
4185 __ bind(&fast); 4315 __ bind(&fast);
4186 } 4316 }
4187 tmp.Unuse(); 4317 tmp.Unuse();
4188 4318
4189 // All extension objects were empty and it is safe to use a global 4319 // All extension objects were empty and it is safe to use a global
4190 // load IC call. 4320 // load IC call.
4321 // The register allocator prefers eax if it is free, so the code generator
4322 // will load the global object directly into eax, which is where the LoadIC
4323 // expects it.
4324 frame_->Spill(eax);
4191 LoadGlobal(); 4325 LoadGlobal();
4192 frame_->Push(slot->var()->name()); 4326 frame_->Push(slot->var()->name());
4193 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 4327 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
4194 ? RelocInfo::CODE_TARGET 4328 ? RelocInfo::CODE_TARGET
4195 : RelocInfo::CODE_TARGET_CONTEXT; 4329 : RelocInfo::CODE_TARGET_CONTEXT;
4196 Result answer = frame_->CallLoadIC(mode); 4330 Result answer = frame_->CallLoadIC(mode);
4197 // A test eax instruction following the call signals that the inobject 4331 // A test eax instruction following the call signals that the inobject
4198 // property case was inlined. Ensure that there is not a test eax 4332 // property case was inlined. Ensure that there is not a test eax
4199 // instruction here. 4333 // instruction here.
4200 __ nop(); 4334 __ nop();
4201 // Discard the global object. The result is in answer.
4202 frame_->Drop();
4203 return answer; 4335 return answer;
4204 } 4336 }
4205 4337
4206 4338
4207 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { 4339 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
4208 if (slot->type() == Slot::LOOKUP) { 4340 if (slot->type() == Slot::LOOKUP) {
4209 ASSERT(slot->var()->is_dynamic()); 4341 ASSERT(slot->var()->is_dynamic());
4210 4342
4211 // For now, just do a runtime call. Since the call is inevitable, 4343 // For now, just do a runtime call. Since the call is inevitable,
4212 // we eagerly sync the virtual frame so we can directly push the 4344 // we eagerly sync the virtual frame so we can directly push the
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
4297 // scope. 4429 // scope.
4298 } 4430 }
4299 4431
4300 exit.Bind(); 4432 exit.Bind();
4301 } 4433 }
4302 } 4434 }
4303 4435
4304 4436
4305 void CodeGenerator::VisitSlot(Slot* node) { 4437 void CodeGenerator::VisitSlot(Slot* node) {
4306 Comment cmnt(masm_, "[ Slot"); 4438 Comment cmnt(masm_, "[ Slot");
4307 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF); 4439 Result result = LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
4440 frame()->Push(&result);
4308 } 4441 }
4309 4442
4310 4443
4311 void CodeGenerator::VisitVariableProxy(VariableProxy* node) { 4444 void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
4312 Comment cmnt(masm_, "[ VariableProxy"); 4445 Comment cmnt(masm_, "[ VariableProxy");
4313 Variable* var = node->var(); 4446 Variable* var = node->var();
4314 Expression* expr = var->rewrite(); 4447 Expression* expr = var->rewrite();
4315 if (expr != NULL) { 4448 if (expr != NULL) {
4316 Visit(expr); 4449 Visit(expr);
4317 } else { 4450 } else {
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
4467 break; 4600 break;
4468 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 4601 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
4469 if (CompileTimeValue::IsCompileTimeValue(property->value())) break; 4602 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
4470 // else fall through. 4603 // else fall through.
4471 case ObjectLiteral::Property::COMPUTED: { 4604 case ObjectLiteral::Property::COMPUTED: {
4472 Handle<Object> key(property->key()->handle()); 4605 Handle<Object> key(property->key()->handle());
4473 if (key->IsSymbol()) { 4606 if (key->IsSymbol()) {
4474 // Duplicate the object as the IC receiver. 4607 // Duplicate the object as the IC receiver.
4475 frame_->Dup(); 4608 frame_->Dup();
4476 Load(property->value()); 4609 Load(property->value());
4477 frame_->Push(key); 4610 Result dummy = frame_->CallStoreIC(Handle<String>::cast(key), false);
4478 Result ignored = frame_->CallStoreIC(); 4611 dummy.Unuse();
4479 break; 4612 break;
4480 } 4613 }
4481 // Fall through 4614 // Fall through
4482 } 4615 }
4483 case ObjectLiteral::Property::PROTOTYPE: { 4616 case ObjectLiteral::Property::PROTOTYPE: {
4484 // Duplicate the object as an argument to the runtime call. 4617 // Duplicate the object as an argument to the runtime call.
4485 frame_->Dup(); 4618 frame_->Dup();
4486 Load(property->key()); 4619 Load(property->key());
4487 Load(property->value()); 4620 Load(property->value());
4488 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3); 4621 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
4592 // assign the exception value to the catch variable. 4725 // assign the exception value to the catch variable.
4593 Comment cmnt(masm_, "[ CatchExtensionObject"); 4726 Comment cmnt(masm_, "[ CatchExtensionObject");
4594 Load(node->key()); 4727 Load(node->key());
4595 Load(node->value()); 4728 Load(node->value());
4596 Result result = 4729 Result result =
4597 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2); 4730 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
4598 frame_->Push(&result); 4731 frame_->Push(&result);
4599 } 4732 }
4600 4733
4601 4734
4735 void CodeGenerator::EmitSlotAssignment(Assignment* node) {
4736 #ifdef DEBUG
4737 int original_height = frame()->height();
4738 #endif
4739 Comment cmnt(masm(), "[ Variable Assignment");
4740 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4741 ASSERT(var != NULL);
4742 Slot* slot = var->slot();
4743 ASSERT(slot != NULL);
4744
4745 // Evaluate the right-hand side.
4746 if (node->is_compound()) {
4747 Result result = LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
4748 frame()->Push(&result);
4749 Load(node->value());
4750
4751 bool overwrite_value =
4752 (node->value()->AsBinaryOperation() != NULL &&
4753 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
4754 GenericBinaryOperation(node->binary_op(),
4755 node->type(),
4756 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
4757 } else {
4758 Load(node->value());
4759 }
4760
4761 // Perform the assignment.
4762 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
4763 CodeForSourcePosition(node->position());
4764 StoreToSlot(slot,
4765 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
4766 }
4767 ASSERT(frame()->height() == original_height + 1);
4768 }
4769
4770
4771 void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
4772 #ifdef DEBUG
4773 int original_height = frame()->height();
4774 #endif
4775 Comment cmnt(masm(), "[ Named Property Assignment");
4776 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4777 Property* prop = node->target()->AsProperty();
4778 ASSERT(var == NULL || (prop == NULL && var->is_global()));
4779
4780 // Initialize name and evaluate the receiver subexpression if necessary.
4781 Handle<String> name;
4782 bool is_trivial_receiver = false;
4783 if (var != NULL) {
4784 name = var->name();
4785 } else {
4786 Literal* lit = prop->key()->AsLiteral();
4787 ASSERT_NOT_NULL(lit);
4788 name = Handle<String>::cast(lit->handle());
4789 // Do not materialize the receiver on the frame if it is trivial.
4790 is_trivial_receiver = prop->obj()->IsTrivial();
4791 if (!is_trivial_receiver) Load(prop->obj());
4792 }
4793
4794 if (node->starts_initialization_block()) {
4795 ASSERT_EQ(NULL, var);
4796 // Change to slow case in the beginning of an initialization block to
4797 // avoid the quadratic behavior of repeatedly adding fast properties.
4798 if (is_trivial_receiver) {
4799 frame()->Push(prop->obj());
4800 } else {
4801 frame()->Dup();
4802 }
4803 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1);
4804 }
4805
4806 if (node->ends_initialization_block() && !is_trivial_receiver) {
4807 // Add an extra copy of the receiver to the frame, so that it can be
4808 // converted back to fast case after the assignment.
4809 frame()->Dup();
4810 }
4811
4812 // Evaluate the right-hand side.
4813 if (node->is_compound()) {
4814 if (is_trivial_receiver) {
4815 frame()->Push(prop->obj());
4816 } else if (var != NULL) {
4817 // The LoadIC stub expects the object in eax.
4818 // Freeing eax causes the code generator to load the global into it.
4819 frame_->Spill(eax);
4820 LoadGlobal();
4821 } else {
4822 frame()->Dup();
4823 }
4824 Result value = EmitNamedLoad(name, var != NULL);
4825 frame()->Push(&value);
4826 Load(node->value());
4827
4828 bool overwrite_value =
4829 (node->value()->AsBinaryOperation() != NULL &&
4830 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
4831 GenericBinaryOperation(node->binary_op(),
4832 node->type(),
4833 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
4834 } else {
4835 Load(node->value());
4836 }
4837
4838 // Perform the assignment. It is safe to ignore constants here.
4839 ASSERT(var == NULL || var->mode() != Variable::CONST);
4840 ASSERT_NE(Token::INIT_CONST, node->op());
4841 if (is_trivial_receiver) {
4842 Result value = frame()->Pop();
4843 frame()->Push(prop->obj());
4844 frame()->Push(&value);
4845 }
4846 CodeForSourcePosition(node->position());
4847 bool is_contextual = (var != NULL);
4848 Result answer = EmitNamedStore(name, is_contextual);
4849 frame()->Push(&answer);
4850
4851 if (node->ends_initialization_block()) {
4852 ASSERT_EQ(NULL, var);
4853 // The argument to the runtime call is the receiver.
4854 if (is_trivial_receiver) {
4855 frame()->Push(prop->obj());
4856 } else {
4857 // A copy of the receiver is below the value of the assignment. Swap
4858 // the receiver and the value of the assignment expression.
4859 Result result = frame()->Pop();
4860 Result receiver = frame()->Pop();
4861 frame()->Push(&result);
4862 frame()->Push(&receiver);
4863 }
4864 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
4865 }
4866
4867 ASSERT_EQ(frame()->height(), original_height + 1);
4868 }
4869
4870
4871 void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
4872 #ifdef DEBUG
4873 int original_height = frame()->height();
4874 #endif
4875 Comment cmnt(masm_, "[ Named Property Assignment");
4876 Property* prop = node->target()->AsProperty();
4877 ASSERT_NOT_NULL(prop);
4878
4879 // Evaluate the receiver subexpression.
4880 Load(prop->obj());
4881
4882 if (node->starts_initialization_block()) {
4883 // Change to slow case in the beginning of an initialization block to
4884 // avoid the quadratic behavior of repeatedly adding fast properties.
4885 frame_->Dup();
4886 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
4887 }
4888
4889 if (node->ends_initialization_block()) {
4890 // Add an extra copy of the receiver to the frame, so that it can be
4891 // converted back to fast case after the assignment.
4892 frame_->Dup();
4893 }
4894
4895 // Evaluate the key subexpression.
4896 Load(prop->key());
4897
4898 // Evaluate the right-hand side.
4899 if (node->is_compound()) {
4900 // Duplicate receiver and key.
4901 frame()->PushElementAt(1);
4902 frame()->PushElementAt(1);
4903 Result value = EmitKeyedLoad();
4904 frame()->Push(&value);
4905 Load(node->value());
4906
4907 bool overwrite_value =
4908 (node->value()->AsBinaryOperation() != NULL &&
4909 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
4910 GenericBinaryOperation(node->binary_op(),
4911 node->type(),
4912 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
4913 } else {
4914 Load(node->value());
4915 }
4916
4917 // Perform the assignment. It is safe to ignore constants here.
4918 ASSERT(node->op() != Token::INIT_CONST);
4919 CodeForSourcePosition(node->position());
4920 Result answer = EmitKeyedStore(prop->key()->type());
4921 frame()->Push(&answer);
4922
4923 if (node->ends_initialization_block()) {
4924 // The argument to the runtime call is the extra copy of the receiver,
4925 // which is below the value of the assignment. Swap the receiver and
4926 // the value of the assignment expression.
4927 Result result = frame()->Pop();
4928 Result receiver = frame()->Pop();
4929 frame()->Push(&result);
4930 frame()->Push(&receiver);
4931 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
4932 }
4933
4934 ASSERT(frame()->height() == original_height + 1);
4935 }
4936
4937
4602 void CodeGenerator::VisitAssignment(Assignment* node) { 4938 void CodeGenerator::VisitAssignment(Assignment* node) {
4603 #ifdef DEBUG 4939 #ifdef DEBUG
4604 int original_height = frame_->height(); 4940 int original_height = frame()->height();
4605 #endif 4941 #endif
4606 Comment cmnt(masm_, "[ Assignment"); 4942 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4607 4943 Property* prop = node->target()->AsProperty();
4608 { Reference target(this, node->target(), node->is_compound()); 4944
4609 if (target.is_illegal()) { 4945 if (var != NULL && !var->is_global()) {
4610 // Fool the virtual frame into thinking that we left the assignment's 4946 EmitSlotAssignment(node);
4611 // value on the frame. 4947
4612 frame_->Push(Smi::FromInt(0)); 4948 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
4613 return; 4949 (var != NULL && var->is_global())) {
4614 } 4950 // Properties whose keys are property names and global variables are
4615 Variable* var = node->target()->AsVariableProxy()->AsVariable(); 4951 // treated as named property references. We do not need to consider
4616 4952 // global 'this' because it is not a valid left-hand side.
4617 if (node->starts_initialization_block()) { 4953 EmitNamedPropertyAssignment(node);
4618 ASSERT(target.type() == Reference::NAMED || 4954
4619 target.type() == Reference::KEYED); 4955 } else if (prop != NULL) {
4620 // Change to slow case in the beginning of an initialization 4956 // Other properties (including rewritten parameters for a function that
4621 // block to avoid the quadratic behavior of repeatedly adding 4957 // uses arguments) are keyed property assignments.
4622 // fast properties. 4958 EmitKeyedPropertyAssignment(node);
4623 4959
4624 // The receiver is the argument to the runtime call. It is the 4960 } else {
4625 // first value pushed when the reference was loaded to the 4961 // Invalid left-hand side.
4626 // frame. 4962 Load(node->target());
4627 frame_->PushElementAt(target.size() - 1); 4963 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1);
4628 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1); 4964 // The runtime call doesn't actually return but the code generator will
4629 } 4965 // still generate code and expects a certain frame height.
4630 if (node->ends_initialization_block()) { 4966 frame()->Push(&result);
4631 // Add an extra copy of the receiver to the frame, so that it can be 4967 }
4632 // converted back to fast case after the assignment. 4968
4633 ASSERT(target.type() == Reference::NAMED || 4969 ASSERT(frame()->height() == original_height + 1);
4634 target.type() == Reference::KEYED);
4635 if (target.type() == Reference::NAMED) {
4636 frame_->Dup();
4637 // Dup target receiver on stack.
4638 } else {
4639 ASSERT(target.type() == Reference::KEYED);
4640 Result temp = frame_->Pop();
4641 frame_->Dup();
4642 frame_->Push(&temp);
4643 }
4644 }
4645 if (node->op() == Token::ASSIGN ||
4646 node->op() == Token::INIT_VAR ||
4647 node->op() == Token::INIT_CONST) {
4648 Load(node->value());
4649
4650 } else { // Assignment is a compound assignment.
4651 Literal* literal = node->value()->AsLiteral();
4652 bool overwrite_value =
4653 (node->value()->AsBinaryOperation() != NULL &&
4654 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
4655 Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
4656 // There are two cases where the target is not read in the right hand
4657 // side, that are easy to test for: the right hand side is a literal,
4658 // or the right hand side is a different variable. TakeValue invalidates
4659 // the target, with an implicit promise that it will be written to again
4660 // before it is read.
4661 if (literal != NULL || (right_var != NULL && right_var != var)) {
4662 target.TakeValue();
4663 } else {
4664 target.GetValue();
4665 }
4666 Load(node->value());
4667 GenericBinaryOperation(node->binary_op(),
4668 node->type(),
4669 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
4670 }
4671
4672 if (var != NULL &&
4673 var->mode() == Variable::CONST &&
4674 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
4675 // Assignment ignored - leave the value on the stack.
4676 UnloadReference(&target);
4677 } else {
4678 CodeForSourcePosition(node->position());
4679 if (node->op() == Token::INIT_CONST) {
4680 // Dynamic constant initializations must use the function context
4681 // and initialize the actual constant declared. Dynamic variable
4682 // initializations are simply assignments and use SetValue.
4683 target.SetValue(CONST_INIT);
4684 } else {
4685 target.SetValue(NOT_CONST_INIT);
4686 }
4687 if (node->ends_initialization_block()) {
4688 ASSERT(target.type() == Reference::UNLOADED);
4689 // End of initialization block. Revert to fast case. The
4690 // argument to the runtime call is the extra copy of the receiver,
4691 // which is below the value of the assignment.
4692 // Swap the receiver and the value of the assignment expression.
4693 Result lhs = frame_->Pop();
4694 Result receiver = frame_->Pop();
4695 frame_->Push(&lhs);
4696 frame_->Push(&receiver);
4697 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
4698 }
4699 }
4700 }
4701 ASSERT(frame_->height() == original_height + 1);
4702 } 4970 }
4703 4971
4704 4972
4705 void CodeGenerator::VisitThrow(Throw* node) { 4973 void CodeGenerator::VisitThrow(Throw* node) {
4706 Comment cmnt(masm_, "[ Throw"); 4974 Comment cmnt(masm_, "[ Throw");
4707 Load(node->exception()); 4975 Load(node->exception());
4708 Result result = frame_->CallRuntime(Runtime::kThrow, 1); 4976 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
4709 frame_->Push(&result); 4977 frame_->Push(&result);
4710 } 4978 }
4711 4979
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after
4896 // Load the function to call from the property through a reference. 5164 // Load the function to call from the property through a reference.
4897 5165
4898 // Pass receiver to called function. 5166 // Pass receiver to called function.
4899 if (property->is_synthetic()) { 5167 if (property->is_synthetic()) {
4900 Reference ref(this, property); 5168 Reference ref(this, property);
4901 ref.GetValue(); 5169 ref.GetValue();
4902 // Use global object as receiver. 5170 // Use global object as receiver.
4903 LoadGlobalReceiver(); 5171 LoadGlobalReceiver();
4904 } else { 5172 } else {
4905 Load(property->obj()); 5173 Load(property->obj());
5174 frame()->Dup();
4906 Load(property->key()); 5175 Load(property->key());
4907 Result function = EmitKeyedLoad(false); 5176 Result function = EmitKeyedLoad();
4908 frame_->Drop(); // Key.
4909 Result receiver = frame_->Pop(); 5177 Result receiver = frame_->Pop();
4910 frame_->Push(&function); 5178 frame_->Push(&function);
4911 frame_->Push(&receiver); 5179 frame_->Push(&receiver);
4912 } 5180 }
4913 5181
4914 // Call the function. 5182 // Call the function.
4915 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position()); 5183 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
4916 } 5184 }
4917 5185
4918 } else { 5186 } else {
(...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after
5166 Result temp = allocator()->Allocate(); 5434 Result temp = allocator()->Allocate();
5167 ASSERT(temp.is_valid()); 5435 ASSERT(temp.is_valid());
5168 // Check if the object is a JS array or not. 5436 // Check if the object is a JS array or not.
5169 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg()); 5437 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
5170 value.Unuse(); 5438 value.Unuse();
5171 temp.Unuse(); 5439 temp.Unuse();
5172 destination()->Split(equal); 5440 destination()->Split(equal);
5173 } 5441 }
5174 5442
5175 5443
5444 void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
5445 ASSERT(args->length() == 1);
5446 Load(args->at(0));
5447 Result value = frame_->Pop();
5448 value.ToRegister();
5449 ASSERT(value.is_valid());
5450 __ test(value.reg(), Immediate(kSmiTagMask));
5451 destination()->false_target()->Branch(equal);
5452 // It is a heap object - get map.
5453 Result temp = allocator()->Allocate();
5454 ASSERT(temp.is_valid());
5455 // Check if the object is a regexp.
5456 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, temp.reg());
5457 value.Unuse();
5458 temp.Unuse();
5459 destination()->Split(equal);
5460 }
5461
5462
5176 void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) { 5463 void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
5177 // This generates a fast version of: 5464 // This generates a fast version of:
5178 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp') 5465 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
5179 ASSERT(args->length() == 1); 5466 ASSERT(args->length() == 1);
5180 Load(args->at(0)); 5467 Load(args->at(0));
5181 Result obj = frame_->Pop(); 5468 Result obj = frame_->Pop();
5182 obj.ToRegister(); 5469 obj.ToRegister();
5183 5470
5184 __ test(obj.reg(), Immediate(kSmiTagMask)); 5471 __ test(obj.reg(), Immediate(kSmiTagMask));
5185 destination()->false_target()->Branch(zero); 5472 destination()->false_target()->Branch(zero);
(...skipping 334 matching lines...) Expand 10 before | Expand all | Expand 10 after
5520 Load(args->at(0)); 5807 Load(args->at(0));
5521 Load(args->at(1)); 5808 Load(args->at(1));
5522 Load(args->at(2)); 5809 Load(args->at(2));
5523 Load(args->at(3)); 5810 Load(args->at(3));
5524 RegExpExecStub stub; 5811 RegExpExecStub stub;
5525 Result result = frame_->CallStub(&stub, 4); 5812 Result result = frame_->CallStub(&stub, 4);
5526 frame_->Push(&result); 5813 frame_->Push(&result);
5527 } 5814 }
5528 5815
5529 5816
5817 void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
5818 ASSERT_EQ(args->length(), 1);
5819
5820 // Load the argument on the stack and call the stub.
5821 Load(args->at(0));
5822 NumberToStringStub stub;
5823 Result result = frame_->CallStub(&stub, 1);
5824 frame_->Push(&result);
5825 }
5826
5827
5828 void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5829 ASSERT_EQ(args->length(), 1);
5830 Load(args->at(0));
5831 TranscendentalCacheStub stub(TranscendentalCache::SIN);
5832 Result result = frame_->CallStub(&stub, 1);
5833 frame_->Push(&result);
5834 }
5835
5836
5837 void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5838 ASSERT_EQ(args->length(), 1);
5839 Load(args->at(0));
5840 TranscendentalCacheStub stub(TranscendentalCache::COS);
5841 Result result = frame_->CallStub(&stub, 1);
5842 frame_->Push(&result);
5843 }
5844
5845
5530 void CodeGenerator::VisitCallRuntime(CallRuntime* node) { 5846 void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
5531 if (CheckForInlineRuntimeCall(node)) { 5847 if (CheckForInlineRuntimeCall(node)) {
5532 return; 5848 return;
5533 } 5849 }
5534 5850
5535 ZoneList<Expression*>* args = node->arguments(); 5851 ZoneList<Expression*>* args = node->arguments();
5536 Comment cmnt(masm_, "[ CallRuntime"); 5852 Comment cmnt(masm_, "[ CallRuntime");
5537 Runtime::Function* function = node->function(); 5853 Runtime::Function* function = node->function();
5538 5854
5539 if (function == NULL) { 5855 if (function == NULL) {
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
5654 } 5970 }
5655 5971
5656 } else { 5972 } else {
5657 Load(node->expression()); 5973 Load(node->expression());
5658 bool overwrite = 5974 bool overwrite =
5659 (node->expression()->AsBinaryOperation() != NULL && 5975 (node->expression()->AsBinaryOperation() != NULL &&
5660 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed()); 5976 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
5661 switch (op) { 5977 switch (op) {
5662 case Token::SUB: { 5978 case Token::SUB: {
5663 GenericUnaryOpStub stub(Token::SUB, overwrite); 5979 GenericUnaryOpStub stub(Token::SUB, overwrite);
5664 // TODO(1222589): remove dependency of TOS being cached inside stub
5665 Result operand = frame_->Pop(); 5980 Result operand = frame_->Pop();
5666 Result answer = frame_->CallStub(&stub, &operand); 5981 Result answer = frame_->CallStub(&stub, &operand);
5667 frame_->Push(&answer); 5982 frame_->Push(&answer);
5668 break; 5983 break;
5669 } 5984 }
5670 5985
5671 case Token::BIT_NOT: { 5986 case Token::BIT_NOT: {
5672 // Smi check. 5987 // Smi check.
5673 JumpTarget smi_label; 5988 JumpTarget smi_label;
5674 JumpTarget continue_label; 5989 JumpTarget continue_label;
(...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after
6104 __ test(answer.reg(), Immediate(kSmiTagMask)); 6419 __ test(answer.reg(), Immediate(kSmiTagMask));
6105 destination()->false_target()->Branch(zero); 6420 destination()->false_target()->Branch(zero);
6106 6421
6107 // It can be an undetectable string object. 6422 // It can be an undetectable string object.
6108 Result temp = allocator()->Allocate(); 6423 Result temp = allocator()->Allocate();
6109 ASSERT(temp.is_valid()); 6424 ASSERT(temp.is_valid());
6110 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 6425 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
6111 __ movzx_b(temp.reg(), FieldOperand(temp.reg(), Map::kBitFieldOffset)); 6426 __ movzx_b(temp.reg(), FieldOperand(temp.reg(), Map::kBitFieldOffset));
6112 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable)); 6427 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
6113 destination()->false_target()->Branch(not_zero); 6428 destination()->false_target()->Branch(not_zero);
6114 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 6429 __ CmpObjectType(answer.reg(), FIRST_NONSTRING_TYPE, temp.reg());
6115 __ movzx_b(temp.reg(),
6116 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
6117 __ cmp(temp.reg(), FIRST_NONSTRING_TYPE);
6118 temp.Unuse(); 6430 temp.Unuse();
6119 answer.Unuse(); 6431 answer.Unuse();
6120 destination()->Split(less); 6432 destination()->Split(below);
6121 6433
6122 } else if (check->Equals(Heap::boolean_symbol())) { 6434 } else if (check->Equals(Heap::boolean_symbol())) {
6123 __ cmp(answer.reg(), Factory::true_value()); 6435 __ cmp(answer.reg(), Factory::true_value());
6124 destination()->true_target()->Branch(equal); 6436 destination()->true_target()->Branch(equal);
6125 __ cmp(answer.reg(), Factory::false_value()); 6437 __ cmp(answer.reg(), Factory::false_value());
6126 answer.Unuse(); 6438 answer.Unuse();
6127 destination()->Split(equal); 6439 destination()->Split(equal);
6128 6440
6129 } else if (check->Equals(Heap::undefined_symbol())) { 6441 } else if (check->Equals(Heap::undefined_symbol())) {
6130 __ cmp(answer.reg(), Factory::undefined_value()); 6442 __ cmp(answer.reg(), Factory::undefined_value());
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
6270 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0)) 6582 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
6271 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0)) 6583 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
6272 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0)) 6584 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
6273 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0)) 6585 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
6274 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0)); 6586 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
6275 } 6587 }
6276 #endif 6588 #endif
6277 6589
6278 6590
6279 // Emit a LoadIC call to get the value from receiver and leave it in 6591 // Emit a LoadIC call to get the value from receiver and leave it in
6280 // dst. The receiver register is restored after the call. 6592 // dst.
6281 class DeferredReferenceGetNamedValue: public DeferredCode { 6593 class DeferredReferenceGetNamedValue: public DeferredCode {
6282 public: 6594 public:
6283 DeferredReferenceGetNamedValue(Register dst, 6595 DeferredReferenceGetNamedValue(Register dst,
6284 Register receiver, 6596 Register receiver,
6285 Handle<String> name) 6597 Handle<String> name)
6286 : dst_(dst), receiver_(receiver), name_(name) { 6598 : dst_(dst), receiver_(receiver), name_(name) {
6287 set_comment("[ DeferredReferenceGetNamedValue"); 6599 set_comment("[ DeferredReferenceGetNamedValue");
6288 } 6600 }
6289 6601
6290 virtual void Generate(); 6602 virtual void Generate();
6291 6603
6292 Label* patch_site() { return &patch_site_; } 6604 Label* patch_site() { return &patch_site_; }
6293 6605
6294 private: 6606 private:
6295 Label patch_site_; 6607 Label patch_site_;
6296 Register dst_; 6608 Register dst_;
6297 Register receiver_; 6609 Register receiver_;
6298 Handle<String> name_; 6610 Handle<String> name_;
6299 }; 6611 };
6300 6612
6301 6613
6302 void DeferredReferenceGetNamedValue::Generate() { 6614 void DeferredReferenceGetNamedValue::Generate() {
6303 __ push(receiver_); 6615 if (!receiver_.is(eax)) {
6616 __ mov(eax, receiver_);
6617 }
6304 __ Set(ecx, Immediate(name_)); 6618 __ Set(ecx, Immediate(name_));
6305 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 6619 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
6306 __ call(ic, RelocInfo::CODE_TARGET); 6620 __ call(ic, RelocInfo::CODE_TARGET);
6307 // The call must be followed by a test eax instruction to indicate 6621 // The call must be followed by a test eax instruction to indicate
6308 // that the inobject property case was inlined. 6622 // that the inobject property case was inlined.
6309 // 6623 //
6310 // Store the delta to the map check instruction here in the test 6624 // Store the delta to the map check instruction here in the test
6311 // instruction. Use masm_-> instead of the __ macro since the 6625 // instruction. Use masm_-> instead of the __ macro since the
6312 // latter can't return a value. 6626 // latter can't return a value.
6313 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 6627 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
6314 // Here we use masm_-> instead of the __ macro because this is the 6628 // Here we use masm_-> instead of the __ macro because this is the
6315 // instruction that gets patched and coverage code gets in the way. 6629 // instruction that gets patched and coverage code gets in the way.
6316 masm_->test(eax, Immediate(-delta_to_patch_site)); 6630 masm_->test(eax, Immediate(-delta_to_patch_site));
6317 __ IncrementCounter(&Counters::named_load_inline_miss, 1); 6631 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
6318 6632
6319 if (!dst_.is(eax)) __ mov(dst_, eax); 6633 if (!dst_.is(eax)) __ mov(dst_, eax);
6320 __ pop(receiver_);
6321 } 6634 }
6322 6635
6323 6636
6324 class DeferredReferenceGetKeyedValue: public DeferredCode { 6637 class DeferredReferenceGetKeyedValue: public DeferredCode {
6325 public: 6638 public:
6326 explicit DeferredReferenceGetKeyedValue(Register dst, 6639 explicit DeferredReferenceGetKeyedValue(Register dst,
6327 Register receiver, 6640 Register receiver,
6328 Register key, 6641 Register key)
6329 bool is_global) 6642 : dst_(dst), receiver_(receiver), key_(key) {
6330 : dst_(dst), receiver_(receiver), key_(key), is_global_(is_global) {
6331 set_comment("[ DeferredReferenceGetKeyedValue"); 6643 set_comment("[ DeferredReferenceGetKeyedValue");
6332 } 6644 }
6333 6645
6334 virtual void Generate(); 6646 virtual void Generate();
6335 6647
6336 Label* patch_site() { return &patch_site_; } 6648 Label* patch_site() { return &patch_site_; }
6337 6649
6338 private: 6650 private:
6339 Label patch_site_; 6651 Label patch_site_;
6340 Register dst_; 6652 Register dst_;
6341 Register receiver_; 6653 Register receiver_;
6342 Register key_; 6654 Register key_;
6343 bool is_global_;
6344 }; 6655 };
6345 6656
6346 6657
6347 void DeferredReferenceGetKeyedValue::Generate() { 6658 void DeferredReferenceGetKeyedValue::Generate() {
6348 __ push(receiver_); // First IC argument. 6659 if (!receiver_.is(eax)) {
6349 __ push(key_); // Second IC argument. 6660 // Register eax is available for key.
6350 6661 if (!key_.is(eax)) {
6662 __ mov(eax, key_);
6663 }
6664 if (!receiver_.is(edx)) {
6665 __ mov(edx, receiver_);
6666 }
6667 } else if (!key_.is(edx)) {
6668 // Register edx is available for receiver.
6669 if (!receiver_.is(edx)) {
6670 __ mov(edx, receiver_);
6671 }
6672 if (!key_.is(eax)) {
6673 __ mov(eax, key_);
6674 }
6675 } else {
6676 __ xchg(edx, eax);
6677 }
6351 // Calculate the delta from the IC call instruction to the map check 6678 // Calculate the delta from the IC call instruction to the map check
6352 // cmp instruction in the inlined version. This delta is stored in 6679 // cmp instruction in the inlined version. This delta is stored in
6353 // a test(eax, delta) instruction after the call so that we can find 6680 // a test(eax, delta) instruction after the call so that we can find
6354 // it in the IC initialization code and patch the cmp instruction. 6681 // it in the IC initialization code and patch the cmp instruction.
6355 // This means that we cannot allow test instructions after calls to 6682 // This means that we cannot allow test instructions after calls to
6356 // KeyedLoadIC stubs in other places. 6683 // KeyedLoadIC stubs in other places.
6357 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 6684 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
6358 RelocInfo::Mode mode = is_global_ 6685 __ call(ic, RelocInfo::CODE_TARGET);
6359 ? RelocInfo::CODE_TARGET_CONTEXT
6360 : RelocInfo::CODE_TARGET;
6361 __ call(ic, mode);
6362 // The delta from the start of the map-compare instruction to the 6686 // The delta from the start of the map-compare instruction to the
6363 // test instruction. We use masm_-> directly here instead of the __ 6687 // test instruction. We use masm_-> directly here instead of the __
6364 // macro because the macro sometimes uses macro expansion to turn 6688 // macro because the macro sometimes uses macro expansion to turn
6365 // into something that can't return a value. This is encountered 6689 // into something that can't return a value. This is encountered
6366 // when doing generated code coverage tests. 6690 // when doing generated code coverage tests.
6367 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 6691 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
6368 // Here we use masm_-> instead of the __ macro because this is the 6692 // Here we use masm_-> instead of the __ macro because this is the
6369 // instruction that gets patched and coverage code gets in the way. 6693 // instruction that gets patched and coverage code gets in the way.
6370 masm_->test(eax, Immediate(-delta_to_patch_site)); 6694 masm_->test(eax, Immediate(-delta_to_patch_site));
6371 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1); 6695 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
6372 6696
6373 if (!dst_.is(eax)) __ mov(dst_, eax); 6697 if (!dst_.is(eax)) __ mov(dst_, eax);
6374 __ pop(key_);
6375 __ pop(receiver_);
6376 } 6698 }
6377 6699
6378 6700
6379 class DeferredReferenceSetKeyedValue: public DeferredCode { 6701 class DeferredReferenceSetKeyedValue: public DeferredCode {
6380 public: 6702 public:
6381 DeferredReferenceSetKeyedValue(Register value, 6703 DeferredReferenceSetKeyedValue(Register value,
6382 Register key, 6704 Register key,
6383 Register receiver) 6705 Register receiver)
6384 : value_(value), key_(key), receiver_(receiver) { 6706 : value_(value), key_(key), receiver_(receiver) {
6385 set_comment("[ DeferredReferenceSetKeyedValue"); 6707 set_comment("[ DeferredReferenceSetKeyedValue");
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
6417 // instruction that gets patched and coverage code gets in the way. 6739 // instruction that gets patched and coverage code gets in the way.
6418 masm_->test(eax, Immediate(-delta_to_patch_site)); 6740 masm_->test(eax, Immediate(-delta_to_patch_site));
6419 // Restore value (returned from store IC), key and receiver 6741 // Restore value (returned from store IC), key and receiver
6420 // registers. 6742 // registers.
6421 if (!value_.is(eax)) __ mov(value_, eax); 6743 if (!value_.is(eax)) __ mov(value_, eax);
6422 __ pop(key_); 6744 __ pop(key_);
6423 __ pop(receiver_); 6745 __ pop(receiver_);
6424 } 6746 }
6425 6747
6426 6748
6427 Result CodeGenerator::EmitKeyedLoad(bool is_global) { 6749 Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
6428 Comment cmnt(masm_, "[ Load from keyed Property"); 6750 #ifdef DEBUG
6429 // Inline array load code if inside of a loop. We do not know 6751 int original_height = frame()->height();
6430 // the receiver map yet, so we initially generate the code with 6752 #endif
6431 // a check against an invalid map. In the inline cache code, we 6753 Result result;
6432 // patch the map check if appropriate. 6754 // Do not inline the inobject property case for loads from the global
6755 // object. Also do not inline for unoptimized code. This saves time in
6756 // the code generator. Unoptimized code is toplevel code or code that is
6757 // not in a loop.
6758 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
6759 Comment cmnt(masm(), "[ Load from named Property");
6760 frame()->Push(name);
6761
6762 RelocInfo::Mode mode = is_contextual
6763 ? RelocInfo::CODE_TARGET_CONTEXT
6764 : RelocInfo::CODE_TARGET;
6765 result = frame()->CallLoadIC(mode);
6766 // A test eax instruction following the call signals that the inobject
6767 // property case was inlined. Ensure that there is not a test eax
6768 // instruction here.
6769 __ nop();
6770 } else {
6771 // Inline the inobject property case.
6772 Comment cmnt(masm(), "[ Inlined named property load");
6773 Result receiver = frame()->Pop();
6774 receiver.ToRegister();
6775
6776 result = allocator()->Allocate();
6777 ASSERT(result.is_valid());
6778 DeferredReferenceGetNamedValue* deferred =
6779 new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name);
6780
6781 // Check that the receiver is a heap object.
6782 __ test(receiver.reg(), Immediate(kSmiTagMask));
6783 deferred->Branch(zero);
6784
6785 __ bind(deferred->patch_site());
6786 // This is the map check instruction that will be patched (so we can't
6787 // use the double underscore macro that may insert instructions).
6788 // Initially use an invalid map to force a failure.
6789 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
6790 Immediate(Factory::null_value()));
6791 // This branch is always a forwards branch so it's always a fixed size
6792 // which allows the assert below to succeed and patching to work.
6793 deferred->Branch(not_equal);
6794
6795 // The delta from the patch label to the load offset must be statically
6796 // known.
6797 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
6798 LoadIC::kOffsetToLoadInstruction);
6799 // The initial (invalid) offset has to be large enough to force a 32-bit
6800 // instruction encoding to allow patching with an arbitrary offset. Use
6801 // kMaxInt (minus kHeapObjectTag).
6802 int offset = kMaxInt;
6803 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
6804
6805 __ IncrementCounter(&Counters::named_load_inline, 1);
6806 deferred->BindExit();
6807 }
6808 ASSERT(frame()->height() == original_height - 1);
6809 return result;
6810 }
6811
6812
6813 Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
6814 #ifdef DEBUG
6815 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
6816 #endif
6817 Result result = frame()->CallStoreIC(name, is_contextual);
6818
6819 ASSERT_EQ(expected_height, frame()->height());
6820 return result;
6821 }
6822
6823
6824 Result CodeGenerator::EmitKeyedLoad() {
6825 #ifdef DEBUG
6826 int original_height = frame()->height();
6827 #endif
6828 Result result;
6829 // Inline array load code if inside of a loop. We do not know the
6830 // receiver map yet, so we initially generate the code with a check
6831 // against an invalid map. In the inline cache code, we patch the map
6832 // check if appropriate.
6433 if (loop_nesting() > 0) { 6833 if (loop_nesting() > 0) {
6434 Comment cmnt(masm_, "[ Inlined load from keyed Property"); 6834 Comment cmnt(masm_, "[ Inlined load from keyed Property");
6435 6835
6436 Result key = frame_->Pop(); 6836 Result key = frame_->Pop();
6437 Result receiver = frame_->Pop(); 6837 Result receiver = frame_->Pop();
6438 key.ToRegister(); 6838 key.ToRegister();
6439 receiver.ToRegister(); 6839 receiver.ToRegister();
6440 6840
6441 // Use a fresh temporary to load the elements without destroying 6841 // Use a fresh temporary to load the elements without destroying
6442 // the receiver which is needed for the deferred slow case. 6842 // the receiver which is needed for the deferred slow case.
6443 Result elements = allocator()->Allocate(); 6843 Result elements = allocator()->Allocate();
6444 ASSERT(elements.is_valid()); 6844 ASSERT(elements.is_valid());
6445 6845
6446 // Use a fresh temporary for the index and later the loaded 6846 // Use a fresh temporary for the index and later the loaded
6447 // value. 6847 // value.
6448 Result index = allocator()->Allocate(); 6848 result = allocator()->Allocate();
6449 ASSERT(index.is_valid()); 6849 ASSERT(result.is_valid());
6450 6850
6451 DeferredReferenceGetKeyedValue* deferred = 6851 DeferredReferenceGetKeyedValue* deferred =
6452 new DeferredReferenceGetKeyedValue(index.reg(), 6852 new DeferredReferenceGetKeyedValue(result.reg(),
6453 receiver.reg(), 6853 receiver.reg(),
6454 key.reg(), 6854 key.reg());
6455 is_global);
6456 6855
6457 // Check that the receiver is not a smi (only needed if this 6856 __ test(receiver.reg(), Immediate(kSmiTagMask));
6458 // is not a load from the global context) and that it has the 6857 deferred->Branch(zero);
6459 // expected map.
6460 if (!is_global) {
6461 __ test(receiver.reg(), Immediate(kSmiTagMask));
6462 deferred->Branch(zero);
6463 }
6464 6858
6465 // Initially, use an invalid map. The map is patched in the IC 6859 // Initially, use an invalid map. The map is patched in the IC
6466 // initialization code. 6860 // initialization code.
6467 __ bind(deferred->patch_site()); 6861 __ bind(deferred->patch_site());
6468 // Use masm-> here instead of the double underscore macro since extra 6862 // Use masm-> here instead of the double underscore macro since extra
6469 // coverage code can interfere with the patching. 6863 // coverage code can interfere with the patching.
6470 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), 6864 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
6471 Immediate(Factory::null_value())); 6865 Immediate(Factory::null_value()));
6472 deferred->Branch(not_equal); 6866 deferred->Branch(not_equal);
6473 6867
6474 // Check that the key is a smi. 6868 // Check that the key is a smi.
6475 __ test(key.reg(), Immediate(kSmiTagMask)); 6869 __ test(key.reg(), Immediate(kSmiTagMask));
6476 deferred->Branch(not_zero); 6870 deferred->Branch(not_zero);
6477 6871
6478 // Get the elements array from the receiver and check that it 6872 // Get the elements array from the receiver and check that it
6479 // is not a dictionary. 6873 // is not a dictionary.
6480 __ mov(elements.reg(), 6874 __ mov(elements.reg(),
6481 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); 6875 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
6482 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), 6876 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
6483 Immediate(Factory::fixed_array_map())); 6877 Immediate(Factory::fixed_array_map()));
6484 deferred->Branch(not_equal); 6878 deferred->Branch(not_equal);
6485 6879
6486 // Shift the key to get the actual index value and check that 6880 // Shift the key to get the actual index value and check that
6487 // it is within bounds. 6881 // it is within bounds.
6488 __ mov(index.reg(), key.reg()); 6882 __ mov(result.reg(), key.reg());
6489 __ SmiUntag(index.reg()); 6883 __ SmiUntag(result.reg());
6490 __ cmp(index.reg(), 6884 __ cmp(result.reg(),
6491 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); 6885 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
6492 deferred->Branch(above_equal); 6886 deferred->Branch(above_equal);
6493 6887
6494 // Load and check that the result is not the hole. We could 6888 // Load and check that the result is not the hole.
6495 // reuse the index or elements register for the value. 6889 __ mov(result.reg(), Operand(elements.reg(),
6496 // 6890 result.reg(),
6497 // TODO(206): Consider whether it makes sense to try some 6891 times_4,
6498 // heuristic about which register to reuse. For example, if 6892 FixedArray::kHeaderSize - kHeapObjectTag));
6499 // one is eax, the we can reuse that one because the value
6500 // coming from the deferred code will be in eax.
6501 Result value = index;
6502 __ mov(value.reg(), Operand(elements.reg(),
6503 index.reg(),
6504 times_4,
6505 FixedArray::kHeaderSize - kHeapObjectTag));
6506 elements.Unuse(); 6893 elements.Unuse();
6507 index.Unuse(); 6894 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
6508 __ cmp(Operand(value.reg()), Immediate(Factory::the_hole_value()));
6509 deferred->Branch(equal); 6895 deferred->Branch(equal);
6510 __ IncrementCounter(&Counters::keyed_load_inline, 1); 6896 __ IncrementCounter(&Counters::keyed_load_inline, 1);
6511 6897
6512 deferred->BindExit(); 6898 deferred->BindExit();
6513 // Restore the receiver and key to the frame and push the
6514 // result on top of it.
6515 frame_->Push(&receiver);
6516 frame_->Push(&key);
6517 return value;
6518 } else { 6899 } else {
6519 Comment cmnt(masm_, "[ Load from keyed Property"); 6900 Comment cmnt(masm_, "[ Load from keyed Property");
6520 RelocInfo::Mode mode = is_global 6901 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
6521 ? RelocInfo::CODE_TARGET_CONTEXT
6522 : RelocInfo::CODE_TARGET;
6523 Result answer = frame_->CallKeyedLoadIC(mode);
6524 // Make sure that we do not have a test instruction after the 6902 // Make sure that we do not have a test instruction after the
6525 // call. A test instruction after the call is used to 6903 // call. A test instruction after the call is used to
6526 // indicate that we have generated an inline version of the 6904 // indicate that we have generated an inline version of the
6527 // keyed load. The explicit nop instruction is here because 6905 // keyed load. The explicit nop instruction is here because
6528 // the push that follows might be peep-hole optimized away. 6906 // the push that follows might be peep-hole optimized away.
6529 __ nop(); 6907 __ nop();
6530 return answer;
6531 } 6908 }
6909 ASSERT(frame()->height() == original_height - 2);
6910 return result;
6911 }
6912
6913
6914 Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
6915 #ifdef DEBUG
6916 int original_height = frame()->height();
6917 #endif
6918 Result result;
6919 // Generate inlined version of the keyed store if the code is in a loop
6920 // and the key is likely to be a smi.
6921 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
6922 Comment cmnt(masm(), "[ Inlined store to keyed Property");
6923
6924 // Get the receiver, key and value into registers.
6925 result = frame()->Pop();
6926 Result key = frame()->Pop();
6927 Result receiver = frame()->Pop();
6928
6929 Result tmp = allocator_->Allocate();
6930 ASSERT(tmp.is_valid());
6931
6932 // Determine whether the value is a constant before putting it in a
6933 // register.
6934 bool value_is_constant = result.is_constant();
6935
6936 // Make sure that value, key and receiver are in registers.
6937 result.ToRegister();
6938 key.ToRegister();
6939 receiver.ToRegister();
6940
6941 DeferredReferenceSetKeyedValue* deferred =
6942 new DeferredReferenceSetKeyedValue(result.reg(),
6943 key.reg(),
6944 receiver.reg());
6945
6946 // Check that the value is a smi if it is not a constant. We can skip
6947 // the write barrier for smis and constants.
6948 if (!value_is_constant) {
6949 __ test(result.reg(), Immediate(kSmiTagMask));
6950 deferred->Branch(not_zero);
6951 }
6952
6953 // Check that the key is a non-negative smi.
6954 __ test(key.reg(), Immediate(kSmiTagMask | 0x80000000));
6955 deferred->Branch(not_zero);
6956
6957 // Check that the receiver is not a smi.
6958 __ test(receiver.reg(), Immediate(kSmiTagMask));
6959 deferred->Branch(zero);
6960
6961 // Check that the receiver is a JSArray.
6962 __ mov(tmp.reg(),
6963 FieldOperand(receiver.reg(), HeapObject::kMapOffset));
6964 __ movzx_b(tmp.reg(),
6965 FieldOperand(tmp.reg(), Map::kInstanceTypeOffset));
6966 __ cmp(tmp.reg(), JS_ARRAY_TYPE);
6967 deferred->Branch(not_equal);
6968
6969 // Check that the key is within bounds. Both the key and the length of
6970 // the JSArray are smis.
6971 __ cmp(key.reg(),
6972 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
6973 deferred->Branch(greater_equal);
6974
6975 // Get the elements array from the receiver and check that it is not a
6976 // dictionary.
6977 __ mov(tmp.reg(),
6978 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
6979 // Bind the deferred code patch site to be able to locate the fixed
6980 // array map comparison. When debugging, we patch this comparison to
6981 // always fail so that we will hit the IC call in the deferred code
6982 // which will allow the debugger to break for fast case stores.
6983 __ bind(deferred->patch_site());
6984 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
6985 Immediate(Factory::fixed_array_map()));
6986 deferred->Branch(not_equal);
6987
6988 // Store the value.
6989 __ mov(Operand(tmp.reg(),
6990 key.reg(),
6991 times_2,
6992 FixedArray::kHeaderSize - kHeapObjectTag),
6993 result.reg());
6994 __ IncrementCounter(&Counters::keyed_store_inline, 1);
6995
6996 deferred->BindExit();
6997 } else {
6998 result = frame()->CallKeyedStoreIC();
6999 // Make sure that we do not have a test instruction after the
7000 // call. A test instruction after the call is used to
7001 // indicate that we have generated an inline version of the
7002 // keyed store.
7003 __ nop();
7004 frame()->Drop(2);
7005 }
7006 ASSERT(frame()->height() == original_height - 3);
7007 return result;
6532 } 7008 }
6533 7009
6534 7010
6535 #undef __ 7011 #undef __
6536 #define __ ACCESS_MASM(masm) 7012 #define __ ACCESS_MASM(masm)
6537 7013
6538 7014
6539 Handle<String> Reference::GetName() { 7015 Handle<String> Reference::GetName() {
6540 ASSERT(type_ == NAMED); 7016 ASSERT(type_ == NAMED);
6541 Property* property = expression_->AsProperty(); 7017 Property* property = expression_->AsProperty();
6542 if (property == NULL) { 7018 if (property == NULL) {
6543 // Global variable reference treated as a named property reference. 7019 // Global variable reference treated as a named property reference.
6544 VariableProxy* proxy = expression_->AsVariableProxy(); 7020 VariableProxy* proxy = expression_->AsVariableProxy();
6545 ASSERT(proxy->AsVariable() != NULL); 7021 ASSERT(proxy->AsVariable() != NULL);
6546 ASSERT(proxy->AsVariable()->is_global()); 7022 ASSERT(proxy->AsVariable()->is_global());
6547 return proxy->name(); 7023 return proxy->name();
6548 } else { 7024 } else {
6549 Literal* raw_name = property->key()->AsLiteral(); 7025 Literal* raw_name = property->key()->AsLiteral();
6550 ASSERT(raw_name != NULL); 7026 ASSERT(raw_name != NULL);
6551 return Handle<String>(String::cast(*raw_name->handle())); 7027 return Handle<String>::cast(raw_name->handle());
6552 } 7028 }
6553 } 7029 }
6554 7030
6555 7031
6556 void Reference::GetValue() { 7032 void Reference::GetValue() {
6557 ASSERT(!cgen_->in_spilled_code()); 7033 ASSERT(!cgen_->in_spilled_code());
6558 ASSERT(cgen_->HasValidEntryRegisters()); 7034 ASSERT(cgen_->HasValidEntryRegisters());
6559 ASSERT(!is_illegal()); 7035 ASSERT(!is_illegal());
6560 MacroAssembler* masm = cgen_->masm(); 7036 MacroAssembler* masm = cgen_->masm();
6561 7037
6562 // Record the source position for the property load. 7038 // Record the source position for the property load.
6563 Property* property = expression_->AsProperty(); 7039 Property* property = expression_->AsProperty();
6564 if (property != NULL) { 7040 if (property != NULL) {
6565 cgen_->CodeForSourcePosition(property->position()); 7041 cgen_->CodeForSourcePosition(property->position());
6566 } 7042 }
6567 7043
6568 switch (type_) { 7044 switch (type_) {
6569 case SLOT: { 7045 case SLOT: {
6570 Comment cmnt(masm, "[ Load from Slot"); 7046 Comment cmnt(masm, "[ Load from Slot");
6571 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); 7047 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
6572 ASSERT(slot != NULL); 7048 ASSERT(slot != NULL);
6573 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF); 7049 Result result =
7050 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
7051 if (!persist_after_get_) set_unloaded();
7052 cgen_->frame()->Push(&result);
6574 break; 7053 break;
6575 } 7054 }
6576 7055
6577 case NAMED: { 7056 case NAMED: {
6578 Variable* var = expression_->AsVariableProxy()->AsVariable(); 7057 Variable* var = expression_->AsVariableProxy()->AsVariable();
6579 bool is_global = var != NULL; 7058 bool is_global = var != NULL;
6580 ASSERT(!is_global || var->is_global()); 7059 ASSERT(!is_global || var->is_global());
6581 7060 if (persist_after_get_) cgen_->frame()->Dup();
6582 // Do not inline the inobject property case for loads from the global 7061 Result result = cgen_->EmitNamedLoad(GetName(), is_global);
6583 // object. Also do not inline for unoptimized code. This saves time 7062 if (!persist_after_get_) set_unloaded();
6584 // in the code generator. Unoptimized code is toplevel code or code 7063 cgen_->frame()->Push(&result);
6585 // that is not in a loop.
6586 if (is_global ||
6587 cgen_->scope()->is_global_scope() ||
6588 cgen_->loop_nesting() == 0) {
6589 Comment cmnt(masm, "[ Load from named Property");
6590 cgen_->frame()->Push(GetName());
6591
6592 RelocInfo::Mode mode = is_global
6593 ? RelocInfo::CODE_TARGET_CONTEXT
6594 : RelocInfo::CODE_TARGET;
6595 Result answer = cgen_->frame()->CallLoadIC(mode);
6596 // A test eax instruction following the call signals that the
6597 // inobject property case was inlined. Ensure that there is not
6598 // a test eax instruction here.
6599 __ nop();
6600 cgen_->frame()->Push(&answer);
6601 } else {
6602 // Inline the inobject property case.
6603 Comment cmnt(masm, "[ Inlined named property load");
6604 Result receiver = cgen_->frame()->Pop();
6605 receiver.ToRegister();
6606
6607 Result value = cgen_->allocator()->Allocate();
6608 ASSERT(value.is_valid());
6609 DeferredReferenceGetNamedValue* deferred =
6610 new DeferredReferenceGetNamedValue(value.reg(),
6611 receiver.reg(),
6612 GetName());
6613
6614 // Check that the receiver is a heap object.
6615 __ test(receiver.reg(), Immediate(kSmiTagMask));
6616 deferred->Branch(zero);
6617
6618 __ bind(deferred->patch_site());
6619 // This is the map check instruction that will be patched (so we can't
6620 // use the double underscore macro that may insert instructions).
6621 // Initially use an invalid map to force a failure.
6622 masm->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
6623 Immediate(Factory::null_value()));
6624 // This branch is always a forwards branch so it's always a fixed
6625 // size which allows the assert below to succeed and patching to work.
6626 deferred->Branch(not_equal);
6627
6628 // The delta from the patch label to the load offset must be
6629 // statically known.
6630 ASSERT(masm->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
6631 LoadIC::kOffsetToLoadInstruction);
6632 // The initial (invalid) offset has to be large enough to force
6633 // a 32-bit instruction encoding to allow patching with an
6634 // arbitrary offset. Use kMaxInt (minus kHeapObjectTag).
6635 int offset = kMaxInt;
6636 masm->mov(value.reg(), FieldOperand(receiver.reg(), offset));
6637
6638 __ IncrementCounter(&Counters::named_load_inline, 1);
6639 deferred->BindExit();
6640 cgen_->frame()->Push(&receiver);
6641 cgen_->frame()->Push(&value);
6642 }
6643 break; 7064 break;
6644 } 7065 }
6645 7066
6646 case KEYED: { 7067 case KEYED: {
6647 Variable* var = expression_->AsVariableProxy()->AsVariable(); 7068 if (persist_after_get_) {
6648 bool is_global = var != NULL; 7069 cgen_->frame()->PushElementAt(1);
6649 ASSERT(!is_global || var->is_global()); 7070 cgen_->frame()->PushElementAt(1);
6650 Result value = cgen_->EmitKeyedLoad(is_global); 7071 }
7072 Result value = cgen_->EmitKeyedLoad();
6651 cgen_->frame()->Push(&value); 7073 cgen_->frame()->Push(&value);
7074 if (!persist_after_get_) set_unloaded();
6652 break; 7075 break;
6653 } 7076 }
6654 7077
6655 default: 7078 default:
6656 UNREACHABLE(); 7079 UNREACHABLE();
6657 } 7080 }
6658
6659 if (!persist_after_get_) {
6660 cgen_->UnloadReference(this);
6661 }
6662 } 7081 }
6663 7082
6664 7083
6665 void Reference::TakeValue() { 7084 void Reference::TakeValue() {
6666 // For non-constant frame-allocated slots, we invalidate the value in the 7085 // For non-constant frame-allocated slots, we invalidate the value in the
6667 // slot. For all others, we fall back on GetValue. 7086 // slot. For all others, we fall back on GetValue.
6668 ASSERT(!cgen_->in_spilled_code()); 7087 ASSERT(!cgen_->in_spilled_code());
6669 ASSERT(!is_illegal()); 7088 ASSERT(!is_illegal());
6670 if (type_ != SLOT) { 7089 if (type_ != SLOT) {
6671 GetValue(); 7090 GetValue();
(...skipping 29 matching lines...) Expand all
6701 void Reference::SetValue(InitState init_state) { 7120 void Reference::SetValue(InitState init_state) {
6702 ASSERT(cgen_->HasValidEntryRegisters()); 7121 ASSERT(cgen_->HasValidEntryRegisters());
6703 ASSERT(!is_illegal()); 7122 ASSERT(!is_illegal());
6704 MacroAssembler* masm = cgen_->masm(); 7123 MacroAssembler* masm = cgen_->masm();
6705 switch (type_) { 7124 switch (type_) {
6706 case SLOT: { 7125 case SLOT: {
6707 Comment cmnt(masm, "[ Store to Slot"); 7126 Comment cmnt(masm, "[ Store to Slot");
6708 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); 7127 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
6709 ASSERT(slot != NULL); 7128 ASSERT(slot != NULL);
6710 cgen_->StoreToSlot(slot, init_state); 7129 cgen_->StoreToSlot(slot, init_state);
6711 cgen_->UnloadReference(this); 7130 set_unloaded();
6712 break; 7131 break;
6713 } 7132 }
6714 7133
6715 case NAMED: { 7134 case NAMED: {
6716 Comment cmnt(masm, "[ Store to named Property"); 7135 Comment cmnt(masm, "[ Store to named Property");
6717 cgen_->frame()->Push(GetName()); 7136 Result answer = cgen_->EmitNamedStore(GetName(), false);
6718 Result answer = cgen_->frame()->CallStoreIC();
6719 cgen_->frame()->Push(&answer); 7137 cgen_->frame()->Push(&answer);
6720 set_unloaded(); 7138 set_unloaded();
6721 break; 7139 break;
6722 } 7140 }
6723 7141
6724 case KEYED: { 7142 case KEYED: {
6725 Comment cmnt(masm, "[ Store to keyed Property"); 7143 Comment cmnt(masm, "[ Store to keyed Property");
6726
6727 // Generate inlined version of the keyed store if the code is in
6728 // a loop and the key is likely to be a smi.
6729 Property* property = expression()->AsProperty(); 7144 Property* property = expression()->AsProperty();
6730 ASSERT(property != NULL); 7145 ASSERT(property != NULL);
6731 StaticType* key_smi_analysis = property->key()->type(); 7146 Result answer = cgen_->EmitKeyedStore(property->key()->type());
6732 7147 cgen_->frame()->Push(&answer);
6733 if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) { 7148 set_unloaded();
6734 Comment cmnt(masm, "[ Inlined store to keyed Property");
6735
6736 // Get the receiver, key and value into registers.
6737 Result value = cgen_->frame()->Pop();
6738 Result key = cgen_->frame()->Pop();
6739 Result receiver = cgen_->frame()->Pop();
6740
6741 Result tmp = cgen_->allocator_->Allocate();
6742 ASSERT(tmp.is_valid());
6743
6744 // Determine whether the value is a constant before putting it
6745 // in a register.
6746 bool value_is_constant = value.is_constant();
6747
6748 // Make sure that value, key and receiver are in registers.
6749 value.ToRegister();
6750 key.ToRegister();
6751 receiver.ToRegister();
6752
6753 DeferredReferenceSetKeyedValue* deferred =
6754 new DeferredReferenceSetKeyedValue(value.reg(),
6755 key.reg(),
6756 receiver.reg());
6757
6758 // Check that the value is a smi if it is not a constant. We
6759 // can skip the write barrier for smis and constants.
6760 if (!value_is_constant) {
6761 __ test(value.reg(), Immediate(kSmiTagMask));
6762 deferred->Branch(not_zero);
6763 }
6764
6765 // Check that the key is a non-negative smi.
6766 __ test(key.reg(), Immediate(kSmiTagMask | 0x80000000));
6767 deferred->Branch(not_zero);
6768
6769 // Check that the receiver is not a smi.
6770 __ test(receiver.reg(), Immediate(kSmiTagMask));
6771 deferred->Branch(zero);
6772
6773 // Check that the receiver is a JSArray.
6774 __ mov(tmp.reg(),
6775 FieldOperand(receiver.reg(), HeapObject::kMapOffset));
6776 __ movzx_b(tmp.reg(),
6777 FieldOperand(tmp.reg(), Map::kInstanceTypeOffset));
6778 __ cmp(tmp.reg(), JS_ARRAY_TYPE);
6779 deferred->Branch(not_equal);
6780
6781 // Check that the key is within bounds. Both the key and the
6782 // length of the JSArray are smis.
6783 __ cmp(key.reg(),
6784 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
6785 deferred->Branch(greater_equal);
6786
6787 // Get the elements array from the receiver and check that it
6788 // is not a dictionary.
6789 __ mov(tmp.reg(),
6790 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
6791 // Bind the deferred code patch site to be able to locate the
6792 // fixed array map comparison. When debugging, we patch this
6793 // comparison to always fail so that we will hit the IC call
6794 // in the deferred code which will allow the debugger to
6795 // break for fast case stores.
6796 __ bind(deferred->patch_site());
6797 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
6798 Immediate(Factory::fixed_array_map()));
6799 deferred->Branch(not_equal);
6800
6801 // Store the value.
6802 __ mov(Operand(tmp.reg(),
6803 key.reg(),
6804 times_2,
6805 FixedArray::kHeaderSize - kHeapObjectTag),
6806 value.reg());
6807 __ IncrementCounter(&Counters::keyed_store_inline, 1);
6808
6809 deferred->BindExit();
6810
6811 cgen_->frame()->Push(&receiver);
6812 cgen_->frame()->Push(&key);
6813 cgen_->frame()->Push(&value);
6814 } else {
6815 Result answer = cgen_->frame()->CallKeyedStoreIC();
6816 // Make sure that we do not have a test instruction after the
6817 // call. A test instruction after the call is used to
6818 // indicate that we have generated an inline version of the
6819 // keyed store.
6820 __ nop();
6821 cgen_->frame()->Push(&answer);
6822 }
6823 cgen_->UnloadReference(this);
6824 break; 7149 break;
6825 } 7150 }
6826 7151
6827 default: 7152 case UNLOADED:
7153 case ILLEGAL:
6828 UNREACHABLE(); 7154 UNREACHABLE();
6829 } 7155 }
6830 } 7156 }
6831 7157
6832 7158
6833 void FastNewClosureStub::Generate(MacroAssembler* masm) { 7159 void FastNewClosureStub::Generate(MacroAssembler* masm) {
6834 // Clone the boilerplate in new space. Set the context to the 7160 // Clone the boilerplate in new space. Set the context to the
6835 // current context in esi. 7161 // current context in esi.
6836 Label gc; 7162 Label gc;
6837 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT); 7163 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
6911 __ mov(esi, Operand(eax)); 7237 __ mov(esi, Operand(eax));
6912 __ ret(1 * kPointerSize); 7238 __ ret(1 * kPointerSize);
6913 7239
6914 // Need to collect. Call into runtime system. 7240 // Need to collect. Call into runtime system.
6915 __ bind(&gc); 7241 __ bind(&gc);
6916 __ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1); 7242 __ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1);
6917 } 7243 }
6918 7244
6919 7245
6920 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { 7246 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
7247 // Stack layout on entry:
7248 //
7249 // [esp + kPointerSize]: constant elements.
7250 // [esp + (2 * kPointerSize)]: literal index.
7251 // [esp + (3 * kPointerSize)]: literals array.
7252
7253 // All sizes here are multiples of kPointerSize.
6921 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; 7254 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
6922 int size = JSArray::kSize + elements_size; 7255 int size = JSArray::kSize + elements_size;
6923 7256
6924 // Load boilerplate object into ecx and check if we need to create a 7257 // Load boilerplate object into ecx and check if we need to create a
6925 // boilerplate. 7258 // boilerplate.
6926 Label slow_case; 7259 Label slow_case;
6927 __ mov(ecx, Operand(esp, 3 * kPointerSize)); 7260 __ mov(ecx, Operand(esp, 3 * kPointerSize));
6928 __ mov(eax, Operand(esp, 2 * kPointerSize)); 7261 __ mov(eax, Operand(esp, 2 * kPointerSize));
6929 ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0)); 7262 ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
6930 __ mov(ecx, FieldOperand(ecx, eax, times_2, FixedArray::kHeaderSize)); 7263 __ mov(ecx, FieldOperand(ecx, eax, times_2, FixedArray::kHeaderSize));
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
7030 Register right_arg = eax; 7363 Register right_arg = eax;
7031 if (!(left.is(left_arg) && right.is(right_arg))) { 7364 if (!(left.is(left_arg) && right.is(right_arg))) {
7032 if (left.is(right_arg) && right.is(left_arg)) { 7365 if (left.is(right_arg) && right.is(left_arg)) {
7033 if (IsOperationCommutative()) { 7366 if (IsOperationCommutative()) {
7034 SetArgsReversed(); 7367 SetArgsReversed();
7035 } else { 7368 } else {
7036 __ xchg(left, right); 7369 __ xchg(left, right);
7037 } 7370 }
7038 } else if (left.is(left_arg)) { 7371 } else if (left.is(left_arg)) {
7039 __ mov(right_arg, right); 7372 __ mov(right_arg, right);
7373 } else if (right.is(right_arg)) {
7374 __ mov(left_arg, left);
7040 } else if (left.is(right_arg)) { 7375 } else if (left.is(right_arg)) {
7041 if (IsOperationCommutative()) { 7376 if (IsOperationCommutative()) {
7042 __ mov(left_arg, right); 7377 __ mov(left_arg, right);
7043 SetArgsReversed(); 7378 SetArgsReversed();
7044 } else { 7379 } else {
7045 // Order of moves important to avoid destroying left argument. 7380 // Order of moves important to avoid destroying left argument.
7046 __ mov(left_arg, left); 7381 __ mov(left_arg, left);
7047 __ mov(right_arg, right); 7382 __ mov(right_arg, right);
7048 } 7383 }
7049 } else if (right.is(left_arg)) { 7384 } else if (right.is(left_arg)) {
7050 if (IsOperationCommutative()) { 7385 if (IsOperationCommutative()) {
7051 __ mov(right_arg, left); 7386 __ mov(right_arg, left);
7052 SetArgsReversed(); 7387 SetArgsReversed();
7053 } else { 7388 } else {
7054 // Order of moves important to avoid destroying right argument. 7389 // Order of moves important to avoid destroying right argument.
7055 __ mov(right_arg, right); 7390 __ mov(right_arg, right);
7056 __ mov(left_arg, left); 7391 __ mov(left_arg, left);
7057 } 7392 }
7058 } else if (right.is(right_arg)) {
7059 __ mov(left_arg, left);
7060 } else { 7393 } else {
7061 // Order of moves is not important. 7394 // Order of moves is not important.
7062 __ mov(left_arg, left); 7395 __ mov(left_arg, left);
7063 __ mov(right_arg, right); 7396 __ mov(right_arg, right);
7064 } 7397 }
7065 } 7398 }
7066 7399
7067 // Update flags to indicate that arguments are in registers. 7400 // Update flags to indicate that arguments are in registers.
7068 SetArgsInRegisters(); 7401 SetArgsInRegisters();
7069 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1); 7402 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
(...skipping 15 matching lines...) Expand all
7085 } else { 7418 } else {
7086 // The calling convention with registers is left in edx and right in eax. 7419 // The calling convention with registers is left in edx and right in eax.
7087 Register left_arg = edx; 7420 Register left_arg = edx;
7088 Register right_arg = eax; 7421 Register right_arg = eax;
7089 if (left.is(left_arg)) { 7422 if (left.is(left_arg)) {
7090 __ mov(right_arg, Immediate(right)); 7423 __ mov(right_arg, Immediate(right));
7091 } else if (left.is(right_arg) && IsOperationCommutative()) { 7424 } else if (left.is(right_arg) && IsOperationCommutative()) {
7092 __ mov(left_arg, Immediate(right)); 7425 __ mov(left_arg, Immediate(right));
7093 SetArgsReversed(); 7426 SetArgsReversed();
7094 } else { 7427 } else {
7428 // For non-commutative operations, left and right_arg might be
7429 // the same register. Therefore, the order of the moves is
7430 // important here in order to not overwrite left before moving
7431 // it to left_arg.
7095 __ mov(left_arg, left); 7432 __ mov(left_arg, left);
7096 __ mov(right_arg, Immediate(right)); 7433 __ mov(right_arg, Immediate(right));
7097 } 7434 }
7098 7435
7099 // Update flags to indicate that arguments are in registers. 7436 // Update flags to indicate that arguments are in registers.
7100 SetArgsInRegisters(); 7437 SetArgsInRegisters();
7101 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1); 7438 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
7102 } 7439 }
7103 7440
7104 // Call the stub. 7441 // Call the stub.
(...skipping 12 matching lines...) Expand all
7117 } else { 7454 } else {
7118 // The calling convention with registers is left in edx and right in eax. 7455 // The calling convention with registers is left in edx and right in eax.
7119 Register left_arg = edx; 7456 Register left_arg = edx;
7120 Register right_arg = eax; 7457 Register right_arg = eax;
7121 if (right.is(right_arg)) { 7458 if (right.is(right_arg)) {
7122 __ mov(left_arg, Immediate(left)); 7459 __ mov(left_arg, Immediate(left));
7123 } else if (right.is(left_arg) && IsOperationCommutative()) { 7460 } else if (right.is(left_arg) && IsOperationCommutative()) {
7124 __ mov(right_arg, Immediate(left)); 7461 __ mov(right_arg, Immediate(left));
7125 SetArgsReversed(); 7462 SetArgsReversed();
7126 } else { 7463 } else {
7464 // For non-commutative operations, right and left_arg might be
7465 // the same register. Therefore, the order of the moves is
7466 // important here in order to not overwrite right before moving
7467 // it to right_arg.
7468 __ mov(right_arg, right);
7127 __ mov(left_arg, Immediate(left)); 7469 __ mov(left_arg, Immediate(left));
7128 __ mov(right_arg, right);
7129 } 7470 }
7130 // Update flags to indicate that arguments are in registers. 7471 // Update flags to indicate that arguments are in registers.
7131 SetArgsInRegisters(); 7472 SetArgsInRegisters();
7132 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1); 7473 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
7133 } 7474 }
7134 7475
7135 // Call the stub. 7476 // Call the stub.
7136 __ CallStub(this); 7477 __ CallStub(this);
7137 } 7478 }
7138 7479
(...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after
7486 } 7827 }
7487 7828
7488 // Floating point case. 7829 // Floating point case.
7489 switch (op_) { 7830 switch (op_) {
7490 case Token::ADD: 7831 case Token::ADD:
7491 case Token::SUB: 7832 case Token::SUB:
7492 case Token::MUL: 7833 case Token::MUL:
7493 case Token::DIV: { 7834 case Token::DIV: {
7494 if (CpuFeatures::IsSupported(SSE2)) { 7835 if (CpuFeatures::IsSupported(SSE2)) {
7495 CpuFeatures::Scope use_sse2(SSE2); 7836 CpuFeatures::Scope use_sse2(SSE2);
7496 FloatingPointHelper::LoadSSE2Operands(masm, &call_runtime); 7837 if (NumberInfo::IsNumber(operands_type_)) {
7838 if (FLAG_debug_code) {
7839 // Assert at runtime that inputs are only numbers.
7840 __ AbortIfNotNumber(edx,
7841 "GenericBinaryOpStub operand not a number.");
7842 __ AbortIfNotNumber(eax,
7843 "GenericBinaryOpStub operand not a number.");
7844 }
7845 FloatingPointHelper::LoadSSE2Operands(masm);
7846 } else {
7847 FloatingPointHelper::LoadSSE2Operands(masm, &call_runtime);
7848 }
7497 7849
7498 switch (op_) { 7850 switch (op_) {
7499 case Token::ADD: __ addsd(xmm0, xmm1); break; 7851 case Token::ADD: __ addsd(xmm0, xmm1); break;
7500 case Token::SUB: __ subsd(xmm0, xmm1); break; 7852 case Token::SUB: __ subsd(xmm0, xmm1); break;
7501 case Token::MUL: __ mulsd(xmm0, xmm1); break; 7853 case Token::MUL: __ mulsd(xmm0, xmm1); break;
7502 case Token::DIV: __ divsd(xmm0, xmm1); break; 7854 case Token::DIV: __ divsd(xmm0, xmm1); break;
7503 default: UNREACHABLE(); 7855 default: UNREACHABLE();
7504 } 7856 }
7505 GenerateHeapResultAllocation(masm, &call_runtime); 7857 GenerateHeapResultAllocation(masm, &call_runtime);
7506 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 7858 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
7507 GenerateReturn(masm); 7859 GenerateReturn(masm);
7508 } else { // SSE2 not available, use FPU. 7860 } else { // SSE2 not available, use FPU.
7509 FloatingPointHelper::CheckFloatOperands(masm, &call_runtime, ebx); 7861 if (NumberInfo::IsNumber(operands_type_)) {
7862 if (FLAG_debug_code) {
7863 // Assert at runtime that inputs are only numbers.
7864 __ AbortIfNotNumber(edx,
7865 "GenericBinaryOpStub operand not a number.");
7866 __ AbortIfNotNumber(eax,
7867 "GenericBinaryOpStub operand not a number.");
7868 }
7869 } else {
7870 FloatingPointHelper::CheckFloatOperands(masm, &call_runtime, ebx);
7871 }
7510 FloatingPointHelper::LoadFloatOperands( 7872 FloatingPointHelper::LoadFloatOperands(
7511 masm, 7873 masm,
7512 ecx, 7874 ecx,
7513 FloatingPointHelper::ARGS_IN_REGISTERS); 7875 FloatingPointHelper::ARGS_IN_REGISTERS);
7514 switch (op_) { 7876 switch (op_) {
7515 case Token::ADD: __ faddp(1); break; 7877 case Token::ADD: __ faddp(1); break;
7516 case Token::SUB: __ fsubp(1); break; 7878 case Token::SUB: __ fsubp(1); break;
7517 case Token::MUL: __ fmulp(1); break; 7879 case Token::MUL: __ fmulp(1); break;
7518 case Token::DIV: __ fdivp(1); break; 7880 case Token::DIV: __ fdivp(1); break;
7519 default: UNREACHABLE(); 7881 default: UNREACHABLE();
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
7611 __ push(edx); 7973 __ push(edx);
7612 } else { 7974 } else {
7613 __ push(edx); 7975 __ push(edx);
7614 __ push(eax); 7976 __ push(eax);
7615 } 7977 }
7616 __ push(ecx); 7978 __ push(ecx);
7617 } 7979 }
7618 switch (op_) { 7980 switch (op_) {
7619 case Token::ADD: { 7981 case Token::ADD: {
7620 // Test for string arguments before calling runtime. 7982 // Test for string arguments before calling runtime.
7621 Label not_strings, not_string1, string1; 7983 Label not_strings, not_string1, string1, string1_smi2;
7622 Result answer; 7984 Result answer;
7623 __ test(edx, Immediate(kSmiTagMask)); 7985 __ test(edx, Immediate(kSmiTagMask));
7624 __ j(zero, &not_string1); 7986 __ j(zero, &not_string1);
7625 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ecx); 7987 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ecx);
7626 __ j(above_equal, &not_string1); 7988 __ j(above_equal, &not_string1);
7627 7989
7628 // First argument is a string, test second. 7990 // First argument is a string, test second.
7629 __ test(eax, Immediate(kSmiTagMask)); 7991 __ test(eax, Immediate(kSmiTagMask));
7630 __ j(zero, &string1); 7992 __ j(zero, &string1_smi2);
7631 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ecx); 7993 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ecx);
7632 __ j(above_equal, &string1); 7994 __ j(above_equal, &string1);
7633 7995
7634 // First and second argument are strings. Jump to the string add stub. 7996 // First and second argument are strings. Jump to the string add stub.
7635 StringAddStub stub(NO_STRING_CHECK_IN_STUB); 7997 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
7636 __ TailCallStub(&stub); 7998 __ TailCallStub(&string_add_stub);
7637 7999
7638 // Only first argument is a string. 8000 __ bind(&string1_smi2);
8001 // First argument is a string, second is a smi. Try to lookup the number
8002 // string for the smi in the number string cache.
8003 NumberToStringStub::GenerateLookupNumberStringCache(
8004 masm, eax, edi, ebx, ecx, true, &string1);
8005
8006 // Call the string add stub to make the result.
8007 __ EnterInternalFrame();
8008 __ push(edx); // Original first argument.
8009 __ push(edi); // Number to string result for second argument.
8010 __ CallStub(&string_add_stub);
8011 __ LeaveInternalFrame();
8012 __ ret(2 * kPointerSize);
8013
7639 __ bind(&string1); 8014 __ bind(&string1);
7640 __ InvokeBuiltin( 8015 __ InvokeBuiltin(
7641 HasArgsReversed() ? 8016 HasArgsReversed() ?
7642 Builtins::STRING_ADD_RIGHT : 8017 Builtins::STRING_ADD_RIGHT :
7643 Builtins::STRING_ADD_LEFT, 8018 Builtins::STRING_ADD_LEFT,
7644 JUMP_FUNCTION); 8019 JUMP_FUNCTION);
7645 8020
7646 // First argument was not a string, test second. 8021 // First argument was not a string, test second.
7647 __ bind(&not_string1); 8022 __ bind(&not_string1);
7648 __ test(eax, Immediate(kSmiTagMask)); 8023 __ test(eax, Immediate(kSmiTagMask));
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
7759 // If arguments are not passed in registers remove them from the stack before 8134 // If arguments are not passed in registers remove them from the stack before
7760 // returning. 8135 // returning.
7761 if (!HasArgsInRegisters()) { 8136 if (!HasArgsInRegisters()) {
7762 __ ret(2 * kPointerSize); // Remove both operands 8137 __ ret(2 * kPointerSize); // Remove both operands
7763 } else { 8138 } else {
7764 __ ret(0); 8139 __ ret(0);
7765 } 8140 }
7766 } 8141 }
7767 8142
7768 8143
8144 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
8145 // Input on stack:
8146 // esp[4]: argument (should be number).
8147 // esp[0]: return address.
8148 // Test that eax is a number.
8149 Label runtime_call;
8150 Label runtime_call_clear_stack;
8151 Label input_not_smi;
8152 Label loaded;
8153 __ mov(eax, Operand(esp, kPointerSize));
8154 __ test(eax, Immediate(kSmiTagMask));
8155 __ j(not_zero, &input_not_smi);
8156 // Input is a smi. Untag and load it onto the FPU stack.
8157 // Then load the low and high words of the double into ebx, edx.
8158 ASSERT_EQ(1, kSmiTagSize);
8159 __ sar(eax, 1);
8160 __ sub(Operand(esp), Immediate(2 * kPointerSize));
8161 __ mov(Operand(esp, 0), eax);
8162 __ fild_s(Operand(esp, 0));
8163 __ fst_d(Operand(esp, 0));
8164 __ pop(edx);
8165 __ pop(ebx);
8166 __ jmp(&loaded);
8167 __ bind(&input_not_smi);
8168 // Check if input is a HeapNumber.
8169 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
8170 __ cmp(Operand(ebx), Immediate(Factory::heap_number_map()));
8171 __ j(not_equal, &runtime_call);
8172 // Input is a HeapNumber. Push it on the FPU stack and load its
8173 // low and high words into ebx, edx.
8174 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
8175 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
8176 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
8177
8178 __ bind(&loaded);
8179 // ST[0] == double value
8180 // ebx = low 32 bits of double value
8181 // edx = high 32 bits of double value
8182 // Compute hash:
8183 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
8184 __ mov(ecx, ebx);
8185 __ xor_(ecx, Operand(edx));
8186 __ mov(eax, ecx);
8187 __ sar(eax, 16);
8188 __ xor_(ecx, Operand(eax));
8189 __ mov(eax, ecx);
8190 __ sar(eax, 8);
8191 __ xor_(ecx, Operand(eax));
8192 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
8193 __ and_(Operand(ecx), Immediate(TranscendentalCache::kCacheSize - 1));
8194 // ST[0] == double value.
8195 // ebx = low 32 bits of double value.
8196 // edx = high 32 bits of double value.
8197 // ecx = TranscendentalCache::hash(double value).
8198 __ mov(eax,
8199 Immediate(ExternalReference::transcendental_cache_array_address()));
8200 // Eax points to cache array.
8201 __ mov(eax, Operand(eax, type_ * sizeof(TranscendentalCache::caches_[0])));
8202 // Eax points to the cache for the type type_.
8203 // If NULL, the cache hasn't been initialized yet, so go through runtime.
8204 __ test(eax, Operand(eax));
8205 __ j(zero, &runtime_call_clear_stack);
8206 #ifdef DEBUG
8207 // Check that the layout of cache elements match expectations.
8208 { // NOLINT - doesn't like a single brace on a line.
8209 TranscendentalCache::Element test_elem[2];
8210 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
8211 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
8212 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
8213 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
8214 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
8215 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
8216 CHECK_EQ(0, elem_in0 - elem_start);
8217 CHECK_EQ(kIntSize, elem_in1 - elem_start);
8218 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
8219 }
8220 #endif
8221 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
8222 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
8223 __ lea(ecx, Operand(eax, ecx, times_4, 0));
8224 // Check if cache matches: Double value is stored in uint32_t[2] array.
8225 Label cache_miss;
8226 __ cmp(ebx, Operand(ecx, 0));
8227 __ j(not_equal, &cache_miss);
8228 __ cmp(edx, Operand(ecx, kIntSize));
8229 __ j(not_equal, &cache_miss);
8230 // Cache hit!
8231 __ mov(eax, Operand(ecx, 2 * kIntSize));
8232 __ fstp(0);
8233 __ ret(kPointerSize);
8234
8235 __ bind(&cache_miss);
8236 // Update cache with new value.
8237 // We are short on registers, so use no_reg as scratch.
8238 // This gives slightly larger code.
8239 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
8240 GenerateOperation(masm);
8241 __ mov(Operand(ecx, 0), ebx);
8242 __ mov(Operand(ecx, kIntSize), edx);
8243 __ mov(Operand(ecx, 2 * kIntSize), eax);
8244 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
8245 __ ret(kPointerSize);
8246
8247 __ bind(&runtime_call_clear_stack);
8248 __ fstp(0);
8249 __ bind(&runtime_call);
8250 __ TailCallRuntime(ExternalReference(RuntimeFunction()), 1, 1);
8251 }
8252
8253
8254 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
8255 switch (type_) {
8256 // Add more cases when necessary.
8257 case TranscendentalCache::SIN: return Runtime::kMath_sin;
8258 case TranscendentalCache::COS: return Runtime::kMath_cos;
8259 default:
8260 UNIMPLEMENTED();
8261 return Runtime::kAbort;
8262 }
8263 }
8264
8265
8266 void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
8267 // Only free register is edi.
8268 Label done;
8269 ASSERT(type_ == TranscendentalCache::SIN ||
8270 type_ == TranscendentalCache::COS);
8271 // More transcendental types can be added later.
8272
8273 // Both fsin and fcos require arguments in the range +/-2^63 and
8274 // return NaN for infinities and NaN. They can share all code except
8275 // the actual fsin/fcos operation.
8276 Label in_range;
8277 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
8278 // work. We must reduce it to the appropriate range.
8279 __ mov(edi, edx);
8280 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
8281 int supported_exponent_limit =
8282 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
8283 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
8284 __ j(below, &in_range, taken);
8285 // Check for infinity and NaN. Both return NaN for sin.
8286 __ cmp(Operand(edi), Immediate(0x7ff00000));
8287 Label non_nan_result;
8288 __ j(not_equal, &non_nan_result, taken);
8289 // Input is +/-Infinity or NaN. Result is NaN.
8290 __ fstp(0);
8291 // NaN is represented by 0x7ff8000000000000.
8292 __ push(Immediate(0x7ff80000));
8293 __ push(Immediate(0));
8294 __ fld_d(Operand(esp, 0));
8295 __ add(Operand(esp), Immediate(2 * kPointerSize));
8296 __ jmp(&done);
8297
8298 __ bind(&non_nan_result);
8299
8300 // Use fpmod to restrict argument to the range +/-2*PI.
8301 __ mov(edi, eax); // Save eax before using fnstsw_ax.
8302 __ fldpi();
8303 __ fadd(0);
8304 __ fld(1);
8305 // FPU Stack: input, 2*pi, input.
8306 {
8307 Label no_exceptions;
8308 __ fwait();
8309 __ fnstsw_ax();
8310 // Clear if Illegal Operand or Zero Division exceptions are set.
8311 __ test(Operand(eax), Immediate(5));
8312 __ j(zero, &no_exceptions);
8313 __ fnclex();
8314 __ bind(&no_exceptions);
8315 }
8316
8317 // Compute st(0) % st(1)
8318 {
8319 Label partial_remainder_loop;
8320 __ bind(&partial_remainder_loop);
8321 __ fprem1();
8322 __ fwait();
8323 __ fnstsw_ax();
8324 __ test(Operand(eax), Immediate(0x400 /* C2 */));
8325 // If C2 is set, computation only has partial result. Loop to
8326 // continue computation.
8327 __ j(not_zero, &partial_remainder_loop);
8328 }
8329 // FPU Stack: input, 2*pi, input % 2*pi
8330 __ fstp(2);
8331 __ fstp(0);
8332 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
8333
8334 // FPU Stack: input % 2*pi
8335 __ bind(&in_range);
8336 switch (type_) {
8337 case TranscendentalCache::SIN:
8338 __ fsin();
8339 break;
8340 case TranscendentalCache::COS:
8341 __ fcos();
8342 break;
8343 default:
8344 UNREACHABLE();
8345 }
8346 __ bind(&done);
8347 }
8348
8349
7769 // Get the integer part of a heap number. Surprisingly, all this bit twiddling 8350 // Get the integer part of a heap number. Surprisingly, all this bit twiddling
7770 // is faster than using the built-in instructions on floating point registers. 8351 // is faster than using the built-in instructions on floating point registers.
7771 // Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the 8352 // Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
7772 // trashed registers. 8353 // trashed registers.
7773 void IntegerConvert(MacroAssembler* masm, 8354 void IntegerConvert(MacroAssembler* masm,
7774 Register source, 8355 Register source,
7775 bool use_sse3, 8356 bool use_sse3,
7776 Label* conversion_failure) { 8357 Label* conversion_failure) {
7777 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx)); 8358 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
7778 Label done, right_exponent, normal_exponent; 8359 Label done, right_exponent, normal_exponent;
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after
7970 __ bind(&load_smi); 8551 __ bind(&load_smi);
7971 __ SmiUntag(number); 8552 __ SmiUntag(number);
7972 __ push(number); 8553 __ push(number);
7973 __ fild_s(Operand(esp, 0)); 8554 __ fild_s(Operand(esp, 0));
7974 __ pop(number); 8555 __ pop(number);
7975 8556
7976 __ bind(&done); 8557 __ bind(&done);
7977 } 8558 }
7978 8559
7979 8560
8561 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
8562 Label load_smi_edx, load_eax, load_smi_eax, done;
8563 // Load operand in edx into xmm0.
8564 __ test(edx, Immediate(kSmiTagMask));
8565 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
8566 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
8567
8568 __ bind(&load_eax);
8569 // Load operand in eax into xmm1.
8570 __ test(eax, Immediate(kSmiTagMask));
8571 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
8572 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
8573 __ jmp(&done);
8574
8575 __ bind(&load_smi_edx);
8576 __ SmiUntag(edx); // Untag smi before converting to float.
8577 __ cvtsi2sd(xmm0, Operand(edx));
8578 __ SmiTag(edx); // Retag smi for heap number overwriting test.
8579 __ jmp(&load_eax);
8580
8581 __ bind(&load_smi_eax);
8582 __ SmiUntag(eax); // Untag smi before converting to float.
8583 __ cvtsi2sd(xmm1, Operand(eax));
8584 __ SmiTag(eax); // Retag smi for heap number overwriting test.
8585
8586 __ bind(&done);
8587 }
8588
8589
7980 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm, 8590 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
7981 Label* not_numbers) { 8591 Label* not_numbers) {
7982 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done; 8592 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
7983 // Load operand in edx into xmm0, or branch to not_numbers. 8593 // Load operand in edx into xmm0, or branch to not_numbers.
7984 __ test(edx, Immediate(kSmiTagMask)); 8594 __ test(edx, Immediate(kSmiTagMask));
7985 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi. 8595 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
7986 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map()); 8596 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map());
7987 __ j(not_equal, not_numbers); // Argument in edx is not a number. 8597 __ j(not_equal, not_numbers); // Argument in edx is not a number.
7988 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 8598 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
7989 __ bind(&load_eax); 8599 __ bind(&load_eax);
(...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after
8299 // by calling the runtime system. 8909 // by calling the runtime system.
8300 __ bind(&slow); 8910 __ bind(&slow);
8301 __ pop(ebx); // Return address. 8911 __ pop(ebx); // Return address.
8302 __ push(edx); 8912 __ push(edx);
8303 __ push(ebx); 8913 __ push(ebx);
8304 __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1, 1); 8914 __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1, 1);
8305 } 8915 }
8306 8916
8307 8917
8308 void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) { 8918 void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
8919 // esp[0] : return address
8920 // esp[4] : number of parameters
8921 // esp[8] : receiver displacement
8922 // esp[16] : function
8923
8309 // The displacement is used for skipping the return address and the 8924 // The displacement is used for skipping the return address and the
8310 // frame pointer on the stack. It is the offset of the last 8925 // frame pointer on the stack. It is the offset of the last
8311 // parameter (if any) relative to the frame pointer. 8926 // parameter (if any) relative to the frame pointer.
8312 static const int kDisplacement = 2 * kPointerSize; 8927 static const int kDisplacement = 2 * kPointerSize;
8313 8928
8314 // Check if the calling frame is an arguments adaptor frame. 8929 // Check if the calling frame is an arguments adaptor frame.
8315 Label adaptor_frame, try_allocate, runtime; 8930 Label adaptor_frame, try_allocate, runtime;
8316 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 8931 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
8317 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); 8932 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
8318 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 8933 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
8382 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); 8997 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
8383 8998
8384 // Copy the fixed array slots. 8999 // Copy the fixed array slots.
8385 Label loop; 9000 Label loop;
8386 __ bind(&loop); 9001 __ bind(&loop);
8387 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. 9002 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
8388 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); 9003 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
8389 __ add(Operand(edi), Immediate(kPointerSize)); 9004 __ add(Operand(edi), Immediate(kPointerSize));
8390 __ sub(Operand(edx), Immediate(kPointerSize)); 9005 __ sub(Operand(edx), Immediate(kPointerSize));
8391 __ dec(ecx); 9006 __ dec(ecx);
8392 __ test(ecx, Operand(ecx));
8393 __ j(not_zero, &loop); 9007 __ j(not_zero, &loop);
8394 9008
8395 // Return and remove the on-stack parameters. 9009 // Return and remove the on-stack parameters.
8396 __ bind(&done); 9010 __ bind(&done);
8397 __ ret(3 * kPointerSize); 9011 __ ret(3 * kPointerSize);
8398 9012
8399 // Do the runtime call to allocate the arguments object. 9013 // Do the runtime call to allocate the arguments object.
8400 __ bind(&runtime); 9014 __ bind(&runtime);
8401 __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3, 1); 9015 __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3, 1);
8402 } 9016 }
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
8730 __ mov(eax, Operand(esp, kLastMatchInfoOffset)); 9344 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
8731 __ ret(4 * kPointerSize); 9345 __ ret(4 * kPointerSize);
8732 9346
8733 // Do the runtime call to execute the regexp. 9347 // Do the runtime call to execute the regexp.
8734 __ bind(&runtime); 9348 __ bind(&runtime);
8735 __ TailCallRuntime(ExternalReference(Runtime::kRegExpExec), 4, 1); 9349 __ TailCallRuntime(ExternalReference(Runtime::kRegExpExec), 4, 1);
8736 #endif // V8_NATIVE_REGEXP 9350 #endif // V8_NATIVE_REGEXP
8737 } 9351 }
8738 9352
8739 9353
9354 void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
9355 Register object,
9356 Register result,
9357 Register scratch1,
9358 Register scratch2,
9359 bool object_is_smi,
9360 Label* not_found) {
9361 // Currently only lookup for smis. Check for smi if object is not known to be
9362 // a smi.
9363 if (!object_is_smi) {
9364 ASSERT(kSmiTag == 0);
9365 __ test(object, Immediate(kSmiTagMask));
9366 __ j(not_zero, not_found);
9367 }
9368
9369 // Use of registers. Register result is used as a temporary.
9370 Register number_string_cache = result;
9371 Register mask = scratch1;
9372 Register scratch = scratch2;
9373
9374 // Load the number string cache.
9375 ExternalReference roots_address = ExternalReference::roots_address();
9376 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
9377 __ mov(number_string_cache,
9378 Operand::StaticArray(scratch, times_pointer_size, roots_address));
9379 // Make the hash mask from the length of the number string cache. It
9380 // contains two elements (number and string) for each cache entry.
9381 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
9382 __ shr(mask, 1); // Divide length by two (length is not a smi).
9383 __ sub(Operand(mask), Immediate(1)); // Make mask.
9384 // Calculate the entry in the number string cache. The hash value in the
9385 // number string cache for smis is just the smi value.
9386 __ mov(scratch, object);
9387 __ SmiUntag(scratch);
9388 __ and_(scratch, Operand(mask));
9389 // Check if the entry is the smi we are looking for.
9390 __ cmp(object,
9391 FieldOperand(number_string_cache,
9392 scratch,
9393 times_twice_pointer_size,
9394 FixedArray::kHeaderSize));
9395 __ j(not_equal, not_found);
9396
9397 // Get the result from the cache.
9398 __ mov(result,
9399 FieldOperand(number_string_cache,
9400 scratch,
9401 times_twice_pointer_size,
9402 FixedArray::kHeaderSize + kPointerSize));
9403 __ IncrementCounter(&Counters::number_to_string_native, 1);
9404 }
9405
9406
9407 void NumberToStringStub::Generate(MacroAssembler* masm) {
9408 Label runtime;
9409
9410 __ mov(ebx, Operand(esp, kPointerSize));
9411
9412 // Generate code to lookup number in the number string cache.
9413 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
9414 __ ret(1 * kPointerSize);
9415
9416 __ bind(&runtime);
9417 // Handle number to string in the runtime system if not found in the cache.
9418 __ TailCallRuntime(ExternalReference(Runtime::kNumberToString), 1, 1);
9419 }
9420
9421
8740 void CompareStub::Generate(MacroAssembler* masm) { 9422 void CompareStub::Generate(MacroAssembler* masm) {
8741 Label call_builtin, done; 9423 Label call_builtin, done;
8742 9424
8743 // NOTICE! This code is only reached after a smi-fast-case check, so 9425 // NOTICE! This code is only reached after a smi-fast-case check, so
8744 // it is certain that at least one operand isn't a smi. 9426 // it is certain that at least one operand isn't a smi.
8745 9427
8746 if (cc_ == equal) { // Both strict and non-strict. 9428 if (cc_ == equal) { // Both strict and non-strict.
8747 Label slow; // Fallthrough label. 9429 Label slow; // Fallthrough label.
8748 // Equality is almost reflexive (everything but NaN), so start by testing 9430 // Equality is almost reflexive (everything but NaN), so start by testing
8749 // for "identity and not NaN". 9431 // for "identity and not NaN".
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after
9062 // Goto slow case if we do not have a function. 9744 // Goto slow case if we do not have a function.
9063 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 9745 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
9064 __ j(not_equal, &slow, not_taken); 9746 __ j(not_equal, &slow, not_taken);
9065 9747
9066 // Fast-case: Just invoke the function. 9748 // Fast-case: Just invoke the function.
9067 ParameterCount actual(argc_); 9749 ParameterCount actual(argc_);
9068 __ InvokeFunction(edi, actual, JUMP_FUNCTION); 9750 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
9069 9751
9070 // Slow-case: Non-function called. 9752 // Slow-case: Non-function called.
9071 __ bind(&slow); 9753 __ bind(&slow);
9754 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
9755 // of the original receiver from the call site).
9756 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
9072 __ Set(eax, Immediate(argc_)); 9757 __ Set(eax, Immediate(argc_));
9073 __ Set(ebx, Immediate(0)); 9758 __ Set(ebx, Immediate(0));
9074 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); 9759 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
9075 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); 9760 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
9076 __ jmp(adaptor, RelocInfo::CODE_TARGET); 9761 __ jmp(adaptor, RelocInfo::CODE_TARGET);
9077 } 9762 }
9078 9763
9079 9764
9080 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { 9765 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
9081 // eax holds the exception. 9766 // eax holds the exception.
(...skipping 553 matching lines...) Expand 10 before | Expand all | Expand 10 after
9635 __ mov(eax, edx); 10320 __ mov(eax, edx);
9636 __ IncrementCounter(&Counters::string_add_native, 1); 10321 __ IncrementCounter(&Counters::string_add_native, 1);
9637 __ ret(2 * kPointerSize); 10322 __ ret(2 * kPointerSize);
9638 10323
9639 // Both strings are non-empty. 10324 // Both strings are non-empty.
9640 // eax: first string 10325 // eax: first string
9641 // ebx: length of first string 10326 // ebx: length of first string
9642 // ecx: length of second string 10327 // ecx: length of second string
9643 // edx: second string 10328 // edx: second string
9644 // Look at the length of the result of adding the two strings. 10329 // Look at the length of the result of adding the two strings.
9645 Label string_add_flat_result; 10330 Label string_add_flat_result, longer_than_two;
9646 __ bind(&both_not_zero_length); 10331 __ bind(&both_not_zero_length);
9647 __ add(ebx, Operand(ecx)); 10332 __ add(ebx, Operand(ecx));
9648 // Use the runtime system when adding two one character strings, as it 10333 // Use the runtime system when adding two one character strings, as it
9649 // contains optimizations for this specific case using the symbol table. 10334 // contains optimizations for this specific case using the symbol table.
9650 __ cmp(ebx, 2); 10335 __ cmp(ebx, 2);
9651 __ j(equal, &string_add_runtime); 10336 __ j(not_equal, &longer_than_two);
10337
10338 // Check that both strings are non-external ascii strings.
10339 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
10340 &string_add_runtime);
10341
10342 // Get the two characters forming the sub string.
10343 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
10344 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
10345
10346 // Try to lookup two character string in symbol table. If it is not found
10347 // just allocate a new one.
10348 Label make_two_character_string, make_flat_ascii_string;
10349 GenerateTwoCharacterSymbolTableProbe(masm, ebx, ecx, eax, edx, edi,
10350 &make_two_character_string);
10351 __ ret(2 * kPointerSize);
10352
10353 __ bind(&make_two_character_string);
10354 __ Set(ebx, Immediate(2));
10355 __ jmp(&make_flat_ascii_string);
10356
10357 __ bind(&longer_than_two);
9652 // Check if resulting string will be flat. 10358 // Check if resulting string will be flat.
9653 __ cmp(ebx, String::kMinNonFlatLength); 10359 __ cmp(ebx, String::kMinNonFlatLength);
9654 __ j(below, &string_add_flat_result); 10360 __ j(below, &string_add_flat_result);
9655 // Handle exceptionally long strings in the runtime system. 10361 // Handle exceptionally long strings in the runtime system.
9656 ASSERT((String::kMaxLength & 0x80000000) == 0); 10362 ASSERT((String::kMaxLength & 0x80000000) == 0);
9657 __ cmp(ebx, String::kMaxLength); 10363 __ cmp(ebx, String::kMaxLength);
9658 __ j(above, &string_add_runtime); 10364 __ j(above, &string_add_runtime);
9659 10365
9660 // If result is not supposed to be flat allocate a cons string object. If both 10366 // If result is not supposed to be flat allocate a cons string object. If both
9661 // strings are ascii the result is an ascii cons string. 10367 // strings are ascii the result is an ascii cons string.
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
9708 Label non_ascii_string_add_flat_result; 10414 Label non_ascii_string_add_flat_result;
9709 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); 10415 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
9710 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); 10416 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
9711 ASSERT(kStringEncodingMask == kAsciiStringTag); 10417 ASSERT(kStringEncodingMask == kAsciiStringTag);
9712 __ test(ecx, Immediate(kAsciiStringTag)); 10418 __ test(ecx, Immediate(kAsciiStringTag));
9713 __ j(zero, &non_ascii_string_add_flat_result); 10419 __ j(zero, &non_ascii_string_add_flat_result);
9714 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 10420 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
9715 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); 10421 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
9716 __ test(ecx, Immediate(kAsciiStringTag)); 10422 __ test(ecx, Immediate(kAsciiStringTag));
9717 __ j(zero, &string_add_runtime); 10423 __ j(zero, &string_add_runtime);
10424
10425 __ bind(&make_flat_ascii_string);
9718 // Both strings are ascii strings. As they are short they are both flat. 10426 // Both strings are ascii strings. As they are short they are both flat.
10427 // ebx: length of resulting flat string
9719 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime); 10428 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
9720 // eax: result string 10429 // eax: result string
9721 __ mov(ecx, eax); 10430 __ mov(ecx, eax);
9722 // Locate first character of result. 10431 // Locate first character of result.
9723 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 10432 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9724 // Load first argument and locate first character. 10433 // Load first argument and locate first character.
9725 __ mov(edx, Operand(esp, 2 * kPointerSize)); 10434 __ mov(edx, Operand(esp, 2 * kPointerSize));
9726 __ mov(edi, FieldOperand(edx, String::kLengthOffset)); 10435 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
9727 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 10436 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9728 // eax: result string 10437 // eax: result string
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
9865 __ mov_b(Operand(dest, 0), scratch); 10574 __ mov_b(Operand(dest, 0), scratch);
9866 __ add(Operand(src), Immediate(1)); 10575 __ add(Operand(src), Immediate(1));
9867 __ add(Operand(dest), Immediate(1)); 10576 __ add(Operand(dest), Immediate(1));
9868 __ sub(Operand(count), Immediate(1)); 10577 __ sub(Operand(count), Immediate(1));
9869 __ j(not_zero, &loop); 10578 __ j(not_zero, &loop);
9870 10579
9871 __ bind(&done); 10580 __ bind(&done);
9872 } 10581 }
9873 10582
9874 10583
10584 void StringStubBase::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
10585 Register c1,
10586 Register c2,
10587 Register scratch1,
10588 Register scratch2,
10589 Register scratch3,
10590 Label* not_found) {
10591 // Register scratch3 is the general scratch register in this function.
10592 Register scratch = scratch3;
10593
10594 // Make sure that both characters are not digits as such strings has a
10595 // different hash algorithm. Don't try to look for these in the symbol table.
10596 Label not_array_index;
10597 __ mov(scratch, c1);
10598 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
10599 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
10600 __ j(above, &not_array_index);
10601 __ mov(scratch, c2);
10602 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
10603 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
10604 __ j(below_equal, not_found);
10605
10606 __ bind(&not_array_index);
10607 // Calculate the two character string hash.
10608 Register hash = scratch1;
10609 GenerateHashInit(masm, hash, c1, scratch);
10610 GenerateHashAddCharacter(masm, hash, c2, scratch);
10611 GenerateHashGetHash(masm, hash, scratch);
10612
10613 // Collect the two characters in a register.
10614 Register chars = c1;
10615 __ shl(c2, kBitsPerByte);
10616 __ or_(chars, Operand(c2));
10617
10618 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
10619 // hash: hash of two character string.
10620
10621 // Load the symbol table.
10622 Register symbol_table = c2;
10623 ExternalReference roots_address = ExternalReference::roots_address();
10624 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
10625 __ mov(symbol_table,
10626 Operand::StaticArray(scratch, times_pointer_size, roots_address));
10627
10628 // Calculate capacity mask from the symbol table capacity.
10629 Register mask = scratch2;
10630 static const int kCapacityOffset =
10631 FixedArray::kHeaderSize +
10632 SymbolTable::kCapacityIndex * kPointerSize;
10633 __ mov(mask, FieldOperand(symbol_table, kCapacityOffset));
10634 __ SmiUntag(mask);
10635 __ sub(Operand(mask), Immediate(1));
10636
10637 // Registers
10638 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
10639 // hash: hash of two character string
10640 // symbol_table: symbol table
10641 // mask: capacity mask
10642 // scratch: -
10643
10644 // Perform a number of probes in the symbol table.
10645 static const int kProbes = 4;
10646 Label found_in_symbol_table;
10647 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
10648 for (int i = 0; i < kProbes; i++) {
10649 // Calculate entry in symbol table.
10650 __ mov(scratch, hash);
10651 if (i > 0) {
10652 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
10653 }
10654 __ and_(scratch, Operand(mask));
10655
10656 // Load the entry from the symble table.
10657 Register candidate = scratch; // Scratch register contains candidate.
10658 ASSERT_EQ(1, SymbolTableShape::kEntrySize);
10659 static const int kFirstElementOffset =
10660 FixedArray::kHeaderSize +
10661 SymbolTable::kPrefixStartIndex * kPointerSize +
10662 SymbolTableShape::kPrefixSize * kPointerSize;
10663 __ mov(candidate,
10664 FieldOperand(symbol_table,
10665 scratch,
10666 times_pointer_size,
10667 kFirstElementOffset));
10668
10669 // If entry is undefined no string with this hash can be found.
10670 __ cmp(candidate, Factory::undefined_value());
10671 __ j(equal, not_found);
10672
10673 // If length is not 2 the string is not a candidate.
10674 __ cmp(FieldOperand(candidate, String::kLengthOffset), Immediate(2));
10675 __ j(not_equal, &next_probe[i]);
10676
10677 // As we are out of registers save the mask on the stack and use that
10678 // register as a temporary.
10679 __ push(mask);
10680 Register temp = mask;
10681
10682 // Check that the candidate is a non-external ascii string.
10683 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
10684 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
10685 __ JumpIfInstanceTypeIsNotSequentialAscii(
10686 temp, temp, &next_probe_pop_mask[i]);
10687
10688 // Check if the two characters match.
10689 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
10690 __ and_(temp, 0x0000ffff);
10691 __ cmp(chars, Operand(temp));
10692 __ j(equal, &found_in_symbol_table);
10693 __ bind(&next_probe_pop_mask[i]);
10694 __ pop(mask);
10695 __ bind(&next_probe[i]);
10696 }
10697
10698 // No matching 2 character string found by probing.
10699 __ jmp(not_found);
10700
10701 // Scratch register contains result when we fall through to here.
10702 Register result = scratch;
10703 __ bind(&found_in_symbol_table);
10704 __ pop(mask); // Pop temporally saved mask from the stack.
10705 if (!result.is(eax)) {
10706 __ mov(eax, result);
10707 }
10708 }
10709
10710
10711 void StringStubBase::GenerateHashInit(MacroAssembler* masm,
10712 Register hash,
10713 Register character,
10714 Register scratch) {
10715 // hash = character + (character << 10);
10716 __ mov(hash, character);
10717 __ shl(hash, 10);
10718 __ add(hash, Operand(character));
10719 // hash ^= hash >> 6;
10720 __ mov(scratch, hash);
10721 __ sar(scratch, 6);
10722 __ xor_(hash, Operand(scratch));
10723 }
10724
10725
10726 void StringStubBase::GenerateHashAddCharacter(MacroAssembler* masm,
10727 Register hash,
10728 Register character,
10729 Register scratch) {
10730 // hash += character;
10731 __ add(hash, Operand(character));
10732 // hash += hash << 10;
10733 __ mov(scratch, hash);
10734 __ shl(scratch, 10);
10735 __ add(hash, Operand(scratch));
10736 // hash ^= hash >> 6;
10737 __ mov(scratch, hash);
10738 __ sar(scratch, 6);
10739 __ xor_(hash, Operand(scratch));
10740 }
10741
10742
10743 void StringStubBase::GenerateHashGetHash(MacroAssembler* masm,
10744 Register hash,
10745 Register scratch) {
10746 // hash += hash << 3;
10747 __ mov(scratch, hash);
10748 __ shl(scratch, 3);
10749 __ add(hash, Operand(scratch));
10750 // hash ^= hash >> 11;
10751 __ mov(scratch, hash);
10752 __ sar(scratch, 11);
10753 __ xor_(hash, Operand(scratch));
10754 // hash += hash << 15;
10755 __ mov(scratch, hash);
10756 __ shl(scratch, 15);
10757 __ add(hash, Operand(scratch));
10758
10759 // if (hash == 0) hash = 27;
10760 Label hash_not_zero;
10761 __ test(hash, Operand(hash));
10762 __ j(not_zero, &hash_not_zero);
10763 __ mov(hash, Immediate(27));
10764 __ bind(&hash_not_zero);
10765 }
10766
10767
9875 void SubStringStub::Generate(MacroAssembler* masm) { 10768 void SubStringStub::Generate(MacroAssembler* masm) {
9876 Label runtime; 10769 Label runtime;
9877 10770
9878 // Stack frame on entry. 10771 // Stack frame on entry.
9879 // esp[0]: return address 10772 // esp[0]: return address
9880 // esp[4]: to 10773 // esp[4]: to
9881 // esp[8]: from 10774 // esp[8]: from
9882 // esp[12]: string 10775 // esp[12]: string
9883 10776
9884 // Make sure first argument is a string. 10777 // Make sure first argument is a string.
9885 __ mov(eax, Operand(esp, 3 * kPointerSize)); 10778 __ mov(eax, Operand(esp, 3 * kPointerSize));
9886 ASSERT_EQ(0, kSmiTag); 10779 ASSERT_EQ(0, kSmiTag);
9887 __ test(eax, Immediate(kSmiTagMask)); 10780 __ test(eax, Immediate(kSmiTagMask));
9888 __ j(zero, &runtime); 10781 __ j(zero, &runtime);
9889 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx); 10782 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
9890 __ j(NegateCondition(is_string), &runtime); 10783 __ j(NegateCondition(is_string), &runtime);
9891 10784
9892 // eax: string 10785 // eax: string
9893 // ebx: instance type 10786 // ebx: instance type
9894 // Calculate length of sub string using the smi values. 10787 // Calculate length of sub string using the smi values.
9895 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // to 10788 Label result_longer_than_two;
10789 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
9896 __ test(ecx, Immediate(kSmiTagMask)); 10790 __ test(ecx, Immediate(kSmiTagMask));
9897 __ j(not_zero, &runtime); 10791 __ j(not_zero, &runtime);
9898 __ mov(edx, Operand(esp, 2 * kPointerSize)); // from 10792 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
9899 __ test(edx, Immediate(kSmiTagMask)); 10793 __ test(edx, Immediate(kSmiTagMask));
9900 __ j(not_zero, &runtime); 10794 __ j(not_zero, &runtime);
9901 __ sub(ecx, Operand(edx)); 10795 __ sub(ecx, Operand(edx));
9902 // Handle sub-strings of length 2 and less in the runtime system. 10796 // Special handling of sub-strings of length 1 and 2. One character strings
10797 // are handled in the runtime system (looked up in the single character
10798 // cache). Two character strings are looked for in the symbol cache.
9903 __ SmiUntag(ecx); // Result length is no longer smi. 10799 __ SmiUntag(ecx); // Result length is no longer smi.
9904 __ cmp(ecx, 2); 10800 __ cmp(ecx, 2);
9905 __ j(below_equal, &runtime); 10801 __ j(greater, &result_longer_than_two);
10802 __ j(less, &runtime);
9906 10803
10804 // Sub string of length 2 requested.
10805 // eax: string
10806 // ebx: instance type
10807 // ecx: sub string length (value is 2)
10808 // edx: from index (smi)
10809 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
10810
10811 // Get the two characters forming the sub string.
10812 __ SmiUntag(edx); // From index is no longer smi.
10813 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
10814 __ movzx_b(ecx,
10815 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
10816
10817 // Try to lookup two character string in symbol table.
10818 Label make_two_character_string;
10819 GenerateTwoCharacterSymbolTableProbe(masm, ebx, ecx, eax, edx, edi,
10820 &make_two_character_string);
10821 __ ret(2 * kPointerSize);
10822
10823 __ bind(&make_two_character_string);
10824 // Setup registers for allocating the two character string.
10825 __ mov(eax, Operand(esp, 3 * kPointerSize));
10826 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
10827 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
10828 __ Set(ecx, Immediate(2));
10829
10830 __ bind(&result_longer_than_two);
9907 // eax: string 10831 // eax: string
9908 // ebx: instance type 10832 // ebx: instance type
9909 // ecx: result string length 10833 // ecx: result string length
9910 // Check for flat ascii string 10834 // Check for flat ascii string
9911 Label non_ascii_flat; 10835 Label non_ascii_flat;
9912 __ and_(ebx, kStringRepresentationMask | kStringEncodingMask); 10836 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
9913 __ cmp(ebx, kSeqStringTag | kAsciiStringTag);
9914 __ j(not_equal, &non_ascii_flat);
9915 10837
9916 // Allocate the result. 10838 // Allocate the result.
9917 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime); 10839 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
9918 10840
9919 // eax: result string 10841 // eax: result string
9920 // ecx: result string length 10842 // ecx: result string length
9921 __ mov(edx, esi); // esi used by following code. 10843 __ mov(edx, esi); // esi used by following code.
9922 // Locate first character of result. 10844 // Locate first character of result.
9923 __ mov(edi, eax); 10845 __ mov(edi, eax);
9924 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 10846 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
10095 11017
10096 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 11018 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
10097 // tagged as a small integer. 11019 // tagged as a small integer.
10098 __ bind(&runtime); 11020 __ bind(&runtime);
10099 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1); 11021 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1);
10100 } 11022 }
10101 11023
10102 #undef __ 11024 #undef __
10103 11025
10104 } } // namespace v8::internal 11026 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/ia32/codegen-ia32.h ('k') | src/ia32/debug-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698