Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(752)

Side by Side Diff: src/arm/codegen-arm.cc

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/codegen-arm.h ('k') | src/arm/cpu-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
125 } 125 }
126 126
127 127
128 TypeInfoCodeGenState::~TypeInfoCodeGenState() { 128 TypeInfoCodeGenState::~TypeInfoCodeGenState() {
129 owner()->set_type_info(slot_, old_type_info_); 129 owner()->set_type_info(slot_, old_type_info_);
130 } 130 }
131 131
132 // ------------------------------------------------------------------------- 132 // -------------------------------------------------------------------------
133 // CodeGenerator implementation 133 // CodeGenerator implementation
134 134
135 int CodeGenerator::inlined_write_barrier_size_ = -1;
136
137 CodeGenerator::CodeGenerator(MacroAssembler* masm) 135 CodeGenerator::CodeGenerator(MacroAssembler* masm)
138 : deferred_(8), 136 : deferred_(8),
139 masm_(masm), 137 masm_(masm),
140 info_(NULL), 138 info_(NULL),
141 frame_(NULL), 139 frame_(NULL),
142 allocator_(NULL), 140 allocator_(NULL),
143 cc_reg_(al), 141 cc_reg_(al),
144 state_(NULL), 142 state_(NULL),
145 loop_nesting_(0), 143 loop_nesting_(0),
146 type_info_(NULL), 144 type_info_(NULL),
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
300 frame_->CallRuntime(Runtime::kTraceEnter, 0); 298 frame_->CallRuntime(Runtime::kTraceEnter, 0);
301 // Ignore the return value. 299 // Ignore the return value.
302 } 300 }
303 301
304 // Compile the body of the function in a vanilla state. Don't 302 // Compile the body of the function in a vanilla state. Don't
305 // bother compiling all the code if the scope has an illegal 303 // bother compiling all the code if the scope has an illegal
306 // redeclaration. 304 // redeclaration.
307 if (!scope()->HasIllegalRedeclaration()) { 305 if (!scope()->HasIllegalRedeclaration()) {
308 Comment cmnt(masm_, "[ function body"); 306 Comment cmnt(masm_, "[ function body");
309 #ifdef DEBUG 307 #ifdef DEBUG
310 bool is_builtin = Bootstrapper::IsActive(); 308 bool is_builtin = Isolate::Current()->bootstrapper()->IsActive();
311 bool should_trace = 309 bool should_trace =
312 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; 310 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
313 if (should_trace) { 311 if (should_trace) {
314 frame_->CallRuntime(Runtime::kDebugTrace, 0); 312 frame_->CallRuntime(Runtime::kDebugTrace, 0);
315 // Ignore the return value. 313 // Ignore the return value.
316 } 314 }
317 #endif 315 #endif
318 VisitStatements(info->function()->body()); 316 VisitStatements(info->function()->body());
319 } 317 }
320 } 318 }
(...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after
765 763
766 // Check if the value is a smi. 764 // Check if the value is a smi.
767 __ cmp(tos, Operand(Smi::FromInt(0))); 765 __ cmp(tos, Operand(Smi::FromInt(0)));
768 766
769 if (!known_smi) { 767 if (!known_smi) {
770 false_target->Branch(eq); 768 false_target->Branch(eq);
771 __ tst(tos, Operand(kSmiTagMask)); 769 __ tst(tos, Operand(kSmiTagMask));
772 true_target->Branch(eq); 770 true_target->Branch(eq);
773 771
774 // Slow case. 772 // Slow case.
775 if (CpuFeatures::IsSupported(VFP3)) { 773 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
776 CpuFeatures::Scope scope(VFP3); 774 CpuFeatures::Scope scope(VFP3);
777 // Implements the slow case by using ToBooleanStub. 775 // Implements the slow case by using ToBooleanStub.
778 // The ToBooleanStub takes a single argument, and 776 // The ToBooleanStub takes a single argument, and
779 // returns a non-zero value for true, or zero for false. 777 // returns a non-zero value for true, or zero for false.
780 // Both the argument value and the return value use the 778 // Both the argument value and the return value use the
781 // register assigned to tos_ 779 // register assigned to tos_
782 ToBooleanStub stub(tos); 780 ToBooleanStub stub(tos);
783 frame_->CallStub(&stub, 0); 781 frame_->CallStub(&stub, 0);
784 // Convert the result in "tos" to a condition code. 782 // Convert the result in "tos" to a condition code.
785 __ cmp(tos, Operand(0, RelocInfo::NONE)); 783 __ cmp(tos, Operand(0, RelocInfo::NONE));
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
962 960
963 __ b(cond, &non_smi_input_); 961 __ b(cond, &non_smi_input_);
964 } 962 }
965 963
966 964
967 // For bit operations the result is always 32bits so we handle the case where 965 // For bit operations the result is always 32bits so we handle the case where
968 // the result does not fit in a Smi without calling the generic stub. 966 // the result does not fit in a Smi without calling the generic stub.
969 void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) { 967 void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) {
970 ASSERT(Token::IsBitOp(op_)); 968 ASSERT(Token::IsBitOp(op_));
971 969
972 if ((op_ == Token::SHR) && !CpuFeatures::IsSupported(VFP3)) { 970 if ((op_ == Token::SHR) &&
971 !Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
973 // >>> requires an unsigned to double conversion and the non VFP code 972 // >>> requires an unsigned to double conversion and the non VFP code
974 // does not support this conversion. 973 // does not support this conversion.
975 __ b(cond, entry_label()); 974 __ b(cond, entry_label());
976 } else { 975 } else {
977 __ b(cond, &answer_out_of_range_); 976 __ b(cond, &answer_out_of_range_);
978 } 977 }
979 } 978 }
980 979
981 980
982 // On entry the non-constant side of the binary operation is in tos_register_ 981 // On entry the non-constant side of the binary operation is in tos_register_
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
1066 if (answer_out_of_range_.is_linked()) { 1065 if (answer_out_of_range_.is_linked()) {
1067 GenerateAnswerOutOfRange(); 1066 GenerateAnswerOutOfRange();
1068 } 1067 }
1069 } 1068 }
1070 1069
1071 1070
1072 // Convert and write the integer answer into heap_number. 1071 // Convert and write the integer answer into heap_number.
1073 void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer, 1072 void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer,
1074 Register heap_number, 1073 Register heap_number,
1075 Register scratch) { 1074 Register scratch) {
1076 if (CpuFeatures::IsSupported(VFP3)) { 1075 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
1077 CpuFeatures::Scope scope(VFP3); 1076 CpuFeatures::Scope scope(VFP3);
1078 __ vmov(s0, answer); 1077 __ vmov(s0, answer);
1079 if (op_ == Token::SHR) { 1078 if (op_ == Token::SHR) {
1080 __ vcvt_f64_u32(d0, s0); 1079 __ vcvt_f64_u32(d0, s0);
1081 } else { 1080 } else {
1082 __ vcvt_f64_s32(d0, s0); 1081 __ vcvt_f64_s32(d0, s0);
1083 } 1082 }
1084 __ sub(scratch, heap_number, Operand(kHeapObjectTag)); 1083 __ sub(scratch, heap_number, Operand(kHeapObjectTag));
1085 __ vstr(d0, scratch, HeapNumber::kValueOffset); 1084 __ vstr(d0, scratch, HeapNumber::kValueOffset);
1086 } else { 1085 } else {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1136 } 1135 }
1137 break; 1136 break;
1138 case Token::SHR: 1137 case Token::SHR:
1139 ASSERT(!reversed_); 1138 ASSERT(!reversed_);
1140 if (shift_value != 0) { 1139 if (shift_value != 0) {
1141 __ mov(int32, Operand(int32, LSR, shift_value), SetCC); 1140 __ mov(int32, Operand(int32, LSR, shift_value), SetCC);
1142 } else { 1141 } else {
1143 // SHR is special because it is required to produce a positive answer. 1142 // SHR is special because it is required to produce a positive answer.
1144 __ cmp(int32, Operand(0, RelocInfo::NONE)); 1143 __ cmp(int32, Operand(0, RelocInfo::NONE));
1145 } 1144 }
1146 if (CpuFeatures::IsSupported(VFP3)) { 1145 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
1147 __ b(mi, &result_not_a_smi); 1146 __ b(mi, &result_not_a_smi);
1148 } else { 1147 } else {
1149 // Non VFP code cannot convert from unsigned to double, so fall back 1148 // Non VFP code cannot convert from unsigned to double, so fall back
1150 // to GenericBinaryOpStub. 1149 // to GenericBinaryOpStub.
1151 __ b(mi, entry_label()); 1150 __ b(mi, entry_label());
1152 } 1151 }
1153 break; 1152 break;
1154 case Token::SHL: 1153 case Token::SHL:
1155 ASSERT(!reversed_); 1154 ASSERT(!reversed_);
1156 if (shift_value != 0) { 1155 if (shift_value != 0) {
(...skipping 560 matching lines...) Expand 10 before | Expand all | Expand 10 after
1717 // stack, as receiver and arguments, and calls x. 1716 // stack, as receiver and arguments, and calls x.
1718 // In the implementation comments, we call x the applicand 1717 // In the implementation comments, we call x the applicand
1719 // and y the receiver. 1718 // and y the receiver.
1720 1719
1721 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); 1720 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1722 ASSERT(arguments->IsArguments()); 1721 ASSERT(arguments->IsArguments());
1723 1722
1724 // Load applicand.apply onto the stack. This will usually 1723 // Load applicand.apply onto the stack. This will usually
1725 // give us a megamorphic load site. Not super, but it works. 1724 // give us a megamorphic load site. Not super, but it works.
1726 Load(applicand); 1725 Load(applicand);
1727 Handle<String> name = Factory::LookupAsciiSymbol("apply"); 1726 Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
1728 frame_->Dup(); 1727 frame_->Dup();
1729 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET); 1728 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1730 frame_->EmitPush(r0); 1729 frame_->EmitPush(r0);
1731 1730
1732 // Load the receiver and the existing arguments object onto the 1731 // Load the receiver and the existing arguments object onto the
1733 // expression stack. Avoid allocating the arguments object here. 1732 // expression stack. Avoid allocating the arguments object here.
1734 Load(receiver); 1733 Load(receiver);
1735 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF); 1734 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
1736 1735
1737 // At this point the top two stack elements are probably in registers 1736 // At this point the top two stack elements are probably in registers
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1780 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 1779 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1781 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); 1780 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1782 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE); 1781 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
1783 __ b(lt, &build_args); 1782 __ b(lt, &build_args);
1784 1783
1785 // Check that applicand.apply is Function.prototype.apply. 1784 // Check that applicand.apply is Function.prototype.apply.
1786 __ ldr(r0, MemOperand(sp, kPointerSize)); 1785 __ ldr(r0, MemOperand(sp, kPointerSize));
1787 __ JumpIfSmi(r0, &build_args); 1786 __ JumpIfSmi(r0, &build_args);
1788 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 1787 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1789 __ b(ne, &build_args); 1788 __ b(ne, &build_args);
1790 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); 1789 Handle<Code> apply_code(
1790 Isolate::Current()->builtins()->builtin(Builtins::FunctionApply));
1791 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset)); 1791 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
1792 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); 1792 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
1793 __ cmp(r1, Operand(apply_code)); 1793 __ cmp(r1, Operand(apply_code));
1794 __ b(ne, &build_args); 1794 __ b(ne, &build_args);
1795 1795
1796 // Check that applicand is a function. 1796 // Check that applicand is a function.
1797 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); 1797 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1798 __ JumpIfSmi(r1, &build_args); 1798 __ JumpIfSmi(r1, &build_args);
1799 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE); 1799 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
1800 __ b(ne, &build_args); 1800 __ b(ne, &build_args);
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after
1995 1995
1996 ASSERT(frame_->height() == original_height); 1996 ASSERT(frame_->height() == original_height);
1997 return; 1997 return;
1998 } 1998 }
1999 1999
2000 ASSERT(!var->is_global()); 2000 ASSERT(!var->is_global());
2001 2001
2002 // If we have a function or a constant, we need to initialize the variable. 2002 // If we have a function or a constant, we need to initialize the variable.
2003 Expression* val = NULL; 2003 Expression* val = NULL;
2004 if (node->mode() == Variable::CONST) { 2004 if (node->mode() == Variable::CONST) {
2005 val = new Literal(Factory::the_hole_value()); 2005 val = new Literal(FACTORY->the_hole_value());
2006 } else { 2006 } else {
2007 val = node->fun(); // NULL if we don't have a function 2007 val = node->fun(); // NULL if we don't have a function
2008 } 2008 }
2009 2009
2010 2010
2011 if (val != NULL) { 2011 if (val != NULL) {
2012 WriteBarrierCharacter wb_info = 2012 WriteBarrierCharacter wb_info =
2013 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI; 2013 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
2014 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE; 2014 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE;
2015 // Set initial value. 2015 // Set initial value.
(...skipping 836 matching lines...) Expand 10 before | Expand all | Expand 10 after
2852 // After shadowing stops, the original labels are unshadowed and the 2852 // After shadowing stops, the original labels are unshadowed and the
2853 // LabelShadows represent the formerly shadowing labels. 2853 // LabelShadows represent the formerly shadowing labels.
2854 bool has_unlinks = false; 2854 bool has_unlinks = false;
2855 for (int i = 0; i < shadows.length(); i++) { 2855 for (int i = 0; i < shadows.length(); i++) {
2856 shadows[i]->StopShadowing(); 2856 shadows[i]->StopShadowing();
2857 has_unlinks = has_unlinks || shadows[i]->is_linked(); 2857 has_unlinks = has_unlinks || shadows[i]->is_linked();
2858 } 2858 }
2859 function_return_is_shadowed_ = function_return_was_shadowed; 2859 function_return_is_shadowed_ = function_return_was_shadowed;
2860 2860
2861 // Get an external reference to the handler address. 2861 // Get an external reference to the handler address.
2862 ExternalReference handler_address(Top::k_handler_address); 2862 ExternalReference handler_address(Isolate::k_handler_address);
2863 2863
2864 // If we can fall off the end of the try block, unlink from try chain. 2864 // If we can fall off the end of the try block, unlink from try chain.
2865 if (has_valid_frame()) { 2865 if (has_valid_frame()) {
2866 // The next handler address is on top of the frame. Unlink from 2866 // The next handler address is on top of the frame. Unlink from
2867 // the handler list and drop the rest of this handler from the 2867 // the handler list and drop the rest of this handler from the
2868 // frame. 2868 // frame.
2869 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2869 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2870 frame_->EmitPop(r1); // r0 can contain the return value. 2870 frame_->EmitPop(r1); // r0 can contain the return value.
2871 __ mov(r3, Operand(handler_address)); 2871 __ mov(r3, Operand(handler_address));
2872 __ str(r1, MemOperand(r3)); 2872 __ str(r1, MemOperand(r3));
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
2968 // After shadowing stops, the original labels are unshadowed and the 2968 // After shadowing stops, the original labels are unshadowed and the
2969 // LabelShadows represent the formerly shadowing labels. 2969 // LabelShadows represent the formerly shadowing labels.
2970 int nof_unlinks = 0; 2970 int nof_unlinks = 0;
2971 for (int i = 0; i < shadows.length(); i++) { 2971 for (int i = 0; i < shadows.length(); i++) {
2972 shadows[i]->StopShadowing(); 2972 shadows[i]->StopShadowing();
2973 if (shadows[i]->is_linked()) nof_unlinks++; 2973 if (shadows[i]->is_linked()) nof_unlinks++;
2974 } 2974 }
2975 function_return_is_shadowed_ = function_return_was_shadowed; 2975 function_return_is_shadowed_ = function_return_was_shadowed;
2976 2976
2977 // Get an external reference to the handler address. 2977 // Get an external reference to the handler address.
2978 ExternalReference handler_address(Top::k_handler_address); 2978 ExternalReference handler_address(Isolate::k_handler_address);
2979 2979
2980 // If we can fall off the end of the try block, unlink from the try 2980 // If we can fall off the end of the try block, unlink from the try
2981 // chain and set the state on the frame to FALLING. 2981 // chain and set the state on the frame to FALLING.
2982 if (has_valid_frame()) { 2982 if (has_valid_frame()) {
2983 // The next handler address is on top of the frame. 2983 // The next handler address is on top of the frame.
2984 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2984 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2985 frame_->EmitPop(r1); 2985 frame_->EmitPop(r1);
2986 __ mov(r3, Operand(handler_address)); 2986 __ mov(r3, Operand(handler_address));
2987 __ str(r1, MemOperand(r3)); 2987 __ str(r1, MemOperand(r3));
2988 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 2988 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
3123 function_info->strict_mode() ? kStrictMode : kNonStrictMode); 3123 function_info->strict_mode() ? kStrictMode : kNonStrictMode);
3124 frame_->EmitPush(Operand(function_info)); 3124 frame_->EmitPush(Operand(function_info));
3125 frame_->SpillAll(); 3125 frame_->SpillAll();
3126 frame_->CallStub(&stub, 1); 3126 frame_->CallStub(&stub, 1);
3127 frame_->EmitPush(r0); 3127 frame_->EmitPush(r0);
3128 } else { 3128 } else {
3129 // Create a new closure. 3129 // Create a new closure.
3130 frame_->EmitPush(cp); 3130 frame_->EmitPush(cp);
3131 frame_->EmitPush(Operand(function_info)); 3131 frame_->EmitPush(Operand(function_info));
3132 frame_->EmitPush(Operand(pretenure 3132 frame_->EmitPush(Operand(pretenure
3133 ? Factory::true_value() 3133 ? FACTORY->true_value()
3134 : Factory::false_value())); 3134 : FACTORY->false_value()));
3135 frame_->CallRuntime(Runtime::kNewClosure, 3); 3135 frame_->CallRuntime(Runtime::kNewClosure, 3);
3136 frame_->EmitPush(r0); 3136 frame_->EmitPush(r0);
3137 } 3137 }
3138 } 3138 }
3139 3139
3140 3140
3141 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { 3141 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
3142 #ifdef DEBUG 3142 #ifdef DEBUG
3143 int original_height = frame_->height(); 3143 int original_height = frame_->height();
3144 #endif 3144 #endif
(...skipping 479 matching lines...) Expand 10 before | Expand all | Expand 10 after
3624 Literal* key = property->key(); 3624 Literal* key = property->key();
3625 Expression* value = property->value(); 3625 Expression* value = property->value();
3626 switch (property->kind()) { 3626 switch (property->kind()) {
3627 case ObjectLiteral::Property::CONSTANT: 3627 case ObjectLiteral::Property::CONSTANT:
3628 break; 3628 break;
3629 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 3629 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3630 if (CompileTimeValue::IsCompileTimeValue(property->value())) break; 3630 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3631 // else fall through 3631 // else fall through
3632 case ObjectLiteral::Property::COMPUTED: 3632 case ObjectLiteral::Property::COMPUTED:
3633 if (key->handle()->IsSymbol()) { 3633 if (key->handle()->IsSymbol()) {
3634 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 3634 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
3635 Builtins::StoreIC_Initialize));
3635 Load(value); 3636 Load(value);
3636 if (property->emit_store()) { 3637 if (property->emit_store()) {
3637 frame_->PopToR0(); 3638 frame_->PopToR0();
3638 // Fetch the object literal. 3639 // Fetch the object literal.
3639 frame_->SpillAllButCopyTOSToR1(); 3640 frame_->SpillAllButCopyTOSToR1();
3640 __ mov(r2, Operand(key->handle())); 3641 __ mov(r2, Operand(key->handle()));
3641 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); 3642 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3642 } else { 3643 } else {
3643 frame_->Drop(); 3644 frame_->Drop();
3644 } 3645 }
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
3687 3688
3688 Register tos = frame_->GetTOSRegister(); 3689 Register tos = frame_->GetTOSRegister();
3689 // Load the function of this activation. 3690 // Load the function of this activation.
3690 __ ldr(tos, frame_->Function()); 3691 __ ldr(tos, frame_->Function());
3691 // Load the literals array of the function. 3692 // Load the literals array of the function.
3692 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset)); 3693 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset));
3693 frame_->EmitPush(tos); 3694 frame_->EmitPush(tos);
3694 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index()))); 3695 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3695 frame_->EmitPush(Operand(node->constant_elements())); 3696 frame_->EmitPush(Operand(node->constant_elements()));
3696 int length = node->values()->length(); 3697 int length = node->values()->length();
3697 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) { 3698 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
3698 FastCloneShallowArrayStub stub( 3699 FastCloneShallowArrayStub stub(
3699 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); 3700 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
3700 frame_->CallStub(&stub, 3); 3701 frame_->CallStub(&stub, 3);
3701 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2); 3702 __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1, r1, r2);
3702 } else if (node->depth() > 1) { 3703 } else if (node->depth() > 1) {
3703 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); 3704 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
3704 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 3705 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3705 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 3706 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
3706 } else { 3707 } else {
3707 FastCloneShallowArrayStub stub( 3708 FastCloneShallowArrayStub stub(
3708 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); 3709 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
3709 frame_->CallStub(&stub, 3); 3710 frame_->CallStub(&stub, 3);
3710 } 3711 }
3711 frame_->EmitPush(r0); // save the result 3712 frame_->EmitPush(r0); // save the result
(...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after
4247 // Load the arguments. 4248 // Load the arguments.
4248 int arg_count = args->length(); 4249 int arg_count = args->length();
4249 for (int i = 0; i < arg_count; i++) { 4250 for (int i = 0; i < arg_count; i++) {
4250 Load(args->at(i)); 4251 Load(args->at(i));
4251 } 4252 }
4252 4253
4253 VirtualFrame::SpilledScope spilled_scope(frame_); 4254 VirtualFrame::SpilledScope spilled_scope(frame_);
4254 // Setup the name register and call the IC initialization code. 4255 // Setup the name register and call the IC initialization code.
4255 __ mov(r2, Operand(var->name())); 4256 __ mov(r2, Operand(var->name()));
4256 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; 4257 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4257 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop); 4258 Handle<Code> stub =
4259 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
4258 CodeForSourcePosition(node->position()); 4260 CodeForSourcePosition(node->position());
4259 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT, 4261 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
4260 arg_count + 1); 4262 arg_count + 1);
4261 __ ldr(cp, frame_->Context()); 4263 __ ldr(cp, frame_->Context());
4262 frame_->EmitPush(r0); 4264 frame_->EmitPush(r0);
4263 4265
4264 } else if (var != NULL && var->AsSlot() != NULL && 4266 } else if (var != NULL && var->AsSlot() != NULL &&
4265 var->AsSlot()->type() == Slot::LOOKUP) { 4267 var->AsSlot()->type() == Slot::LOOKUP) {
4266 // ---------------------------------- 4268 // ----------------------------------
4267 // JavaScript examples: 4269 // JavaScript examples:
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
4342 int arg_count = args->length(); 4344 int arg_count = args->length();
4343 for (int i = 0; i < arg_count; i++) { 4345 for (int i = 0; i < arg_count; i++) {
4344 Load(args->at(i)); 4346 Load(args->at(i));
4345 } 4347 }
4346 4348
4347 VirtualFrame::SpilledScope spilled_scope(frame_); 4349 VirtualFrame::SpilledScope spilled_scope(frame_);
4348 // Set the name register and call the IC initialization code. 4350 // Set the name register and call the IC initialization code.
4349 __ mov(r2, Operand(name)); 4351 __ mov(r2, Operand(name));
4350 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; 4352 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4351 Handle<Code> stub = 4353 Handle<Code> stub =
4352 StubCache::ComputeCallInitialize(arg_count, in_loop); 4354 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
4353 CodeForSourcePosition(node->position()); 4355 CodeForSourcePosition(node->position());
4354 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); 4356 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4355 __ ldr(cp, frame_->Context()); 4357 __ ldr(cp, frame_->Context());
4356 frame_->EmitPush(r0); 4358 frame_->EmitPush(r0);
4357 } 4359 }
4358 4360
4359 } else { 4361 } else {
4360 // ------------------------------------------- 4362 // -------------------------------------------
4361 // JavaScript example: 'array[index](1, 2, 3)' 4363 // JavaScript example: 'array[index](1, 2, 3)'
4362 // ------------------------------------------- 4364 // -------------------------------------------
(...skipping 21 matching lines...) Expand all
4384 4386
4385 // Load the arguments. 4387 // Load the arguments.
4386 int arg_count = args->length(); 4388 int arg_count = args->length();
4387 for (int i = 0; i < arg_count; i++) { 4389 for (int i = 0; i < arg_count; i++) {
4388 Load(args->at(i)); 4390 Load(args->at(i));
4389 } 4391 }
4390 4392
4391 // Load the key into r2 and call the IC initialization code. 4393 // Load the key into r2 and call the IC initialization code.
4392 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; 4394 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4393 Handle<Code> stub = 4395 Handle<Code> stub =
4394 StubCache::ComputeKeyedCallInitialize(arg_count, in_loop); 4396 ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count,
4397 in_loop);
4395 CodeForSourcePosition(node->position()); 4398 CodeForSourcePosition(node->position());
4396 frame_->SpillAll(); 4399 frame_->SpillAll();
4397 __ ldr(r2, frame_->ElementAt(arg_count + 1)); 4400 __ ldr(r2, frame_->ElementAt(arg_count + 1));
4398 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); 4401 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4399 frame_->Drop(); // Drop the key still on the stack. 4402 frame_->Drop(); // Drop the key still on the stack.
4400 __ ldr(cp, frame_->Context()); 4403 __ ldr(cp, frame_->Context());
4401 frame_->EmitPush(r0); 4404 frame_->EmitPush(r0);
4402 } 4405 }
4403 } 4406 }
4404 4407
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
4449 VirtualFrame::SpilledScope spilled_scope(frame_); 4452 VirtualFrame::SpilledScope spilled_scope(frame_);
4450 4453
4451 // Load the argument count into r0 and the function into r1 as per 4454 // Load the argument count into r0 and the function into r1 as per
4452 // calling convention. 4455 // calling convention.
4453 __ mov(r0, Operand(arg_count)); 4456 __ mov(r0, Operand(arg_count));
4454 __ ldr(r1, frame_->ElementAt(arg_count)); 4457 __ ldr(r1, frame_->ElementAt(arg_count));
4455 4458
4456 // Call the construct call builtin that handles allocation and 4459 // Call the construct call builtin that handles allocation and
4457 // constructor invocation. 4460 // constructor invocation.
4458 CodeForSourcePosition(node->position()); 4461 CodeForSourcePosition(node->position());
4459 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall)); 4462 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
4463 Builtins::JSConstructCall));
4460 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1); 4464 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
4461 frame_->EmitPush(r0); 4465 frame_->EmitPush(r0);
4462 4466
4463 ASSERT_EQ(original_height + 1, frame_->height()); 4467 ASSERT_EQ(original_height + 1, frame_->height());
4464 } 4468 }
4465 4469
4466 4470
4467 void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { 4471 void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
4468 Register scratch = VirtualFrame::scratch0(); 4472 Register scratch = VirtualFrame::scratch0();
4469 JumpTarget null, function, leave, non_function_constructor; 4473 JumpTarget null, function, leave, non_function_constructor;
(...skipping 28 matching lines...) Expand all
4498 // The tos register now contains the constructor function. Grab the 4502 // The tos register now contains the constructor function. Grab the
4499 // instance class name from there. 4503 // instance class name from there.
4500 __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset)); 4504 __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset));
4501 __ ldr(tos, 4505 __ ldr(tos,
4502 FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset)); 4506 FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset));
4503 frame_->EmitPush(tos); 4507 frame_->EmitPush(tos);
4504 leave.Jump(); 4508 leave.Jump();
4505 4509
4506 // Functions have class 'Function'. 4510 // Functions have class 'Function'.
4507 function.Bind(); 4511 function.Bind();
4508 __ mov(tos, Operand(Factory::function_class_symbol())); 4512 __ mov(tos, Operand(FACTORY->function_class_symbol()));
4509 frame_->EmitPush(tos); 4513 frame_->EmitPush(tos);
4510 leave.Jump(); 4514 leave.Jump();
4511 4515
4512 // Objects with a non-function constructor have class 'Object'. 4516 // Objects with a non-function constructor have class 'Object'.
4513 non_function_constructor.Bind(); 4517 non_function_constructor.Bind();
4514 __ mov(tos, Operand(Factory::Object_symbol())); 4518 __ mov(tos, Operand(FACTORY->Object_symbol()));
4515 frame_->EmitPush(tos); 4519 frame_->EmitPush(tos);
4516 leave.Jump(); 4520 leave.Jump();
4517 4521
4518 // Non-JS objects have class null. 4522 // Non-JS objects have class null.
4519 null.Bind(); 4523 null.Bind();
4520 __ LoadRoot(tos, Heap::kNullValueRootIndex); 4524 __ LoadRoot(tos, Heap::kNullValueRootIndex);
4521 frame_->EmitPush(tos); 4525 frame_->EmitPush(tos);
4522 4526
4523 // All done. 4527 // All done.
4524 leave.Bind(); 4528 leave.Bind();
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
4605 cc_reg_ = eq; 4609 cc_reg_ = eq;
4606 } 4610 }
4607 4611
4608 4612
4609 // Generates the Math.pow method. 4613 // Generates the Math.pow method.
4610 void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) { 4614 void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4611 ASSERT(args->length() == 2); 4615 ASSERT(args->length() == 2);
4612 Load(args->at(0)); 4616 Load(args->at(0));
4613 Load(args->at(1)); 4617 Load(args->at(1));
4614 4618
4615 if (!CpuFeatures::IsSupported(VFP3)) { 4619 if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
4616 frame_->CallRuntime(Runtime::kMath_pow, 2); 4620 frame_->CallRuntime(Runtime::kMath_pow, 2);
4617 frame_->EmitPush(r0); 4621 frame_->EmitPush(r0);
4618 } else { 4622 } else {
4619 CpuFeatures::Scope scope(VFP3); 4623 CpuFeatures::Scope scope(VFP3);
4620 JumpTarget runtime, done; 4624 JumpTarget runtime, done;
4621 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return; 4625 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return;
4622 4626
4623 Register scratch1 = VirtualFrame::scratch0(); 4627 Register scratch1 = VirtualFrame::scratch0();
4624 Register scratch2 = VirtualFrame::scratch1(); 4628 Register scratch2 = VirtualFrame::scratch1();
4625 4629
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
4759 frame_->EmitPush(base); 4763 frame_->EmitPush(base);
4760 } 4764 }
4761 } 4765 }
4762 4766
4763 4767
4764 // Generates the Math.sqrt method. 4768 // Generates the Math.sqrt method.
4765 void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) { 4769 void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4766 ASSERT(args->length() == 1); 4770 ASSERT(args->length() == 1);
4767 Load(args->at(0)); 4771 Load(args->at(0));
4768 4772
4769 if (!CpuFeatures::IsSupported(VFP3)) { 4773 if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
4770 frame_->CallRuntime(Runtime::kMath_sqrt, 1); 4774 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4771 frame_->EmitPush(r0); 4775 frame_->EmitPush(r0);
4772 } else { 4776 } else {
4773 CpuFeatures::Scope scope(VFP3); 4777 CpuFeatures::Scope scope(VFP3);
4774 JumpTarget runtime, done; 4778 JumpTarget runtime, done;
4775 4779
4776 Register scratch1 = VirtualFrame::scratch0(); 4780 Register scratch1 = VirtualFrame::scratch0();
4777 Register scratch2 = VirtualFrame::scratch1(); 4781 Register scratch2 = VirtualFrame::scratch1();
4778 4782
4779 // Get the value from the frame. 4783 // Get the value from the frame.
(...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after
5145 // Calculate location of the first key name. 5149 // Calculate location of the first key name.
5146 __ add(map_result_, 5150 __ add(map_result_,
5147 map_result_, 5151 map_result_,
5148 Operand(FixedArray::kHeaderSize - kHeapObjectTag + 5152 Operand(FixedArray::kHeaderSize - kHeapObjectTag +
5149 DescriptorArray::kFirstIndex * kPointerSize)); 5153 DescriptorArray::kFirstIndex * kPointerSize));
5150 // Loop through all the keys in the descriptor array. If one of these is the 5154 // Loop through all the keys in the descriptor array. If one of these is the
5151 // symbol valueOf the result is false. 5155 // symbol valueOf the result is false.
5152 Label entry, loop; 5156 Label entry, loop;
5153 // The use of ip to store the valueOf symbol asumes that it is not otherwise 5157 // The use of ip to store the valueOf symbol asumes that it is not otherwise
5154 // used in the loop below. 5158 // used in the loop below.
5155 __ mov(ip, Operand(Factory::value_of_symbol())); 5159 __ mov(ip, Operand(FACTORY->value_of_symbol()));
5156 __ jmp(&entry); 5160 __ jmp(&entry);
5157 __ bind(&loop); 5161 __ bind(&loop);
5158 __ ldr(scratch2_, MemOperand(map_result_, 0)); 5162 __ ldr(scratch2_, MemOperand(map_result_, 0));
5159 __ cmp(scratch2_, ip); 5163 __ cmp(scratch2_, ip);
5160 __ b(eq, &false_result); 5164 __ b(eq, &false_result);
5161 __ add(map_result_, map_result_, Operand(kPointerSize)); 5165 __ add(map_result_, map_result_, Operand(kPointerSize));
5162 __ bind(&entry); 5166 __ bind(&entry);
5163 __ cmp(map_result_, Operand(scratch1_)); 5167 __ cmp(map_result_, Operand(scratch1_));
5164 __ b(ne, &loop); 5168 __ b(ne, &loop);
5165 5169
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
5348 __ bind(&slow_allocate_heapnumber); 5352 __ bind(&slow_allocate_heapnumber);
5349 // Allocate a heap number. 5353 // Allocate a heap number.
5350 __ CallRuntime(Runtime::kNumberAlloc, 0); 5354 __ CallRuntime(Runtime::kNumberAlloc, 0);
5351 __ mov(r4, Operand(r0)); 5355 __ mov(r4, Operand(r0));
5352 5356
5353 __ bind(&heapnumber_allocated); 5357 __ bind(&heapnumber_allocated);
5354 5358
5355 // Convert 32 random bits in r0 to 0.(32 random bits) in a double 5359 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
5356 // by computing: 5360 // by computing:
5357 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 5361 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
5358 if (CpuFeatures::IsSupported(VFP3)) { 5362 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
5359 __ PrepareCallCFunction(0, r1); 5363 __ PrepareCallCFunction(0, r1);
5360 __ CallCFunction(ExternalReference::random_uint32_function(), 0); 5364 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
5361 5365
5362 CpuFeatures::Scope scope(VFP3); 5366 CpuFeatures::Scope scope(VFP3);
5363 // 0x41300000 is the top half of 1.0 x 2^20 as a double. 5367 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
5364 // Create this constant using mov/orr to avoid PC relative load. 5368 // Create this constant using mov/orr to avoid PC relative load.
5365 __ mov(r1, Operand(0x41000000)); 5369 __ mov(r1, Operand(0x41000000));
5366 __ orr(r1, r1, Operand(0x300000)); 5370 __ orr(r1, r1, Operand(0x300000));
5367 // Move 0x41300000xxxxxxxx (x = random bits) to VFP. 5371 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
5368 __ vmov(d7, r0, r1); 5372 __ vmov(d7, r0, r1);
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
5472 } 5476 }
5473 5477
5474 5478
5475 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { 5479 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
5476 ASSERT_EQ(2, args->length()); 5480 ASSERT_EQ(2, args->length());
5477 5481
5478 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 5482 ASSERT_NE(NULL, args->at(0)->AsLiteral());
5479 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); 5483 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
5480 5484
5481 Handle<FixedArray> jsfunction_result_caches( 5485 Handle<FixedArray> jsfunction_result_caches(
5482 Top::global_context()->jsfunction_result_caches()); 5486 Isolate::Current()->global_context()->jsfunction_result_caches());
5483 if (jsfunction_result_caches->length() <= cache_id) { 5487 if (jsfunction_result_caches->length() <= cache_id) {
5484 __ Abort("Attempt to use undefined cache."); 5488 __ Abort("Attempt to use undefined cache.");
5485 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex); 5489 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
5486 return; 5490 return;
5487 } 5491 }
5488 5492
5489 Load(args->at(1)); 5493 Load(args->at(1));
5490 5494
5491 frame_->PopToR1(); 5495 frame_->PopToR1();
5492 frame_->SpillAll(); 5496 frame_->SpillAll();
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
5662 } 5666 }
5663 Load(args->at(n_args + 1)); // function 5667 Load(args->at(n_args + 1)); // function
5664 frame_->CallJSFunction(n_args); 5668 frame_->CallJSFunction(n_args);
5665 frame_->EmitPush(r0); 5669 frame_->EmitPush(r0);
5666 } 5670 }
5667 5671
5668 5672
5669 void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) { 5673 void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5670 ASSERT_EQ(args->length(), 1); 5674 ASSERT_EQ(args->length(), 1);
5671 Load(args->at(0)); 5675 Load(args->at(0));
5672 if (CpuFeatures::IsSupported(VFP3)) { 5676 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
5673 TranscendentalCacheStub stub(TranscendentalCache::SIN, 5677 TranscendentalCacheStub stub(TranscendentalCache::SIN,
5674 TranscendentalCacheStub::TAGGED); 5678 TranscendentalCacheStub::TAGGED);
5675 frame_->SpillAllButCopyTOSToR0(); 5679 frame_->SpillAllButCopyTOSToR0();
5676 frame_->CallStub(&stub, 1); 5680 frame_->CallStub(&stub, 1);
5677 } else { 5681 } else {
5678 frame_->CallRuntime(Runtime::kMath_sin, 1); 5682 frame_->CallRuntime(Runtime::kMath_sin, 1);
5679 } 5683 }
5680 frame_->EmitPush(r0); 5684 frame_->EmitPush(r0);
5681 } 5685 }
5682 5686
5683 5687
5684 void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) { 5688 void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5685 ASSERT_EQ(args->length(), 1); 5689 ASSERT_EQ(args->length(), 1);
5686 Load(args->at(0)); 5690 Load(args->at(0));
5687 if (CpuFeatures::IsSupported(VFP3)) { 5691 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
5688 TranscendentalCacheStub stub(TranscendentalCache::COS, 5692 TranscendentalCacheStub stub(TranscendentalCache::COS,
5689 TranscendentalCacheStub::TAGGED); 5693 TranscendentalCacheStub::TAGGED);
5690 frame_->SpillAllButCopyTOSToR0(); 5694 frame_->SpillAllButCopyTOSToR0();
5691 frame_->CallStub(&stub, 1); 5695 frame_->CallStub(&stub, 1);
5692 } else { 5696 } else {
5693 frame_->CallRuntime(Runtime::kMath_cos, 1); 5697 frame_->CallRuntime(Runtime::kMath_cos, 1);
5694 } 5698 }
5695 frame_->EmitPush(r0); 5699 frame_->EmitPush(r0);
5696 } 5700 }
5697 5701
5698 5702
5699 void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) { 5703 void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) {
5700 ASSERT_EQ(args->length(), 1); 5704 ASSERT_EQ(args->length(), 1);
5701 Load(args->at(0)); 5705 Load(args->at(0));
5702 if (CpuFeatures::IsSupported(VFP3)) { 5706 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
5703 TranscendentalCacheStub stub(TranscendentalCache::LOG, 5707 TranscendentalCacheStub stub(TranscendentalCache::LOG,
5704 TranscendentalCacheStub::TAGGED); 5708 TranscendentalCacheStub::TAGGED);
5705 frame_->SpillAllButCopyTOSToR0(); 5709 frame_->SpillAllButCopyTOSToR0();
5706 frame_->CallStub(&stub, 1); 5710 frame_->CallStub(&stub, 1);
5707 } else { 5711 } else {
5708 frame_->CallRuntime(Runtime::kMath_log, 1); 5712 frame_->CallRuntime(Runtime::kMath_log, 1);
5709 } 5713 }
5710 frame_->EmitPush(r0); 5714 frame_->EmitPush(r0);
5711 } 5715 }
5712 5716
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
5797 int original_height = frame_->height(); 5801 int original_height = frame_->height();
5798 #endif 5802 #endif
5799 if (CheckForInlineRuntimeCall(node)) { 5803 if (CheckForInlineRuntimeCall(node)) {
5800 ASSERT((has_cc() && frame_->height() == original_height) || 5804 ASSERT((has_cc() && frame_->height() == original_height) ||
5801 (!has_cc() && frame_->height() == original_height + 1)); 5805 (!has_cc() && frame_->height() == original_height + 1));
5802 return; 5806 return;
5803 } 5807 }
5804 5808
5805 ZoneList<Expression*>* args = node->arguments(); 5809 ZoneList<Expression*>* args = node->arguments();
5806 Comment cmnt(masm_, "[ CallRuntime"); 5810 Comment cmnt(masm_, "[ CallRuntime");
5807 Runtime::Function* function = node->function(); 5811 const Runtime::Function* function = node->function();
5808 5812
5809 if (function == NULL) { 5813 if (function == NULL) {
5810 // Prepare stack for calling JS runtime function. 5814 // Prepare stack for calling JS runtime function.
5811 // Push the builtins object found in the current global object. 5815 // Push the builtins object found in the current global object.
5812 Register scratch = VirtualFrame::scratch0(); 5816 Register scratch = VirtualFrame::scratch0();
5813 __ ldr(scratch, GlobalObjectOperand()); 5817 __ ldr(scratch, GlobalObjectOperand());
5814 Register builtins = frame_->GetTOSRegister(); 5818 Register builtins = frame_->GetTOSRegister();
5815 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset)); 5819 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset));
5816 frame_->EmitPush(builtins); 5820 frame_->EmitPush(builtins);
5817 } 5821 }
5818 5822
5819 // Push the arguments ("left-to-right"). 5823 // Push the arguments ("left-to-right").
5820 int arg_count = args->length(); 5824 int arg_count = args->length();
5821 for (int i = 0; i < arg_count; i++) { 5825 for (int i = 0; i < arg_count; i++) {
5822 Load(args->at(i)); 5826 Load(args->at(i));
5823 } 5827 }
5824 5828
5825 VirtualFrame::SpilledScope spilled_scope(frame_); 5829 VirtualFrame::SpilledScope spilled_scope(frame_);
5826 5830
5827 if (function == NULL) { 5831 if (function == NULL) {
5828 // Call the JS runtime function. 5832 // Call the JS runtime function.
5829 __ mov(r2, Operand(node->name())); 5833 __ mov(r2, Operand(node->name()));
5830 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; 5834 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
5831 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop); 5835 Handle<Code> stub =
5836 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
5832 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); 5837 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
5833 __ ldr(cp, frame_->Context()); 5838 __ ldr(cp, frame_->Context());
5834 frame_->EmitPush(r0); 5839 frame_->EmitPush(r0);
5835 } else { 5840 } else {
5836 // Call the C runtime function. 5841 // Call the C runtime function.
5837 frame_->CallRuntime(function, arg_count); 5842 frame_->CallRuntime(function, arg_count);
5838 frame_->EmitPush(r0); 5843 frame_->EmitPush(r0);
5839 } 5844 }
5840 ASSERT_EQ(original_height + 1, frame_->height()); 5845 ASSERT_EQ(original_height + 1, frame_->height());
5841 } 5846 }
(...skipping 514 matching lines...) Expand 10 before | Expand all | Expand 10 after
6356 (right->AsLiteral() != NULL && 6361 (right->AsLiteral() != NULL &&
6357 right->AsLiteral()->handle()->IsString())) { 6362 right->AsLiteral()->handle()->IsString())) {
6358 Handle<String> check(String::cast(*right->AsLiteral()->handle())); 6363 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
6359 6364
6360 // Load the operand, move it to a register. 6365 // Load the operand, move it to a register.
6361 LoadTypeofExpression(operation->expression()); 6366 LoadTypeofExpression(operation->expression());
6362 Register tos = frame_->PopToRegister(); 6367 Register tos = frame_->PopToRegister();
6363 6368
6364 Register scratch = VirtualFrame::scratch0(); 6369 Register scratch = VirtualFrame::scratch0();
6365 6370
6366 if (check->Equals(Heap::number_symbol())) { 6371 if (check->Equals(HEAP->number_symbol())) {
6367 __ tst(tos, Operand(kSmiTagMask)); 6372 __ tst(tos, Operand(kSmiTagMask));
6368 true_target()->Branch(eq); 6373 true_target()->Branch(eq);
6369 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset)); 6374 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6370 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 6375 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
6371 __ cmp(tos, ip); 6376 __ cmp(tos, ip);
6372 cc_reg_ = eq; 6377 cc_reg_ = eq;
6373 6378
6374 } else if (check->Equals(Heap::string_symbol())) { 6379 } else if (check->Equals(HEAP->string_symbol())) {
6375 __ tst(tos, Operand(kSmiTagMask)); 6380 __ tst(tos, Operand(kSmiTagMask));
6376 false_target()->Branch(eq); 6381 false_target()->Branch(eq);
6377 6382
6378 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset)); 6383 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6379 6384
6380 // It can be an undetectable string object. 6385 // It can be an undetectable string object.
6381 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset)); 6386 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6382 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable)); 6387 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6383 __ cmp(scratch, Operand(1 << Map::kIsUndetectable)); 6388 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
6384 false_target()->Branch(eq); 6389 false_target()->Branch(eq);
6385 6390
6386 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset)); 6391 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset));
6387 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE)); 6392 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
6388 cc_reg_ = lt; 6393 cc_reg_ = lt;
6389 6394
6390 } else if (check->Equals(Heap::boolean_symbol())) { 6395 } else if (check->Equals(HEAP->boolean_symbol())) {
6391 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 6396 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
6392 __ cmp(tos, ip); 6397 __ cmp(tos, ip);
6393 true_target()->Branch(eq); 6398 true_target()->Branch(eq);
6394 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 6399 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
6395 __ cmp(tos, ip); 6400 __ cmp(tos, ip);
6396 cc_reg_ = eq; 6401 cc_reg_ = eq;
6397 6402
6398 } else if (check->Equals(Heap::undefined_symbol())) { 6403 } else if (check->Equals(HEAP->undefined_symbol())) {
6399 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 6404 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
6400 __ cmp(tos, ip); 6405 __ cmp(tos, ip);
6401 true_target()->Branch(eq); 6406 true_target()->Branch(eq);
6402 6407
6403 __ tst(tos, Operand(kSmiTagMask)); 6408 __ tst(tos, Operand(kSmiTagMask));
6404 false_target()->Branch(eq); 6409 false_target()->Branch(eq);
6405 6410
6406 // It can be an undetectable object. 6411 // It can be an undetectable object.
6407 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset)); 6412 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6408 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset)); 6413 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6409 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable)); 6414 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6410 __ cmp(scratch, Operand(1 << Map::kIsUndetectable)); 6415 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
6411 6416
6412 cc_reg_ = eq; 6417 cc_reg_ = eq;
6413 6418
6414 } else if (check->Equals(Heap::function_symbol())) { 6419 } else if (check->Equals(HEAP->function_symbol())) {
6415 __ tst(tos, Operand(kSmiTagMask)); 6420 __ tst(tos, Operand(kSmiTagMask));
6416 false_target()->Branch(eq); 6421 false_target()->Branch(eq);
6417 Register map_reg = scratch; 6422 Register map_reg = scratch;
6418 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE); 6423 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE);
6419 true_target()->Branch(eq); 6424 true_target()->Branch(eq);
6420 // Regular expressions are callable so typeof == 'function'. 6425 // Regular expressions are callable so typeof == 'function'.
6421 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE); 6426 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
6422 cc_reg_ = eq; 6427 cc_reg_ = eq;
6423 6428
6424 } else if (check->Equals(Heap::object_symbol())) { 6429 } else if (check->Equals(HEAP->object_symbol())) {
6425 __ tst(tos, Operand(kSmiTagMask)); 6430 __ tst(tos, Operand(kSmiTagMask));
6426 false_target()->Branch(eq); 6431 false_target()->Branch(eq);
6427 6432
6428 __ LoadRoot(ip, Heap::kNullValueRootIndex); 6433 __ LoadRoot(ip, Heap::kNullValueRootIndex);
6429 __ cmp(tos, ip); 6434 __ cmp(tos, ip);
6430 true_target()->Branch(eq); 6435 true_target()->Branch(eq);
6431 6436
6432 Register map_reg = scratch; 6437 Register map_reg = scratch;
6433 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE); 6438 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE);
6434 false_target()->Branch(eq); 6439 false_target()->Branch(eq);
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
6576 void DeferredReferenceGetNamedValue::Generate() { 6581 void DeferredReferenceGetNamedValue::Generate() {
6577 #ifdef DEBUG 6582 #ifdef DEBUG
6578 int expected_height = frame_state()->frame()->height(); 6583 int expected_height = frame_state()->frame()->height();
6579 #endif 6584 #endif
6580 VirtualFrame copied_frame(*frame_state()->frame()); 6585 VirtualFrame copied_frame(*frame_state()->frame());
6581 copied_frame.SpillAll(); 6586 copied_frame.SpillAll();
6582 6587
6583 Register scratch1 = VirtualFrame::scratch0(); 6588 Register scratch1 = VirtualFrame::scratch0();
6584 Register scratch2 = VirtualFrame::scratch1(); 6589 Register scratch2 = VirtualFrame::scratch1();
6585 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2)); 6590 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
6586 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2); 6591 __ DecrementCounter(COUNTERS->named_load_inline(), 1, scratch1, scratch2);
6587 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2); 6592 __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1,
6593 scratch1, scratch2);
6588 6594
6589 // Ensure receiver in r0 and name in r2 to match load ic calling convention. 6595 // Ensure receiver in r0 and name in r2 to match load ic calling convention.
6590 __ Move(r0, receiver_); 6596 __ Move(r0, receiver_);
6591 __ mov(r2, Operand(name_)); 6597 __ mov(r2, Operand(name_));
6592 6598
6593 // The rest of the instructions in the deferred code must be together. 6599 // The rest of the instructions in the deferred code must be together.
6594 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6600 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6595 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 6601 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6602 Builtins::LoadIC_Initialize));
6596 RelocInfo::Mode mode = is_contextual_ 6603 RelocInfo::Mode mode = is_contextual_
6597 ? RelocInfo::CODE_TARGET_CONTEXT 6604 ? RelocInfo::CODE_TARGET_CONTEXT
6598 : RelocInfo::CODE_TARGET; 6605 : RelocInfo::CODE_TARGET;
6599 __ Call(ic, mode); 6606 __ Call(ic, mode);
6600 // We must mark the code just after the call with the correct marker. 6607 // We must mark the code just after the call with the correct marker.
6601 MacroAssembler::NopMarkerTypes code_marker; 6608 MacroAssembler::NopMarkerTypes code_marker;
6602 if (is_contextual_) { 6609 if (is_contextual_) {
6603 code_marker = is_dont_delete_ 6610 code_marker = is_dont_delete_
6604 ? MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE 6611 ? MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE
6605 : MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT; 6612 : MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT;
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
6647 // in r0. 6654 // in r0.
6648 void DeferredReferenceGetKeyedValue::Generate() { 6655 void DeferredReferenceGetKeyedValue::Generate() {
6649 ASSERT((key_.is(r0) && receiver_.is(r1)) || 6656 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
6650 (key_.is(r1) && receiver_.is(r0))); 6657 (key_.is(r1) && receiver_.is(r0)));
6651 6658
6652 VirtualFrame copied_frame(*frame_state()->frame()); 6659 VirtualFrame copied_frame(*frame_state()->frame());
6653 copied_frame.SpillAll(); 6660 copied_frame.SpillAll();
6654 6661
6655 Register scratch1 = VirtualFrame::scratch0(); 6662 Register scratch1 = VirtualFrame::scratch0();
6656 Register scratch2 = VirtualFrame::scratch1(); 6663 Register scratch2 = VirtualFrame::scratch1();
6657 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2); 6664 __ DecrementCounter(COUNTERS->keyed_load_inline(), 1, scratch1, scratch2);
6658 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2); 6665 __ IncrementCounter(COUNTERS->keyed_load_inline_miss(),
6666 1, scratch1, scratch2);
6659 6667
6660 // Ensure key in r0 and receiver in r1 to match keyed load ic calling 6668 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
6661 // convention. 6669 // convention.
6662 if (key_.is(r1)) { 6670 if (key_.is(r1)) {
6663 __ Swap(r0, r1, ip); 6671 __ Swap(r0, r1, ip);
6664 } 6672 }
6665 6673
6666 // The rest of the instructions in the deferred code must be together. 6674 // The rest of the instructions in the deferred code must be together.
6667 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6675 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6668 // Call keyed load IC. It has the arguments key and receiver in r0 and r1. 6676 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
6669 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 6677 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6678 Builtins::KeyedLoadIC_Initialize));
6670 __ Call(ic, RelocInfo::CODE_TARGET); 6679 __ Call(ic, RelocInfo::CODE_TARGET);
6671 // The call must be followed by a nop instruction to indicate that the 6680 // The call must be followed by a nop instruction to indicate that the
6672 // keyed load has been inlined. 6681 // keyed load has been inlined.
6673 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED); 6682 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
6674 6683
6675 // Now go back to the frame that we entered with. This will not overwrite 6684 // Now go back to the frame that we entered with. This will not overwrite
6676 // the receiver or key registers since they were not in use when we came 6685 // the receiver or key registers since they were not in use when we came
6677 // in. The instructions emitted by this merge are skipped over by the 6686 // in. The instructions emitted by this merge are skipped over by the
6678 // inline load patching mechanism when looking for the branch instruction 6687 // inline load patching mechanism when looking for the branch instruction
6679 // that tells it where the code to patch is. 6688 // that tells it where the code to patch is.
(...skipping 26 matching lines...) Expand all
6706 Register value_; 6715 Register value_;
6707 Register key_; 6716 Register key_;
6708 Register receiver_; 6717 Register receiver_;
6709 StrictModeFlag strict_mode_; 6718 StrictModeFlag strict_mode_;
6710 }; 6719 };
6711 6720
6712 6721
6713 void DeferredReferenceSetKeyedValue::Generate() { 6722 void DeferredReferenceSetKeyedValue::Generate() {
6714 Register scratch1 = VirtualFrame::scratch0(); 6723 Register scratch1 = VirtualFrame::scratch0();
6715 Register scratch2 = VirtualFrame::scratch1(); 6724 Register scratch2 = VirtualFrame::scratch1();
6716 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2); 6725 __ DecrementCounter(COUNTERS->keyed_store_inline(), 1, scratch1, scratch2);
6717 __ IncrementCounter( 6726 __ IncrementCounter(COUNTERS->keyed_store_inline_miss(),
6718 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2); 6727 1, scratch1, scratch2);
6719 6728
6720 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic 6729 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
6721 // calling convention. 6730 // calling convention.
6722 if (value_.is(r1)) { 6731 if (value_.is(r1)) {
6723 __ Swap(r0, r1, ip); 6732 __ Swap(r0, r1, ip);
6724 } 6733 }
6725 ASSERT(receiver_.is(r2)); 6734 ASSERT(receiver_.is(r2));
6726 6735
6727 // The rest of the instructions in the deferred code must be together. 6736 // The rest of the instructions in the deferred code must be together.
6728 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6737 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6729 // Call keyed store IC. It has the arguments value, key and receiver in r0, 6738 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6730 // r1 and r2. 6739 // r1 and r2.
6731 Handle<Code> ic(Builtins::builtin( 6740 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6732 (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict 6741 (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
6733 : Builtins::KeyedStoreIC_Initialize)); 6742 : Builtins::KeyedStoreIC_Initialize));
6734 __ Call(ic, RelocInfo::CODE_TARGET); 6743 __ Call(ic, RelocInfo::CODE_TARGET);
6735 // The call must be followed by a nop instruction to indicate that the 6744 // The call must be followed by a nop instruction to indicate that the
6736 // keyed store has been inlined. 6745 // keyed store has been inlined.
6737 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED); 6746 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
6738 6747
6739 // Block the constant pool for one more instruction after leaving this 6748 // Block the constant pool for one more instruction after leaving this
6740 // constant pool block scope to include the branch instruction ending the 6749 // constant pool block scope to include the branch instruction ending the
6741 // deferred code. 6750 // deferred code.
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
6776 6785
6777 // Ensure value in r0, receiver in r1 to match store ic calling 6786 // Ensure value in r0, receiver in r1 to match store ic calling
6778 // convention. 6787 // convention.
6779 ASSERT(value_.is(r0) && receiver_.is(r1)); 6788 ASSERT(value_.is(r0) && receiver_.is(r1));
6780 __ mov(r2, Operand(name_)); 6789 __ mov(r2, Operand(name_));
6781 6790
6782 // The rest of the instructions in the deferred code must be together. 6791 // The rest of the instructions in the deferred code must be together.
6783 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6792 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6784 // Call keyed store IC. It has the arguments value, key and receiver in r0, 6793 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6785 // r1 and r2. 6794 // r1 and r2.
6786 Handle<Code> ic(Builtins::builtin( 6795 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6787 (strict_mode_ == kStrictMode) ? Builtins::StoreIC_Initialize_Strict 6796 (strict_mode_ == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
6788 : Builtins::StoreIC_Initialize)); 6797 : Builtins::StoreIC_Initialize));
6789 __ Call(ic, RelocInfo::CODE_TARGET); 6798 __ Call(ic, RelocInfo::CODE_TARGET);
6790 // The call must be followed by a nop instruction to indicate that the 6799 // The call must be followed by a nop instruction to indicate that the
6791 // named store has been inlined. 6800 // named store has been inlined.
6792 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED); 6801 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
6793 6802
6794 // Go back to the frame we entered with. The instructions 6803 // Go back to the frame we entered with. The instructions
6795 // generated by this merge are skipped over by the inline store 6804 // generated by this merge are skipped over by the inline store
6796 // patching mechanism when looking for the branch instruction that 6805 // patching mechanism when looking for the branch instruction that
6797 // tells it where the code to patch is. 6806 // tells it where the code to patch is.
6798 copied_frame.MergeTo(frame_state()->frame()); 6807 copied_frame.MergeTo(frame_state()->frame());
6799 6808
6800 // Block the constant pool for one more instruction after leaving this 6809 // Block the constant pool for one more instruction after leaving this
6801 // constant pool block scope to include the branch instruction ending the 6810 // constant pool block scope to include the branch instruction ending the
6802 // deferred code. 6811 // deferred code.
6803 __ BlockConstPoolFor(1); 6812 __ BlockConstPoolFor(1);
6804 } 6813 }
6805 } 6814 }
6806 6815
6807 6816
6808 // Consumes the top of stack (the receiver) and pushes the result instead. 6817 // Consumes the top of stack (the receiver) and pushes the result instead.
6809 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { 6818 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
6810 bool contextual_load_in_builtin = 6819 bool contextual_load_in_builtin =
6811 is_contextual && 6820 is_contextual &&
6812 (Bootstrapper::IsActive() || 6821 (ISOLATE->bootstrapper()->IsActive() ||
6813 (!info_->closure().is_null() && info_->closure()->IsBuiltin())); 6822 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
6814 6823
6815 if (scope()->is_global_scope() || 6824 if (scope()->is_global_scope() ||
6816 loop_nesting() == 0 || 6825 loop_nesting() == 0 ||
6817 contextual_load_in_builtin) { 6826 contextual_load_in_builtin) {
6818 Comment cmnt(masm(), "[ Load from named Property"); 6827 Comment cmnt(masm(), "[ Load from named Property");
6819 // Setup the name register and call load IC. 6828 // Setup the name register and call load IC.
6820 frame_->CallLoadIC(name, 6829 frame_->CallLoadIC(name,
6821 is_contextual 6830 is_contextual
6822 ? RelocInfo::CODE_TARGET_CONTEXT 6831 ? RelocInfo::CODE_TARGET_CONTEXT
6823 : RelocInfo::CODE_TARGET); 6832 : RelocInfo::CODE_TARGET);
6824 frame_->EmitPush(r0); // Push answer. 6833 frame_->EmitPush(r0); // Push answer.
6825 } else { 6834 } else {
6826 // Inline the in-object property case. 6835 // Inline the in-object property case.
6827 Comment cmnt(masm(), is_contextual 6836 Comment cmnt(masm(), is_contextual
6828 ? "[ Inlined contextual property load" 6837 ? "[ Inlined contextual property load"
6829 : "[ Inlined named property load"); 6838 : "[ Inlined named property load");
6830 6839
6831 // Counter will be decremented in the deferred code. Placed here to avoid 6840 // Counter will be decremented in the deferred code. Placed here to avoid
6832 // having it in the instruction stream below where patching will occur. 6841 // having it in the instruction stream below where patching will occur.
6833 if (is_contextual) { 6842 if (is_contextual) {
6834 __ IncrementCounter(&Counters::named_load_global_inline, 1, 6843 __ IncrementCounter(COUNTERS->named_load_global_inline(), 1,
6835 frame_->scratch0(), frame_->scratch1()); 6844 frame_->scratch0(), frame_->scratch1());
6836 } else { 6845 } else {
6837 __ IncrementCounter(&Counters::named_load_inline, 1, 6846 __ IncrementCounter(COUNTERS->named_load_inline(), 1,
6838 frame_->scratch0(), frame_->scratch1()); 6847 frame_->scratch0(), frame_->scratch1());
6839 } 6848 }
6840 6849
6841 // The following instructions are the inlined load of an in-object property. 6850 // The following instructions are the inlined load of an in-object property.
6842 // Parts of this code is patched, so the exact instructions generated needs 6851 // Parts of this code is patched, so the exact instructions generated needs
6843 // to be fixed. Therefore the instruction pool is blocked when generating 6852 // to be fixed. Therefore the instruction pool is blocked when generating
6844 // this code 6853 // this code
6845 6854
6846 // Load the receiver from the stack. 6855 // Load the receiver from the stack.
6847 Register receiver = frame_->PopToRegister(); 6856 Register receiver = frame_->PopToRegister();
(...skipping 12 matching lines...) Expand all
6860 LookupResult lookup; 6869 LookupResult lookup;
6861 global_object->LocalLookupRealNamedProperty(*name, &lookup); 6870 global_object->LocalLookupRealNamedProperty(*name, &lookup);
6862 if (lookup.IsProperty() && lookup.type() == NORMAL) { 6871 if (lookup.IsProperty() && lookup.type() == NORMAL) {
6863 ASSERT(lookup.holder() == global_object); 6872 ASSERT(lookup.holder() == global_object);
6864 ASSERT(global_object->property_dictionary()->ValueAt( 6873 ASSERT(global_object->property_dictionary()->ValueAt(
6865 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell()); 6874 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
6866 is_dont_delete = lookup.IsDontDelete(); 6875 is_dont_delete = lookup.IsDontDelete();
6867 } 6876 }
6868 } 6877 }
6869 if (is_dont_delete) { 6878 if (is_dont_delete) {
6870 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1, 6879 __ IncrementCounter(COUNTERS->dont_delete_hint_hit(), 1,
6871 frame_->scratch0(), frame_->scratch1()); 6880 frame_->scratch0(), frame_->scratch1());
6872 } 6881 }
6873 } 6882 }
6874 6883
6875 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6884 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6876 if (!is_contextual) { 6885 if (!is_contextual) {
6877 // Check that the receiver is a heap object. 6886 // Check that the receiver is a heap object.
6878 __ tst(receiver, Operand(kSmiTagMask)); 6887 __ tst(receiver, Operand(kSmiTagMask));
6879 deferred->Branch(eq); 6888 deferred->Branch(eq);
6880 } 6889 }
(...skipping 16 matching lines...) Expand all
6897 Label check_inlined_codesize; 6906 Label check_inlined_codesize;
6898 masm_->bind(&check_inlined_codesize); 6907 masm_->bind(&check_inlined_codesize);
6899 #endif 6908 #endif
6900 6909
6901 Register scratch = VirtualFrame::scratch0(); 6910 Register scratch = VirtualFrame::scratch0();
6902 Register scratch2 = VirtualFrame::scratch1(); 6911 Register scratch2 = VirtualFrame::scratch1();
6903 6912
6904 // Check the map. The null map used below is patched by the inline cache 6913 // Check the map. The null map used below is patched by the inline cache
6905 // code. Therefore we can't use a LoadRoot call. 6914 // code. Therefore we can't use a LoadRoot call.
6906 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset)); 6915 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
6907 __ mov(scratch2, Operand(Factory::null_value())); 6916 __ mov(scratch2, Operand(FACTORY->null_value()));
6908 __ cmp(scratch, scratch2); 6917 __ cmp(scratch, scratch2);
6909 deferred->Branch(ne); 6918 deferred->Branch(ne);
6910 6919
6911 if (is_contextual) { 6920 if (is_contextual) {
6912 #ifdef DEBUG 6921 #ifdef DEBUG
6913 InlinedNamedLoadInstructions += 1; 6922 InlinedNamedLoadInstructions += 1;
6914 #endif 6923 #endif
6915 // Load the (initially invalid) cell and get its value. 6924 // Load the (initially invalid) cell and get its value.
6916 masm()->mov(receiver, Operand(Factory::null_value())); 6925 masm()->mov(receiver, Operand(FACTORY->null_value()));
6917 __ ldr(receiver, 6926 __ ldr(receiver,
6918 FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset)); 6927 FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset));
6919 6928
6920 deferred->set_is_dont_delete(is_dont_delete); 6929 deferred->set_is_dont_delete(is_dont_delete);
6921 6930
6922 if (!is_dont_delete) { 6931 if (!is_dont_delete) {
6923 #ifdef DEBUG 6932 #ifdef DEBUG
6924 InlinedNamedLoadInstructions += 3; 6933 InlinedNamedLoadInstructions += 3;
6925 #endif 6934 #endif
6926 __ cmp(receiver, Operand(Factory::the_hole_value())); 6935 __ cmp(receiver, Operand(FACTORY->the_hole_value()));
6927 deferred->Branch(eq); 6936 deferred->Branch(eq);
6928 } else if (FLAG_debug_code) { 6937 } else if (FLAG_debug_code) {
6929 #ifdef DEBUG 6938 #ifdef DEBUG
6930 InlinedNamedLoadInstructions += 3; 6939 InlinedNamedLoadInstructions += 3;
6931 #endif 6940 #endif
6932 __ cmp(receiver, Operand(Factory::the_hole_value())); 6941 __ cmp(receiver, Operand(FACTORY->the_hole_value()));
6933 __ b(&check_the_hole, eq); 6942 __ b(&check_the_hole, eq);
6934 __ bind(&cont); 6943 __ bind(&cont);
6935 } 6944 }
6936 } else { 6945 } else {
6937 // Initially use an invalid index. The index will be patched by the 6946 // Initially use an invalid index. The index will be patched by the
6938 // inline cache code. 6947 // inline cache code.
6939 __ ldr(receiver, MemOperand(receiver, 0)); 6948 __ ldr(receiver, MemOperand(receiver, 0));
6940 } 6949 }
6941 6950
6942 // Make sure that the expected number of instructions are generated. 6951 // Make sure that the expected number of instructions are generated.
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
6990 Register scratch1 = VirtualFrame::scratch1(); 6999 Register scratch1 = VirtualFrame::scratch1();
6991 7000
6992 // Check the map. Initially use an invalid map to force a 7001 // Check the map. Initially use an invalid map to force a
6993 // failure. The map check will be patched in the runtime system. 7002 // failure. The map check will be patched in the runtime system.
6994 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); 7003 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
6995 7004
6996 #ifdef DEBUG 7005 #ifdef DEBUG
6997 Label check_inlined_codesize; 7006 Label check_inlined_codesize;
6998 masm_->bind(&check_inlined_codesize); 7007 masm_->bind(&check_inlined_codesize);
6999 #endif 7008 #endif
7000 __ mov(scratch0, Operand(Factory::null_value())); 7009 __ mov(scratch0, Operand(FACTORY->null_value()));
7001 __ cmp(scratch0, scratch1); 7010 __ cmp(scratch0, scratch1);
7002 deferred->Branch(ne); 7011 deferred->Branch(ne);
7003 7012
7004 int offset = 0; 7013 int offset = 0;
7005 __ str(value, MemOperand(receiver, offset)); 7014 __ str(value, MemOperand(receiver, offset));
7006 7015
7007 // Update the write barrier and record its size. We do not use 7016 // Update the write barrier and record its size. We do not use
7008 // the RecordWrite macro here because we want the offset 7017 // the RecordWrite macro here because we want the offset
7009 // addition instruction first to make it easy to patch. 7018 // addition instruction first to make it easy to patch.
7010 Label record_write_start, record_write_done; 7019 Label record_write_start, record_write_done;
7011 __ bind(&record_write_start); 7020 __ bind(&record_write_start);
7012 // Add offset into the object. 7021 // Add offset into the object.
7013 __ add(scratch0, receiver, Operand(offset)); 7022 __ add(scratch0, receiver, Operand(offset));
7014 // Test that the object is not in the new space. We cannot set 7023 // Test that the object is not in the new space. We cannot set
7015 // region marks for new space pages. 7024 // region marks for new space pages.
7016 __ InNewSpace(receiver, scratch1, eq, &record_write_done); 7025 __ InNewSpace(receiver, scratch1, eq, &record_write_done);
7017 // Record the actual write. 7026 // Record the actual write.
7018 __ RecordWriteHelper(receiver, scratch0, scratch1); 7027 __ RecordWriteHelper(receiver, scratch0, scratch1);
7019 __ bind(&record_write_done); 7028 __ bind(&record_write_done);
7020 // Clobber all input registers when running with the debug-code flag 7029 // Clobber all input registers when running with the debug-code flag
7021 // turned on to provoke errors. 7030 // turned on to provoke errors.
7022 if (FLAG_debug_code) { 7031 if (FLAG_debug_code) {
7023 __ mov(receiver, Operand(BitCast<int32_t>(kZapValue))); 7032 __ mov(receiver, Operand(BitCast<int32_t>(kZapValue)));
7024 __ mov(scratch0, Operand(BitCast<int32_t>(kZapValue))); 7033 __ mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
7025 __ mov(scratch1, Operand(BitCast<int32_t>(kZapValue))); 7034 __ mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
7026 } 7035 }
7027 // Check that this is the first inlined write barrier or that 7036 // Check that this is the first inlined write barrier or that
7028 // this inlined write barrier has the same size as all the other 7037 // this inlined write barrier has the same size as all the other
7029 // inlined write barriers. 7038 // inlined write barriers.
7030 ASSERT((inlined_write_barrier_size_ == -1) || 7039 ASSERT((Isolate::Current()->inlined_write_barrier_size() == -1) ||
7031 (inlined_write_barrier_size_ == 7040 (Isolate::Current()->inlined_write_barrier_size() ==
7032 masm()->InstructionsGeneratedSince(&record_write_start))); 7041 masm()->InstructionsGeneratedSince(&record_write_start)));
7033 inlined_write_barrier_size_ = 7042 Isolate::Current()->set_inlined_write_barrier_size(
7034 masm()->InstructionsGeneratedSince(&record_write_start); 7043 masm()->InstructionsGeneratedSince(&record_write_start));
7035 7044
7036 // Make sure that the expected number of instructions are generated. 7045 // Make sure that the expected number of instructions are generated.
7037 ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(), 7046 ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(),
7038 masm()->InstructionsGeneratedSince(&check_inlined_codesize)); 7047 masm()->InstructionsGeneratedSince(&check_inlined_codesize));
7039 } 7048 }
7040 deferred->BindExit(); 7049 deferred->BindExit();
7041 } 7050 }
7042 ASSERT_EQ(expected_height, frame()->height()); 7051 ASSERT_EQ(expected_height, frame()->height());
7043 } 7052 }
7044 7053
7045 7054
7046 void CodeGenerator::EmitKeyedLoad() { 7055 void CodeGenerator::EmitKeyedLoad() {
7047 if (loop_nesting() == 0) { 7056 if (loop_nesting() == 0) {
7048 Comment cmnt(masm_, "[ Load from keyed property"); 7057 Comment cmnt(masm_, "[ Load from keyed property");
7049 frame_->CallKeyedLoadIC(); 7058 frame_->CallKeyedLoadIC();
7050 } else { 7059 } else {
7051 // Inline the keyed load. 7060 // Inline the keyed load.
7052 Comment cmnt(masm_, "[ Inlined load from keyed property"); 7061 Comment cmnt(masm_, "[ Inlined load from keyed property");
7053 7062
7054 // Counter will be decremented in the deferred code. Placed here to avoid 7063 // Counter will be decremented in the deferred code. Placed here to avoid
7055 // having it in the instruction stream below where patching will occur. 7064 // having it in the instruction stream below where patching will occur.
7056 __ IncrementCounter(&Counters::keyed_load_inline, 1, 7065 __ IncrementCounter(COUNTERS->keyed_load_inline(), 1,
7057 frame_->scratch0(), frame_->scratch1()); 7066 frame_->scratch0(), frame_->scratch1());
7058 7067
7059 // Load the key and receiver from the stack. 7068 // Load the key and receiver from the stack.
7060 bool key_is_known_smi = frame_->KnownSmiAt(0); 7069 bool key_is_known_smi = frame_->KnownSmiAt(0);
7061 Register key = frame_->PopToRegister(); 7070 Register key = frame_->PopToRegister();
7062 Register receiver = frame_->PopToRegister(key); 7071 Register receiver = frame_->PopToRegister(key);
7063 7072
7064 // The deferred code expects key and receiver in registers. 7073 // The deferred code expects key and receiver in registers.
7065 DeferredReferenceGetKeyedValue* deferred = 7074 DeferredReferenceGetKeyedValue* deferred =
7066 new DeferredReferenceGetKeyedValue(key, receiver); 7075 new DeferredReferenceGetKeyedValue(key, receiver);
(...skipping 16 matching lines...) Expand all
7083 // Check that the key is a smi. 7092 // Check that the key is a smi.
7084 if (!key_is_known_smi) { 7093 if (!key_is_known_smi) {
7085 __ tst(key, Operand(kSmiTagMask)); 7094 __ tst(key, Operand(kSmiTagMask));
7086 deferred->Branch(ne); 7095 deferred->Branch(ne);
7087 } 7096 }
7088 7097
7089 #ifdef DEBUG 7098 #ifdef DEBUG
7090 Label check_inlined_codesize; 7099 Label check_inlined_codesize;
7091 masm_->bind(&check_inlined_codesize); 7100 masm_->bind(&check_inlined_codesize);
7092 #endif 7101 #endif
7093 __ mov(scratch2, Operand(Factory::null_value())); 7102 __ mov(scratch2, Operand(FACTORY->null_value()));
7094 __ cmp(scratch1, scratch2); 7103 __ cmp(scratch1, scratch2);
7095 deferred->Branch(ne); 7104 deferred->Branch(ne);
7096 7105
7097 // Get the elements array from the receiver. 7106 // Get the elements array from the receiver.
7098 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); 7107 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
7099 __ AssertFastElements(scratch1); 7108 __ AssertFastElements(scratch1);
7100 7109
7101 // Check that key is within bounds. Use unsigned comparison to handle 7110 // Check that key is within bounds. Use unsigned comparison to handle
7102 // negative keys. 7111 // negative keys.
7103 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); 7112 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
(...skipping 29 matching lines...) Expand all
7133 if (loop_nesting() > 0 && key_type->IsLikelySmi()) { 7142 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
7134 // Inline the keyed store. 7143 // Inline the keyed store.
7135 Comment cmnt(masm_, "[ Inlined store to keyed property"); 7144 Comment cmnt(masm_, "[ Inlined store to keyed property");
7136 7145
7137 Register scratch1 = VirtualFrame::scratch0(); 7146 Register scratch1 = VirtualFrame::scratch0();
7138 Register scratch2 = VirtualFrame::scratch1(); 7147 Register scratch2 = VirtualFrame::scratch1();
7139 Register scratch3 = r3; 7148 Register scratch3 = r3;
7140 7149
7141 // Counter will be decremented in the deferred code. Placed here to avoid 7150 // Counter will be decremented in the deferred code. Placed here to avoid
7142 // having it in the instruction stream below where patching will occur. 7151 // having it in the instruction stream below where patching will occur.
7143 __ IncrementCounter(&Counters::keyed_store_inline, 1, 7152 __ IncrementCounter(COUNTERS->keyed_store_inline(), 1,
7144 scratch1, scratch2); 7153 scratch1, scratch2);
7145 7154
7146 7155
7147 // Load the value, key and receiver from the stack. 7156 // Load the value, key and receiver from the stack.
7148 bool value_is_harmless = frame_->KnownSmiAt(0); 7157 bool value_is_harmless = frame_->KnownSmiAt(0);
7149 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true; 7158 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
7150 bool key_is_smi = frame_->KnownSmiAt(1); 7159 bool key_is_smi = frame_->KnownSmiAt(1);
7151 Register value = frame_->PopToRegister(); 7160 Register value = frame_->PopToRegister();
7152 Register key = frame_->PopToRegister(value); 7161 Register key = frame_->PopToRegister(value);
7153 VirtualFrame::SpilledScope spilled(frame_); 7162 VirtualFrame::SpilledScope spilled(frame_);
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
7214 #ifdef DEBUG 7223 #ifdef DEBUG
7215 Label check_inlined_codesize; 7224 Label check_inlined_codesize;
7216 masm_->bind(&check_inlined_codesize); 7225 masm_->bind(&check_inlined_codesize);
7217 #endif 7226 #endif
7218 7227
7219 // Read the fixed array map from the constant pool (not from the root 7228 // Read the fixed array map from the constant pool (not from the root
7220 // array) so that the value can be patched. When debugging, we patch this 7229 // array) so that the value can be patched. When debugging, we patch this
7221 // comparison to always fail so that we will hit the IC call in the 7230 // comparison to always fail so that we will hit the IC call in the
7222 // deferred code which will allow the debugger to break for fast case 7231 // deferred code which will allow the debugger to break for fast case
7223 // stores. 7232 // stores.
7224 __ mov(scratch3, Operand(Factory::fixed_array_map())); 7233 __ mov(scratch3, Operand(FACTORY->fixed_array_map()));
7225 __ cmp(scratch2, scratch3); 7234 __ cmp(scratch2, scratch3);
7226 deferred->Branch(ne); 7235 deferred->Branch(ne);
7227 7236
7228 // Check that the key is within bounds. Both the key and the length of 7237 // Check that the key is within bounds. Both the key and the length of
7229 // the JSArray are smis (because the fixed array check above ensures the 7238 // the JSArray are smis (because the fixed array check above ensures the
7230 // elements are in fast case). Use unsigned comparison to handle negative 7239 // elements are in fast case). Use unsigned comparison to handle negative
7231 // keys. 7240 // keys.
7232 __ ldr(scratch3, FieldMemOperand(receiver, JSArray::kLengthOffset)); 7241 __ ldr(scratch3, FieldMemOperand(receiver, JSArray::kLengthOffset));
7233 __ cmp(scratch3, key); 7242 __ cmp(scratch3, key);
7234 deferred->Branch(ls); // Unsigned less equal. 7243 deferred->Branch(ls); // Unsigned less equal.
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
7384 7393
7385 default: 7394 default:
7386 UNREACHABLE(); 7395 UNREACHABLE();
7387 } 7396 }
7388 } 7397 }
7389 7398
7390 7399
7391 const char* GenericBinaryOpStub::GetName() { 7400 const char* GenericBinaryOpStub::GetName() {
7392 if (name_ != NULL) return name_; 7401 if (name_ != NULL) return name_;
7393 const int len = 100; 7402 const int len = 100;
7394 name_ = Bootstrapper::AllocateAutoDeletedArray(len); 7403 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(len);
7395 if (name_ == NULL) return "OOM"; 7404 if (name_ == NULL) return "OOM";
7396 const char* op_name = Token::Name(op_); 7405 const char* op_name = Token::Name(op_);
7397 const char* overwrite_name; 7406 const char* overwrite_name;
7398 switch (mode_) { 7407 switch (mode_) {
7399 case NO_OVERWRITE: overwrite_name = "Alloc"; break; 7408 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
7400 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; 7409 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
7401 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; 7410 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
7402 default: overwrite_name = "UnknownOverwrite"; break; 7411 default: overwrite_name = "UnknownOverwrite"; break;
7403 } 7412 }
7404 7413
7405 OS::SNPrintF(Vector<char>(name_, len), 7414 OS::SNPrintF(Vector<char>(name_, len),
7406 "GenericBinaryOpStub_%s_%s%s_%s", 7415 "GenericBinaryOpStub_%s_%s%s_%s",
7407 op_name, 7416 op_name,
7408 overwrite_name, 7417 overwrite_name,
7409 specialized_on_rhs_ ? "_ConstantRhs" : "", 7418 specialized_on_rhs_ ? "_ConstantRhs" : "",
7410 BinaryOpIC::GetName(runtime_operands_type_)); 7419 BinaryOpIC::GetName(runtime_operands_type_));
7411 return name_; 7420 return name_;
7412 } 7421 }
7413 7422
7414
7415 #undef __ 7423 #undef __
7416 7424
7417 } } // namespace v8::internal 7425 } } // namespace v8::internal
7418 7426
7419 #endif // V8_TARGET_ARCH_ARM 7427 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/codegen-arm.h ('k') | src/arm/cpu-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698