Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(345)

Side by Side Diff: src/arm/codegen-arm.cc

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
304 frame_->CallRuntime(Runtime::kTraceEnter, 0); 304 frame_->CallRuntime(Runtime::kTraceEnter, 0);
305 // Ignore the return value. 305 // Ignore the return value.
306 } 306 }
307 307
308 // Compile the body of the function in a vanilla state. Don't 308 // Compile the body of the function in a vanilla state. Don't
309 // bother compiling all the code if the scope has an illegal 309 // bother compiling all the code if the scope has an illegal
310 // redeclaration. 310 // redeclaration.
311 if (!scope()->HasIllegalRedeclaration()) { 311 if (!scope()->HasIllegalRedeclaration()) {
312 Comment cmnt(masm_, "[ function body"); 312 Comment cmnt(masm_, "[ function body");
313 #ifdef DEBUG 313 #ifdef DEBUG
314 bool is_builtin = Bootstrapper::IsActive(); 314 bool is_builtin = Isolate::Current()->bootstrapper()->IsActive();
315 bool should_trace = 315 bool should_trace =
316 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; 316 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
317 if (should_trace) { 317 if (should_trace) {
318 frame_->CallRuntime(Runtime::kDebugTrace, 0); 318 frame_->CallRuntime(Runtime::kDebugTrace, 0);
319 // Ignore the return value. 319 // Ignore the return value.
320 } 320 }
321 #endif 321 #endif
322 VisitStatements(info->function()->body()); 322 VisitStatements(info->function()->body());
323 } 323 }
324 } 324 }
(...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after
769 769
770 // Check if the value is a smi. 770 // Check if the value is a smi.
771 __ cmp(tos, Operand(Smi::FromInt(0))); 771 __ cmp(tos, Operand(Smi::FromInt(0)));
772 772
773 if (!known_smi) { 773 if (!known_smi) {
774 false_target->Branch(eq); 774 false_target->Branch(eq);
775 __ tst(tos, Operand(kSmiTagMask)); 775 __ tst(tos, Operand(kSmiTagMask));
776 true_target->Branch(eq); 776 true_target->Branch(eq);
777 777
778 // Slow case. 778 // Slow case.
779 if (CpuFeatures::IsSupported(VFP3)) { 779 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
780 CpuFeatures::Scope scope(VFP3); 780 CpuFeatures::Scope scope(VFP3);
781 // Implements the slow case by using ToBooleanStub. 781 // Implements the slow case by using ToBooleanStub.
782 // The ToBooleanStub takes a single argument, and 782 // The ToBooleanStub takes a single argument, and
783 // returns a non-zero value for true, or zero for false. 783 // returns a non-zero value for true, or zero for false.
784 // Both the argument value and the return value use the 784 // Both the argument value and the return value use the
785 // register assigned to tos_ 785 // register assigned to tos_
786 ToBooleanStub stub(tos); 786 ToBooleanStub stub(tos);
787 frame_->CallStub(&stub, 0); 787 frame_->CallStub(&stub, 0);
788 // Convert the result in "tos" to a condition code. 788 // Convert the result in "tos" to a condition code.
789 __ cmp(tos, Operand(0, RelocInfo::NONE)); 789 __ cmp(tos, Operand(0, RelocInfo::NONE));
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
966 966
967 __ b(cond, &non_smi_input_); 967 __ b(cond, &non_smi_input_);
968 } 968 }
969 969
970 970
971 // For bit operations the result is always 32bits so we handle the case where 971 // For bit operations the result is always 32bits so we handle the case where
972 // the result does not fit in a Smi without calling the generic stub. 972 // the result does not fit in a Smi without calling the generic stub.
973 void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) { 973 void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) {
974 ASSERT(Token::IsBitOp(op_)); 974 ASSERT(Token::IsBitOp(op_));
975 975
976 if ((op_ == Token::SHR) && !CpuFeatures::IsSupported(VFP3)) { 976 if ((op_ == Token::SHR) &&
977 !Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
977 // >>> requires an unsigned to double conversion and the non VFP code 978 // >>> requires an unsigned to double conversion and the non VFP code
978 // does not support this conversion. 979 // does not support this conversion.
979 __ b(cond, entry_label()); 980 __ b(cond, entry_label());
980 } else { 981 } else {
981 __ b(cond, &answer_out_of_range_); 982 __ b(cond, &answer_out_of_range_);
982 } 983 }
983 } 984 }
984 985
985 986
986 // On entry the non-constant side of the binary operation is in tos_register_ 987 // On entry the non-constant side of the binary operation is in tos_register_
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
1070 if (answer_out_of_range_.is_linked()) { 1071 if (answer_out_of_range_.is_linked()) {
1071 GenerateAnswerOutOfRange(); 1072 GenerateAnswerOutOfRange();
1072 } 1073 }
1073 } 1074 }
1074 1075
1075 1076
1076 // Convert and write the integer answer into heap_number. 1077 // Convert and write the integer answer into heap_number.
1077 void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer, 1078 void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer,
1078 Register heap_number, 1079 Register heap_number,
1079 Register scratch) { 1080 Register scratch) {
1080 if (CpuFeatures::IsSupported(VFP3)) { 1081 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
1081 CpuFeatures::Scope scope(VFP3); 1082 CpuFeatures::Scope scope(VFP3);
1082 __ vmov(s0, answer); 1083 __ vmov(s0, answer);
1083 if (op_ == Token::SHR) { 1084 if (op_ == Token::SHR) {
1084 __ vcvt_f64_u32(d0, s0); 1085 __ vcvt_f64_u32(d0, s0);
1085 } else { 1086 } else {
1086 __ vcvt_f64_s32(d0, s0); 1087 __ vcvt_f64_s32(d0, s0);
1087 } 1088 }
1088 __ sub(scratch, heap_number, Operand(kHeapObjectTag)); 1089 __ sub(scratch, heap_number, Operand(kHeapObjectTag));
1089 __ vstr(d0, scratch, HeapNumber::kValueOffset); 1090 __ vstr(d0, scratch, HeapNumber::kValueOffset);
1090 } else { 1091 } else {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1140 } 1141 }
1141 break; 1142 break;
1142 case Token::SHR: 1143 case Token::SHR:
1143 ASSERT(!reversed_); 1144 ASSERT(!reversed_);
1144 if (shift_value != 0) { 1145 if (shift_value != 0) {
1145 __ mov(int32, Operand(int32, LSR, shift_value), SetCC); 1146 __ mov(int32, Operand(int32, LSR, shift_value), SetCC);
1146 } else { 1147 } else {
1147 // SHR is special because it is required to produce a positive answer. 1148 // SHR is special because it is required to produce a positive answer.
1148 __ cmp(int32, Operand(0, RelocInfo::NONE)); 1149 __ cmp(int32, Operand(0, RelocInfo::NONE));
1149 } 1150 }
1150 if (CpuFeatures::IsSupported(VFP3)) { 1151 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
1151 __ b(mi, &result_not_a_smi); 1152 __ b(mi, &result_not_a_smi);
1152 } else { 1153 } else {
1153 // Non VFP code cannot convert from unsigned to double, so fall back 1154 // Non VFP code cannot convert from unsigned to double, so fall back
1154 // to GenericBinaryOpStub. 1155 // to GenericBinaryOpStub.
1155 __ b(mi, entry_label()); 1156 __ b(mi, entry_label());
1156 } 1157 }
1157 break; 1158 break;
1158 case Token::SHL: 1159 case Token::SHL:
1159 ASSERT(!reversed_); 1160 ASSERT(!reversed_);
1160 if (shift_value != 0) { 1161 if (shift_value != 0) {
(...skipping 560 matching lines...) Expand 10 before | Expand all | Expand 10 after
1721 // stack, as receiver and arguments, and calls x. 1722 // stack, as receiver and arguments, and calls x.
1722 // In the implementation comments, we call x the applicand 1723 // In the implementation comments, we call x the applicand
1723 // and y the receiver. 1724 // and y the receiver.
1724 1725
1725 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); 1726 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1726 ASSERT(arguments->IsArguments()); 1727 ASSERT(arguments->IsArguments());
1727 1728
1728 // Load applicand.apply onto the stack. This will usually 1729 // Load applicand.apply onto the stack. This will usually
1729 // give us a megamorphic load site. Not super, but it works. 1730 // give us a megamorphic load site. Not super, but it works.
1730 Load(applicand); 1731 Load(applicand);
1731 Handle<String> name = Factory::LookupAsciiSymbol("apply"); 1732 Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
1732 frame_->Dup(); 1733 frame_->Dup();
1733 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET); 1734 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1734 frame_->EmitPush(r0); 1735 frame_->EmitPush(r0);
1735 1736
1736 // Load the receiver and the existing arguments object onto the 1737 // Load the receiver and the existing arguments object onto the
1737 // expression stack. Avoid allocating the arguments object here. 1738 // expression stack. Avoid allocating the arguments object here.
1738 Load(receiver); 1739 Load(receiver);
1739 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF); 1740 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
1740 1741
1741 // At this point the top two stack elements are probably in registers 1742 // At this point the top two stack elements are probably in registers
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1784 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 1785 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1785 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); 1786 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1786 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE); 1787 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
1787 __ b(lt, &build_args); 1788 __ b(lt, &build_args);
1788 1789
1789 // Check that applicand.apply is Function.prototype.apply. 1790 // Check that applicand.apply is Function.prototype.apply.
1790 __ ldr(r0, MemOperand(sp, kPointerSize)); 1791 __ ldr(r0, MemOperand(sp, kPointerSize));
1791 __ JumpIfSmi(r0, &build_args); 1792 __ JumpIfSmi(r0, &build_args);
1792 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 1793 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1793 __ b(ne, &build_args); 1794 __ b(ne, &build_args);
1794 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); 1795 Handle<Code> apply_code(
1796 Isolate::Current()->builtins()->builtin(Builtins::FunctionApply));
1795 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset)); 1797 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
1796 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); 1798 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
1797 __ cmp(r1, Operand(apply_code)); 1799 __ cmp(r1, Operand(apply_code));
1798 __ b(ne, &build_args); 1800 __ b(ne, &build_args);
1799 1801
1800 // Check that applicand is a function. 1802 // Check that applicand is a function.
1801 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); 1803 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1802 __ JumpIfSmi(r1, &build_args); 1804 __ JumpIfSmi(r1, &build_args);
1803 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE); 1805 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
1804 __ b(ne, &build_args); 1806 __ b(ne, &build_args);
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after
1999 2001
2000 ASSERT(frame_->height() == original_height); 2002 ASSERT(frame_->height() == original_height);
2001 return; 2003 return;
2002 } 2004 }
2003 2005
2004 ASSERT(!var->is_global()); 2006 ASSERT(!var->is_global());
2005 2007
2006 // If we have a function or a constant, we need to initialize the variable. 2008 // If we have a function or a constant, we need to initialize the variable.
2007 Expression* val = NULL; 2009 Expression* val = NULL;
2008 if (node->mode() == Variable::CONST) { 2010 if (node->mode() == Variable::CONST) {
2009 val = new Literal(Factory::the_hole_value()); 2011 val = new Literal(FACTORY->the_hole_value());
2010 } else { 2012 } else {
2011 val = node->fun(); // NULL if we don't have a function 2013 val = node->fun(); // NULL if we don't have a function
2012 } 2014 }
2013 2015
2014 2016
2015 if (val != NULL) { 2017 if (val != NULL) {
2016 WriteBarrierCharacter wb_info = 2018 WriteBarrierCharacter wb_info =
2017 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI; 2019 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
2018 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE; 2020 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE;
2019 // Set initial value. 2021 // Set initial value.
(...skipping 836 matching lines...) Expand 10 before | Expand all | Expand 10 after
2856 // After shadowing stops, the original labels are unshadowed and the 2858 // After shadowing stops, the original labels are unshadowed and the
2857 // LabelShadows represent the formerly shadowing labels. 2859 // LabelShadows represent the formerly shadowing labels.
2858 bool has_unlinks = false; 2860 bool has_unlinks = false;
2859 for (int i = 0; i < shadows.length(); i++) { 2861 for (int i = 0; i < shadows.length(); i++) {
2860 shadows[i]->StopShadowing(); 2862 shadows[i]->StopShadowing();
2861 has_unlinks = has_unlinks || shadows[i]->is_linked(); 2863 has_unlinks = has_unlinks || shadows[i]->is_linked();
2862 } 2864 }
2863 function_return_is_shadowed_ = function_return_was_shadowed; 2865 function_return_is_shadowed_ = function_return_was_shadowed;
2864 2866
2865 // Get an external reference to the handler address. 2867 // Get an external reference to the handler address.
2866 ExternalReference handler_address(Top::k_handler_address); 2868 ExternalReference handler_address(Isolate::k_handler_address);
2867 2869
2868 // If we can fall off the end of the try block, unlink from try chain. 2870 // If we can fall off the end of the try block, unlink from try chain.
2869 if (has_valid_frame()) { 2871 if (has_valid_frame()) {
2870 // The next handler address is on top of the frame. Unlink from 2872 // The next handler address is on top of the frame. Unlink from
2871 // the handler list and drop the rest of this handler from the 2873 // the handler list and drop the rest of this handler from the
2872 // frame. 2874 // frame.
2873 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2875 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2874 frame_->EmitPop(r1); // r0 can contain the return value. 2876 frame_->EmitPop(r1); // r0 can contain the return value.
2875 __ mov(r3, Operand(handler_address)); 2877 __ mov(r3, Operand(handler_address));
2876 __ str(r1, MemOperand(r3)); 2878 __ str(r1, MemOperand(r3));
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
2972 // After shadowing stops, the original labels are unshadowed and the 2974 // After shadowing stops, the original labels are unshadowed and the
2973 // LabelShadows represent the formerly shadowing labels. 2975 // LabelShadows represent the formerly shadowing labels.
2974 int nof_unlinks = 0; 2976 int nof_unlinks = 0;
2975 for (int i = 0; i < shadows.length(); i++) { 2977 for (int i = 0; i < shadows.length(); i++) {
2976 shadows[i]->StopShadowing(); 2978 shadows[i]->StopShadowing();
2977 if (shadows[i]->is_linked()) nof_unlinks++; 2979 if (shadows[i]->is_linked()) nof_unlinks++;
2978 } 2980 }
2979 function_return_is_shadowed_ = function_return_was_shadowed; 2981 function_return_is_shadowed_ = function_return_was_shadowed;
2980 2982
2981 // Get an external reference to the handler address. 2983 // Get an external reference to the handler address.
2982 ExternalReference handler_address(Top::k_handler_address); 2984 ExternalReference handler_address(Isolate::k_handler_address);
2983 2985
2984 // If we can fall off the end of the try block, unlink from the try 2986 // If we can fall off the end of the try block, unlink from the try
2985 // chain and set the state on the frame to FALLING. 2987 // chain and set the state on the frame to FALLING.
2986 if (has_valid_frame()) { 2988 if (has_valid_frame()) {
2987 // The next handler address is on top of the frame. 2989 // The next handler address is on top of the frame.
2988 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2990 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2989 frame_->EmitPop(r1); 2991 frame_->EmitPop(r1);
2990 __ mov(r3, Operand(handler_address)); 2992 __ mov(r3, Operand(handler_address));
2991 __ str(r1, MemOperand(r3)); 2993 __ str(r1, MemOperand(r3));
2992 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 2994 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
3127 function_info->strict_mode() ? kStrictMode : kNonStrictMode); 3129 function_info->strict_mode() ? kStrictMode : kNonStrictMode);
3128 frame_->EmitPush(Operand(function_info)); 3130 frame_->EmitPush(Operand(function_info));
3129 frame_->SpillAll(); 3131 frame_->SpillAll();
3130 frame_->CallStub(&stub, 1); 3132 frame_->CallStub(&stub, 1);
3131 frame_->EmitPush(r0); 3133 frame_->EmitPush(r0);
3132 } else { 3134 } else {
3133 // Create a new closure. 3135 // Create a new closure.
3134 frame_->EmitPush(cp); 3136 frame_->EmitPush(cp);
3135 frame_->EmitPush(Operand(function_info)); 3137 frame_->EmitPush(Operand(function_info));
3136 frame_->EmitPush(Operand(pretenure 3138 frame_->EmitPush(Operand(pretenure
3137 ? Factory::true_value() 3139 ? FACTORY->true_value()
3138 : Factory::false_value())); 3140 : FACTORY->false_value()));
3139 frame_->CallRuntime(Runtime::kNewClosure, 3); 3141 frame_->CallRuntime(Runtime::kNewClosure, 3);
3140 frame_->EmitPush(r0); 3142 frame_->EmitPush(r0);
3141 } 3143 }
3142 } 3144 }
3143 3145
3144 3146
3145 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { 3147 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
3146 #ifdef DEBUG 3148 #ifdef DEBUG
3147 int original_height = frame_->height(); 3149 int original_height = frame_->height();
3148 #endif 3150 #endif
(...skipping 481 matching lines...) Expand 10 before | Expand all | Expand 10 after
3630 Literal* key = property->key(); 3632 Literal* key = property->key();
3631 Expression* value = property->value(); 3633 Expression* value = property->value();
3632 switch (property->kind()) { 3634 switch (property->kind()) {
3633 case ObjectLiteral::Property::CONSTANT: 3635 case ObjectLiteral::Property::CONSTANT:
3634 break; 3636 break;
3635 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 3637 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3636 if (CompileTimeValue::IsCompileTimeValue(property->value())) break; 3638 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3637 // else fall through 3639 // else fall through
3638 case ObjectLiteral::Property::COMPUTED: 3640 case ObjectLiteral::Property::COMPUTED:
3639 if (key->handle()->IsSymbol()) { 3641 if (key->handle()->IsSymbol()) {
3640 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 3642 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
3643 Builtins::StoreIC_Initialize));
3641 Load(value); 3644 Load(value);
3642 if (property->emit_store()) { 3645 if (property->emit_store()) {
3643 frame_->PopToR0(); 3646 frame_->PopToR0();
3644 // Fetch the object literal. 3647 // Fetch the object literal.
3645 frame_->SpillAllButCopyTOSToR1(); 3648 frame_->SpillAllButCopyTOSToR1();
3646 __ mov(r2, Operand(key->handle())); 3649 __ mov(r2, Operand(key->handle()));
3647 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); 3650 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3648 } else { 3651 } else {
3649 frame_->Drop(); 3652 frame_->Drop();
3650 } 3653 }
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
3693 3696
3694 Register tos = frame_->GetTOSRegister(); 3697 Register tos = frame_->GetTOSRegister();
3695 // Load the function of this activation. 3698 // Load the function of this activation.
3696 __ ldr(tos, frame_->Function()); 3699 __ ldr(tos, frame_->Function());
3697 // Load the literals array of the function. 3700 // Load the literals array of the function.
3698 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset)); 3701 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset));
3699 frame_->EmitPush(tos); 3702 frame_->EmitPush(tos);
3700 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index()))); 3703 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3701 frame_->EmitPush(Operand(node->constant_elements())); 3704 frame_->EmitPush(Operand(node->constant_elements()));
3702 int length = node->values()->length(); 3705 int length = node->values()->length();
3703 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) { 3706 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
3704 FastCloneShallowArrayStub stub( 3707 FastCloneShallowArrayStub stub(
3705 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); 3708 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
3706 frame_->CallStub(&stub, 3); 3709 frame_->CallStub(&stub, 3);
3707 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2); 3710 __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1, r1, r2);
3708 } else if (node->depth() > 1) { 3711 } else if (node->depth() > 1) {
3709 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); 3712 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
3710 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 3713 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3711 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 3714 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
3712 } else { 3715 } else {
3713 FastCloneShallowArrayStub stub( 3716 FastCloneShallowArrayStub stub(
3714 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); 3717 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
3715 frame_->CallStub(&stub, 3); 3718 frame_->CallStub(&stub, 3);
3716 } 3719 }
3717 frame_->EmitPush(r0); // save the result 3720 frame_->EmitPush(r0); // save the result
(...skipping 537 matching lines...) Expand 10 before | Expand all | Expand 10 after
4255 // Load the arguments. 4258 // Load the arguments.
4256 int arg_count = args->length(); 4259 int arg_count = args->length();
4257 for (int i = 0; i < arg_count; i++) { 4260 for (int i = 0; i < arg_count; i++) {
4258 Load(args->at(i)); 4261 Load(args->at(i));
4259 } 4262 }
4260 4263
4261 VirtualFrame::SpilledScope spilled_scope(frame_); 4264 VirtualFrame::SpilledScope spilled_scope(frame_);
4262 // Setup the name register and call the IC initialization code. 4265 // Setup the name register and call the IC initialization code.
4263 __ mov(r2, Operand(var->name())); 4266 __ mov(r2, Operand(var->name()));
4264 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; 4267 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4265 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop); 4268 Handle<Code> stub =
4269 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
4266 CodeForSourcePosition(node->position()); 4270 CodeForSourcePosition(node->position());
4267 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT, 4271 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
4268 arg_count + 1); 4272 arg_count + 1);
4269 __ ldr(cp, frame_->Context()); 4273 __ ldr(cp, frame_->Context());
4270 frame_->EmitPush(r0); 4274 frame_->EmitPush(r0);
4271 4275
4272 } else if (var != NULL && var->AsSlot() != NULL && 4276 } else if (var != NULL && var->AsSlot() != NULL &&
4273 var->AsSlot()->type() == Slot::LOOKUP) { 4277 var->AsSlot()->type() == Slot::LOOKUP) {
4274 // ---------------------------------- 4278 // ----------------------------------
4275 // JavaScript examples: 4279 // JavaScript examples:
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
4350 int arg_count = args->length(); 4354 int arg_count = args->length();
4351 for (int i = 0; i < arg_count; i++) { 4355 for (int i = 0; i < arg_count; i++) {
4352 Load(args->at(i)); 4356 Load(args->at(i));
4353 } 4357 }
4354 4358
4355 VirtualFrame::SpilledScope spilled_scope(frame_); 4359 VirtualFrame::SpilledScope spilled_scope(frame_);
4356 // Set the name register and call the IC initialization code. 4360 // Set the name register and call the IC initialization code.
4357 __ mov(r2, Operand(name)); 4361 __ mov(r2, Operand(name));
4358 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; 4362 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4359 Handle<Code> stub = 4363 Handle<Code> stub =
4360 StubCache::ComputeCallInitialize(arg_count, in_loop); 4364 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
4361 CodeForSourcePosition(node->position()); 4365 CodeForSourcePosition(node->position());
4362 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); 4366 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4363 __ ldr(cp, frame_->Context()); 4367 __ ldr(cp, frame_->Context());
4364 frame_->EmitPush(r0); 4368 frame_->EmitPush(r0);
4365 } 4369 }
4366 4370
4367 } else { 4371 } else {
4368 // ------------------------------------------- 4372 // -------------------------------------------
4369 // JavaScript example: 'array[index](1, 2, 3)' 4373 // JavaScript example: 'array[index](1, 2, 3)'
4370 // ------------------------------------------- 4374 // -------------------------------------------
(...skipping 21 matching lines...) Expand all
4392 4396
4393 // Load the arguments. 4397 // Load the arguments.
4394 int arg_count = args->length(); 4398 int arg_count = args->length();
4395 for (int i = 0; i < arg_count; i++) { 4399 for (int i = 0; i < arg_count; i++) {
4396 Load(args->at(i)); 4400 Load(args->at(i));
4397 } 4401 }
4398 4402
4399 // Load the key into r2 and call the IC initialization code. 4403 // Load the key into r2 and call the IC initialization code.
4400 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; 4404 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4401 Handle<Code> stub = 4405 Handle<Code> stub =
4402 StubCache::ComputeKeyedCallInitialize(arg_count, in_loop); 4406 ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count,
4407 in_loop);
4403 CodeForSourcePosition(node->position()); 4408 CodeForSourcePosition(node->position());
4404 frame_->SpillAll(); 4409 frame_->SpillAll();
4405 __ ldr(r2, frame_->ElementAt(arg_count + 1)); 4410 __ ldr(r2, frame_->ElementAt(arg_count + 1));
4406 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); 4411 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4407 frame_->Drop(); // Drop the key still on the stack. 4412 frame_->Drop(); // Drop the key still on the stack.
4408 __ ldr(cp, frame_->Context()); 4413 __ ldr(cp, frame_->Context());
4409 frame_->EmitPush(r0); 4414 frame_->EmitPush(r0);
4410 } 4415 }
4411 } 4416 }
4412 4417
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
4457 VirtualFrame::SpilledScope spilled_scope(frame_); 4462 VirtualFrame::SpilledScope spilled_scope(frame_);
4458 4463
4459 // Load the argument count into r0 and the function into r1 as per 4464 // Load the argument count into r0 and the function into r1 as per
4460 // calling convention. 4465 // calling convention.
4461 __ mov(r0, Operand(arg_count)); 4466 __ mov(r0, Operand(arg_count));
4462 __ ldr(r1, frame_->ElementAt(arg_count)); 4467 __ ldr(r1, frame_->ElementAt(arg_count));
4463 4468
4464 // Call the construct call builtin that handles allocation and 4469 // Call the construct call builtin that handles allocation and
4465 // constructor invocation. 4470 // constructor invocation.
4466 CodeForSourcePosition(node->position()); 4471 CodeForSourcePosition(node->position());
4467 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall)); 4472 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
4473 Builtins::JSConstructCall));
4468 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1); 4474 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
4469 frame_->EmitPush(r0); 4475 frame_->EmitPush(r0);
4470 4476
4471 ASSERT_EQ(original_height + 1, frame_->height()); 4477 ASSERT_EQ(original_height + 1, frame_->height());
4472 } 4478 }
4473 4479
4474 4480
4475 void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { 4481 void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
4476 Register scratch = VirtualFrame::scratch0(); 4482 Register scratch = VirtualFrame::scratch0();
4477 JumpTarget null, function, leave, non_function_constructor; 4483 JumpTarget null, function, leave, non_function_constructor;
(...skipping 28 matching lines...) Expand all
4506 // The tos register now contains the constructor function. Grab the 4512 // The tos register now contains the constructor function. Grab the
4507 // instance class name from there. 4513 // instance class name from there.
4508 __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset)); 4514 __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset));
4509 __ ldr(tos, 4515 __ ldr(tos,
4510 FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset)); 4516 FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset));
4511 frame_->EmitPush(tos); 4517 frame_->EmitPush(tos);
4512 leave.Jump(); 4518 leave.Jump();
4513 4519
4514 // Functions have class 'Function'. 4520 // Functions have class 'Function'.
4515 function.Bind(); 4521 function.Bind();
4516 __ mov(tos, Operand(Factory::function_class_symbol())); 4522 __ mov(tos, Operand(FACTORY->function_class_symbol()));
4517 frame_->EmitPush(tos); 4523 frame_->EmitPush(tos);
4518 leave.Jump(); 4524 leave.Jump();
4519 4525
4520 // Objects with a non-function constructor have class 'Object'. 4526 // Objects with a non-function constructor have class 'Object'.
4521 non_function_constructor.Bind(); 4527 non_function_constructor.Bind();
4522 __ mov(tos, Operand(Factory::Object_symbol())); 4528 __ mov(tos, Operand(FACTORY->Object_symbol()));
4523 frame_->EmitPush(tos); 4529 frame_->EmitPush(tos);
4524 leave.Jump(); 4530 leave.Jump();
4525 4531
4526 // Non-JS objects have class null. 4532 // Non-JS objects have class null.
4527 null.Bind(); 4533 null.Bind();
4528 __ LoadRoot(tos, Heap::kNullValueRootIndex); 4534 __ LoadRoot(tos, Heap::kNullValueRootIndex);
4529 frame_->EmitPush(tos); 4535 frame_->EmitPush(tos);
4530 4536
4531 // All done. 4537 // All done.
4532 leave.Bind(); 4538 leave.Bind();
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
4615 cc_reg_ = eq; 4621 cc_reg_ = eq;
4616 } 4622 }
4617 4623
4618 4624
4619 // Generates the Math.pow method. 4625 // Generates the Math.pow method.
4620 void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) { 4626 void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4621 ASSERT(args->length() == 2); 4627 ASSERT(args->length() == 2);
4622 Load(args->at(0)); 4628 Load(args->at(0));
4623 Load(args->at(1)); 4629 Load(args->at(1));
4624 4630
4625 if (!CpuFeatures::IsSupported(VFP3)) { 4631 if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
4626 frame_->CallRuntime(Runtime::kMath_pow, 2); 4632 frame_->CallRuntime(Runtime::kMath_pow, 2);
4627 frame_->EmitPush(r0); 4633 frame_->EmitPush(r0);
4628 } else { 4634 } else {
4629 CpuFeatures::Scope scope(VFP3); 4635 CpuFeatures::Scope scope(VFP3);
4630 JumpTarget runtime, done; 4636 JumpTarget runtime, done;
4631 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return; 4637 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return;
4632 4638
4633 Register scratch1 = VirtualFrame::scratch0(); 4639 Register scratch1 = VirtualFrame::scratch0();
4634 Register scratch2 = VirtualFrame::scratch1(); 4640 Register scratch2 = VirtualFrame::scratch1();
4635 4641
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
4769 frame_->EmitPush(base); 4775 frame_->EmitPush(base);
4770 } 4776 }
4771 } 4777 }
4772 4778
4773 4779
4774 // Generates the Math.sqrt method. 4780 // Generates the Math.sqrt method.
4775 void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) { 4781 void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4776 ASSERT(args->length() == 1); 4782 ASSERT(args->length() == 1);
4777 Load(args->at(0)); 4783 Load(args->at(0));
4778 4784
4779 if (!CpuFeatures::IsSupported(VFP3)) { 4785 if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
4780 frame_->CallRuntime(Runtime::kMath_sqrt, 1); 4786 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4781 frame_->EmitPush(r0); 4787 frame_->EmitPush(r0);
4782 } else { 4788 } else {
4783 CpuFeatures::Scope scope(VFP3); 4789 CpuFeatures::Scope scope(VFP3);
4784 JumpTarget runtime, done; 4790 JumpTarget runtime, done;
4785 4791
4786 Register scratch1 = VirtualFrame::scratch0(); 4792 Register scratch1 = VirtualFrame::scratch0();
4787 Register scratch2 = VirtualFrame::scratch1(); 4793 Register scratch2 = VirtualFrame::scratch1();
4788 4794
4789 // Get the value from the frame. 4795 // Get the value from the frame.
(...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after
5155 // Calculate location of the first key name. 5161 // Calculate location of the first key name.
5156 __ add(map_result_, 5162 __ add(map_result_,
5157 map_result_, 5163 map_result_,
5158 Operand(FixedArray::kHeaderSize - kHeapObjectTag + 5164 Operand(FixedArray::kHeaderSize - kHeapObjectTag +
5159 DescriptorArray::kFirstIndex * kPointerSize)); 5165 DescriptorArray::kFirstIndex * kPointerSize));
5160 // Loop through all the keys in the descriptor array. If one of these is the 5166 // Loop through all the keys in the descriptor array. If one of these is the
5161 // symbol valueOf the result is false. 5167 // symbol valueOf the result is false.
5162 Label entry, loop; 5168 Label entry, loop;
5163 // The use of ip to store the valueOf symbol asumes that it is not otherwise 5169 // The use of ip to store the valueOf symbol asumes that it is not otherwise
5164 // used in the loop below. 5170 // used in the loop below.
5165 __ mov(ip, Operand(Factory::value_of_symbol())); 5171 __ mov(ip, Operand(FACTORY->value_of_symbol()));
5166 __ jmp(&entry); 5172 __ jmp(&entry);
5167 __ bind(&loop); 5173 __ bind(&loop);
5168 __ ldr(scratch2_, MemOperand(map_result_, 0)); 5174 __ ldr(scratch2_, MemOperand(map_result_, 0));
5169 __ cmp(scratch2_, ip); 5175 __ cmp(scratch2_, ip);
5170 __ b(eq, &false_result); 5176 __ b(eq, &false_result);
5171 __ add(map_result_, map_result_, Operand(kPointerSize)); 5177 __ add(map_result_, map_result_, Operand(kPointerSize));
5172 __ bind(&entry); 5178 __ bind(&entry);
5173 __ cmp(map_result_, Operand(scratch1_)); 5179 __ cmp(map_result_, Operand(scratch1_));
5174 __ b(ne, &loop); 5180 __ b(ne, &loop);
5175 5181
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
5358 __ bind(&slow_allocate_heapnumber); 5364 __ bind(&slow_allocate_heapnumber);
5359 // Allocate a heap number. 5365 // Allocate a heap number.
5360 __ CallRuntime(Runtime::kNumberAlloc, 0); 5366 __ CallRuntime(Runtime::kNumberAlloc, 0);
5361 __ mov(r4, Operand(r0)); 5367 __ mov(r4, Operand(r0));
5362 5368
5363 __ bind(&heapnumber_allocated); 5369 __ bind(&heapnumber_allocated);
5364 5370
5365 // Convert 32 random bits in r0 to 0.(32 random bits) in a double 5371 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
5366 // by computing: 5372 // by computing:
5367 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 5373 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
5368 if (CpuFeatures::IsSupported(VFP3)) { 5374 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
5369 __ PrepareCallCFunction(0, r1); 5375 __ PrepareCallCFunction(0, r1);
5370 __ CallCFunction(ExternalReference::random_uint32_function(), 0); 5376 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
5371 5377
5372 CpuFeatures::Scope scope(VFP3); 5378 CpuFeatures::Scope scope(VFP3);
5373 // 0x41300000 is the top half of 1.0 x 2^20 as a double. 5379 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
5374 // Create this constant using mov/orr to avoid PC relative load. 5380 // Create this constant using mov/orr to avoid PC relative load.
5375 __ mov(r1, Operand(0x41000000)); 5381 __ mov(r1, Operand(0x41000000));
5376 __ orr(r1, r1, Operand(0x300000)); 5382 __ orr(r1, r1, Operand(0x300000));
5377 // Move 0x41300000xxxxxxxx (x = random bits) to VFP. 5383 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
5378 __ vmov(d7, r0, r1); 5384 __ vmov(d7, r0, r1);
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
5482 } 5488 }
5483 5489
5484 5490
5485 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { 5491 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
5486 ASSERT_EQ(2, args->length()); 5492 ASSERT_EQ(2, args->length());
5487 5493
5488 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 5494 ASSERT_NE(NULL, args->at(0)->AsLiteral());
5489 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); 5495 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
5490 5496
5491 Handle<FixedArray> jsfunction_result_caches( 5497 Handle<FixedArray> jsfunction_result_caches(
5492 Top::global_context()->jsfunction_result_caches()); 5498 Isolate::Current()->global_context()->jsfunction_result_caches());
5493 if (jsfunction_result_caches->length() <= cache_id) { 5499 if (jsfunction_result_caches->length() <= cache_id) {
5494 __ Abort("Attempt to use undefined cache."); 5500 __ Abort("Attempt to use undefined cache.");
5495 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex); 5501 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
5496 return; 5502 return;
5497 } 5503 }
5498 5504
5499 Load(args->at(1)); 5505 Load(args->at(1));
5500 5506
5501 frame_->PopToR1(); 5507 frame_->PopToR1();
5502 frame_->SpillAll(); 5508 frame_->SpillAll();
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
5674 } 5680 }
5675 Load(args->at(n_args + 1)); // function 5681 Load(args->at(n_args + 1)); // function
5676 frame_->CallJSFunction(n_args); 5682 frame_->CallJSFunction(n_args);
5677 frame_->EmitPush(r0); 5683 frame_->EmitPush(r0);
5678 } 5684 }
5679 5685
5680 5686
5681 void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) { 5687 void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5682 ASSERT_EQ(args->length(), 1); 5688 ASSERT_EQ(args->length(), 1);
5683 Load(args->at(0)); 5689 Load(args->at(0));
5684 if (CpuFeatures::IsSupported(VFP3)) { 5690 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
5685 TranscendentalCacheStub stub(TranscendentalCache::SIN, 5691 TranscendentalCacheStub stub(TranscendentalCache::SIN,
5686 TranscendentalCacheStub::TAGGED); 5692 TranscendentalCacheStub::TAGGED);
5687 frame_->SpillAllButCopyTOSToR0(); 5693 frame_->SpillAllButCopyTOSToR0();
5688 frame_->CallStub(&stub, 1); 5694 frame_->CallStub(&stub, 1);
5689 } else { 5695 } else {
5690 frame_->CallRuntime(Runtime::kMath_sin, 1); 5696 frame_->CallRuntime(Runtime::kMath_sin, 1);
5691 } 5697 }
5692 frame_->EmitPush(r0); 5698 frame_->EmitPush(r0);
5693 } 5699 }
5694 5700
5695 5701
5696 void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) { 5702 void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5697 ASSERT_EQ(args->length(), 1); 5703 ASSERT_EQ(args->length(), 1);
5698 Load(args->at(0)); 5704 Load(args->at(0));
5699 if (CpuFeatures::IsSupported(VFP3)) { 5705 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
5700 TranscendentalCacheStub stub(TranscendentalCache::COS, 5706 TranscendentalCacheStub stub(TranscendentalCache::COS,
5701 TranscendentalCacheStub::TAGGED); 5707 TranscendentalCacheStub::TAGGED);
5702 frame_->SpillAllButCopyTOSToR0(); 5708 frame_->SpillAllButCopyTOSToR0();
5703 frame_->CallStub(&stub, 1); 5709 frame_->CallStub(&stub, 1);
5704 } else { 5710 } else {
5705 frame_->CallRuntime(Runtime::kMath_cos, 1); 5711 frame_->CallRuntime(Runtime::kMath_cos, 1);
5706 } 5712 }
5707 frame_->EmitPush(r0); 5713 frame_->EmitPush(r0);
5708 } 5714 }
5709 5715
5710 5716
5711 void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) { 5717 void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) {
5712 ASSERT_EQ(args->length(), 1); 5718 ASSERT_EQ(args->length(), 1);
5713 Load(args->at(0)); 5719 Load(args->at(0));
5714 if (CpuFeatures::IsSupported(VFP3)) { 5720 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
5715 TranscendentalCacheStub stub(TranscendentalCache::LOG, 5721 TranscendentalCacheStub stub(TranscendentalCache::LOG,
5716 TranscendentalCacheStub::TAGGED); 5722 TranscendentalCacheStub::TAGGED);
5717 frame_->SpillAllButCopyTOSToR0(); 5723 frame_->SpillAllButCopyTOSToR0();
5718 frame_->CallStub(&stub, 1); 5724 frame_->CallStub(&stub, 1);
5719 } else { 5725 } else {
5720 frame_->CallRuntime(Runtime::kMath_log, 1); 5726 frame_->CallRuntime(Runtime::kMath_log, 1);
5721 } 5727 }
5722 frame_->EmitPush(r0); 5728 frame_->EmitPush(r0);
5723 } 5729 }
5724 5730
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
5809 int original_height = frame_->height(); 5815 int original_height = frame_->height();
5810 #endif 5816 #endif
5811 if (CheckForInlineRuntimeCall(node)) { 5817 if (CheckForInlineRuntimeCall(node)) {
5812 ASSERT((has_cc() && frame_->height() == original_height) || 5818 ASSERT((has_cc() && frame_->height() == original_height) ||
5813 (!has_cc() && frame_->height() == original_height + 1)); 5819 (!has_cc() && frame_->height() == original_height + 1));
5814 return; 5820 return;
5815 } 5821 }
5816 5822
5817 ZoneList<Expression*>* args = node->arguments(); 5823 ZoneList<Expression*>* args = node->arguments();
5818 Comment cmnt(masm_, "[ CallRuntime"); 5824 Comment cmnt(masm_, "[ CallRuntime");
5819 Runtime::Function* function = node->function(); 5825 const Runtime::Function* function = node->function();
5820 5826
5821 if (function == NULL) { 5827 if (function == NULL) {
5822 // Prepare stack for calling JS runtime function. 5828 // Prepare stack for calling JS runtime function.
5823 // Push the builtins object found in the current global object. 5829 // Push the builtins object found in the current global object.
5824 Register scratch = VirtualFrame::scratch0(); 5830 Register scratch = VirtualFrame::scratch0();
5825 __ ldr(scratch, GlobalObjectOperand()); 5831 __ ldr(scratch, GlobalObjectOperand());
5826 Register builtins = frame_->GetTOSRegister(); 5832 Register builtins = frame_->GetTOSRegister();
5827 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset)); 5833 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset));
5828 frame_->EmitPush(builtins); 5834 frame_->EmitPush(builtins);
5829 } 5835 }
5830 5836
5831 // Push the arguments ("left-to-right"). 5837 // Push the arguments ("left-to-right").
5832 int arg_count = args->length(); 5838 int arg_count = args->length();
5833 for (int i = 0; i < arg_count; i++) { 5839 for (int i = 0; i < arg_count; i++) {
5834 Load(args->at(i)); 5840 Load(args->at(i));
5835 } 5841 }
5836 5842
5837 VirtualFrame::SpilledScope spilled_scope(frame_); 5843 VirtualFrame::SpilledScope spilled_scope(frame_);
5838 5844
5839 if (function == NULL) { 5845 if (function == NULL) {
5840 // Call the JS runtime function. 5846 // Call the JS runtime function.
5841 __ mov(r2, Operand(node->name())); 5847 __ mov(r2, Operand(node->name()));
5842 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; 5848 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
5843 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop); 5849 Handle<Code> stub =
5850 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
5844 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); 5851 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
5845 __ ldr(cp, frame_->Context()); 5852 __ ldr(cp, frame_->Context());
5846 frame_->EmitPush(r0); 5853 frame_->EmitPush(r0);
5847 } else { 5854 } else {
5848 // Call the C runtime function. 5855 // Call the C runtime function.
5849 frame_->CallRuntime(function, arg_count); 5856 frame_->CallRuntime(function, arg_count);
5850 frame_->EmitPush(r0); 5857 frame_->EmitPush(r0);
5851 } 5858 }
5852 ASSERT_EQ(original_height + 1, frame_->height()); 5859 ASSERT_EQ(original_height + 1, frame_->height());
5853 } 5860 }
(...skipping 514 matching lines...) Expand 10 before | Expand all | Expand 10 after
6368 (right->AsLiteral() != NULL && 6375 (right->AsLiteral() != NULL &&
6369 right->AsLiteral()->handle()->IsString())) { 6376 right->AsLiteral()->handle()->IsString())) {
6370 Handle<String> check(String::cast(*right->AsLiteral()->handle())); 6377 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
6371 6378
6372 // Load the operand, move it to a register. 6379 // Load the operand, move it to a register.
6373 LoadTypeofExpression(operation->expression()); 6380 LoadTypeofExpression(operation->expression());
6374 Register tos = frame_->PopToRegister(); 6381 Register tos = frame_->PopToRegister();
6375 6382
6376 Register scratch = VirtualFrame::scratch0(); 6383 Register scratch = VirtualFrame::scratch0();
6377 6384
6378 if (check->Equals(Heap::number_symbol())) { 6385 if (check->Equals(HEAP->number_symbol())) {
6379 __ tst(tos, Operand(kSmiTagMask)); 6386 __ tst(tos, Operand(kSmiTagMask));
6380 true_target()->Branch(eq); 6387 true_target()->Branch(eq);
6381 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset)); 6388 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6382 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 6389 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
6383 __ cmp(tos, ip); 6390 __ cmp(tos, ip);
6384 cc_reg_ = eq; 6391 cc_reg_ = eq;
6385 6392
6386 } else if (check->Equals(Heap::string_symbol())) { 6393 } else if (check->Equals(HEAP->string_symbol())) {
6387 __ tst(tos, Operand(kSmiTagMask)); 6394 __ tst(tos, Operand(kSmiTagMask));
6388 false_target()->Branch(eq); 6395 false_target()->Branch(eq);
6389 6396
6390 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset)); 6397 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6391 6398
6392 // It can be an undetectable string object. 6399 // It can be an undetectable string object.
6393 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset)); 6400 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6394 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable)); 6401 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6395 __ cmp(scratch, Operand(1 << Map::kIsUndetectable)); 6402 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
6396 false_target()->Branch(eq); 6403 false_target()->Branch(eq);
6397 6404
6398 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset)); 6405 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset));
6399 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE)); 6406 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
6400 cc_reg_ = lt; 6407 cc_reg_ = lt;
6401 6408
6402 } else if (check->Equals(Heap::boolean_symbol())) { 6409 } else if (check->Equals(HEAP->boolean_symbol())) {
6403 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 6410 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
6404 __ cmp(tos, ip); 6411 __ cmp(tos, ip);
6405 true_target()->Branch(eq); 6412 true_target()->Branch(eq);
6406 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 6413 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
6407 __ cmp(tos, ip); 6414 __ cmp(tos, ip);
6408 cc_reg_ = eq; 6415 cc_reg_ = eq;
6409 6416
6410 } else if (check->Equals(Heap::undefined_symbol())) { 6417 } else if (check->Equals(HEAP->undefined_symbol())) {
6411 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 6418 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
6412 __ cmp(tos, ip); 6419 __ cmp(tos, ip);
6413 true_target()->Branch(eq); 6420 true_target()->Branch(eq);
6414 6421
6415 __ tst(tos, Operand(kSmiTagMask)); 6422 __ tst(tos, Operand(kSmiTagMask));
6416 false_target()->Branch(eq); 6423 false_target()->Branch(eq);
6417 6424
6418 // It can be an undetectable object. 6425 // It can be an undetectable object.
6419 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset)); 6426 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6420 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset)); 6427 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6421 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable)); 6428 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6422 __ cmp(scratch, Operand(1 << Map::kIsUndetectable)); 6429 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
6423 6430
6424 cc_reg_ = eq; 6431 cc_reg_ = eq;
6425 6432
6426 } else if (check->Equals(Heap::function_symbol())) { 6433 } else if (check->Equals(HEAP->function_symbol())) {
6427 __ tst(tos, Operand(kSmiTagMask)); 6434 __ tst(tos, Operand(kSmiTagMask));
6428 false_target()->Branch(eq); 6435 false_target()->Branch(eq);
6429 Register map_reg = scratch; 6436 Register map_reg = scratch;
6430 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE); 6437 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE);
6431 true_target()->Branch(eq); 6438 true_target()->Branch(eq);
6432 // Regular expressions are callable so typeof == 'function'. 6439 // Regular expressions are callable so typeof == 'function'.
6433 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE); 6440 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
6434 cc_reg_ = eq; 6441 cc_reg_ = eq;
6435 6442
6436 } else if (check->Equals(Heap::object_symbol())) { 6443 } else if (check->Equals(HEAP->object_symbol())) {
6437 __ tst(tos, Operand(kSmiTagMask)); 6444 __ tst(tos, Operand(kSmiTagMask));
6438 false_target()->Branch(eq); 6445 false_target()->Branch(eq);
6439 6446
6440 __ LoadRoot(ip, Heap::kNullValueRootIndex); 6447 __ LoadRoot(ip, Heap::kNullValueRootIndex);
6441 __ cmp(tos, ip); 6448 __ cmp(tos, ip);
6442 true_target()->Branch(eq); 6449 true_target()->Branch(eq);
6443 6450
6444 Register map_reg = scratch; 6451 Register map_reg = scratch;
6445 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE); 6452 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE);
6446 false_target()->Branch(eq); 6453 false_target()->Branch(eq);
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
6588 void DeferredReferenceGetNamedValue::Generate() { 6595 void DeferredReferenceGetNamedValue::Generate() {
6589 #ifdef DEBUG 6596 #ifdef DEBUG
6590 int expected_height = frame_state()->frame()->height(); 6597 int expected_height = frame_state()->frame()->height();
6591 #endif 6598 #endif
6592 VirtualFrame copied_frame(*frame_state()->frame()); 6599 VirtualFrame copied_frame(*frame_state()->frame());
6593 copied_frame.SpillAll(); 6600 copied_frame.SpillAll();
6594 6601
6595 Register scratch1 = VirtualFrame::scratch0(); 6602 Register scratch1 = VirtualFrame::scratch0();
6596 Register scratch2 = VirtualFrame::scratch1(); 6603 Register scratch2 = VirtualFrame::scratch1();
6597 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2)); 6604 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
6598 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2); 6605 __ DecrementCounter(COUNTERS->named_load_inline(), 1, scratch1, scratch2);
6599 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2); 6606 __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1,
6607 scratch1, scratch2);
6600 6608
6601 // Ensure receiver in r0 and name in r2 to match load ic calling convention. 6609 // Ensure receiver in r0 and name in r2 to match load ic calling convention.
6602 __ Move(r0, receiver_); 6610 __ Move(r0, receiver_);
6603 __ mov(r2, Operand(name_)); 6611 __ mov(r2, Operand(name_));
6604 6612
6605 // The rest of the instructions in the deferred code must be together. 6613 // The rest of the instructions in the deferred code must be together.
6606 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6614 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6607 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 6615 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6616 Builtins::LoadIC_Initialize));
6608 RelocInfo::Mode mode = is_contextual_ 6617 RelocInfo::Mode mode = is_contextual_
6609 ? RelocInfo::CODE_TARGET_CONTEXT 6618 ? RelocInfo::CODE_TARGET_CONTEXT
6610 : RelocInfo::CODE_TARGET; 6619 : RelocInfo::CODE_TARGET;
6611 __ Call(ic, mode); 6620 __ Call(ic, mode);
6612 // We must mark the code just after the call with the correct marker. 6621 // We must mark the code just after the call with the correct marker.
6613 MacroAssembler::NopMarkerTypes code_marker; 6622 MacroAssembler::NopMarkerTypes code_marker;
6614 if (is_contextual_) { 6623 if (is_contextual_) {
6615 code_marker = is_dont_delete_ 6624 code_marker = is_dont_delete_
6616 ? MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE 6625 ? MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE
6617 : MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT; 6626 : MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT;
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
6659 // in r0. 6668 // in r0.
6660 void DeferredReferenceGetKeyedValue::Generate() { 6669 void DeferredReferenceGetKeyedValue::Generate() {
6661 ASSERT((key_.is(r0) && receiver_.is(r1)) || 6670 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
6662 (key_.is(r1) && receiver_.is(r0))); 6671 (key_.is(r1) && receiver_.is(r0)));
6663 6672
6664 VirtualFrame copied_frame(*frame_state()->frame()); 6673 VirtualFrame copied_frame(*frame_state()->frame());
6665 copied_frame.SpillAll(); 6674 copied_frame.SpillAll();
6666 6675
6667 Register scratch1 = VirtualFrame::scratch0(); 6676 Register scratch1 = VirtualFrame::scratch0();
6668 Register scratch2 = VirtualFrame::scratch1(); 6677 Register scratch2 = VirtualFrame::scratch1();
6669 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2); 6678 __ DecrementCounter(COUNTERS->keyed_load_inline(), 1, scratch1, scratch2);
6670 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2); 6679 __ IncrementCounter(COUNTERS->keyed_load_inline_miss(),
6680 1, scratch1, scratch2);
6671 6681
6672 // Ensure key in r0 and receiver in r1 to match keyed load ic calling 6682 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
6673 // convention. 6683 // convention.
6674 if (key_.is(r1)) { 6684 if (key_.is(r1)) {
6675 __ Swap(r0, r1, ip); 6685 __ Swap(r0, r1, ip);
6676 } 6686 }
6677 6687
6678 // The rest of the instructions in the deferred code must be together. 6688 // The rest of the instructions in the deferred code must be together.
6679 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6689 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6680 // Call keyed load IC. It has the arguments key and receiver in r0 and r1. 6690 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
6681 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 6691 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6692 Builtins::KeyedLoadIC_Initialize));
6682 __ Call(ic, RelocInfo::CODE_TARGET); 6693 __ Call(ic, RelocInfo::CODE_TARGET);
6683 // The call must be followed by a nop instruction to indicate that the 6694 // The call must be followed by a nop instruction to indicate that the
6684 // keyed load has been inlined. 6695 // keyed load has been inlined.
6685 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED); 6696 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
6686 6697
6687 // Now go back to the frame that we entered with. This will not overwrite 6698 // Now go back to the frame that we entered with. This will not overwrite
6688 // the receiver or key registers since they were not in use when we came 6699 // the receiver or key registers since they were not in use when we came
6689 // in. The instructions emitted by this merge are skipped over by the 6700 // in. The instructions emitted by this merge are skipped over by the
6690 // inline load patching mechanism when looking for the branch instruction 6701 // inline load patching mechanism when looking for the branch instruction
6691 // that tells it where the code to patch is. 6702 // that tells it where the code to patch is.
(...skipping 26 matching lines...) Expand all
6718 Register value_; 6729 Register value_;
6719 Register key_; 6730 Register key_;
6720 Register receiver_; 6731 Register receiver_;
6721 StrictModeFlag strict_mode_; 6732 StrictModeFlag strict_mode_;
6722 }; 6733 };
6723 6734
6724 6735
6725 void DeferredReferenceSetKeyedValue::Generate() { 6736 void DeferredReferenceSetKeyedValue::Generate() {
6726 Register scratch1 = VirtualFrame::scratch0(); 6737 Register scratch1 = VirtualFrame::scratch0();
6727 Register scratch2 = VirtualFrame::scratch1(); 6738 Register scratch2 = VirtualFrame::scratch1();
6728 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2); 6739 __ DecrementCounter(COUNTERS->keyed_store_inline(), 1, scratch1, scratch2);
6729 __ IncrementCounter( 6740 __ IncrementCounter(COUNTERS->keyed_store_inline_miss(),
6730 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2); 6741 1, scratch1, scratch2);
6731 6742
6732 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic 6743 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
6733 // calling convention. 6744 // calling convention.
6734 if (value_.is(r1)) { 6745 if (value_.is(r1)) {
6735 __ Swap(r0, r1, ip); 6746 __ Swap(r0, r1, ip);
6736 } 6747 }
6737 ASSERT(receiver_.is(r2)); 6748 ASSERT(receiver_.is(r2));
6738 6749
6739 // The rest of the instructions in the deferred code must be together. 6750 // The rest of the instructions in the deferred code must be together.
6740 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6751 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6741 // Call keyed store IC. It has the arguments value, key and receiver in r0, 6752 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6742 // r1 and r2. 6753 // r1 and r2.
6743 Handle<Code> ic(Builtins::builtin( 6754 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6744 (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict 6755 (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
6745 : Builtins::KeyedStoreIC_Initialize)); 6756 : Builtins::KeyedStoreIC_Initialize));
6746 __ Call(ic, RelocInfo::CODE_TARGET); 6757 __ Call(ic, RelocInfo::CODE_TARGET);
6747 // The call must be followed by a nop instruction to indicate that the 6758 // The call must be followed by a nop instruction to indicate that the
6748 // keyed store has been inlined. 6759 // keyed store has been inlined.
6749 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED); 6760 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
6750 6761
6751 // Block the constant pool for one more instruction after leaving this 6762 // Block the constant pool for one more instruction after leaving this
6752 // constant pool block scope to include the branch instruction ending the 6763 // constant pool block scope to include the branch instruction ending the
6753 // deferred code. 6764 // deferred code.
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
6788 6799
6789 // Ensure value in r0, receiver in r1 to match store ic calling 6800 // Ensure value in r0, receiver in r1 to match store ic calling
6790 // convention. 6801 // convention.
6791 ASSERT(value_.is(r0) && receiver_.is(r1)); 6802 ASSERT(value_.is(r0) && receiver_.is(r1));
6792 __ mov(r2, Operand(name_)); 6803 __ mov(r2, Operand(name_));
6793 6804
6794 // The rest of the instructions in the deferred code must be together. 6805 // The rest of the instructions in the deferred code must be together.
6795 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6806 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6796 // Call keyed store IC. It has the arguments value, key and receiver in r0, 6807 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6797 // r1 and r2. 6808 // r1 and r2.
6798 Handle<Code> ic(Builtins::builtin( 6809 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6799 (strict_mode_ == kStrictMode) ? Builtins::StoreIC_Initialize_Strict 6810 (strict_mode_ == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
6800 : Builtins::StoreIC_Initialize)); 6811 : Builtins::StoreIC_Initialize));
6801 __ Call(ic, RelocInfo::CODE_TARGET); 6812 __ Call(ic, RelocInfo::CODE_TARGET);
6802 // The call must be followed by a nop instruction to indicate that the 6813 // The call must be followed by a nop instruction to indicate that the
6803 // named store has been inlined. 6814 // named store has been inlined.
6804 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED); 6815 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
6805 6816
6806 // Go back to the frame we entered with. The instructions 6817 // Go back to the frame we entered with. The instructions
6807 // generated by this merge are skipped over by the inline store 6818 // generated by this merge are skipped over by the inline store
6808 // patching mechanism when looking for the branch instruction that 6819 // patching mechanism when looking for the branch instruction that
6809 // tells it where the code to patch is. 6820 // tells it where the code to patch is.
6810 copied_frame.MergeTo(frame_state()->frame()); 6821 copied_frame.MergeTo(frame_state()->frame());
6811 6822
6812 // Block the constant pool for one more instruction after leaving this 6823 // Block the constant pool for one more instruction after leaving this
6813 // constant pool block scope to include the branch instruction ending the 6824 // constant pool block scope to include the branch instruction ending the
6814 // deferred code. 6825 // deferred code.
6815 __ BlockConstPoolFor(1); 6826 __ BlockConstPoolFor(1);
6816 } 6827 }
6817 } 6828 }
6818 6829
6819 6830
6820 // Consumes the top of stack (the receiver) and pushes the result instead. 6831 // Consumes the top of stack (the receiver) and pushes the result instead.
6821 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { 6832 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
6822 bool contextual_load_in_builtin = 6833 bool contextual_load_in_builtin =
6823 is_contextual && 6834 is_contextual &&
6824 (Bootstrapper::IsActive() || 6835 (ISOLATE->bootstrapper()->IsActive() ||
6825 (!info_->closure().is_null() && info_->closure()->IsBuiltin())); 6836 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
6826 6837
6827 if (scope()->is_global_scope() || 6838 if (scope()->is_global_scope() ||
6828 loop_nesting() == 0 || 6839 loop_nesting() == 0 ||
6829 contextual_load_in_builtin) { 6840 contextual_load_in_builtin) {
6830 Comment cmnt(masm(), "[ Load from named Property"); 6841 Comment cmnt(masm(), "[ Load from named Property");
6831 // Setup the name register and call load IC. 6842 // Setup the name register and call load IC.
6832 frame_->CallLoadIC(name, 6843 frame_->CallLoadIC(name,
6833 is_contextual 6844 is_contextual
6834 ? RelocInfo::CODE_TARGET_CONTEXT 6845 ? RelocInfo::CODE_TARGET_CONTEXT
6835 : RelocInfo::CODE_TARGET); 6846 : RelocInfo::CODE_TARGET);
6836 frame_->EmitPush(r0); // Push answer. 6847 frame_->EmitPush(r0); // Push answer.
6837 } else { 6848 } else {
6838 // Inline the in-object property case. 6849 // Inline the in-object property case.
6839 Comment cmnt(masm(), is_contextual 6850 Comment cmnt(masm(), is_contextual
6840 ? "[ Inlined contextual property load" 6851 ? "[ Inlined contextual property load"
6841 : "[ Inlined named property load"); 6852 : "[ Inlined named property load");
6842 6853
6843 // Counter will be decremented in the deferred code. Placed here to avoid 6854 // Counter will be decremented in the deferred code. Placed here to avoid
6844 // having it in the instruction stream below where patching will occur. 6855 // having it in the instruction stream below where patching will occur.
6845 if (is_contextual) { 6856 if (is_contextual) {
6846 __ IncrementCounter(&Counters::named_load_global_inline, 1, 6857 __ IncrementCounter(COUNTERS->named_load_global_inline(), 1,
6847 frame_->scratch0(), frame_->scratch1()); 6858 frame_->scratch0(), frame_->scratch1());
6848 } else { 6859 } else {
6849 __ IncrementCounter(&Counters::named_load_inline, 1, 6860 __ IncrementCounter(COUNTERS->named_load_inline(), 1,
6850 frame_->scratch0(), frame_->scratch1()); 6861 frame_->scratch0(), frame_->scratch1());
6851 } 6862 }
6852 6863
6853 // The following instructions are the inlined load of an in-object property. 6864 // The following instructions are the inlined load of an in-object property.
6854 // Parts of this code is patched, so the exact instructions generated needs 6865 // Parts of this code is patched, so the exact instructions generated needs
6855 // to be fixed. Therefore the instruction pool is blocked when generating 6866 // to be fixed. Therefore the instruction pool is blocked when generating
6856 // this code 6867 // this code
6857 6868
6858 // Load the receiver from the stack. 6869 // Load the receiver from the stack.
6859 Register receiver = frame_->PopToRegister(); 6870 Register receiver = frame_->PopToRegister();
(...skipping 12 matching lines...) Expand all
6872 LookupResult lookup; 6883 LookupResult lookup;
6873 global_object->LocalLookupRealNamedProperty(*name, &lookup); 6884 global_object->LocalLookupRealNamedProperty(*name, &lookup);
6874 if (lookup.IsProperty() && lookup.type() == NORMAL) { 6885 if (lookup.IsProperty() && lookup.type() == NORMAL) {
6875 ASSERT(lookup.holder() == global_object); 6886 ASSERT(lookup.holder() == global_object);
6876 ASSERT(global_object->property_dictionary()->ValueAt( 6887 ASSERT(global_object->property_dictionary()->ValueAt(
6877 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell()); 6888 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
6878 is_dont_delete = lookup.IsDontDelete(); 6889 is_dont_delete = lookup.IsDontDelete();
6879 } 6890 }
6880 } 6891 }
6881 if (is_dont_delete) { 6892 if (is_dont_delete) {
6882 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1, 6893 __ IncrementCounter(COUNTERS->dont_delete_hint_hit(), 1,
6883 frame_->scratch0(), frame_->scratch1()); 6894 frame_->scratch0(), frame_->scratch1());
6884 } 6895 }
6885 } 6896 }
6886 6897
6887 { Assembler::BlockConstPoolScope block_const_pool(masm_); 6898 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6888 if (!is_contextual) { 6899 if (!is_contextual) {
6889 // Check that the receiver is a heap object. 6900 // Check that the receiver is a heap object.
6890 __ tst(receiver, Operand(kSmiTagMask)); 6901 __ tst(receiver, Operand(kSmiTagMask));
6891 deferred->Branch(eq); 6902 deferred->Branch(eq);
6892 } 6903 }
(...skipping 16 matching lines...) Expand all
6909 Label check_inlined_codesize; 6920 Label check_inlined_codesize;
6910 masm_->bind(&check_inlined_codesize); 6921 masm_->bind(&check_inlined_codesize);
6911 #endif 6922 #endif
6912 6923
6913 Register scratch = VirtualFrame::scratch0(); 6924 Register scratch = VirtualFrame::scratch0();
6914 Register scratch2 = VirtualFrame::scratch1(); 6925 Register scratch2 = VirtualFrame::scratch1();
6915 6926
6916 // Check the map. The null map used below is patched by the inline cache 6927 // Check the map. The null map used below is patched by the inline cache
6917 // code. Therefore we can't use a LoadRoot call. 6928 // code. Therefore we can't use a LoadRoot call.
6918 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset)); 6929 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
6919 __ mov(scratch2, Operand(Factory::null_value())); 6930 __ mov(scratch2, Operand(FACTORY->null_value()));
6920 __ cmp(scratch, scratch2); 6931 __ cmp(scratch, scratch2);
6921 deferred->Branch(ne); 6932 deferred->Branch(ne);
6922 6933
6923 if (is_contextual) { 6934 if (is_contextual) {
6924 #ifdef DEBUG 6935 #ifdef DEBUG
6925 InlinedNamedLoadInstructions += 1; 6936 InlinedNamedLoadInstructions += 1;
6926 #endif 6937 #endif
6927 // Load the (initially invalid) cell and get its value. 6938 // Load the (initially invalid) cell and get its value.
6928 masm()->mov(receiver, Operand(Factory::null_value())); 6939 masm()->mov(receiver, Operand(FACTORY->null_value()));
6929 __ ldr(receiver, 6940 __ ldr(receiver,
6930 FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset)); 6941 FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset));
6931 6942
6932 deferred->set_is_dont_delete(is_dont_delete); 6943 deferred->set_is_dont_delete(is_dont_delete);
6933 6944
6934 if (!is_dont_delete) { 6945 if (!is_dont_delete) {
6935 #ifdef DEBUG 6946 #ifdef DEBUG
6936 InlinedNamedLoadInstructions += 3; 6947 InlinedNamedLoadInstructions += 3;
6937 #endif 6948 #endif
6938 __ cmp(receiver, Operand(Factory::the_hole_value())); 6949 __ cmp(receiver, Operand(FACTORY->the_hole_value()));
6939 deferred->Branch(eq); 6950 deferred->Branch(eq);
6940 } else if (FLAG_debug_code) { 6951 } else if (FLAG_debug_code) {
6941 #ifdef DEBUG 6952 #ifdef DEBUG
6942 InlinedNamedLoadInstructions += 3; 6953 InlinedNamedLoadInstructions += 3;
6943 #endif 6954 #endif
6944 __ cmp(receiver, Operand(Factory::the_hole_value())); 6955 __ cmp(receiver, Operand(FACTORY->the_hole_value()));
6945 __ b(&check_the_hole, eq); 6956 __ b(&check_the_hole, eq);
6946 __ bind(&cont); 6957 __ bind(&cont);
6947 } 6958 }
6948 } else { 6959 } else {
6949 // Initially use an invalid index. The index will be patched by the 6960 // Initially use an invalid index. The index will be patched by the
6950 // inline cache code. 6961 // inline cache code.
6951 __ ldr(receiver, MemOperand(receiver, 0)); 6962 __ ldr(receiver, MemOperand(receiver, 0));
6952 } 6963 }
6953 6964
6954 // Make sure that the expected number of instructions are generated. 6965 // Make sure that the expected number of instructions are generated.
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
7002 Register scratch1 = VirtualFrame::scratch1(); 7013 Register scratch1 = VirtualFrame::scratch1();
7003 7014
7004 // Check the map. Initially use an invalid map to force a 7015 // Check the map. Initially use an invalid map to force a
7005 // failure. The map check will be patched in the runtime system. 7016 // failure. The map check will be patched in the runtime system.
7006 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); 7017 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
7007 7018
7008 #ifdef DEBUG 7019 #ifdef DEBUG
7009 Label check_inlined_codesize; 7020 Label check_inlined_codesize;
7010 masm_->bind(&check_inlined_codesize); 7021 masm_->bind(&check_inlined_codesize);
7011 #endif 7022 #endif
7012 __ mov(scratch0, Operand(Factory::null_value())); 7023 __ mov(scratch0, Operand(FACTORY->null_value()));
7013 __ cmp(scratch0, scratch1); 7024 __ cmp(scratch0, scratch1);
7014 deferred->Branch(ne); 7025 deferred->Branch(ne);
7015 7026
7016 int offset = 0; 7027 int offset = 0;
7017 __ str(value, MemOperand(receiver, offset)); 7028 __ str(value, MemOperand(receiver, offset));
7018 7029
7019 // Update the write barrier and record its size. We do not use 7030 // Update the write barrier and record its size. We do not use
7020 // the RecordWrite macro here because we want the offset 7031 // the RecordWrite macro here because we want the offset
7021 // addition instruction first to make it easy to patch. 7032 // addition instruction first to make it easy to patch.
7022 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER 7033 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER
(...skipping 10 matching lines...) Expand all
7033 // Clobber all input registers when running with the debug-code flag 7044 // Clobber all input registers when running with the debug-code flag
7034 // turned on to provoke errors. 7045 // turned on to provoke errors.
7035 if (FLAG_debug_code) { 7046 if (FLAG_debug_code) {
7036 __ mov(receiver, Operand(BitCast<int32_t>(kZapValue))); 7047 __ mov(receiver, Operand(BitCast<int32_t>(kZapValue)));
7037 __ mov(scratch0, Operand(BitCast<int32_t>(kZapValue))); 7048 __ mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
7038 __ mov(scratch1, Operand(BitCast<int32_t>(kZapValue))); 7049 __ mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
7039 } 7050 }
7040 // Check that this is the first inlined write barrier or that 7051 // Check that this is the first inlined write barrier or that
7041 // this inlined write barrier has the same size as all the other 7052 // this inlined write barrier has the same size as all the other
7042 // inlined write barriers. 7053 // inlined write barriers.
7043 ASSERT((inlined_write_barrier_size_ == -1) || 7054 ASSERT((Isolate::Current()->inlined_write_barrier_size() == -1) ||
7044 (inlined_write_barrier_size_ == 7055 (Isolate::Current()->inlined_write_barrier_size() ==
7045 masm()->InstructionsGeneratedSince(&record_write_start))); 7056 masm()->InstructionsGeneratedSince(&record_write_start)));
7046 inlined_write_barrier_size_ = 7057 Isolate::Current()->set_inlined_write_barrier_size(
7047 masm()->InstructionsGeneratedSince(&record_write_start); 7058 masm()->InstructionsGeneratedSince(&record_write_start));
7048 #endif 7059 #endif
7049 7060
7050 // Make sure that the expected number of instructions are generated. 7061 // Make sure that the expected number of instructions are generated.
7051 ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(), 7062 ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(),
7052 masm()->InstructionsGeneratedSince(&check_inlined_codesize)); 7063 masm()->InstructionsGeneratedSince(&check_inlined_codesize));
7053 } 7064 }
7054 deferred->BindExit(); 7065 deferred->BindExit();
7055 } 7066 }
7056 ASSERT_EQ(expected_height, frame()->height()); 7067 ASSERT_EQ(expected_height, frame()->height());
7057 } 7068 }
7058 7069
7059 7070
7060 void CodeGenerator::EmitKeyedLoad() { 7071 void CodeGenerator::EmitKeyedLoad() {
7061 if (loop_nesting() == 0) { 7072 if (loop_nesting() == 0) {
7062 Comment cmnt(masm_, "[ Load from keyed property"); 7073 Comment cmnt(masm_, "[ Load from keyed property");
7063 frame_->CallKeyedLoadIC(); 7074 frame_->CallKeyedLoadIC();
7064 } else { 7075 } else {
7065 // Inline the keyed load. 7076 // Inline the keyed load.
7066 Comment cmnt(masm_, "[ Inlined load from keyed property"); 7077 Comment cmnt(masm_, "[ Inlined load from keyed property");
7067 7078
7068 // Counter will be decremented in the deferred code. Placed here to avoid 7079 // Counter will be decremented in the deferred code. Placed here to avoid
7069 // having it in the instruction stream below where patching will occur. 7080 // having it in the instruction stream below where patching will occur.
7070 __ IncrementCounter(&Counters::keyed_load_inline, 1, 7081 __ IncrementCounter(COUNTERS->keyed_load_inline(), 1,
7071 frame_->scratch0(), frame_->scratch1()); 7082 frame_->scratch0(), frame_->scratch1());
7072 7083
7073 // Load the key and receiver from the stack. 7084 // Load the key and receiver from the stack.
7074 bool key_is_known_smi = frame_->KnownSmiAt(0); 7085 bool key_is_known_smi = frame_->KnownSmiAt(0);
7075 Register key = frame_->PopToRegister(); 7086 Register key = frame_->PopToRegister();
7076 Register receiver = frame_->PopToRegister(key); 7087 Register receiver = frame_->PopToRegister(key);
7077 7088
7078 // The deferred code expects key and receiver in registers. 7089 // The deferred code expects key and receiver in registers.
7079 DeferredReferenceGetKeyedValue* deferred = 7090 DeferredReferenceGetKeyedValue* deferred =
7080 new DeferredReferenceGetKeyedValue(key, receiver); 7091 new DeferredReferenceGetKeyedValue(key, receiver);
(...skipping 16 matching lines...) Expand all
7097 // Check that the key is a smi. 7108 // Check that the key is a smi.
7098 if (!key_is_known_smi) { 7109 if (!key_is_known_smi) {
7099 __ tst(key, Operand(kSmiTagMask)); 7110 __ tst(key, Operand(kSmiTagMask));
7100 deferred->Branch(ne); 7111 deferred->Branch(ne);
7101 } 7112 }
7102 7113
7103 #ifdef DEBUG 7114 #ifdef DEBUG
7104 Label check_inlined_codesize; 7115 Label check_inlined_codesize;
7105 masm_->bind(&check_inlined_codesize); 7116 masm_->bind(&check_inlined_codesize);
7106 #endif 7117 #endif
7107 __ mov(scratch2, Operand(Factory::null_value())); 7118 __ mov(scratch2, Operand(FACTORY->null_value()));
7108 __ cmp(scratch1, scratch2); 7119 __ cmp(scratch1, scratch2);
7109 deferred->Branch(ne); 7120 deferred->Branch(ne);
7110 7121
7111 // Get the elements array from the receiver. 7122 // Get the elements array from the receiver.
7112 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); 7123 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
7113 __ AssertFastElements(scratch1); 7124 __ AssertFastElements(scratch1);
7114 7125
7115 // Check that key is within bounds. Use unsigned comparison to handle 7126 // Check that key is within bounds. Use unsigned comparison to handle
7116 // negative keys. 7127 // negative keys.
7117 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); 7128 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
(...skipping 29 matching lines...) Expand all
7147 if (loop_nesting() > 0 && key_type->IsLikelySmi()) { 7158 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
7148 // Inline the keyed store. 7159 // Inline the keyed store.
7149 Comment cmnt(masm_, "[ Inlined store to keyed property"); 7160 Comment cmnt(masm_, "[ Inlined store to keyed property");
7150 7161
7151 Register scratch1 = VirtualFrame::scratch0(); 7162 Register scratch1 = VirtualFrame::scratch0();
7152 Register scratch2 = VirtualFrame::scratch1(); 7163 Register scratch2 = VirtualFrame::scratch1();
7153 Register scratch3 = r3; 7164 Register scratch3 = r3;
7154 7165
7155 // Counter will be decremented in the deferred code. Placed here to avoid 7166 // Counter will be decremented in the deferred code. Placed here to avoid
7156 // having it in the instruction stream below where patching will occur. 7167 // having it in the instruction stream below where patching will occur.
7157 __ IncrementCounter(&Counters::keyed_store_inline, 1, 7168 __ IncrementCounter(COUNTERS->keyed_store_inline(), 1,
7158 scratch1, scratch2); 7169 scratch1, scratch2);
7159 7170
7160 7171
7161 // Load the value, key and receiver from the stack. 7172 // Load the value, key and receiver from the stack.
7162 bool value_is_harmless = frame_->KnownSmiAt(0); 7173 bool value_is_harmless = frame_->KnownSmiAt(0);
7163 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true; 7174 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
7164 bool key_is_smi = frame_->KnownSmiAt(1); 7175 bool key_is_smi = frame_->KnownSmiAt(1);
7165 Register value = frame_->PopToRegister(); 7176 Register value = frame_->PopToRegister();
7166 Register key = frame_->PopToRegister(value); 7177 Register key = frame_->PopToRegister(value);
7167 VirtualFrame::SpilledScope spilled(frame_); 7178 VirtualFrame::SpilledScope spilled(frame_);
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
7228 #ifdef DEBUG 7239 #ifdef DEBUG
7229 Label check_inlined_codesize; 7240 Label check_inlined_codesize;
7230 masm_->bind(&check_inlined_codesize); 7241 masm_->bind(&check_inlined_codesize);
7231 #endif 7242 #endif
7232 7243
7233 // Read the fixed array map from the constant pool (not from the root 7244 // Read the fixed array map from the constant pool (not from the root
7234 // array) so that the value can be patched. When debugging, we patch this 7245 // array) so that the value can be patched. When debugging, we patch this
7235 // comparison to always fail so that we will hit the IC call in the 7246 // comparison to always fail so that we will hit the IC call in the
7236 // deferred code which will allow the debugger to break for fast case 7247 // deferred code which will allow the debugger to break for fast case
7237 // stores. 7248 // stores.
7238 __ mov(scratch3, Operand(Factory::fixed_array_map())); 7249 __ mov(scratch3, Operand(FACTORY->fixed_array_map()));
7239 __ cmp(scratch2, scratch3); 7250 __ cmp(scratch2, scratch3);
7240 deferred->Branch(ne); 7251 deferred->Branch(ne);
7241 7252
7242 // Check that the key is within bounds. Both the key and the length of 7253 // Check that the key is within bounds. Both the key and the length of
7243 // the JSArray are smis (because the fixed array check above ensures the 7254 // the JSArray are smis (because the fixed array check above ensures the
7244 // elements are in fast case). Use unsigned comparison to handle negative 7255 // elements are in fast case). Use unsigned comparison to handle negative
7245 // keys. 7256 // keys.
7246 __ ldr(scratch3, FieldMemOperand(receiver, JSArray::kLengthOffset)); 7257 __ ldr(scratch3, FieldMemOperand(receiver, JSArray::kLengthOffset));
7247 __ cmp(scratch3, key); 7258 __ cmp(scratch3, key);
7248 deferred->Branch(ls); // Unsigned less equal. 7259 deferred->Branch(ls); // Unsigned less equal.
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
7398 7409
7399 default: 7410 default:
7400 UNREACHABLE(); 7411 UNREACHABLE();
7401 } 7412 }
7402 } 7413 }
7403 7414
7404 7415
7405 const char* GenericBinaryOpStub::GetName() { 7416 const char* GenericBinaryOpStub::GetName() {
7406 if (name_ != NULL) return name_; 7417 if (name_ != NULL) return name_;
7407 const int len = 100; 7418 const int len = 100;
7408 name_ = Bootstrapper::AllocateAutoDeletedArray(len); 7419 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(len);
7409 if (name_ == NULL) return "OOM"; 7420 if (name_ == NULL) return "OOM";
7410 const char* op_name = Token::Name(op_); 7421 const char* op_name = Token::Name(op_);
7411 const char* overwrite_name; 7422 const char* overwrite_name;
7412 switch (mode_) { 7423 switch (mode_) {
7413 case NO_OVERWRITE: overwrite_name = "Alloc"; break; 7424 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
7414 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; 7425 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
7415 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; 7426 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
7416 default: overwrite_name = "UnknownOverwrite"; break; 7427 default: overwrite_name = "UnknownOverwrite"; break;
7417 } 7428 }
7418 7429
7419 OS::SNPrintF(Vector<char>(name_, len), 7430 OS::SNPrintF(Vector<char>(name_, len),
7420 "GenericBinaryOpStub_%s_%s%s_%s", 7431 "GenericBinaryOpStub_%s_%s%s_%s",
7421 op_name, 7432 op_name,
7422 overwrite_name, 7433 overwrite_name,
7423 specialized_on_rhs_ ? "_ConstantRhs" : "", 7434 specialized_on_rhs_ ? "_ConstantRhs" : "",
7424 BinaryOpIC::GetName(runtime_operands_type_)); 7435 BinaryOpIC::GetName(runtime_operands_type_));
7425 return name_; 7436 return name_;
7426 } 7437 }
7427 7438
7428
7429 #undef __ 7439 #undef __
7430 7440
7431 } } // namespace v8::internal 7441 } } // namespace v8::internal
7432 7442
7433 #endif // V8_TARGET_ARCH_ARM 7443 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/codegen-arm.h ('k') | src/arm/cpu-arm.cc » ('j') | src/heap.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698