| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 158 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( | 158 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( |
| 159 Isolate* isolate, | 159 Isolate* isolate, |
| 160 CodeStubInterfaceDescriptor* descriptor) { | 160 CodeStubInterfaceDescriptor* descriptor) { |
| 161 static Register registers[] = { r1 }; | 161 static Register registers[] = { r1 }; |
| 162 descriptor->register_param_count_ = 1; | 162 descriptor->register_param_count_ = 1; |
| 163 descriptor->register_params_ = registers; | 163 descriptor->register_params_ = registers; |
| 164 descriptor->deoptimization_handler_ = NULL; | 164 descriptor->deoptimization_handler_ = NULL; |
| 165 } | 165 } |
| 166 | 166 |
| 167 | 167 |
| 168 void KeyedArrayCallStub::InitializeInterfaceDescriptor( | |
| 169 Isolate* isolate, | |
| 170 CodeStubInterfaceDescriptor* descriptor) { | |
| 171 static Register registers[] = { r2 }; | |
| 172 descriptor->register_param_count_ = 1; | |
| 173 descriptor->register_params_ = registers; | |
| 174 descriptor->continuation_type_ = TAIL_CALL_CONTINUATION; | |
| 175 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; | |
| 176 descriptor->deoptimization_handler_ = | |
| 177 FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure); | |
| 178 } | |
| 179 | |
| 180 | |
| 181 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( | 168 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( |
| 182 Isolate* isolate, | 169 Isolate* isolate, |
| 183 CodeStubInterfaceDescriptor* descriptor) { | 170 CodeStubInterfaceDescriptor* descriptor) { |
| 184 static Register registers[] = { r2, r1, r0 }; | 171 static Register registers[] = { r2, r1, r0 }; |
| 185 descriptor->register_param_count_ = 3; | 172 descriptor->register_param_count_ = 3; |
| 186 descriptor->register_params_ = registers; | 173 descriptor->register_params_ = registers; |
| 187 descriptor->deoptimization_handler_ = | 174 descriptor->deoptimization_handler_ = |
| 188 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); | 175 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); |
| 189 } | 176 } |
| 190 | 177 |
| (...skipping 2903 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3094 __ str(r1, FieldMemOperand(r2, Cell::kValueOffset)); | 3081 __ str(r1, FieldMemOperand(r2, Cell::kValueOffset)); |
| 3095 // No need for a write barrier here - cells are rescanned. | 3082 // No need for a write barrier here - cells are rescanned. |
| 3096 | 3083 |
| 3097 __ bind(&done); | 3084 __ bind(&done); |
| 3098 } | 3085 } |
| 3099 | 3086 |
| 3100 | 3087 |
| 3101 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3088 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3102 // r1 : the function to call | 3089 // r1 : the function to call |
| 3103 // r2 : cache cell for call target | 3090 // r2 : cache cell for call target |
| 3104 Label slow, non_function; | 3091 Label slow, non_function, wrap, cont; |
| 3105 | 3092 |
| 3106 // Check that the function is really a JavaScript function. | 3093 if (NeedsChecks()) { |
| 3107 // r1: pushed function (to be verified) | 3094 // Check that the function is really a JavaScript function. |
| 3108 __ JumpIfSmi(r1, &non_function); | 3095 // r1: pushed function (to be verified) |
| 3096 __ JumpIfSmi(r1, &non_function); |
| 3109 | 3097 |
| 3110 // Goto slow case if we do not have a function. | 3098 // Goto slow case if we do not have a function. |
| 3111 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); | 3099 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); |
| 3112 __ b(ne, &slow); | 3100 __ b(ne, &slow); |
| 3113 | 3101 |
| 3114 if (RecordCallTarget()) { | 3102 if (RecordCallTarget()) { |
| 3115 GenerateRecordCallTarget(masm); | 3103 GenerateRecordCallTarget(masm); |
| 3104 } |
| 3116 } | 3105 } |
| 3117 | 3106 |
| 3118 // Fast-case: Invoke the function now. | 3107 // Fast-case: Invoke the function now. |
| 3119 // r1: pushed function | 3108 // r1: pushed function |
| 3120 ParameterCount actual(argc_); | 3109 ParameterCount actual(argc_); |
| 3121 | 3110 |
| 3111 if (CallAsMethod()) { |
| 3112 if (NeedsChecks()) { |
| 3113 // Do not transform the receiver for strict mode functions. |
| 3114 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 3115 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); |
| 3116 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + |
| 3117 kSmiTagSize))); |
| 3118 __ b(ne, &cont); |
| 3119 |
| 3120 // Do not transform the receiver for native (Compilerhints already in r3). |
| 3121 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
| 3122 __ b(ne, &cont); |
| 3123 } |
| 3124 |
| 3125 // Compute the receiver in non-strict mode. |
| 3126 __ ldr(r2, MemOperand(sp, argc_ * kPointerSize)); |
| 3127 |
| 3128 if (NeedsChecks()) { |
| 3129 // r0: actual number of arguments |
| 3130 // r1: function |
| 3131 // r2: first argument |
| 3132 __ JumpIfSmi(r2, &wrap); |
| 3133 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); |
| 3134 __ b(lt, &wrap); |
| 3135 } else { |
| 3136 __ jmp(&wrap); |
| 3137 } |
| 3138 |
| 3139 __ bind(&cont); |
| 3140 } |
| 3122 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); | 3141 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 3123 | 3142 |
| 3124 // Slow-case: Non-function called. | 3143 if (NeedsChecks()) { |
| 3125 __ bind(&slow); | 3144 // Slow-case: Non-function called. |
| 3126 if (RecordCallTarget()) { | 3145 __ bind(&slow); |
| 3127 // If there is a call target cache, mark it megamorphic in the | 3146 if (RecordCallTarget()) { |
| 3128 // non-function case. MegamorphicSentinel is an immortal immovable | 3147 // If there is a call target cache, mark it megamorphic in the |
| 3129 // object (undefined) so no write barrier is needed. | 3148 // non-function case. MegamorphicSentinel is an immortal immovable |
| 3130 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), | 3149 // object (undefined) so no write barrier is needed. |
| 3131 masm->isolate()->heap()->undefined_value()); | 3150 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), |
| 3132 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 3151 masm->isolate()->heap()->undefined_value()); |
| 3133 __ str(ip, FieldMemOperand(r2, Cell::kValueOffset)); | 3152 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 3134 } | 3153 __ str(ip, FieldMemOperand(r2, Cell::kValueOffset)); |
| 3135 // Check for function proxy. | 3154 } |
| 3136 __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE)); | 3155 // Check for function proxy. |
| 3137 __ b(ne, &non_function); | 3156 __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 3138 __ push(r1); // put proxy as additional argument | 3157 __ b(ne, &non_function); |
| 3139 __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32)); | 3158 __ push(r1); // put proxy as additional argument |
| 3140 __ mov(r2, Operand::Zero()); | 3159 __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32)); |
| 3141 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); | 3160 __ mov(r2, Operand::Zero()); |
| 3142 { | 3161 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); |
| 3143 Handle<Code> adaptor = | 3162 { |
| 3144 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 3163 Handle<Code> adaptor = |
| 3145 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 3164 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 3165 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
| 3166 } |
| 3167 |
| 3168 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
| 3169 // of the original receiver from the call site). |
| 3170 __ bind(&non_function); |
| 3171 __ str(r1, MemOperand(sp, argc_ * kPointerSize)); |
| 3172 __ mov(r0, Operand(argc_)); // Set up the number of arguments. |
| 3173 __ mov(r2, Operand::Zero()); |
| 3174 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); |
| 3175 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 3176 RelocInfo::CODE_TARGET); |
| 3146 } | 3177 } |
| 3147 | 3178 |
| 3148 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 3179 if (CallAsMethod()) { |
| 3149 // of the original receiver from the call site). | 3180 __ bind(&wrap); |
| 3150 __ bind(&non_function); | 3181 // Wrap the receiver and patch it back onto the stack. |
| 3151 __ str(r1, MemOperand(sp, argc_ * kPointerSize)); | 3182 { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
| 3152 __ mov(r0, Operand(argc_)); // Set up the number of arguments. | 3183 __ push(r1); |
| 3153 __ mov(r2, Operand::Zero()); | 3184 __ push(r2); |
| 3154 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); | 3185 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 3155 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 3186 __ pop(r1); |
| 3156 RelocInfo::CODE_TARGET); | 3187 } |
| 3188 __ str(r0, MemOperand(sp, argc_ * kPointerSize)); |
| 3189 __ jmp(&cont); |
| 3190 } |
| 3157 } | 3191 } |
| 3158 | 3192 |
| 3159 | 3193 |
| 3160 void CallConstructStub::Generate(MacroAssembler* masm) { | 3194 void CallConstructStub::Generate(MacroAssembler* masm) { |
| 3161 // r0 : number of arguments | 3195 // r0 : number of arguments |
| 3162 // r1 : the function to call | 3196 // r1 : the function to call |
| 3163 // r2 : cache cell for call target | 3197 // r2 : cache cell for call target |
| 3164 Label slow, non_function_call; | 3198 Label slow, non_function_call; |
| 3165 | 3199 |
| 3166 // Check that the function is not a smi. | 3200 // Check that the function is not a smi. |
| (...skipping 1833 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5000 if (function_mode_ == JS_FUNCTION_STUB_MODE) { | 5034 if (function_mode_ == JS_FUNCTION_STUB_MODE) { |
| 5001 __ add(r1, r1, Operand(1)); | 5035 __ add(r1, r1, Operand(1)); |
| 5002 } | 5036 } |
| 5003 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 5037 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 5004 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2)); | 5038 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2)); |
| 5005 __ add(sp, sp, r1); | 5039 __ add(sp, sp, r1); |
| 5006 __ Ret(); | 5040 __ Ret(); |
| 5007 } | 5041 } |
| 5008 | 5042 |
| 5009 | 5043 |
| 5010 void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) { | |
| 5011 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); | |
| 5012 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | |
| 5013 __ mov(r1, r0); | |
| 5014 int parameter_count_offset = | |
| 5015 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | |
| 5016 __ ldr(r0, MemOperand(fp, parameter_count_offset)); | |
| 5017 // The parameter count above includes the receiver for the arguments passed to | |
| 5018 // the deoptimization handler. Subtract the receiver for the parameter count | |
| 5019 // for the call. | |
| 5020 __ sub(r0, r0, Operand(1)); | |
| 5021 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | |
| 5022 ParameterCount argument_count(r0); | |
| 5023 __ InvokeFunction(r1, argument_count, JUMP_FUNCTION, NullCallWrapper()); | |
| 5024 } | |
| 5025 | |
| 5026 | |
| 5027 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 5044 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
| 5028 if (masm->isolate()->function_entry_hook() != NULL) { | 5045 if (masm->isolate()->function_entry_hook() != NULL) { |
| 5029 PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize); | 5046 PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize); |
| 5030 ProfileEntryHookStub stub; | 5047 ProfileEntryHookStub stub; |
| 5031 __ push(lr); | 5048 __ push(lr); |
| 5032 __ CallStub(&stub); | 5049 __ CallStub(&stub); |
| 5033 __ pop(lr); | 5050 __ pop(lr); |
| 5034 } | 5051 } |
| 5035 } | 5052 } |
| 5036 | 5053 |
| (...skipping 505 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5542 MemOperand(fp, 6 * kPointerSize), | 5559 MemOperand(fp, 6 * kPointerSize), |
| 5543 NULL); | 5560 NULL); |
| 5544 } | 5561 } |
| 5545 | 5562 |
| 5546 | 5563 |
| 5547 #undef __ | 5564 #undef __ |
| 5548 | 5565 |
| 5549 } } // namespace v8::internal | 5566 } } // namespace v8::internal |
| 5550 | 5567 |
| 5551 #endif // V8_TARGET_ARCH_ARM | 5568 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |