Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(307)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 226233002: Revert "Reland of https://codereview.chromium.org/172523002/" (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/builtins-arm.cc ('k') | src/arm/debug-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2959 matching lines...) Expand 10 before | Expand all | Expand 10 after
2970 2970
2971 __ Push(r4, r2, r1); 2971 __ Push(r4, r2, r1);
2972 __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs, 2972 __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs,
2973 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 2973 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2974 __ Pop(r4, r2, r1); 2974 __ Pop(r4, r2, r1);
2975 2975
2976 __ bind(&done); 2976 __ bind(&done);
2977 } 2977 }
2978 2978
2979 2979
2980 static void GenericCallHelper(MacroAssembler* masm, 2980 void CallFunctionStub::Generate(MacroAssembler* masm) {
2981 const CallIC::State& state,
2982 bool wrap_and_call = false) {
2983 // r1 : the function to call 2981 // r1 : the function to call
2984 2982 // r2 : feedback vector
2985 // wrap_and_call can only be true if we are compiling a monomorphic method. 2983 // r3 : (only if r2 is not the megamorphic symbol) slot in feedback
2986 ASSERT(!(wrap_and_call && state.IsGeneric())); 2984 // vector (Smi)
2987 ASSERT(!wrap_and_call || state.CallAsMethod());
2988 Label slow, non_function, wrap, cont; 2985 Label slow, non_function, wrap, cont;
2989 2986
2990 if (state.IsGeneric()) { 2987 if (NeedsChecks()) {
2991 // Check that the function is really a JavaScript function. 2988 // Check that the function is really a JavaScript function.
2992 // r1: pushed function (to be verified) 2989 // r1: pushed function (to be verified)
2993 __ JumpIfSmi(r1, &non_function); 2990 __ JumpIfSmi(r1, &non_function);
2994 2991
2995 // Goto slow case if we do not have a function. 2992 // Goto slow case if we do not have a function.
2996 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); 2993 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
2997 __ b(ne, &slow); 2994 __ b(ne, &slow);
2995
2996 if (RecordCallTarget()) {
2997 GenerateRecordCallTarget(masm);
2998 // Type information was updated. Because we may call Array, which
2999 // expects either undefined or an AllocationSite in ebx we need
3000 // to set ebx to undefined.
3001 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3002 }
2998 } 3003 }
2999 3004
3000 // Fast-case: Invoke the function now. 3005 // Fast-case: Invoke the function now.
3001 // r1: pushed function 3006 // r1: pushed function
3002 int argc = state.arg_count(); 3007 ParameterCount actual(argc_);
3003 ParameterCount actual(argc);
3004 3008
3005 if (state.CallAsMethod()) { 3009 if (CallAsMethod()) {
3006 if (state.IsGeneric()) { 3010 if (NeedsChecks()) {
3007 // Do not transform the receiver for strict mode functions. 3011 // Do not transform the receiver for strict mode functions.
3008 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 3012 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3009 __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset)); 3013 __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset));
3010 __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + 3014 __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
3011 kSmiTagSize))); 3015 kSmiTagSize)));
3012 __ b(ne, &cont); 3016 __ b(ne, &cont);
3013 3017
3014 // Do not transform the receiver for native (Compilerhints already in r3). 3018 // Do not transform the receiver for native (Compilerhints already in r3).
3015 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); 3019 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
3016 __ b(ne, &cont); 3020 __ b(ne, &cont);
3017 } 3021 }
3018 3022
3019 if (state.IsGeneric() || state.IsSloppy() || wrap_and_call) { 3023 // Compute the receiver in sloppy mode.
3020 // Compute the receiver in sloppy mode. 3024 __ ldr(r3, MemOperand(sp, argc_ * kPointerSize));
3021 __ ldr(r3, MemOperand(sp, argc * kPointerSize));
3022 3025
3023 if (state.IsGeneric()) { 3026 if (NeedsChecks()) {
3024 __ JumpIfSmi(r3, &wrap); 3027 __ JumpIfSmi(r3, &wrap);
3025 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); 3028 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3026 __ b(lt, &wrap); 3029 __ b(lt, &wrap);
3027 } else { 3030 } else {
3028 __ jmp(&wrap); 3031 __ jmp(&wrap);
3029 }
3030 } 3032 }
3031 3033
3032 __ bind(&cont); 3034 __ bind(&cont);
3033 } 3035 }
3036 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
3034 3037
3035 if (state.ArgumentsMustMatch()) { 3038 if (NeedsChecks()) {
3036 __ InvokeFunction(r1, actual, actual, JUMP_FUNCTION, NullCallWrapper());
3037 } else {
3038 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
3039 }
3040
3041 if (state.IsGeneric()) {
3042 // Slow-case: Non-function called. 3039 // Slow-case: Non-function called.
3043 __ bind(&slow); 3040 __ bind(&slow);
3041 if (RecordCallTarget()) {
3042 // If there is a call target cache, mark it megamorphic in the
3043 // non-function case. MegamorphicSentinel is an immortal immovable
3044 // object (megamorphic symbol) so no write barrier is needed.
3045 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
3046 masm->isolate()->heap()->megamorphic_symbol());
3047 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
3048 __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex);
3049 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
3050 }
3044 // Check for function proxy. 3051 // Check for function proxy.
3045 __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); 3052 __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE));
3046 __ b(ne, &non_function); 3053 __ b(ne, &non_function);
3047 __ push(r1); // put proxy as additional argument 3054 __ push(r1); // put proxy as additional argument
3048 __ mov(r0, Operand(argc + 1, RelocInfo::NONE32)); 3055 __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32));
3049 __ mov(r2, Operand::Zero()); 3056 __ mov(r2, Operand::Zero());
3050 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); 3057 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
3051 { 3058 {
3052 Handle<Code> adaptor = 3059 Handle<Code> adaptor =
3053 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3060 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3054 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3061 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3055 } 3062 }
3056 3063
3057 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 3064 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3058 // of the original receiver from the call site). 3065 // of the original receiver from the call site).
3059 __ bind(&non_function); 3066 __ bind(&non_function);
3060 __ str(r1, MemOperand(sp, argc * kPointerSize)); 3067 __ str(r1, MemOperand(sp, argc_ * kPointerSize));
3061 __ mov(r0, Operand(argc)); // Set up the number of arguments. 3068 __ mov(r0, Operand(argc_)); // Set up the number of arguments.
3062 __ mov(r2, Operand::Zero()); 3069 __ mov(r2, Operand::Zero());
3063 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); 3070 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
3064 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3071 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3065 RelocInfo::CODE_TARGET); 3072 RelocInfo::CODE_TARGET);
3066 } 3073 }
3067 3074
3068 if (state.CallAsMethod()) { 3075 if (CallAsMethod()) {
3069 __ bind(&wrap); 3076 __ bind(&wrap);
3070
3071 if (!state.IsGeneric() && !wrap_and_call) {
3072 __ ldr(r5, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3073 __ ldr(r4, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
3074
3075 // Do not transform the receiver for native
3076 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
3077 __ b(ne, &cont);
3078 }
3079
3080 // Wrap the receiver and patch it back onto the stack. 3077 // Wrap the receiver and patch it back onto the stack.
3081 { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); 3078 { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
3082 __ Push(r1, r3); 3079 __ Push(r1, r3);
3083 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 3080 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3084 __ pop(r1); 3081 __ pop(r1);
3085 } 3082 }
3086 __ str(r0, MemOperand(sp, argc * kPointerSize)); 3083 __ str(r0, MemOperand(sp, argc_ * kPointerSize));
3087 __ jmp(&cont); 3084 __ jmp(&cont);
3088 } 3085 }
3089 } 3086 }
3090 3087
3091 3088
3092 void CallFunctionStub::Generate(MacroAssembler* masm) {
3093 // r1 : the function to call
3094
3095 // GenericCallHelper expresses it's options in terms of CallIC::State.
3096 CallIC::CallType call_type = CallAsMethod() ?
3097 CallIC::METHOD : CallIC::FUNCTION;
3098
3099 if (NeedsChecks()) {
3100 GenericCallHelper(masm,
3101 CallIC::State::SlowCallState(
3102 argc_,
3103 call_type));
3104 } else {
3105 GenericCallHelper(masm,
3106 CallIC::State::MonomorphicCallState(
3107 argc_,
3108 call_type,
3109 CallIC::ARGUMENTS_COUNT_UNKNOWN,
3110 SLOPPY),
3111 true);
3112 }
3113 }
3114
3115
3116 void CallConstructStub::Generate(MacroAssembler* masm) { 3089 void CallConstructStub::Generate(MacroAssembler* masm) {
3117 // r0 : number of arguments 3090 // r0 : number of arguments
3118 // r1 : the function to call 3091 // r1 : the function to call
3119 // r2 : feedback vector 3092 // r2 : feedback vector
3120 // r3 : (only if r2 is not the megamorphic symbol) slot in feedback 3093 // r3 : (only if r2 is not the megamorphic symbol) slot in feedback
3121 // vector (Smi) 3094 // vector (Smi)
3122 Label slow, non_function_call; 3095 Label slow, non_function_call;
3123 3096
3124 // Check that the function is not a smi. 3097 // Check that the function is not a smi.
3125 __ JumpIfSmi(r1, &non_function_call); 3098 __ JumpIfSmi(r1, &non_function_call);
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
3170 __ bind(&non_function_call); 3143 __ bind(&non_function_call);
3171 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 3144 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3172 __ bind(&do_call); 3145 __ bind(&do_call);
3173 // Set expected number of arguments to zero (not changing r0). 3146 // Set expected number of arguments to zero (not changing r0).
3174 __ mov(r2, Operand::Zero()); 3147 __ mov(r2, Operand::Zero());
3175 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3148 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3176 RelocInfo::CODE_TARGET); 3149 RelocInfo::CODE_TARGET);
3177 } 3150 }
3178 3151
3179 3152
3180 void CallICStub::GenerateMonomorphicCall(MacroAssembler* masm) {
3181 GenericCallHelper(masm,
3182 CallIC::State::MonomorphicCallState(
3183 state_.arg_count(),
3184 state_.call_type(),
3185 state_.argument_check(),
3186 state_.strict_mode()));
3187 }
3188
3189
3190 void CallICStub::GenerateSlowCall(MacroAssembler* masm) {
3191 GenericCallHelper(masm,
3192 CallIC::State::SlowCallState(
3193 state_.arg_count(),
3194 state_.call_type()));
3195 }
3196
3197
3198 void CallICStub::Generate(MacroAssembler* masm) {
3199 // r1 - function
3200 // r2 - vector
3201 // r3 - slot id (Smi)
3202 Label extra_checks_or_miss, slow;
3203
3204 // The checks. First, does r1 match the recorded monomorphic target?
3205 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
3206 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
3207 __ cmp(r1, r4);
3208 __ b(ne, &extra_checks_or_miss);
3209
3210 GenerateMonomorphicCall(masm);
3211
3212 __ bind(&extra_checks_or_miss);
3213 if (IsGeneric()) {
3214 Label miss_uninit;
3215
3216 __ CompareRoot(r4, Heap::kMegamorphicSymbolRootIndex);
3217 __ b(eq, &slow);
3218 __ CompareRoot(r4, Heap::kUninitializedSymbolRootIndex);
3219 __ b(eq, &miss_uninit);
3220 // If we get here, go from monomorphic to megamorphic, Don't bother missing,
3221 // just update.
3222 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
3223 __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex);
3224 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
3225 __ jmp(&slow);
3226
3227 __ bind(&miss_uninit);
3228 }
3229
3230 GenerateMiss(masm);
3231
3232 // the slow case
3233 __ bind(&slow);
3234 GenerateSlowCall(masm);
3235 }
3236
3237
3238 void CallICStub::GenerateMiss(MacroAssembler* masm) {
3239 // Get the receiver of the function from the stack; 1 ~ return address.
3240 __ ldr(r4, MemOperand(sp, (state_.arg_count() + 1) * kPointerSize));
3241
3242 {
3243 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
3244
3245 // Push the receiver and the function and feedback info.
3246 __ Push(r4, r1, r2, r3);
3247
3248 // Call the entry.
3249 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
3250 masm->isolate());
3251 __ CallExternalReference(miss, 4);
3252
3253 // Move result to edi and exit the internal frame.
3254 __ mov(r1, r0);
3255 }
3256 }
3257
3258
3259 // StringCharCodeAtGenerator 3153 // StringCharCodeAtGenerator
3260 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 3154 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3261 Label flat_string; 3155 Label flat_string;
3262 Label ascii_string; 3156 Label ascii_string;
3263 Label got_char_code; 3157 Label got_char_code;
3264 Label sliced_string; 3158 Label sliced_string;
3265 3159
3266 // If the receiver is a smi trigger the non-string case. 3160 // If the receiver is a smi trigger the non-string case.
3267 __ JumpIfSmi(object_, receiver_not_string_); 3161 __ JumpIfSmi(object_, receiver_not_string_);
3268 3162
(...skipping 2296 matching lines...) Expand 10 before | Expand all | Expand 10 after
5565 MemOperand(fp, 6 * kPointerSize), 5459 MemOperand(fp, 6 * kPointerSize),
5566 NULL); 5460 NULL);
5567 } 5461 }
5568 5462
5569 5463
5570 #undef __ 5464 #undef __
5571 5465
5572 } } // namespace v8::internal 5466 } } // namespace v8::internal
5573 5467
5574 #endif // V8_TARGET_ARCH_ARM 5468 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/builtins-arm.cc ('k') | src/arm/debug-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698