| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 3093 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3104 // The slow case, we need this no matter what to complete a call after a miss. | 3104 // The slow case, we need this no matter what to complete a call after a miss. |
| 3105 CallFunctionNoFeedback(masm, | 3105 CallFunctionNoFeedback(masm, |
| 3106 arg_count(), | 3106 arg_count(), |
| 3107 true, | 3107 true, |
| 3108 CallAsMethod()); | 3108 CallAsMethod()); |
| 3109 | 3109 |
| 3110 __ Unreachable(); | 3110 __ Unreachable(); |
| 3111 } | 3111 } |
| 3112 | 3112 |
| 3113 | 3113 |
| 3114 void CallIC_RoundStub::Generate(MacroAssembler* masm) { | |
| 3115 Register function = x1; | |
| 3116 Register vector = x2; | |
| 3117 Register slot = x3; | |
| 3118 | |
| 3119 Register temp1 = x0; | |
| 3120 Register temp2 = x4; | |
| 3121 DoubleRegister double_temp1 = d1; | |
| 3122 DoubleRegister double_temp2 = d2; | |
| 3123 Label tail, miss; | |
| 3124 | |
| 3125 // Ensure nobody has snuck in another function. | |
| 3126 __ BranchIfNotBuiltin(function, temp1, kMathRound, &miss); | |
| 3127 | |
| 3128 if (arg_count() > 0) { | |
| 3129 __ Ldr(temp1, MemOperand(jssp, (arg_count() - 1) * kPointerSize)); | |
| 3130 Handle<Map> map = isolate()->factory()->heap_number_map(); | |
| 3131 __ CheckMap(temp1, temp2, map, &tail, DO_SMI_CHECK); | |
| 3132 __ Sub(temp1, temp1, Operand(kHeapObjectTag)); | |
| 3133 __ Ldr(double_temp1, MemOperand(temp1, HeapNumber::kValueOffset)); | |
| 3134 | |
| 3135 // If the number is >0, it doesn't round to -0 | |
| 3136 __ Fmov(double_temp2, 0); | |
| 3137 __ Fcmp(double_temp1, double_temp2); | |
| 3138 __ B(gt, &tail); | |
| 3139 | |
| 3140 // If the number is <-.5, it doesn't round to -0 | |
| 3141 __ Fmov(double_temp2, -.5); | |
| 3142 __ Fcmp(double_temp1, double_temp2); | |
| 3143 __ B(lt, &tail); | |
| 3144 | |
| 3145 __ Fmov(temp1, double_temp1); | |
| 3146 __ Cmp(temp1, Operand(0x8000000000000000)); | |
| 3147 __ B(ne, &tail); | |
| 3148 | |
| 3149 __ SmiUntag(temp1, slot); | |
| 3150 __ Mov(temp1, Operand(temp1, LSL, kPointerSizeLog2)); | |
| 3151 __ Add(temp1, temp1, vector); | |
| 3152 __ Mov(temp2, Smi::FromInt(kHasReturnedMinusZeroSentinel)); | |
| 3153 __ Str(temp2, | |
| 3154 FieldMemOperand(temp1, FixedArray::kHeaderSize + kPointerSize)); | |
| 3155 } | |
| 3156 | |
| 3157 __ bind(&tail); | |
| 3158 // The slow case, we need this no matter what to complete a call after a miss. | |
| 3159 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); | |
| 3160 | |
| 3161 __ Unreachable(); | |
| 3162 | |
| 3163 __ bind(&miss); | |
| 3164 GenerateMiss(masm); | |
| 3165 __ b(&tail); | |
| 3166 } | |
| 3167 | |
| 3168 | |
| 3169 void CallIC_FloorStub::Generate(MacroAssembler* masm) { | |
| 3170 Register function = x1; | |
| 3171 Register vector = x2; | |
| 3172 Register slot = x3; | |
| 3173 | |
| 3174 Register temp1 = x0; | |
| 3175 Register temp2 = x4; | |
| 3176 DoubleRegister double_temp = d1; | |
| 3177 Label tail, miss; | |
| 3178 | |
| 3179 // Ensure nobody has snuck in another function. | |
| 3180 __ BranchIfNotBuiltin(function, temp1, kMathFloor, &miss); | |
| 3181 | |
| 3182 if (arg_count() > 0) { | |
| 3183 __ Ldr(temp1, MemOperand(jssp, (arg_count() - 1) * kPointerSize)); | |
| 3184 Handle<Map> map = isolate()->factory()->heap_number_map(); | |
| 3185 __ CheckMap(temp1, temp2, map, &tail, DO_SMI_CHECK); | |
| 3186 __ Sub(temp1, temp1, Operand(kHeapObjectTag)); | |
| 3187 __ Ldr(double_temp, MemOperand(temp1, HeapNumber::kValueOffset)); | |
| 3188 | |
| 3189 // Only -0 floors to -0. | |
| 3190 __ Fmov(temp1, double_temp); | |
| 3191 __ Cmp(temp1, Operand(0x8000000000000000)); | |
| 3192 __ B(ne, &tail); | |
| 3193 | |
| 3194 __ SmiUntag(temp1, slot); | |
| 3195 __ Mov(temp1, Operand(temp1, LSL, kPointerSizeLog2)); | |
| 3196 __ Add(temp1, temp1, vector); | |
| 3197 __ Mov(temp2, Operand(Smi::FromInt(kHasReturnedMinusZeroSentinel))); | |
| 3198 __ Str(temp2, | |
| 3199 FieldMemOperand(temp1, FixedArray::kHeaderSize + kPointerSize)); | |
| 3200 } | |
| 3201 | |
| 3202 __ bind(&tail); | |
| 3203 // The slow case, we need this no matter what to complete a call after a miss. | |
| 3204 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); | |
| 3205 | |
| 3206 __ Unreachable(); | |
| 3207 | |
| 3208 __ bind(&miss); | |
| 3209 GenerateMiss(masm); | |
| 3210 __ b(&tail); | |
| 3211 } | |
| 3212 | |
| 3213 | |
| 3214 void CallIC_CeilStub::Generate(MacroAssembler* masm) { | |
| 3215 Register function = x1; | |
| 3216 Register vector = x2; | |
| 3217 Register slot = x3; | |
| 3218 | |
| 3219 Register temp1 = x0; | |
| 3220 Register temp2 = x4; | |
| 3221 DoubleRegister double_temp1 = d1; | |
| 3222 DoubleRegister double_temp2 = d2; | |
| 3223 Label tail, miss; | |
| 3224 | |
| 3225 // Ensure nobody has snuck in another function. | |
| 3226 __ BranchIfNotBuiltin(function, temp1, kMathCeil, &miss); | |
| 3227 | |
| 3228 if (arg_count() > 0) { | |
| 3229 __ Ldr(temp1, MemOperand(jssp, (arg_count() - 1) * kPointerSize)); | |
| 3230 Handle<Map> map = isolate()->factory()->heap_number_map(); | |
| 3231 __ CheckMap(temp1, temp2, map, &tail, DO_SMI_CHECK); | |
| 3232 __ Sub(temp1, temp1, Operand(kHeapObjectTag)); | |
| 3233 __ Ldr(double_temp1, MemOperand(temp1, HeapNumber::kValueOffset)); | |
| 3234 | |
| 3235 // If the number is positive, it doesn't ceil to -0 | |
| 3236 __ Fmov(double_temp2, 0); | |
| 3237 __ Fcmp(double_temp1, double_temp2); | |
| 3238 __ B(gt, &tail); | |
| 3239 | |
| 3240 // If it's less or equal to 1, it doesn't ceil to -0 | |
| 3241 __ Fmov(double_temp2, -1); | |
| 3242 __ Fcmp(double_temp1, double_temp2); | |
| 3243 __ B(le, &tail); | |
| 3244 | |
| 3245 // +Zero doesn't round to -0 | |
| 3246 __ Fmov(temp1, double_temp1); | |
| 3247 __ Cmp(temp1, Operand(0x8000000000000000)); | |
| 3248 __ B(ne, &tail); | |
| 3249 | |
| 3250 __ SmiUntag(temp1, slot); | |
| 3251 __ Mov(temp1, Operand(temp1, LSL, kPointerSizeLog2)); | |
| 3252 __ Add(temp1, temp1, vector); | |
| 3253 __ Mov(temp2, Operand(Smi::FromInt(kHasReturnedMinusZeroSentinel))); | |
| 3254 __ Str(temp2, | |
| 3255 FieldMemOperand(temp1, FixedArray::kHeaderSize + kPointerSize)); | |
| 3256 } | |
| 3257 | |
| 3258 __ bind(&tail); | |
| 3259 // The slow case, we need this no matter what to complete a call after a miss. | |
| 3260 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); | |
| 3261 | |
| 3262 __ Unreachable(); | |
| 3263 | |
| 3264 __ bind(&miss); | |
| 3265 GenerateMiss(masm); | |
| 3266 __ b(&tail); | |
| 3267 } | |
| 3268 | |
| 3269 | |
| 3270 void CallICStub::Generate(MacroAssembler* masm) { | 3114 void CallICStub::Generate(MacroAssembler* masm) { |
| 3271 ASM_LOCATION("CallICStub"); | 3115 ASM_LOCATION("CallICStub"); |
| 3272 | 3116 |
| 3273 // x1 - function | 3117 // x1 - function |
| 3274 // x3 - slot id (Smi) | 3118 // x3 - slot id (Smi) |
| 3275 // x2 - vector | 3119 // x2 - vector |
| 3276 const int with_types_offset = | 3120 const int with_types_offset = |
| 3277 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 3121 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
| 3278 const int generic_offset = | 3122 const int generic_offset = |
| 3279 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 3123 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3378 | 3222 |
| 3379 // Goto miss case if we do not have a function. | 3223 // Goto miss case if we do not have a function. |
| 3380 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss); | 3224 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss); |
| 3381 | 3225 |
| 3382 // Make sure the function is not the Array() function, which requires special | 3226 // Make sure the function is not the Array() function, which requires special |
| 3383 // behavior on MISS. | 3227 // behavior on MISS. |
| 3384 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5); | 3228 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5); |
| 3385 __ Cmp(function, x5); | 3229 __ Cmp(function, x5); |
| 3386 __ B(eq, &miss); | 3230 __ B(eq, &miss); |
| 3387 | 3231 |
| 3388 // Some builtin functions require special handling, miss to the runtime. | |
| 3389 __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); | |
| 3390 __ Ldr(x0, FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset)); | |
| 3391 __ Cmp(x0, Operand(Smi::FromInt(0))); | |
| 3392 __ B(ne, &miss); | |
| 3393 | |
| 3394 // Update stats. | 3232 // Update stats. |
| 3395 __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset)); | 3233 __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset)); |
| 3396 __ Adds(x4, x4, Operand(Smi::FromInt(1))); | 3234 __ Adds(x4, x4, Operand(Smi::FromInt(1))); |
| 3397 __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset)); | 3235 __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset)); |
| 3398 | 3236 |
| 3399 // Store the function. Use a stub since we need a frame for allocation. | 3237 // Store the function. Use a stub since we need a frame for allocation. |
| 3400 // x2 - vector | 3238 // x2 - vector |
| 3401 // x3 - slot | 3239 // x3 - slot |
| 3402 // x1 - function | 3240 // x1 - function |
| 3403 { | 3241 { |
| (...skipping 1248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4652 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4490 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4653 } | 4491 } |
| 4654 | 4492 |
| 4655 | 4493 |
| 4656 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { | 4494 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { |
| 4657 EmitLoadTypeFeedbackVector(masm, x2); | 4495 EmitLoadTypeFeedbackVector(masm, x2); |
| 4658 CallIC_ArrayStub stub(isolate(), state()); | 4496 CallIC_ArrayStub stub(isolate(), state()); |
| 4659 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4497 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4660 } | 4498 } |
| 4661 | 4499 |
| 4662 | |
| 4663 void CallIC_RoundTrampolineStub::Generate(MacroAssembler* masm) { | |
| 4664 EmitLoadTypeFeedbackVector(masm, x2); | |
| 4665 CallIC_RoundStub stub(isolate(), state()); | |
| 4666 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 4667 } | |
| 4668 | |
| 4669 | |
| 4670 void CallIC_FloorTrampolineStub::Generate(MacroAssembler* masm) { | |
| 4671 EmitLoadTypeFeedbackVector(masm, x2); | |
| 4672 CallIC_FloorStub stub(isolate(), state()); | |
| 4673 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 4674 } | |
| 4675 | |
| 4676 | |
| 4677 void CallIC_CeilTrampolineStub::Generate(MacroAssembler* masm) { | |
| 4678 EmitLoadTypeFeedbackVector(masm, x2); | |
| 4679 CallIC_CeilStub stub(isolate(), state()); | |
| 4680 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 4681 } | |
| 4682 | |
| 4683 | 4500 |
| 4684 void VectorRawLoadStub::Generate(MacroAssembler* masm) { | 4501 void VectorRawLoadStub::Generate(MacroAssembler* masm) { |
| 4685 GenerateImpl(masm, false); | 4502 GenerateImpl(masm, false); |
| 4686 } | 4503 } |
| 4687 | 4504 |
| 4688 | 4505 |
| 4689 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4506 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
| 4690 GenerateImpl(masm, true); | 4507 GenerateImpl(masm, true); |
| 4691 } | 4508 } |
| 4692 | 4509 |
| (...skipping 1241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5934 kStackUnwindSpace, NULL, spill_offset, | 5751 kStackUnwindSpace, NULL, spill_offset, |
| 5935 MemOperand(fp, 6 * kPointerSize), NULL); | 5752 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5936 } | 5753 } |
| 5937 | 5754 |
| 5938 | 5755 |
| 5939 #undef __ | 5756 #undef __ |
| 5940 | 5757 |
| 5941 } } // namespace v8::internal | 5758 } } // namespace v8::internal |
| 5942 | 5759 |
| 5943 #endif // V8_TARGET_ARCH_ARM64 | 5760 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |