Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 3093 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3104 // The slow case, we need this no matter what to complete a call after a miss. | 3104 // The slow case, we need this no matter what to complete a call after a miss. |
| 3105 CallFunctionNoFeedback(masm, | 3105 CallFunctionNoFeedback(masm, |
| 3106 arg_count(), | 3106 arg_count(), |
| 3107 true, | 3107 true, |
| 3108 CallAsMethod()); | 3108 CallAsMethod()); |
| 3109 | 3109 |
| 3110 __ Unreachable(); | 3110 __ Unreachable(); |
| 3111 } | 3111 } |
| 3112 | 3112 |
| 3113 | 3113 |
| 3114 void CallIC_RoundStub::Generate(MacroAssembler* masm) { | |
| 3115 Register function = x1; | |
| 3116 Register vector = x2; | |
| 3117 Register slot = x3; | |
| 3118 | |
| 3119 Register temp1 = x0; | |
| 3120 Register temp2 = x4; | |
| 3121 DoubleRegister double_temp1 = d1; | |
| 3122 DoubleRegister double_temp2 = d2; | |
| 3123 Label tail, miss; | |
| 3124 | |
| 3125 // Ensure nobody has snuck in another function. | |
| 3126 __ BranchIfNotBuiltin(function, temp1, kMathRound, &miss); | |
| 3127 | |
| 3128 if (arg_count() > 0) { | |
| 3129 __ Ldr(temp1, MemOperand(jssp, (arg_count() - 1) * kPointerSize)); | |
| 3130 Handle<Map> map = isolate()->factory()->heap_number_map(); | |
| 3131 __ CheckMap(temp1, temp2, map, &tail, DO_SMI_CHECK); | |
| 3132 __ Sub(temp1, temp1, Operand(kHeapObjectTag)); | |
| 3133 __ Ldr(double_temp1, MemOperand(temp1, HeapNumber::kValueOffset)); | |
| 3134 | |
| 3135 // If the number is >0, it doesn't round to -0 | |
| 3136 __ Fmov(double_temp2, 0); | |
| 3137 __ Fcmp(double_temp1, double_temp2); | |
| 3138 __ B(gt, &tail); | |
| 3139 | |
| 3140 // If the number is <-.5, it doesn't round to -0 | |
| 3141 __ Fmov(double_temp2, -.5); | |
| 3142 __ Fcmp(double_temp1, double_temp2); | |
| 3143 __ B(lt, &tail); | |
| 3144 | |
| 3145 __ Fmov(temp1, double_temp1); | |
| 3146 __ Cmp(temp1, Operand(0x8000000000000000)); | |
| 3147 __ B(ne, &tail); | |
| 3148 | |
| 3149 __ SmiUntag(temp1, slot); | |
| 3150 __ Mov(temp1, Operand(temp1, LSL, kPointerSizeLog2)); | |
| 3151 __ Add(temp1, temp1, vector); | |
| 3152 __ Mov(temp2, Smi::FromInt(kHasReturnedMinusZeroSentinel)); | |
| 3153 __ Str(temp2, | |
| 3154 FieldMemOperand(temp1, FixedArray::kHeaderSize + kPointerSize)); | |
| 3155 } | |
| 3156 | |
| 3157 __ bind(&tail); | |
| 3158 // The slow case, we need this no matter what to complete a call after a miss. | |
| 3159 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); | |
| 3160 | |
| 3161 __ Unreachable(); | |
| 3162 | |
| 3163 __ bind(&miss); | |
| 3164 GenerateMiss(masm); | |
| 3165 __ b(&tail); | |
| 3166 } | |
| 3167 | |
| 3168 | |
| 3169 void CallIC_FloorStub::Generate(MacroAssembler* masm) { | |
| 3170 Register function = x1; | |
| 3171 Register vector = x2; | |
| 3172 Register slot = x3; | |
| 3173 | |
| 3174 Register temp1 = x0; | |
| 3175 Register temp2 = x4; | |
| 3176 DoubleRegister double_temp = d1; | |
| 3177 Label tail, miss; | |
| 3178 | |
| 3179 // Ensure nobody has snuck in another function. | |
| 3180 __ BranchIfNotBuiltin(function, temp1, kMathFloor, &miss); | |
| 3181 | |
| 3182 if (arg_count() > 0) { | |
| 3183 __ Ldr(temp1, MemOperand(jssp, (arg_count() - 1) * kPointerSize)); | |
| 3184 Handle<Map> map = isolate()->factory()->heap_number_map(); | |
| 3185 __ CheckMap(temp1, temp2, map, &tail, DO_SMI_CHECK); | |
| 3186 __ Sub(temp1, temp1, Operand(kHeapObjectTag)); | |
| 3187 __ Ldr(double_temp, MemOperand(temp1, HeapNumber::kValueOffset)); | |
| 3188 | |
| 3189 // Only -0 floors to -0. | |
| 3190 __ Fmov(temp1, double_temp); | |
| 3191 __ Cmp(temp1, Operand(0x8000000000000000)); | |
| 3192 __ B(ne, &tail); | |
| 3193 | |
| 3194 __ SmiUntag(temp1, slot); | |
| 3195 __ Mov(temp1, Operand(temp1, LSL, kPointerSizeLog2)); | |
| 3196 __ Add(temp1, temp1, vector); | |
| 3197 __ Mov(temp2, Operand(Smi::FromInt(kHasReturnedMinusZeroSentinel))); | |
| 3198 __ Str(temp2, | |
| 3199 FieldMemOperand(temp1, FixedArray::kHeaderSize + kPointerSize)); | |
| 3200 } | |
| 3201 | |
| 3202 __ bind(&tail); | |
| 3203 // The slow case, we need this no matter what to complete a call after a miss. | |
| 3204 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); | |
| 3205 | |
| 3206 __ Unreachable(); | |
| 3207 | |
| 3208 __ bind(&miss); | |
| 3209 GenerateMiss(masm); | |
| 3210 __ b(&tail); | |
| 3211 } | |
| 3212 | |
| 3213 | |
| 3214 void CallIC_CeilStub::Generate(MacroAssembler* masm) { | |
| 3215 Register function = x1; | |
| 3216 Register vector = x2; | |
| 3217 Register slot = x3; | |
| 3218 | |
| 3219 Register temp1 = x0; | |
| 3220 Register temp2 = x4; | |
| 3221 DoubleRegister double_temp1 = d1; | |
| 3222 DoubleRegister double_temp2 = d2; | |
| 3223 Label tail, miss; | |
| 3224 | |
| 3225 // Ensure nobody has snuck in another function. | |
| 3226 __ BranchIfNotBuiltin(function, temp1, kMathCeil, &miss); | |
| 3227 | |
| 3228 if (arg_count() > 0) { | |
| 3229 __ Ldr(temp1, MemOperand(jssp, (arg_count() - 1) * kPointerSize)); | |
| 3230 Handle<Map> map = isolate()->factory()->heap_number_map(); | |
| 3231 __ CheckMap(temp1, temp2, map, &tail, DO_SMI_CHECK); | |
| 3232 __ Sub(temp1, temp1, Operand(kHeapObjectTag)); | |
| 3233 __ Ldr(double_temp1, MemOperand(temp1, HeapNumber::kValueOffset)); | |
| 3234 | |
| 3235 // If the number is positive, it doesn't ceil to -0 | |
| 3236 __ Fmov(double_temp2, 0); | |
| 3237 __ Fcmp(double_temp1, double_temp2); | |
| 3238 __ B(gt, &tail); | |
| 3239 | |
| 3240 // If it's less or equal to 1, it doesn't ceil to -0 | |
| 3241 __ Fmov(double_temp2, -1); | |
| 3242 __ Fcmp(double_temp1, double_temp2); | |
| 3243 __ B(le, &tail); | |
| 3244 | |
| 3245 // +Zero doesn't round to -0 | |
| 3246 __ Fmov(temp1, double_temp1); | |
| 3247 __ Cmp(temp1, Operand(0x8000000000000000)); | |
| 3248 __ B(ne, &tail); | |
| 3249 | |
| 3250 __ SmiUntag(temp1, slot); | |
| 3251 __ Mov(temp1, Operand(temp1, LSL, kPointerSizeLog2)); | |
| 3252 __ Add(temp1, temp1, vector); | |
| 3253 __ Mov(temp2, Operand(Smi::FromInt(kHasReturnedMinusZeroSentinel))); | |
| 3254 __ Str(temp2, | |
| 3255 FieldMemOperand(temp1, FixedArray::kHeaderSize + kPointerSize)); | |
| 3256 } | |
| 3257 | |
| 3258 __ bind(&tail); | |
| 3259 // The slow case, we need this no matter what to complete a call after a miss. | |
| 3260 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); | |
| 3261 | |
| 3262 __ Unreachable(); | |
| 3263 | |
| 3264 __ bind(&miss); | |
| 3265 GenerateMiss(masm); | |
| 3266 __ b(&tail); | |
| 3267 } | |
| 3268 | |
| 3269 | |
| 3114 void CallICStub::Generate(MacroAssembler* masm) { | 3270 void CallICStub::Generate(MacroAssembler* masm) { |
| 3115 ASM_LOCATION("CallICStub"); | 3271 ASM_LOCATION("CallICStub"); |
| 3116 | 3272 |
| 3117 // x1 - function | 3273 // x1 - function |
| 3118 // x3 - slot id (Smi) | 3274 // x3 - slot id (Smi) |
| 3119 // x2 - vector | 3275 // x2 - vector |
| 3120 const int with_types_offset = | 3276 const int with_types_offset = |
| 3121 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 3277 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
| 3122 const int generic_offset = | 3278 const int generic_offset = |
| 3123 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 3279 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3222 | 3378 |
| 3223 // Goto miss case if we do not have a function. | 3379 // Goto miss case if we do not have a function. |
| 3224 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss); | 3380 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss); |
| 3225 | 3381 |
| 3226 // Make sure the function is not the Array() function, which requires special | 3382 // Make sure the function is not the Array() function, which requires special |
| 3227 // behavior on MISS. | 3383 // behavior on MISS. |
| 3228 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5); | 3384 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5); |
| 3229 __ Cmp(function, x5); | 3385 __ Cmp(function, x5); |
| 3230 __ B(eq, &miss); | 3386 __ B(eq, &miss); |
| 3231 | 3387 |
| 3388 #if 0 | |
|
mvstanton
2015/05/04 09:41:21
Why is this commented out, I guess it should not b
danno
2015/05/04 14:31:56
Done.
| |
| 3389 // Some builtin functions require special handling, miss to the runtime. | |
| 3390 __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); | |
| 3391 __ Ldr(x0, FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset)); | |
| 3392 __ Cmp(x0, Operand(Smi::FromInt(0))); | |
| 3393 __ B(ne, &miss); | |
| 3394 #endif | |
| 3395 | |
| 3232 // Update stats. | 3396 // Update stats. |
| 3233 __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset)); | 3397 __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset)); |
| 3234 __ Adds(x4, x4, Operand(Smi::FromInt(1))); | 3398 __ Adds(x4, x4, Operand(Smi::FromInt(1))); |
| 3235 __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset)); | 3399 __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset)); |
| 3236 | 3400 |
| 3237 // Store the function. Use a stub since we need a frame for allocation. | 3401 // Store the function. Use a stub since we need a frame for allocation. |
| 3238 // x2 - vector | 3402 // x2 - vector |
| 3239 // x3 - slot | 3403 // x3 - slot |
| 3240 // x1 - function | 3404 // x1 - function |
| 3241 { | 3405 { |
| (...skipping 1249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4491 } | 4655 } |
| 4492 | 4656 |
| 4493 | 4657 |
| 4494 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { | 4658 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { |
| 4495 EmitLoadTypeFeedbackVector(masm, x2); | 4659 EmitLoadTypeFeedbackVector(masm, x2); |
| 4496 CallIC_ArrayStub stub(isolate(), state()); | 4660 CallIC_ArrayStub stub(isolate(), state()); |
| 4497 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4661 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4498 } | 4662 } |
| 4499 | 4663 |
| 4500 | 4664 |
| 4665 void CallIC_RoundTrampolineStub::Generate(MacroAssembler* masm) { | |
| 4666 EmitLoadTypeFeedbackVector(masm, x2); | |
| 4667 CallIC_RoundStub stub(isolate(), state()); | |
| 4668 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 4669 } | |
| 4670 | |
| 4671 | |
| 4672 void CallIC_FloorTrampolineStub::Generate(MacroAssembler* masm) { | |
| 4673 EmitLoadTypeFeedbackVector(masm, x2); | |
| 4674 CallIC_FloorStub stub(isolate(), state()); | |
| 4675 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 4676 } | |
| 4677 | |
| 4678 | |
| 4679 void CallIC_CeilTrampolineStub::Generate(MacroAssembler* masm) { | |
| 4680 EmitLoadTypeFeedbackVector(masm, x2); | |
| 4681 CallIC_CeilStub stub(isolate(), state()); | |
| 4682 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 4683 } | |
| 4684 | |
| 4685 | |
| 4501 void VectorRawLoadStub::Generate(MacroAssembler* masm) { | 4686 void VectorRawLoadStub::Generate(MacroAssembler* masm) { |
| 4502 GenerateImpl(masm, false); | 4687 GenerateImpl(masm, false); |
| 4503 } | 4688 } |
| 4504 | 4689 |
| 4505 | 4690 |
| 4506 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4691 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
| 4507 GenerateImpl(masm, true); | 4692 GenerateImpl(masm, true); |
| 4508 } | 4693 } |
| 4509 | 4694 |
| 4510 | 4695 |
| (...skipping 1240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5751 kStackUnwindSpace, NULL, spill_offset, | 5936 kStackUnwindSpace, NULL, spill_offset, |
| 5752 MemOperand(fp, 6 * kPointerSize), NULL); | 5937 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5753 } | 5938 } |
| 5754 | 5939 |
| 5755 | 5940 |
| 5756 #undef __ | 5941 #undef __ |
| 5757 | 5942 |
| 5758 } } // namespace v8::internal | 5943 } } // namespace v8::internal |
| 5759 | 5944 |
| 5760 #endif // V8_TARGET_ARCH_ARM64 | 5945 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |