| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3195 return Runtime::kAbort; | 3195 return Runtime::kAbort; |
| 3196 } | 3196 } |
| 3197 } | 3197 } |
| 3198 | 3198 |
| 3199 | 3199 |
| 3200 void StackCheckStub::Generate(MacroAssembler* masm) { | 3200 void StackCheckStub::Generate(MacroAssembler* masm) { |
| 3201 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); | 3201 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); |
| 3202 } | 3202 } |
| 3203 | 3203 |
| 3204 | 3204 |
| 3205 void GenericUnaryOpStub::Generate(MacroAssembler* masm) { | |
| 3206 Label slow, done; | |
| 3207 | |
| 3208 Register heap_number_map = r6; | |
| 3209 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | |
| 3210 | |
| 3211 if (op_ == Token::SUB) { | |
| 3212 if (include_smi_code_) { | |
| 3213 // Check whether the value is a smi. | |
| 3214 Label try_float; | |
| 3215 __ tst(r0, Operand(kSmiTagMask)); | |
| 3216 __ b(ne, &try_float); | |
| 3217 | |
| 3218 // Go slow case if the value of the expression is zero | |
| 3219 // to make sure that we switch between 0 and -0. | |
| 3220 if (negative_zero_ == kStrictNegativeZero) { | |
| 3221 // If we have to check for zero, then we can check for the max negative | |
| 3222 // smi while we are at it. | |
| 3223 __ bic(ip, r0, Operand(0x80000000), SetCC); | |
| 3224 __ b(eq, &slow); | |
| 3225 __ rsb(r0, r0, Operand(0, RelocInfo::NONE)); | |
| 3226 __ Ret(); | |
| 3227 } else { | |
| 3228 // The value of the expression is a smi and 0 is OK for -0. Try | |
| 3229 // optimistic subtraction '0 - value'. | |
| 3230 __ rsb(r0, r0, Operand(0, RelocInfo::NONE), SetCC); | |
| 3231 __ Ret(vc); | |
| 3232 // We don't have to reverse the optimistic neg since the only case | |
| 3233 // where we fall through is the minimum negative Smi, which is the case | |
| 3234 // where the neg leaves the register unchanged. | |
| 3235 __ jmp(&slow); // Go slow on max negative Smi. | |
| 3236 } | |
| 3237 __ bind(&try_float); | |
| 3238 } else if (FLAG_debug_code) { | |
| 3239 __ tst(r0, Operand(kSmiTagMask)); | |
| 3240 __ Assert(ne, "Unexpected smi operand."); | |
| 3241 } | |
| 3242 | |
| 3243 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 3244 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | |
| 3245 __ cmp(r1, heap_number_map); | |
| 3246 __ b(ne, &slow); | |
| 3247 // r0 is a heap number. Get a new heap number in r1. | |
| 3248 if (overwrite_ == UNARY_OVERWRITE) { | |
| 3249 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | |
| 3250 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. | |
| 3251 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | |
| 3252 } else { | |
| 3253 __ AllocateHeapNumber(r1, r2, r3, r6, &slow); | |
| 3254 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); | |
| 3255 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | |
| 3256 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); | |
| 3257 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. | |
| 3258 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); | |
| 3259 __ mov(r0, Operand(r1)); | |
| 3260 } | |
| 3261 } else if (op_ == Token::BIT_NOT) { | |
| 3262 if (include_smi_code_) { | |
| 3263 Label non_smi; | |
| 3264 __ JumpIfNotSmi(r0, &non_smi); | |
| 3265 __ mvn(r0, Operand(r0)); | |
| 3266 // Bit-clear inverted smi-tag. | |
| 3267 __ bic(r0, r0, Operand(kSmiTagMask)); | |
| 3268 __ Ret(); | |
| 3269 __ bind(&non_smi); | |
| 3270 } else if (FLAG_debug_code) { | |
| 3271 __ tst(r0, Operand(kSmiTagMask)); | |
| 3272 __ Assert(ne, "Unexpected smi operand."); | |
| 3273 } | |
| 3274 | |
| 3275 // Check if the operand is a heap number. | |
| 3276 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 3277 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | |
| 3278 __ cmp(r1, heap_number_map); | |
| 3279 __ b(ne, &slow); | |
| 3280 | |
| 3281 // Convert the heap number is r0 to an untagged integer in r1. | |
| 3282 __ ConvertToInt32(r0, r1, r2, r3, d0, &slow); | |
| 3283 | |
| 3284 // Do the bitwise operation (move negated) and check if the result | |
| 3285 // fits in a smi. | |
| 3286 Label try_float; | |
| 3287 __ mvn(r1, Operand(r1)); | |
| 3288 __ add(r2, r1, Operand(0x40000000), SetCC); | |
| 3289 __ b(mi, &try_float); | |
| 3290 __ mov(r0, Operand(r1, LSL, kSmiTagSize)); | |
| 3291 __ b(&done); | |
| 3292 | |
| 3293 __ bind(&try_float); | |
| 3294 if (!overwrite_ == UNARY_OVERWRITE) { | |
| 3295 // Allocate a fresh heap number, but don't overwrite r0 until | |
| 3296 // we're sure we can do it without going through the slow case | |
| 3297 // that needs the value in r0. | |
| 3298 __ AllocateHeapNumber(r2, r3, r4, r6, &slow); | |
| 3299 __ mov(r0, Operand(r2)); | |
| 3300 } | |
| 3301 | |
| 3302 if (CpuFeatures::IsSupported(VFP3)) { | |
| 3303 // Convert the int32 in r1 to the heap number in r0. r2 is corrupted. | |
| 3304 CpuFeatures::Scope scope(VFP3); | |
| 3305 __ vmov(s0, r1); | |
| 3306 __ vcvt_f64_s32(d0, s0); | |
| 3307 __ sub(r2, r0, Operand(kHeapObjectTag)); | |
| 3308 __ vstr(d0, r2, HeapNumber::kValueOffset); | |
| 3309 } else { | |
| 3310 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not | |
| 3311 // have to set up a frame. | |
| 3312 WriteInt32ToHeapNumberStub stub(r1, r0, r2); | |
| 3313 __ push(lr); | |
| 3314 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 3315 __ pop(lr); | |
| 3316 } | |
| 3317 } else { | |
| 3318 UNIMPLEMENTED(); | |
| 3319 } | |
| 3320 | |
| 3321 __ bind(&done); | |
| 3322 __ Ret(); | |
| 3323 | |
| 3324 // Handle the slow case by jumping to the JavaScript builtin. | |
| 3325 __ bind(&slow); | |
| 3326 __ push(r0); | |
| 3327 switch (op_) { | |
| 3328 case Token::SUB: | |
| 3329 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS); | |
| 3330 break; | |
| 3331 case Token::BIT_NOT: | |
| 3332 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_JS); | |
| 3333 break; | |
| 3334 default: | |
| 3335 UNREACHABLE(); | |
| 3336 } | |
| 3337 } | |
| 3338 | |
| 3339 | |
| 3340 void MathPowStub::Generate(MacroAssembler* masm) { | 3205 void MathPowStub::Generate(MacroAssembler* masm) { |
| 3341 Label call_runtime; | 3206 Label call_runtime; |
| 3342 | 3207 |
| 3343 if (CpuFeatures::IsSupported(VFP3)) { | 3208 if (CpuFeatures::IsSupported(VFP3)) { |
| 3344 CpuFeatures::Scope scope(VFP3); | 3209 CpuFeatures::Scope scope(VFP3); |
| 3345 | 3210 |
| 3346 Label base_not_smi; | 3211 Label base_not_smi; |
| 3347 Label exponent_not_smi; | 3212 Label exponent_not_smi; |
| 3348 Label convert_exponent; | 3213 Label convert_exponent; |
| 3349 | 3214 |
| (...skipping 2758 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6108 __ str(pc, MemOperand(sp, 0)); | 5973 __ str(pc, MemOperand(sp, 0)); |
| 6109 __ Jump(target); // Call the C++ function. | 5974 __ Jump(target); // Call the C++ function. |
| 6110 } | 5975 } |
| 6111 | 5976 |
| 6112 | 5977 |
| 6113 #undef __ | 5978 #undef __ |
| 6114 | 5979 |
| 6115 } } // namespace v8::internal | 5980 } } // namespace v8::internal |
| 6116 | 5981 |
| 6117 #endif // V8_TARGET_ARCH_ARM | 5982 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |