OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
227 __ eor(result_reg, result_reg, Operand(scratch_high, ASR, 31)); | 227 __ eor(result_reg, result_reg, Operand(scratch_high, ASR, 31)); |
228 __ add(result_reg, result_reg, Operand(scratch_high, LSR, 31)); | 228 __ add(result_reg, result_reg, Operand(scratch_high, LSR, 31)); |
229 | 229 |
230 __ bind(&done); | 230 __ bind(&done); |
231 | 231 |
232 __ Pop(scratch_high, scratch_low, scratch); | 232 __ Pop(scratch_high, scratch_low, scratch); |
233 __ Ret(); | 233 __ Ret(); |
234 } | 234 } |
235 | 235 |
236 | 236 |
237 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( | |
238 Isolate* isolate) { | |
239 WriteInt32ToHeapNumberStub stub1(isolate, r1, r0, r2); | |
240 WriteInt32ToHeapNumberStub stub2(isolate, r2, r0, r3); | |
241 stub1.GetCode(); | |
242 stub2.GetCode(); | |
243 } | |
244 | |
245 | |
246 // See comment for class. | |
247 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { | |
248 Label max_negative_int; | |
249 // the_int_ has the answer which is a signed int32 but not a Smi. | |
250 // We test for the special value that has a different exponent. This test | |
251 // has the neat side effect of setting the flags according to the sign. | |
252 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); | |
253 __ cmp(the_int(), Operand(0x80000000u)); | |
254 __ b(eq, &max_negative_int); | |
255 // Set up the correct exponent in scratch_. All non-Smi int32s have the same. | |
256 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). | |
257 uint32_t non_smi_exponent = | |
258 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; | |
259 __ mov(scratch(), Operand(non_smi_exponent)); | |
260 // Set the sign bit in scratch_ if the value was negative. | |
261 __ orr(scratch(), scratch(), Operand(HeapNumber::kSignMask), LeaveCC, cs); | |
262 // Subtract from 0 if the value was negative. | |
263 __ rsb(the_int(), the_int(), Operand::Zero(), LeaveCC, cs); | |
264 // We should be masking the implict first digit of the mantissa away here, | |
265 // but it just ends up combining harmlessly with the last digit of the | |
266 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get | |
267 // the most significant 1 to hit the last bit of the 12 bit sign and exponent. | |
268 DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0); | |
269 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2; | |
270 __ orr(scratch(), scratch(), Operand(the_int(), LSR, shift_distance)); | |
271 __ str(scratch(), | |
272 FieldMemOperand(the_heap_number(), HeapNumber::kExponentOffset)); | |
273 __ mov(scratch(), Operand(the_int(), LSL, 32 - shift_distance)); | |
274 __ str(scratch(), | |
275 FieldMemOperand(the_heap_number(), HeapNumber::kMantissaOffset)); | |
276 __ Ret(); | |
277 | |
278 __ bind(&max_negative_int); | |
279 // The max negative int32 is stored as a positive number in the mantissa of | |
280 // a double because it uses a sign bit instead of using two's complement. | |
281 // The actual mantissa bits stored are all 0 because the implicit most | |
282 // significant 1 bit is not stored. | |
283 non_smi_exponent += 1 << HeapNumber::kExponentShift; | |
284 __ mov(ip, Operand(HeapNumber::kSignMask | non_smi_exponent)); | |
285 __ str(ip, FieldMemOperand(the_heap_number(), HeapNumber::kExponentOffset)); | |
286 __ mov(ip, Operand::Zero()); | |
287 __ str(ip, FieldMemOperand(the_heap_number(), HeapNumber::kMantissaOffset)); | |
288 __ Ret(); | |
289 } | |
290 | |
291 | |
292 // Handle the case where the lhs and rhs are the same object. | 237 // Handle the case where the lhs and rhs are the same object. |
293 // Equality is almost reflexive (everything but NaN), so this is a test | 238 // Equality is almost reflexive (everything but NaN), so this is a test |
294 // for "identity and not NaN". | 239 // for "identity and not NaN". |
295 static void EmitIdenticalObjectComparison(MacroAssembler* masm, | 240 static void EmitIdenticalObjectComparison(MacroAssembler* masm, |
296 Label* slow, | 241 Label* slow, |
297 Condition cond) { | 242 Condition cond) { |
298 Label not_identical; | 243 Label not_identical; |
299 Label heap_number, return_equal; | 244 Label heap_number, return_equal; |
300 __ cmp(r0, r1); | 245 __ cmp(r0, r1); |
301 __ b(ne, ¬_identical); | 246 __ b(ne, ¬_identical); |
(...skipping 658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
960 } | 905 } |
961 | 906 |
962 | 907 |
963 bool CEntryStub::NeedsImmovableCode() { | 908 bool CEntryStub::NeedsImmovableCode() { |
964 return true; | 909 return true; |
965 } | 910 } |
966 | 911 |
967 | 912 |
968 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 913 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
969 CEntryStub::GenerateAheadOfTime(isolate); | 914 CEntryStub::GenerateAheadOfTime(isolate); |
970 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); | |
971 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 915 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
972 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 916 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
973 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 917 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
974 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 918 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
975 BinaryOpICStub::GenerateAheadOfTime(isolate); | 919 BinaryOpICStub::GenerateAheadOfTime(isolate); |
976 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 920 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
977 } | 921 } |
978 | 922 |
979 | 923 |
980 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 924 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
(...skipping 3793 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4774 MemOperand(fp, 6 * kPointerSize), | 4718 MemOperand(fp, 6 * kPointerSize), |
4775 NULL); | 4719 NULL); |
4776 } | 4720 } |
4777 | 4721 |
4778 | 4722 |
4779 #undef __ | 4723 #undef __ |
4780 | 4724 |
4781 } } // namespace v8::internal | 4725 } } // namespace v8::internal |
4782 | 4726 |
4783 #endif // V8_TARGET_ARCH_ARM | 4727 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |