OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
261 __ Subu(result_reg, zero_reg, input_high); | 261 __ Subu(result_reg, zero_reg, input_high); |
262 __ Movz(result_reg, input_high, scratch); | 262 __ Movz(result_reg, input_high, scratch); |
263 | 263 |
264 __ bind(&done); | 264 __ bind(&done); |
265 | 265 |
266 __ Pop(scratch, scratch2, scratch3); | 266 __ Pop(scratch, scratch2, scratch3); |
267 __ Ret(); | 267 __ Ret(); |
268 } | 268 } |
269 | 269 |
270 | 270 |
271 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( | |
272 Isolate* isolate) { | |
273 WriteInt32ToHeapNumberStub stub1(isolate, a1, v0, a2, a3); | |
274 WriteInt32ToHeapNumberStub stub2(isolate, a2, v0, a3, a0); | |
275 stub1.GetCode(); | |
276 stub2.GetCode(); | |
277 } | |
278 | |
279 | |
280 // See comment for class, this does NOT work for int32's that are in Smi range. | |
281 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { | |
282 Label max_negative_int; | |
283 // the_int_ has the answer which is a signed int32 but not a Smi. | |
284 // We test for the special value that has a different exponent. | |
285 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); | |
286 // Test sign, and save for later conditionals. | |
287 __ And(sign(), the_int(), Operand(0x80000000u)); | |
288 __ Branch(&max_negative_int, eq, the_int(), Operand(0x80000000u)); | |
289 | |
290 // Set up the correct exponent in scratch_. All non-Smi int32s have the same. | |
291 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). | |
292 uint32_t non_smi_exponent = | |
293 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; | |
294 __ li(scratch(), Operand(non_smi_exponent)); | |
295 // Set the sign bit in scratch_ if the value was negative. | |
296 __ or_(scratch(), scratch(), sign()); | |
297 // Subtract from 0 if the value was negative. | |
298 __ subu(at, zero_reg, the_int()); | |
299 __ Movn(the_int(), at, sign()); | |
300 // We should be masking the implict first digit of the mantissa away here, | |
301 // but it just ends up combining harmlessly with the last digit of the | |
302 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get | |
303 // the most significant 1 to hit the last bit of the 12 bit sign and exponent. | |
304 DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0); | |
305 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2; | |
306 __ srl(at, the_int(), shift_distance); | |
307 __ or_(scratch(), scratch(), at); | |
308 __ sw(scratch(), FieldMemOperand(the_heap_number(), | |
309 HeapNumber::kExponentOffset)); | |
310 __ sll(scratch(), the_int(), 32 - shift_distance); | |
311 __ Ret(USE_DELAY_SLOT); | |
312 __ sw(scratch(), FieldMemOperand(the_heap_number(), | |
313 HeapNumber::kMantissaOffset)); | |
314 | |
315 __ bind(&max_negative_int); | |
316 // The max negative int32 is stored as a positive number in the mantissa of | |
317 // a double because it uses a sign bit instead of using two's complement. | |
318 // The actual mantissa bits stored are all 0 because the implicit most | |
319 // significant 1 bit is not stored. | |
320 non_smi_exponent += 1 << HeapNumber::kExponentShift; | |
321 __ li(scratch(), Operand(HeapNumber::kSignMask | non_smi_exponent)); | |
322 __ sw(scratch(), | |
323 FieldMemOperand(the_heap_number(), HeapNumber::kExponentOffset)); | |
324 __ mov(scratch(), zero_reg); | |
325 __ Ret(USE_DELAY_SLOT); | |
326 __ sw(scratch(), | |
327 FieldMemOperand(the_heap_number(), HeapNumber::kMantissaOffset)); | |
328 } | |
329 | |
330 | |
331 // Handle the case where the lhs and rhs are the same object. | 271 // Handle the case where the lhs and rhs are the same object. |
332 // Equality is almost reflexive (everything but NaN), so this is a test | 272 // Equality is almost reflexive (everything but NaN), so this is a test |
333 // for "identity and not NaN". | 273 // for "identity and not NaN". |
334 static void EmitIdenticalObjectComparison(MacroAssembler* masm, | 274 static void EmitIdenticalObjectComparison(MacroAssembler* masm, |
335 Label* slow, | 275 Label* slow, |
336 Condition cc) { | 276 Condition cc) { |
337 Label not_identical; | 277 Label not_identical; |
338 Label heap_number, return_equal; | 278 Label heap_number, return_equal; |
339 Register exp_mask_reg = t1; | 279 Register exp_mask_reg = t1; |
340 | 280 |
(...skipping 705 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1046 } | 986 } |
1047 | 987 |
1048 | 988 |
1049 bool CEntryStub::NeedsImmovableCode() { | 989 bool CEntryStub::NeedsImmovableCode() { |
1050 return true; | 990 return true; |
1051 } | 991 } |
1052 | 992 |
1053 | 993 |
1054 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 994 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
1055 CEntryStub::GenerateAheadOfTime(isolate); | 995 CEntryStub::GenerateAheadOfTime(isolate); |
1056 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); | |
1057 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 996 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
1058 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 997 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
1059 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 998 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
1060 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 999 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
1061 BinaryOpICStub::GenerateAheadOfTime(isolate); | 1000 BinaryOpICStub::GenerateAheadOfTime(isolate); |
1062 StoreRegistersStateStub::GenerateAheadOfTime(isolate); | 1001 StoreRegistersStateStub::GenerateAheadOfTime(isolate); |
1063 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); | 1002 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); |
1064 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 1003 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
1065 } | 1004 } |
1066 | 1005 |
(...skipping 3952 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5019 MemOperand(fp, 6 * kPointerSize), | 4958 MemOperand(fp, 6 * kPointerSize), |
5020 NULL); | 4959 NULL); |
5021 } | 4960 } |
5022 | 4961 |
5023 | 4962 |
5024 #undef __ | 4963 #undef __ |
5025 | 4964 |
5026 } } // namespace v8::internal | 4965 } } // namespace v8::internal |
5027 | 4966 |
5028 #endif // V8_TARGET_ARCH_MIPS64 | 4967 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |