| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 86 Register scratch2 = | 86 Register scratch2 = |
| 87 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch); | 87 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch); |
| 88 Register scratch3 = | 88 Register scratch3 = |
| 89 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch2); | 89 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch2); |
| 90 DoubleRegister double_scratch = kLithiumScratchDouble; | 90 DoubleRegister double_scratch = kLithiumScratchDouble; |
| 91 | 91 |
| 92 __ Push(scratch, scratch2, scratch3); | 92 __ Push(scratch, scratch2, scratch3); |
| 93 | 93 |
| 94 if (!skip_fastpath()) { | 94 if (!skip_fastpath()) { |
| 95 // Load double input. | 95 // Load double input. |
| 96 __ ldc1(double_scratch, MemOperand(input_reg, double_offset)); | 96 __ Ldc1(double_scratch, MemOperand(input_reg, double_offset)); |
| 97 | 97 |
| 98 // Clear cumulative exception flags and save the FCSR. | 98 // Clear cumulative exception flags and save the FCSR. |
| 99 __ cfc1(scratch2, FCSR); | 99 __ cfc1(scratch2, FCSR); |
| 100 __ ctc1(zero_reg, FCSR); | 100 __ ctc1(zero_reg, FCSR); |
| 101 | 101 |
| 102 // Try a conversion to a signed integer. | 102 // Try a conversion to a signed integer. |
| 103 __ Trunc_w_d(double_scratch, double_scratch); | 103 __ Trunc_w_d(double_scratch, double_scratch); |
| 104 // Move the converted value into the result register. | 104 // Move the converted value into the result register. |
| 105 __ mfc1(scratch3, double_scratch); | 105 __ mfc1(scratch3, double_scratch); |
| 106 | 106 |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 340 // Smi compared non-strictly with a non-Smi non-heap-number. Call | 340 // Smi compared non-strictly with a non-Smi non-heap-number. Call |
| 341 // the runtime. | 341 // the runtime. |
| 342 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE)); | 342 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE)); |
| 343 } | 343 } |
| 344 | 344 |
| 345 // Rhs is a smi, lhs is a number. | 345 // Rhs is a smi, lhs is a number. |
| 346 // Convert smi rhs to double. | 346 // Convert smi rhs to double. |
| 347 __ sra(at, rhs, kSmiTagSize); | 347 __ sra(at, rhs, kSmiTagSize); |
| 348 __ mtc1(at, f14); | 348 __ mtc1(at, f14); |
| 349 __ cvt_d_w(f14, f14); | 349 __ cvt_d_w(f14, f14); |
| 350 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); | 350 __ Ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); |
| 351 | 351 |
| 352 // We now have both loaded as doubles. | 352 // We now have both loaded as doubles. |
| 353 __ jmp(both_loaded_as_doubles); | 353 __ jmp(both_loaded_as_doubles); |
| 354 | 354 |
| 355 __ bind(&lhs_is_smi); | 355 __ bind(&lhs_is_smi); |
| 356 // Lhs is a Smi. Check whether the non-smi is a heap number. | 356 // Lhs is a Smi. Check whether the non-smi is a heap number. |
| 357 __ GetObjectType(rhs, t4, t4); | 357 __ GetObjectType(rhs, t4, t4); |
| 358 if (strict) { | 358 if (strict) { |
| 359 // If lhs was not a number and rhs was a Smi then strict equality cannot | 359 // If lhs was not a number and rhs was a Smi then strict equality cannot |
| 360 // succeed. Return non-equal. | 360 // succeed. Return non-equal. |
| 361 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE)); | 361 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE)); |
| 362 __ li(v0, Operand(1)); | 362 __ li(v0, Operand(1)); |
| 363 } else { | 363 } else { |
| 364 // Smi compared non-strictly with a non-Smi non-heap-number. Call | 364 // Smi compared non-strictly with a non-Smi non-heap-number. Call |
| 365 // the runtime. | 365 // the runtime. |
| 366 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE)); | 366 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE)); |
| 367 } | 367 } |
| 368 | 368 |
| 369 // Lhs is a smi, rhs is a number. | 369 // Lhs is a smi, rhs is a number. |
| 370 // Convert smi lhs to double. | 370 // Convert smi lhs to double. |
| 371 __ sra(at, lhs, kSmiTagSize); | 371 __ sra(at, lhs, kSmiTagSize); |
| 372 __ mtc1(at, f12); | 372 __ mtc1(at, f12); |
| 373 __ cvt_d_w(f12, f12); | 373 __ cvt_d_w(f12, f12); |
| 374 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); | 374 __ Ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); |
| 375 // Fall through to both_loaded_as_doubles. | 375 // Fall through to both_loaded_as_doubles. |
| 376 } | 376 } |
| 377 | 377 |
| 378 | 378 |
| 379 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, | 379 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, |
| 380 Register lhs, | 380 Register lhs, |
| 381 Register rhs) { | 381 Register rhs) { |
| 382 // If either operand is a JS object or an oddball value, then they are | 382 // If either operand is a JS object or an oddball value, then they are |
| 383 // not equal since their pointers are different. | 383 // not equal since their pointers are different. |
| 384 // There is no test for undetectability in strict equality. | 384 // There is no test for undetectability in strict equality. |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 421 Label* not_heap_numbers, | 421 Label* not_heap_numbers, |
| 422 Label* slow) { | 422 Label* slow) { |
| 423 __ GetObjectType(lhs, a3, a2); | 423 __ GetObjectType(lhs, a3, a2); |
| 424 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE)); | 424 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE)); |
| 425 __ lw(a2, FieldMemOperand(rhs, HeapObject::kMapOffset)); | 425 __ lw(a2, FieldMemOperand(rhs, HeapObject::kMapOffset)); |
| 426 // If first was a heap number & second wasn't, go to slow case. | 426 // If first was a heap number & second wasn't, go to slow case. |
| 427 __ Branch(slow, ne, a3, Operand(a2)); | 427 __ Branch(slow, ne, a3, Operand(a2)); |
| 428 | 428 |
| 429 // Both are heap numbers. Load them up then jump to the code we have | 429 // Both are heap numbers. Load them up then jump to the code we have |
| 430 // for that. | 430 // for that. |
| 431 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); | 431 __ Ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); |
| 432 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); | 432 __ Ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); |
| 433 | 433 |
| 434 __ jmp(both_loaded_as_doubles); | 434 __ jmp(both_loaded_as_doubles); |
| 435 } | 435 } |
| 436 | 436 |
| 437 | 437 |
| 438 // Fast negative check for internalized-to-internalized equality. | 438 // Fast negative check for internalized-to-internalized equality. |
| 439 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm, | 439 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm, |
| 440 Register lhs, Register rhs, | 440 Register lhs, Register rhs, |
| 441 Label* possible_strings, | 441 Label* possible_strings, |
| 442 Label* runtime_call) { | 442 Label* runtime_call) { |
| (...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 756 const DoubleRegister double_scratch = f6; | 756 const DoubleRegister double_scratch = f6; |
| 757 const FPURegister single_scratch = f8; | 757 const FPURegister single_scratch = f8; |
| 758 const Register scratch = t5; | 758 const Register scratch = t5; |
| 759 const Register scratch2 = t3; | 759 const Register scratch2 = t3; |
| 760 | 760 |
| 761 Label call_runtime, done, int_exponent; | 761 Label call_runtime, done, int_exponent; |
| 762 if (exponent_type() == TAGGED) { | 762 if (exponent_type() == TAGGED) { |
| 763 // Base is already in double_base. | 763 // Base is already in double_base. |
| 764 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); | 764 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); |
| 765 | 765 |
| 766 __ ldc1(double_exponent, | 766 __ Ldc1(double_exponent, |
| 767 FieldMemOperand(exponent, HeapNumber::kValueOffset)); | 767 FieldMemOperand(exponent, HeapNumber::kValueOffset)); |
| 768 } | 768 } |
| 769 | 769 |
| 770 if (exponent_type() != INTEGER) { | 770 if (exponent_type() != INTEGER) { |
| 771 Label int_exponent_convert; | 771 Label int_exponent_convert; |
| 772 // Detect integer exponents stored as double. | 772 // Detect integer exponents stored as double. |
| 773 __ EmitFPUTruncate(kRoundToMinusInf, | 773 __ EmitFPUTruncate(kRoundToMinusInf, |
| 774 scratch, | 774 scratch, |
| 775 double_exponent, | 775 double_exponent, |
| 776 at, | 776 at, |
| (...skipping 1354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2131 } | 2131 } |
| 2132 | 2132 |
| 2133 // Inlining the double comparison and falling back to the general compare | 2133 // Inlining the double comparison and falling back to the general compare |
| 2134 // stub if NaN is involved. | 2134 // stub if NaN is involved. |
| 2135 // Load left and right operand. | 2135 // Load left and right operand. |
| 2136 Label done, left, left_smi, right_smi; | 2136 Label done, left, left_smi, right_smi; |
| 2137 __ JumpIfSmi(a0, &right_smi); | 2137 __ JumpIfSmi(a0, &right_smi); |
| 2138 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, | 2138 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, |
| 2139 DONT_DO_SMI_CHECK); | 2139 DONT_DO_SMI_CHECK); |
| 2140 __ Subu(a2, a0, Operand(kHeapObjectTag)); | 2140 __ Subu(a2, a0, Operand(kHeapObjectTag)); |
| 2141 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset)); | 2141 __ Ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset)); |
| 2142 __ Branch(&left); | 2142 __ Branch(&left); |
| 2143 __ bind(&right_smi); | 2143 __ bind(&right_smi); |
| 2144 __ SmiUntag(a2, a0); // Can't clobber a0 yet. | 2144 __ SmiUntag(a2, a0); // Can't clobber a0 yet. |
| 2145 FPURegister single_scratch = f6; | 2145 FPURegister single_scratch = f6; |
| 2146 __ mtc1(a2, single_scratch); | 2146 __ mtc1(a2, single_scratch); |
| 2147 __ cvt_d_w(f2, single_scratch); | 2147 __ cvt_d_w(f2, single_scratch); |
| 2148 | 2148 |
| 2149 __ bind(&left); | 2149 __ bind(&left); |
| 2150 __ JumpIfSmi(a1, &left_smi); | 2150 __ JumpIfSmi(a1, &left_smi); |
| 2151 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, | 2151 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, |
| 2152 DONT_DO_SMI_CHECK); | 2152 DONT_DO_SMI_CHECK); |
| 2153 __ Subu(a2, a1, Operand(kHeapObjectTag)); | 2153 __ Subu(a2, a1, Operand(kHeapObjectTag)); |
| 2154 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset)); | 2154 __ Ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset)); |
| 2155 __ Branch(&done); | 2155 __ Branch(&done); |
| 2156 __ bind(&left_smi); | 2156 __ bind(&left_smi); |
| 2157 __ SmiUntag(a2, a1); // Can't clobber a1 yet. | 2157 __ SmiUntag(a2, a1); // Can't clobber a1 yet. |
| 2158 single_scratch = f8; | 2158 single_scratch = f8; |
| 2159 __ mtc1(a2, single_scratch); | 2159 __ mtc1(a2, single_scratch); |
| 2160 __ cvt_d_w(f0, single_scratch); | 2160 __ cvt_d_w(f0, single_scratch); |
| 2161 | 2161 |
| 2162 __ bind(&done); | 2162 __ bind(&done); |
| 2163 | 2163 |
| 2164 // Return a result of -1, 0, or 1, or use CompareStub for NaNs. | 2164 // Return a result of -1, 0, or 1, or use CompareStub for NaNs. |
| (...skipping 1352 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3517 kStackUnwindSpace, kInvalidStackOffset, | 3517 kStackUnwindSpace, kInvalidStackOffset, |
| 3518 return_value_operand, NULL); | 3518 return_value_operand, NULL); |
| 3519 } | 3519 } |
| 3520 | 3520 |
| 3521 #undef __ | 3521 #undef __ |
| 3522 | 3522 |
| 3523 } // namespace internal | 3523 } // namespace internal |
| 3524 } // namespace v8 | 3524 } // namespace v8 |
| 3525 | 3525 |
| 3526 #endif // V8_TARGET_ARCH_MIPS | 3526 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |