OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 663 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
674 } else { | 674 } else { |
675 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r2, r3, r4, | 675 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r2, r3, r4, |
676 r5); | 676 r5); |
677 } | 677 } |
678 // Never falls through to here. | 678 // Never falls through to here. |
679 | 679 |
680 __ bind(&slow); | 680 __ bind(&slow); |
681 | 681 |
682 __ Push(lhs, rhs); | 682 __ Push(lhs, rhs); |
683 // Figure out which native to call and setup the arguments. | 683 // Figure out which native to call and setup the arguments. |
684 Builtins::JavaScript native; | 684 if (cc == eq && strict()) { |
685 if (cc == eq) { | 685 __ TailCallRuntime(Runtime::kStrictEquals, 2, 1); |
686 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; | |
687 } else { | 686 } else { |
688 native = | 687 Builtins::JavaScript native; |
689 is_strong(strength()) ? Builtins::COMPARE_STRONG : Builtins::COMPARE; | 688 if (cc == eq) { |
690 int ncr; // NaN compare result | 689 native = Builtins::EQUALS; |
691 if (cc == lt || cc == le) { | |
692 ncr = GREATER; | |
693 } else { | 690 } else { |
694 DCHECK(cc == gt || cc == ge); // remaining cases | 691 native = |
695 ncr = LESS; | 692 is_strong(strength()) ? Builtins::COMPARE_STRONG : Builtins::COMPARE; |
| 693 int ncr; // NaN compare result |
| 694 if (cc == lt || cc == le) { |
| 695 ncr = GREATER; |
| 696 } else { |
| 697 DCHECK(cc == gt || cc == ge); // remaining cases |
| 698 ncr = LESS; |
| 699 } |
| 700 __ mov(r0, Operand(Smi::FromInt(ncr))); |
| 701 __ push(r0); |
696 } | 702 } |
697 __ mov(r0, Operand(Smi::FromInt(ncr))); | 703 |
698 __ push(r0); | 704 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 705 // tagged as a small integer. |
| 706 __ InvokeBuiltin(native, JUMP_FUNCTION); |
699 } | 707 } |
700 | 708 |
701 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | |
702 // tagged as a small integer. | |
703 __ InvokeBuiltin(native, JUMP_FUNCTION); | |
704 | |
705 __ bind(&miss); | 709 __ bind(&miss); |
706 GenerateMiss(masm); | 710 GenerateMiss(masm); |
707 } | 711 } |
708 | 712 |
709 | 713 |
710 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 714 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
711 // We don't allow a GC during a store buffer overflow so there is no need to | 715 // We don't allow a GC during a store buffer overflow so there is no need to |
712 // store the registers in any particular way, but we do have to store and | 716 // store the registers in any particular way, but we do have to store and |
713 // restore them. | 717 // restore them. |
714 __ stm(db_w, sp, kCallerSaved | lr.bit()); | 718 __ stm(db_w, sp, kCallerSaved | lr.bit()); |
(...skipping 4811 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5526 MemOperand(fp, 6 * kPointerSize), NULL); | 5530 MemOperand(fp, 6 * kPointerSize), NULL); |
5527 } | 5531 } |
5528 | 5532 |
5529 | 5533 |
5530 #undef __ | 5534 #undef __ |
5531 | 5535 |
5532 } // namespace internal | 5536 } // namespace internal |
5533 } // namespace v8 | 5537 } // namespace v8 |
5534 | 5538 |
5535 #endif // V8_TARGET_ARCH_ARM | 5539 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |