| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/arm64/frames-arm64.h" | 9 #include "src/arm64/frames-arm64.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 634 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 645 | 645 |
| 646 // Never fall through to here. | 646 // Never fall through to here. |
| 647 if (FLAG_debug_code) { | 647 if (FLAG_debug_code) { |
| 648 __ Unreachable(); | 648 __ Unreachable(); |
| 649 } | 649 } |
| 650 | 650 |
| 651 __ Bind(&slow); | 651 __ Bind(&slow); |
| 652 | 652 |
| 653 __ Push(lhs, rhs); | 653 __ Push(lhs, rhs); |
| 654 // Figure out which native to call and setup the arguments. | 654 // Figure out which native to call and setup the arguments. |
| 655 Builtins::JavaScript native; | 655 if (cond == eq && strict()) { |
| 656 if (cond == eq) { | 656 __ TailCallRuntime(Runtime::kStrictEquals, 2, 1); |
| 657 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; | |
| 658 } else { | 657 } else { |
| 659 native = | 658 Builtins::JavaScript native; |
| 660 is_strong(strength()) ? Builtins::COMPARE_STRONG : Builtins::COMPARE; | 659 if (cond == eq) { |
| 661 int ncr; // NaN compare result | 660 native = Builtins::EQUALS; |
| 662 if ((cond == lt) || (cond == le)) { | |
| 663 ncr = GREATER; | |
| 664 } else { | 661 } else { |
| 665 DCHECK((cond == gt) || (cond == ge)); // remaining cases | 662 native = |
| 666 ncr = LESS; | 663 is_strong(strength()) ? Builtins::COMPARE_STRONG : Builtins::COMPARE; |
| 664 int ncr; // NaN compare result |
| 665 if ((cond == lt) || (cond == le)) { |
| 666 ncr = GREATER; |
| 667 } else { |
| 668 DCHECK((cond == gt) || (cond == ge)); // remaining cases |
| 669 ncr = LESS; |
| 670 } |
| 671 __ Mov(x10, Smi::FromInt(ncr)); |
| 672 __ Push(x10); |
| 667 } | 673 } |
| 668 __ Mov(x10, Smi::FromInt(ncr)); | 674 |
| 669 __ Push(x10); | 675 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 676 // tagged as a small integer. |
| 677 __ InvokeBuiltin(native, JUMP_FUNCTION); |
| 670 } | 678 } |
| 671 | 679 |
| 672 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | |
| 673 // tagged as a small integer. | |
| 674 __ InvokeBuiltin(native, JUMP_FUNCTION); | |
| 675 | |
| 676 __ Bind(&miss); | 680 __ Bind(&miss); |
| 677 GenerateMiss(masm); | 681 GenerateMiss(masm); |
| 678 } | 682 } |
| 679 | 683 |
| 680 | 684 |
| 681 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 685 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 682 CPURegList saved_regs = kCallerSaved; | 686 CPURegList saved_regs = kCallerSaved; |
| 683 CPURegList saved_fp_regs = kCallerSavedFP; | 687 CPURegList saved_fp_regs = kCallerSavedFP; |
| 684 | 688 |
| 685 // We don't allow a GC during a store buffer overflow so there is no need to | 689 // We don't allow a GC during a store buffer overflow so there is no need to |
| (...skipping 5304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5990 MemOperand(fp, 6 * kPointerSize), NULL); | 5994 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5991 } | 5995 } |
| 5992 | 5996 |
| 5993 | 5997 |
| 5994 #undef __ | 5998 #undef __ |
| 5995 | 5999 |
| 5996 } // namespace internal | 6000 } // namespace internal |
| 5997 } // namespace v8 | 6001 } // namespace v8 |
| 5998 | 6002 |
| 5999 #endif // V8_TARGET_ARCH_ARM64 | 6003 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |