| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
| 6 | 6 |
| 7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
| (...skipping 635 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 646 // Never fall through to here. | 646 // Never fall through to here. |
| 647 if (FLAG_debug_code) { | 647 if (FLAG_debug_code) { |
| 648 __ Unreachable(); | 648 __ Unreachable(); |
| 649 } | 649 } |
| 650 | 650 |
| 651 __ Bind(&slow); | 651 __ Bind(&slow); |
| 652 | 652 |
| 653 __ Push(lhs, rhs); | 653 __ Push(lhs, rhs); |
| 654 // Figure out which native to call and setup the arguments. | 654 // Figure out which native to call and setup the arguments. |
| 655 if (cond == eq) { | 655 if (cond == eq) { |
| 656 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2, | 656 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2); |
| 657 1); | |
| 658 } else { | 657 } else { |
| 659 int ncr; // NaN compare result | 658 int ncr; // NaN compare result |
| 660 if ((cond == lt) || (cond == le)) { | 659 if ((cond == lt) || (cond == le)) { |
| 661 ncr = GREATER; | 660 ncr = GREATER; |
| 662 } else { | 661 } else { |
| 663 DCHECK((cond == gt) || (cond == ge)); // remaining cases | 662 DCHECK((cond == gt) || (cond == ge)); // remaining cases |
| 664 ncr = LESS; | 663 ncr = LESS; |
| 665 } | 664 } |
| 666 __ Mov(x10, Smi::FromInt(ncr)); | 665 __ Mov(x10, Smi::FromInt(ncr)); |
| 667 __ Push(x10); | 666 __ Push(x10); |
| 668 | 667 |
| 669 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 668 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 670 // tagged as a small integer. | 669 // tagged as a small integer. |
| 671 __ TailCallRuntime( | 670 __ TailCallRuntime( |
| 672 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3, | 671 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, |
| 673 1); | 672 3); |
| 674 } | 673 } |
| 675 | 674 |
| 676 __ Bind(&miss); | 675 __ Bind(&miss); |
| 677 GenerateMiss(masm); | 676 GenerateMiss(masm); |
| 678 } | 677 } |
| 679 | 678 |
| 680 | 679 |
| 681 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 680 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 682 CPURegList saved_regs = kCallerSaved; | 681 CPURegList saved_regs = kCallerSaved; |
| 683 CPURegList saved_fp_regs = kCallerSavedFP; | 682 CPURegList saved_fp_regs = kCallerSavedFP; |
| (...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 959 // catch this corner-case, we bail out if the result was 0. (This can only | 958 // catch this corner-case, we bail out if the result was 0. (This can only |
| 960 // occur if the divisor is infinity or the base is zero.) | 959 // occur if the divisor is infinity or the base is zero.) |
| 961 __ Fcmp(result_double, 0.0); | 960 __ Fcmp(result_double, 0.0); |
| 962 __ B(&done, ne); | 961 __ B(&done, ne); |
| 963 | 962 |
| 964 if (exponent_type() == ON_STACK) { | 963 if (exponent_type() == ON_STACK) { |
| 965 // Bail out to runtime code. | 964 // Bail out to runtime code. |
| 966 __ Bind(&call_runtime); | 965 __ Bind(&call_runtime); |
| 967 // Put the arguments back on the stack. | 966 // Put the arguments back on the stack. |
| 968 __ Push(base_tagged, exponent_tagged); | 967 __ Push(base_tagged, exponent_tagged); |
| 969 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1); | 968 __ TailCallRuntime(Runtime::kMathPowRT, 2); |
| 970 | 969 |
| 971 // Return. | 970 // Return. |
| 972 __ Bind(&done); | 971 __ Bind(&done); |
| 973 __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1, | 972 __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1, |
| 974 result_double); | 973 result_double); |
| 975 DCHECK(result_tagged.is(x0)); | 974 DCHECK(result_tagged.is(x0)); |
| 976 __ IncrementCounter( | 975 __ IncrementCounter( |
| 977 isolate()->counters()->math_pow(), 1, scratch0, scratch1); | 976 isolate()->counters()->math_pow(), 1, scratch0, scratch1); |
| 978 __ Ret(); | 977 __ Ret(); |
| 979 } else { | 978 } else { |
| (...skipping 622 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1602 __ Bind(&done); | 1601 __ Bind(&done); |
| 1603 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex); | 1602 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex); |
| 1604 __ Ret(); | 1603 __ Ret(); |
| 1605 | 1604 |
| 1606 // Found Proxy or access check needed: Call the runtime | 1605 // Found Proxy or access check needed: Call the runtime |
| 1607 __ Bind(&fast_runtime_fallback); | 1606 __ Bind(&fast_runtime_fallback); |
| 1608 __ Push(object, function_prototype); | 1607 __ Push(object, function_prototype); |
| 1609 // Invalidate the instanceof cache. | 1608 // Invalidate the instanceof cache. |
| 1610 __ Move(scratch, Smi::FromInt(0)); | 1609 __ Move(scratch, Smi::FromInt(0)); |
| 1611 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex); | 1610 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex); |
| 1612 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2, 1); | 1611 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2); |
| 1613 | 1612 |
| 1614 // Slow-case: Call the %InstanceOf runtime function. | 1613 // Slow-case: Call the %InstanceOf runtime function. |
| 1615 __ bind(&slow_case); | 1614 __ bind(&slow_case); |
| 1616 __ Push(object, function); | 1615 __ Push(object, function); |
| 1617 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); | 1616 __ TailCallRuntime(Runtime::kInstanceOf, 2); |
| 1618 } | 1617 } |
| 1619 | 1618 |
| 1620 | 1619 |
| 1621 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { | 1620 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
| 1622 Register arg_count = ArgumentsAccessReadDescriptor::parameter_count(); | 1621 Register arg_count = ArgumentsAccessReadDescriptor::parameter_count(); |
| 1623 Register key = ArgumentsAccessReadDescriptor::index(); | 1622 Register key = ArgumentsAccessReadDescriptor::index(); |
| 1624 DCHECK(arg_count.is(x0)); | 1623 DCHECK(arg_count.is(x0)); |
| 1625 DCHECK(key.is(x1)); | 1624 DCHECK(key.is(x1)); |
| 1626 | 1625 |
| 1627 // The displacement is the offset of the last parameter (if any) relative | 1626 // The displacement is the offset of the last parameter (if any) relative |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1658 // Read the argument from the stack and return it. | 1657 // Read the argument from the stack and return it. |
| 1659 __ Sub(x10, arg_count, key); | 1658 __ Sub(x10, arg_count, key); |
| 1660 __ Add(x10, local_fp, Operand::UntagSmiAndScale(x10, kPointerSizeLog2)); | 1659 __ Add(x10, local_fp, Operand::UntagSmiAndScale(x10, kPointerSizeLog2)); |
| 1661 __ Ldr(x0, MemOperand(x10, kDisplacement)); | 1660 __ Ldr(x0, MemOperand(x10, kDisplacement)); |
| 1662 __ Ret(); | 1661 __ Ret(); |
| 1663 | 1662 |
| 1664 // Slow case: handle non-smi or out-of-bounds access to arguments by calling | 1663 // Slow case: handle non-smi or out-of-bounds access to arguments by calling |
| 1665 // the runtime system. | 1664 // the runtime system. |
| 1666 __ Bind(&slow); | 1665 __ Bind(&slow); |
| 1667 __ Push(key); | 1666 __ Push(key); |
| 1668 __ TailCallRuntime(Runtime::kArguments, 1, 1); | 1667 __ TailCallRuntime(Runtime::kArguments, 1); |
| 1669 } | 1668 } |
| 1670 | 1669 |
| 1671 | 1670 |
| 1672 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { | 1671 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { |
| 1673 // x1 : function | 1672 // x1 : function |
| 1674 // x2 : number of parameters (tagged) | 1673 // x2 : number of parameters (tagged) |
| 1675 // x3 : parameters pointer | 1674 // x3 : parameters pointer |
| 1676 | 1675 |
| 1677 DCHECK(x1.is(ArgumentsAccessNewDescriptor::function())); | 1676 DCHECK(x1.is(ArgumentsAccessNewDescriptor::function())); |
| 1678 DCHECK(x2.is(ArgumentsAccessNewDescriptor::parameter_count())); | 1677 DCHECK(x2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1689 __ B(ne, &runtime); | 1688 __ B(ne, &runtime); |
| 1690 | 1689 |
| 1691 // Patch the arguments.length and parameters pointer in the current frame. | 1690 // Patch the arguments.length and parameters pointer in the current frame. |
| 1692 __ Ldr(x2, | 1691 __ Ldr(x2, |
| 1693 MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1692 MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1694 __ Add(x3, caller_fp, Operand::UntagSmiAndScale(x2, kPointerSizeLog2)); | 1693 __ Add(x3, caller_fp, Operand::UntagSmiAndScale(x2, kPointerSizeLog2)); |
| 1695 __ Add(x3, x3, StandardFrameConstants::kCallerSPOffset); | 1694 __ Add(x3, x3, StandardFrameConstants::kCallerSPOffset); |
| 1696 | 1695 |
| 1697 __ Bind(&runtime); | 1696 __ Bind(&runtime); |
| 1698 __ Push(x1, x3, x2); | 1697 __ Push(x1, x3, x2); |
| 1699 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); | 1698 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); |
| 1700 } | 1699 } |
| 1701 | 1700 |
| 1702 | 1701 |
| 1703 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | 1702 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
| 1704 // x1 : function | 1703 // x1 : function |
| 1705 // x2 : number of parameters (tagged) | 1704 // x2 : number of parameters (tagged) |
| 1706 // x3 : parameters pointer | 1705 // x3 : parameters pointer |
| 1707 // | 1706 // |
| 1708 // Returns pointer to result object in x0. | 1707 // Returns pointer to result object in x0. |
| 1709 | 1708 |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1964 | 1963 |
| 1965 __ Bind(&arguments_test); | 1964 __ Bind(&arguments_test); |
| 1966 __ Cmp(x10, arg_count); | 1965 __ Cmp(x10, arg_count); |
| 1967 __ B(lt, &arguments_loop); | 1966 __ B(lt, &arguments_loop); |
| 1968 | 1967 |
| 1969 __ Ret(); | 1968 __ Ret(); |
| 1970 | 1969 |
| 1971 // Do the runtime call to allocate the arguments object. | 1970 // Do the runtime call to allocate the arguments object. |
| 1972 __ Bind(&runtime); | 1971 __ Bind(&runtime); |
| 1973 __ Push(function, recv_arg, arg_count_smi); | 1972 __ Push(function, recv_arg, arg_count_smi); |
| 1974 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); | 1973 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); |
| 1975 } | 1974 } |
| 1976 | 1975 |
| 1977 | 1976 |
| 1978 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 1977 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
| 1979 // Return address is in lr. | 1978 // Return address is in lr. |
| 1980 Label slow; | 1979 Label slow; |
| 1981 | 1980 |
| 1982 Register receiver = LoadDescriptor::ReceiverRegister(); | 1981 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 1983 Register key = LoadDescriptor::NameRegister(); | 1982 Register key = LoadDescriptor::NameRegister(); |
| 1984 | 1983 |
| 1985 // Check that the key is an array index, that is Uint32. | 1984 // Check that the key is an array index, that is Uint32. |
| 1986 __ TestAndBranchIfAnySet(key, kSmiTagMask | kSmiSignMask, &slow); | 1985 __ TestAndBranchIfAnySet(key, kSmiTagMask | kSmiSignMask, &slow); |
| 1987 | 1986 |
| 1988 // Everything is fine, call runtime. | 1987 // Everything is fine, call runtime. |
| 1989 __ Push(receiver, key); | 1988 __ Push(receiver, key); |
| 1990 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); | 1989 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2); |
| 1991 | 1990 |
| 1992 __ Bind(&slow); | 1991 __ Bind(&slow); |
| 1993 PropertyAccessCompiler::TailCallBuiltin( | 1992 PropertyAccessCompiler::TailCallBuiltin( |
| 1994 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1993 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
| 1995 } | 1994 } |
| 1996 | 1995 |
| 1997 | 1996 |
| 1998 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 1997 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
| 1999 // x1 : function | 1998 // x1 : function |
| 2000 // x2 : number of parameters (tagged) | 1999 // x2 : number of parameters (tagged) |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2112 __ Sub(param_count, param_count, 1); | 2111 __ Sub(param_count, param_count, 1); |
| 2113 __ Cbnz(param_count, &loop); | 2112 __ Cbnz(param_count, &loop); |
| 2114 | 2113 |
| 2115 // Return from stub. | 2114 // Return from stub. |
| 2116 __ Bind(&done); | 2115 __ Bind(&done); |
| 2117 __ Ret(); | 2116 __ Ret(); |
| 2118 | 2117 |
| 2119 // Do the runtime call to allocate the arguments object. | 2118 // Do the runtime call to allocate the arguments object. |
| 2120 __ Bind(&runtime); | 2119 __ Bind(&runtime); |
| 2121 __ Push(function, params, param_count_smi); | 2120 __ Push(function, params, param_count_smi); |
| 2122 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); | 2121 __ TailCallRuntime(Runtime::kNewStrictArguments, 3); |
| 2123 } | 2122 } |
| 2124 | 2123 |
| 2125 | 2124 |
| 2126 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { | 2125 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { |
| 2127 // Stack layout on entry. | 2126 // Stack layout on entry. |
| 2128 // jssp[0]: language mode (tagged) | 2127 // jssp[0]: language mode (tagged) |
| 2129 // jssp[8]: index of rest parameter (tagged) | 2128 // jssp[8]: index of rest parameter (tagged) |
| 2130 // jssp[16]: number of parameters (tagged) | 2129 // jssp[16]: number of parameters (tagged) |
| 2131 // jssp[24]: address of receiver argument | 2130 // jssp[24]: address of receiver argument |
| 2132 // | 2131 // |
| (...skipping 28 matching lines...) Expand all Loading... |
| 2161 | 2160 |
| 2162 // Patch the argument length and parameters pointer. | 2161 // Patch the argument length and parameters pointer. |
| 2163 __ Ldr(param_count_smi, | 2162 __ Ldr(param_count_smi, |
| 2164 MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2163 MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2165 __ SmiUntag(param_count, param_count_smi); | 2164 __ SmiUntag(param_count, param_count_smi); |
| 2166 __ Add(x10, caller_fp, Operand(param_count, LSL, kPointerSizeLog2)); | 2165 __ Add(x10, caller_fp, Operand(param_count, LSL, kPointerSizeLog2)); |
| 2167 __ Add(params, x10, StandardFrameConstants::kCallerSPOffset); | 2166 __ Add(params, x10, StandardFrameConstants::kCallerSPOffset); |
| 2168 | 2167 |
| 2169 __ Bind(&runtime); | 2168 __ Bind(&runtime); |
| 2170 __ Push(params, param_count_smi, rest_index_smi, language_mode_smi); | 2169 __ Push(params, param_count_smi, rest_index_smi, language_mode_smi); |
| 2171 __ TailCallRuntime(Runtime::kNewRestParam, 4, 1); | 2170 __ TailCallRuntime(Runtime::kNewRestParam, 4); |
| 2172 } | 2171 } |
| 2173 | 2172 |
| 2174 | 2173 |
| 2175 void RegExpExecStub::Generate(MacroAssembler* masm) { | 2174 void RegExpExecStub::Generate(MacroAssembler* masm) { |
| 2176 #ifdef V8_INTERPRETED_REGEXP | 2175 #ifdef V8_INTERPRETED_REGEXP |
| 2177 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 2176 __ TailCallRuntime(Runtime::kRegExpExec, 4); |
| 2178 #else // V8_INTERPRETED_REGEXP | 2177 #else // V8_INTERPRETED_REGEXP |
| 2179 | 2178 |
| 2180 // Stack frame on entry. | 2179 // Stack frame on entry. |
| 2181 // jssp[0]: last_match_info (expected JSArray) | 2180 // jssp[0]: last_match_info (expected JSArray) |
| 2182 // jssp[8]: previous index | 2181 // jssp[8]: previous index |
| 2183 // jssp[16]: subject string | 2182 // jssp[16]: subject string |
| 2184 // jssp[24]: JSRegExp object | 2183 // jssp[24]: JSRegExp object |
| 2185 Label runtime; | 2184 Label runtime; |
| 2186 | 2185 |
| 2187 // Use of registers for this function. | 2186 // Use of registers for this function. |
| (...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2608 // If there is no pending exception, handle that in the runtime system. | 2607 // If there is no pending exception, handle that in the runtime system. |
| 2609 __ Mov(x10, Operand(isolate()->factory()->the_hole_value())); | 2608 __ Mov(x10, Operand(isolate()->factory()->the_hole_value())); |
| 2610 __ Mov(x11, | 2609 __ Mov(x11, |
| 2611 Operand(ExternalReference(Isolate::kPendingExceptionAddress, | 2610 Operand(ExternalReference(Isolate::kPendingExceptionAddress, |
| 2612 isolate()))); | 2611 isolate()))); |
| 2613 __ Ldr(exception_value, MemOperand(x11)); | 2612 __ Ldr(exception_value, MemOperand(x11)); |
| 2614 __ Cmp(x10, exception_value); | 2613 __ Cmp(x10, exception_value); |
| 2615 __ B(eq, &runtime); | 2614 __ B(eq, &runtime); |
| 2616 | 2615 |
| 2617 // For exception, throw the exception again. | 2616 // For exception, throw the exception again. |
| 2618 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1); | 2617 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4); |
| 2619 | 2618 |
| 2620 __ Bind(&failure); | 2619 __ Bind(&failure); |
| 2621 __ Mov(x0, Operand(isolate()->factory()->null_value())); | 2620 __ Mov(x0, Operand(isolate()->factory()->null_value())); |
| 2622 // Drop the 4 arguments of the stub from the stack. | 2621 // Drop the 4 arguments of the stub from the stack. |
| 2623 __ Drop(4); | 2622 __ Drop(4); |
| 2624 __ Ret(); | 2623 __ Ret(); |
| 2625 | 2624 |
| 2626 __ Bind(&runtime); | 2625 __ Bind(&runtime); |
| 2627 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 2626 __ TailCallRuntime(Runtime::kRegExpExec, 4); |
| 2628 | 2627 |
| 2629 // Deferred code for string handling. | 2628 // Deferred code for string handling. |
| 2630 // (6) Not a long external string? If yes, go to (8). | 2629 // (6) Not a long external string? If yes, go to (8). |
| 2631 __ Bind(¬_seq_nor_cons); | 2630 __ Bind(¬_seq_nor_cons); |
| 2632 // Compare flags are still set. | 2631 // Compare flags are still set. |
| 2633 __ B(ne, ¬_long_external); // Go to (8). | 2632 __ B(ne, ¬_long_external); // Go to (8). |
| 2634 | 2633 |
| 2635 // (7) External string. Make it, offset-wise, look like a sequential string. | 2634 // (7) External string. Make it, offset-wise, look like a sequential string. |
| 2636 __ Bind(&external_string); | 2635 __ Bind(&external_string); |
| 2637 if (masm->emit_debug_code()) { | 2636 if (masm->emit_debug_code()) { |
| (...skipping 510 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3148 | 3147 |
| 3149 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { | 3148 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { |
| 3150 // Inputs are in x0 (lhs) and x1 (rhs). | 3149 // Inputs are in x0 (lhs) and x1 (rhs). |
| 3151 DCHECK_EQ(CompareICState::BOOLEAN, state()); | 3150 DCHECK_EQ(CompareICState::BOOLEAN, state()); |
| 3152 ASM_LOCATION("CompareICStub[Booleans]"); | 3151 ASM_LOCATION("CompareICStub[Booleans]"); |
| 3153 Label miss; | 3152 Label miss; |
| 3154 | 3153 |
| 3155 __ CheckMap(x1, x2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3154 __ CheckMap(x1, x2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
| 3156 __ CheckMap(x0, x3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3155 __ CheckMap(x0, x3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
| 3157 if (op() != Token::EQ_STRICT && is_strong(strength())) { | 3156 if (op() != Token::EQ_STRICT && is_strong(strength())) { |
| 3158 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); | 3157 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); |
| 3159 } else { | 3158 } else { |
| 3160 if (!Token::IsEqualityOp(op())) { | 3159 if (!Token::IsEqualityOp(op())) { |
| 3161 __ Ldr(x1, FieldMemOperand(x1, Oddball::kToNumberOffset)); | 3160 __ Ldr(x1, FieldMemOperand(x1, Oddball::kToNumberOffset)); |
| 3162 __ AssertSmi(x1); | 3161 __ AssertSmi(x1); |
| 3163 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); | 3162 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); |
| 3164 __ AssertSmi(x0); | 3163 __ AssertSmi(x0); |
| 3165 } | 3164 } |
| 3166 __ Sub(x0, x1, x0); | 3165 __ Sub(x0, x1, x0); |
| 3167 __ Ret(); | 3166 __ Ret(); |
| 3168 } | 3167 } |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3402 x12); | 3401 x12); |
| 3403 } else { | 3402 } else { |
| 3404 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, x10, x11, | 3403 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, x10, x11, |
| 3405 x12, x13); | 3404 x12, x13); |
| 3406 } | 3405 } |
| 3407 | 3406 |
| 3408 // Handle more complex cases in runtime. | 3407 // Handle more complex cases in runtime. |
| 3409 __ Bind(&runtime); | 3408 __ Bind(&runtime); |
| 3410 __ Push(lhs, rhs); | 3409 __ Push(lhs, rhs); |
| 3411 if (equality) { | 3410 if (equality) { |
| 3412 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); | 3411 __ TailCallRuntime(Runtime::kStringEquals, 2); |
| 3413 } else { | 3412 } else { |
| 3414 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 3413 __ TailCallRuntime(Runtime::kStringCompare, 2); |
| 3415 } | 3414 } |
| 3416 | 3415 |
| 3417 __ Bind(&miss); | 3416 __ Bind(&miss); |
| 3418 GenerateMiss(masm); | 3417 GenerateMiss(masm); |
| 3419 } | 3418 } |
| 3420 | 3419 |
| 3421 | 3420 |
| 3422 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { | 3421 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { |
| 3423 DCHECK_EQ(CompareICState::RECEIVER, state()); | 3422 DCHECK_EQ(CompareICState::RECEIVER, state()); |
| 3424 ASM_LOCATION("CompareICStub[Receivers]"); | 3423 ASM_LOCATION("CompareICStub[Receivers]"); |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3464 __ Ldr(lhs_map, FieldMemOperand(lhs, HeapObject::kMapOffset)); | 3463 __ Ldr(lhs_map, FieldMemOperand(lhs, HeapObject::kMapOffset)); |
| 3465 __ Cmp(rhs_map, map); | 3464 __ Cmp(rhs_map, map); |
| 3466 __ B(ne, &miss); | 3465 __ B(ne, &miss); |
| 3467 __ Cmp(lhs_map, map); | 3466 __ Cmp(lhs_map, map); |
| 3468 __ B(ne, &miss); | 3467 __ B(ne, &miss); |
| 3469 | 3468 |
| 3470 if (Token::IsEqualityOp(op())) { | 3469 if (Token::IsEqualityOp(op())) { |
| 3471 __ Sub(result, rhs, lhs); | 3470 __ Sub(result, rhs, lhs); |
| 3472 __ Ret(); | 3471 __ Ret(); |
| 3473 } else if (is_strong(strength())) { | 3472 } else if (is_strong(strength())) { |
| 3474 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); | 3473 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); |
| 3475 } else { | 3474 } else { |
| 3476 Register ncr = x2; | 3475 Register ncr = x2; |
| 3477 if (op() == Token::LT || op() == Token::LTE) { | 3476 if (op() == Token::LT || op() == Token::LTE) { |
| 3478 __ Mov(ncr, Smi::FromInt(GREATER)); | 3477 __ Mov(ncr, Smi::FromInt(GREATER)); |
| 3479 } else { | 3478 } else { |
| 3480 __ Mov(ncr, Smi::FromInt(LESS)); | 3479 __ Mov(ncr, Smi::FromInt(LESS)); |
| 3481 } | 3480 } |
| 3482 __ Push(lhs, rhs, ncr); | 3481 __ Push(lhs, rhs, ncr); |
| 3483 __ TailCallRuntime(Runtime::kCompare, 3, 1); | 3482 __ TailCallRuntime(Runtime::kCompare, 3); |
| 3484 } | 3483 } |
| 3485 | 3484 |
| 3486 __ Bind(&miss); | 3485 __ Bind(&miss); |
| 3487 GenerateMiss(masm); | 3486 GenerateMiss(masm); |
| 3488 } | 3487 } |
| 3489 | 3488 |
| 3490 | 3489 |
| 3491 // This method handles the case where a compare stub had the wrong | 3490 // This method handles the case where a compare stub had the wrong |
| 3492 // implementation. It calls a miss handler, which re-writes the stub. All other | 3491 // implementation. It calls a miss handler, which re-writes the stub. All other |
| 3493 // CompareICStub::Generate* methods should fall back into this one if their | 3492 // CompareICStub::Generate* methods should fall back into this one if their |
| (...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3747 __ Add(result_length, result_length, result_length); | 3746 __ Add(result_length, result_length, result_length); |
| 3748 __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong); | 3747 __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong); |
| 3749 | 3748 |
| 3750 __ Bind(&return_x0); | 3749 __ Bind(&return_x0); |
| 3751 Counters* counters = isolate()->counters(); | 3750 Counters* counters = isolate()->counters(); |
| 3752 __ IncrementCounter(counters->sub_string_native(), 1, x3, x4); | 3751 __ IncrementCounter(counters->sub_string_native(), 1, x3, x4); |
| 3753 __ Drop(3); | 3752 __ Drop(3); |
| 3754 __ Ret(); | 3753 __ Ret(); |
| 3755 | 3754 |
| 3756 __ Bind(&runtime); | 3755 __ Bind(&runtime); |
| 3757 __ TailCallRuntime(Runtime::kSubString, 3, 1); | 3756 __ TailCallRuntime(Runtime::kSubString, 3); |
| 3758 | 3757 |
| 3759 __ bind(&single_char); | 3758 __ bind(&single_char); |
| 3760 // x1: result_length | 3759 // x1: result_length |
| 3761 // x10: input_string | 3760 // x10: input_string |
| 3762 // x12: input_type | 3761 // x12: input_type |
| 3763 // x15: from (untagged) | 3762 // x15: from (untagged) |
| 3764 __ SmiTag(from); | 3763 __ SmiTag(from); |
| 3765 StringCharAtGenerator generator(input_string, from, result_length, x0, | 3764 StringCharAtGenerator generator(input_string, from, result_length, x0, |
| 3766 &runtime, &runtime, &runtime, | 3765 &runtime, &runtime, &runtime, |
| 3767 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); | 3766 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 3793 __ Cmp(x1, FIRST_NONSTRING_TYPE); | 3792 __ Cmp(x1, FIRST_NONSTRING_TYPE); |
| 3794 __ B(hs, ¬_string); | 3793 __ B(hs, ¬_string); |
| 3795 // Check if string has a cached array index. | 3794 // Check if string has a cached array index. |
| 3796 __ Ldr(x2, FieldMemOperand(x0, String::kHashFieldOffset)); | 3795 __ Ldr(x2, FieldMemOperand(x0, String::kHashFieldOffset)); |
| 3797 __ Tst(x2, Operand(String::kContainsCachedArrayIndexMask)); | 3796 __ Tst(x2, Operand(String::kContainsCachedArrayIndexMask)); |
| 3798 __ B(ne, &slow_string); | 3797 __ B(ne, &slow_string); |
| 3799 __ IndexFromHash(x2, x0); | 3798 __ IndexFromHash(x2, x0); |
| 3800 __ Ret(); | 3799 __ Ret(); |
| 3801 __ Bind(&slow_string); | 3800 __ Bind(&slow_string); |
| 3802 __ Push(x0); // Push argument. | 3801 __ Push(x0); // Push argument. |
| 3803 __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); | 3802 __ TailCallRuntime(Runtime::kStringToNumber, 1); |
| 3804 __ Bind(¬_string); | 3803 __ Bind(¬_string); |
| 3805 | 3804 |
| 3806 Label not_oddball; | 3805 Label not_oddball; |
| 3807 __ Cmp(x1, ODDBALL_TYPE); | 3806 __ Cmp(x1, ODDBALL_TYPE); |
| 3808 __ B(ne, ¬_oddball); | 3807 __ B(ne, ¬_oddball); |
| 3809 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); | 3808 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); |
| 3810 __ Ret(); | 3809 __ Ret(); |
| 3811 __ Bind(¬_oddball); | 3810 __ Bind(¬_oddball); |
| 3812 | 3811 |
| 3813 __ Push(x0); // Push argument. | 3812 __ Push(x0); // Push argument. |
| 3814 __ TailCallRuntime(Runtime::kToNumber, 1, 1); | 3813 __ TailCallRuntime(Runtime::kToNumber, 1); |
| 3815 } | 3814 } |
| 3816 | 3815 |
| 3817 | 3816 |
| 3818 void ToLengthStub::Generate(MacroAssembler* masm) { | 3817 void ToLengthStub::Generate(MacroAssembler* masm) { |
| 3819 // The ToLength stub takes one argument in x0. | 3818 // The ToLength stub takes one argument in x0. |
| 3820 Label not_smi; | 3819 Label not_smi; |
| 3821 __ JumpIfNotSmi(x0, ¬_smi); | 3820 __ JumpIfNotSmi(x0, ¬_smi); |
| 3822 STATIC_ASSERT(kSmiTag == 0); | 3821 STATIC_ASSERT(kSmiTag == 0); |
| 3823 __ Tst(x0, x0); | 3822 __ Tst(x0, x0); |
| 3824 __ Csel(x0, x0, Operand(0), ge); | 3823 __ Csel(x0, x0, Operand(0), ge); |
| 3825 __ Ret(); | 3824 __ Ret(); |
| 3826 __ Bind(¬_smi); | 3825 __ Bind(¬_smi); |
| 3827 | 3826 |
| 3828 __ Push(x0); // Push argument. | 3827 __ Push(x0); // Push argument. |
| 3829 __ TailCallRuntime(Runtime::kToLength, 1, 1); | 3828 __ TailCallRuntime(Runtime::kToLength, 1); |
| 3830 } | 3829 } |
| 3831 | 3830 |
| 3832 | 3831 |
| 3833 void ToStringStub::Generate(MacroAssembler* masm) { | 3832 void ToStringStub::Generate(MacroAssembler* masm) { |
| 3834 // The ToString stub takes one argument in x0. | 3833 // The ToString stub takes one argument in x0. |
| 3835 Label is_number; | 3834 Label is_number; |
| 3836 __ JumpIfSmi(x0, &is_number); | 3835 __ JumpIfSmi(x0, &is_number); |
| 3837 | 3836 |
| 3838 Label not_string; | 3837 Label not_string; |
| 3839 __ JumpIfObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE, ¬_string, hs); | 3838 __ JumpIfObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE, ¬_string, hs); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 3851 __ Bind(¬_heap_number); | 3850 __ Bind(¬_heap_number); |
| 3852 | 3851 |
| 3853 Label not_oddball; | 3852 Label not_oddball; |
| 3854 __ Cmp(x1, ODDBALL_TYPE); | 3853 __ Cmp(x1, ODDBALL_TYPE); |
| 3855 __ B(ne, ¬_oddball); | 3854 __ B(ne, ¬_oddball); |
| 3856 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToStringOffset)); | 3855 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToStringOffset)); |
| 3857 __ Ret(); | 3856 __ Ret(); |
| 3858 __ Bind(¬_oddball); | 3857 __ Bind(¬_oddball); |
| 3859 | 3858 |
| 3860 __ Push(x0); // Push argument. | 3859 __ Push(x0); // Push argument. |
| 3861 __ TailCallRuntime(Runtime::kToString, 1, 1); | 3860 __ TailCallRuntime(Runtime::kToString, 1); |
| 3862 } | 3861 } |
| 3863 | 3862 |
| 3864 | 3863 |
| 3865 void StringHelper::GenerateFlatOneByteStringEquals( | 3864 void StringHelper::GenerateFlatOneByteStringEquals( |
| 3866 MacroAssembler* masm, Register left, Register right, Register scratch1, | 3865 MacroAssembler* masm, Register left, Register right, Register scratch1, |
| 3867 Register scratch2, Register scratch3) { | 3866 Register scratch2, Register scratch3) { |
| 3868 DCHECK(!AreAliased(left, right, scratch1, scratch2, scratch3)); | 3867 DCHECK(!AreAliased(left, right, scratch1, scratch2, scratch3)); |
| 3869 Register result = x0; | 3868 Register result = x0; |
| 3870 Register left_length = scratch1; | 3869 Register left_length = scratch1; |
| 3871 Register right_length = scratch2; | 3870 Register right_length = scratch2; |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3995 // Compare flat one-byte strings natively. | 3994 // Compare flat one-byte strings natively. |
| 3996 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x3, | 3995 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x3, |
| 3997 x4); | 3996 x4); |
| 3998 StringHelper::GenerateCompareFlatOneByteStrings(masm, x1, x0, x12, x13, x14, | 3997 StringHelper::GenerateCompareFlatOneByteStrings(masm, x1, x0, x12, x13, x14, |
| 3999 x15); | 3998 x15); |
| 4000 | 3999 |
| 4001 // Call the runtime. | 4000 // Call the runtime. |
| 4002 // Returns -1 (less), 0 (equal), or 1 (greater) tagged as a small integer. | 4001 // Returns -1 (less), 0 (equal), or 1 (greater) tagged as a small integer. |
| 4003 __ Bind(&runtime); | 4002 __ Bind(&runtime); |
| 4004 __ Push(x1, x0); | 4003 __ Push(x1, x0); |
| 4005 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 4004 __ TailCallRuntime(Runtime::kStringCompare, 2); |
| 4006 } | 4005 } |
| 4007 | 4006 |
| 4008 | 4007 |
| 4009 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 4008 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
| 4010 // ----------- S t a t e ------------- | 4009 // ----------- S t a t e ------------- |
| 4011 // -- x1 : left | 4010 // -- x1 : left |
| 4012 // -- x0 : right | 4011 // -- x0 : right |
| 4013 // -- lr : return address | 4012 // -- lr : return address |
| 4014 // ----------------------------------- | 4013 // ----------------------------------- |
| 4015 | 4014 |
| (...skipping 1348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5364 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); | 5363 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); |
| 5365 | 5364 |
| 5366 // If the result is not the_hole, return. Otherwise, handle in the runtime. | 5365 // If the result is not the_hole, return. Otherwise, handle in the runtime. |
| 5367 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case); | 5366 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case); |
| 5368 __ Ret(); | 5367 __ Ret(); |
| 5369 | 5368 |
| 5370 // Fallback to runtime. | 5369 // Fallback to runtime. |
| 5371 __ Bind(&slow_case); | 5370 __ Bind(&slow_case); |
| 5372 __ SmiTag(slot); | 5371 __ SmiTag(slot); |
| 5373 __ Push(slot); | 5372 __ Push(slot); |
| 5374 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); | 5373 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1); |
| 5375 } | 5374 } |
| 5376 | 5375 |
| 5377 | 5376 |
| 5378 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5377 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 5379 Register context = cp; | 5378 Register context = cp; |
| 5380 Register value = x0; | 5379 Register value = x0; |
| 5381 Register slot = x2; | 5380 Register slot = x2; |
| 5382 Register context_temp = x10; | 5381 Register context_temp = x10; |
| 5383 Register cell = x10; | 5382 Register cell = x10; |
| 5384 Register cell_details = x11; | 5383 Register cell_details = x11; |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5485 __ Cmp(cell_value_map, value_map); | 5484 __ Cmp(cell_value_map, value_map); |
| 5486 __ B(eq, &fast_heapobject_case); | 5485 __ B(eq, &fast_heapobject_case); |
| 5487 | 5486 |
| 5488 // Fall back to the runtime. | 5487 // Fall back to the runtime. |
| 5489 __ Bind(&slow_case); | 5488 __ Bind(&slow_case); |
| 5490 __ SmiTag(slot); | 5489 __ SmiTag(slot); |
| 5491 __ Push(slot, value); | 5490 __ Push(slot, value); |
| 5492 __ TailCallRuntime(is_strict(language_mode()) | 5491 __ TailCallRuntime(is_strict(language_mode()) |
| 5493 ? Runtime::kStoreGlobalViaContext_Strict | 5492 ? Runtime::kStoreGlobalViaContext_Strict |
| 5494 : Runtime::kStoreGlobalViaContext_Sloppy, | 5493 : Runtime::kStoreGlobalViaContext_Sloppy, |
| 5495 2, 1); | 5494 2); |
| 5496 } | 5495 } |
| 5497 | 5496 |
| 5498 | 5497 |
| 5499 // The number of register that CallApiFunctionAndReturn will need to save on | 5498 // The number of register that CallApiFunctionAndReturn will need to save on |
| 5500 // the stack. The space for these registers need to be allocated in the | 5499 // the stack. The space for these registers need to be allocated in the |
| 5501 // ExitFrame before calling CallApiFunctionAndReturn. | 5500 // ExitFrame before calling CallApiFunctionAndReturn. |
| 5502 static const int kCallApiFunctionSpillSpace = 4; | 5501 static const int kCallApiFunctionSpillSpace = 4; |
| 5503 | 5502 |
| 5504 | 5503 |
| 5505 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5504 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5638 | 5637 |
| 5639 if (stack_space_operand != NULL) { | 5638 if (stack_space_operand != NULL) { |
| 5640 __ Drop(x2, 1); | 5639 __ Drop(x2, 1); |
| 5641 } else { | 5640 } else { |
| 5642 __ Drop(stack_space); | 5641 __ Drop(stack_space); |
| 5643 } | 5642 } |
| 5644 __ Ret(); | 5643 __ Ret(); |
| 5645 | 5644 |
| 5646 // Re-throw by promoting a scheduled exception. | 5645 // Re-throw by promoting a scheduled exception. |
| 5647 __ Bind(&promote_scheduled_exception); | 5646 __ Bind(&promote_scheduled_exception); |
| 5648 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); | 5647 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0); |
| 5649 | 5648 |
| 5650 // HandleScope limit has changed. Delete allocated extensions. | 5649 // HandleScope limit has changed. Delete allocated extensions. |
| 5651 __ Bind(&delete_allocated_handles); | 5650 __ Bind(&delete_allocated_handles); |
| 5652 __ Str(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); | 5651 __ Str(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); |
| 5653 // Save the return value in a callee-save register. | 5652 // Save the return value in a callee-save register. |
| 5654 Register saved_result = x19; | 5653 Register saved_result = x19; |
| 5655 __ Mov(saved_result, x0); | 5654 __ Mov(saved_result, x0); |
| 5656 __ Mov(x0, ExternalReference::isolate_address(isolate)); | 5655 __ Mov(x0, ExternalReference::isolate_address(isolate)); |
| 5657 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), | 5656 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), |
| 5658 1); | 5657 1); |
| (...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5839 MemOperand(fp, 6 * kPointerSize), NULL); | 5838 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5840 } | 5839 } |
| 5841 | 5840 |
| 5842 | 5841 |
| 5843 #undef __ | 5842 #undef __ |
| 5844 | 5843 |
| 5845 } // namespace internal | 5844 } // namespace internal |
| 5846 } // namespace v8 | 5845 } // namespace v8 |
| 5847 | 5846 |
| 5848 #endif // V8_TARGET_ARCH_ARM64 | 5847 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |