OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 635 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
646 // Never fall through to here. | 646 // Never fall through to here. |
647 if (FLAG_debug_code) { | 647 if (FLAG_debug_code) { |
648 __ Unreachable(); | 648 __ Unreachable(); |
649 } | 649 } |
650 | 650 |
651 __ Bind(&slow); | 651 __ Bind(&slow); |
652 | 652 |
653 __ Push(lhs, rhs); | 653 __ Push(lhs, rhs); |
654 // Figure out which native to call and setup the arguments. | 654 // Figure out which native to call and setup the arguments. |
655 if (cond == eq) { | 655 if (cond == eq) { |
656 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2); | 656 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals); |
657 } else { | 657 } else { |
658 int ncr; // NaN compare result | 658 int ncr; // NaN compare result |
659 if ((cond == lt) || (cond == le)) { | 659 if ((cond == lt) || (cond == le)) { |
660 ncr = GREATER; | 660 ncr = GREATER; |
661 } else { | 661 } else { |
662 DCHECK((cond == gt) || (cond == ge)); // remaining cases | 662 DCHECK((cond == gt) || (cond == ge)); // remaining cases |
663 ncr = LESS; | 663 ncr = LESS; |
664 } | 664 } |
665 __ Mov(x10, Smi::FromInt(ncr)); | 665 __ Mov(x10, Smi::FromInt(ncr)); |
666 __ Push(x10); | 666 __ Push(x10); |
667 | 667 |
668 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 668 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
669 // tagged as a small integer. | 669 // tagged as a small integer. |
670 __ TailCallRuntime( | 670 __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong |
671 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, | 671 : Runtime::kCompare); |
672 3); | |
673 } | 672 } |
674 | 673 |
675 __ Bind(&miss); | 674 __ Bind(&miss); |
676 GenerateMiss(masm); | 675 GenerateMiss(masm); |
677 } | 676 } |
678 | 677 |
679 | 678 |
680 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 679 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
681 CPURegList saved_regs = kCallerSaved; | 680 CPURegList saved_regs = kCallerSaved; |
682 CPURegList saved_fp_regs = kCallerSavedFP; | 681 CPURegList saved_fp_regs = kCallerSavedFP; |
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
958 // catch this corner-case, we bail out if the result was 0. (This can only | 957 // catch this corner-case, we bail out if the result was 0. (This can only |
959 // occur if the divisor is infinity or the base is zero.) | 958 // occur if the divisor is infinity or the base is zero.) |
960 __ Fcmp(result_double, 0.0); | 959 __ Fcmp(result_double, 0.0); |
961 __ B(&done, ne); | 960 __ B(&done, ne); |
962 | 961 |
963 if (exponent_type() == ON_STACK) { | 962 if (exponent_type() == ON_STACK) { |
964 // Bail out to runtime code. | 963 // Bail out to runtime code. |
965 __ Bind(&call_runtime); | 964 __ Bind(&call_runtime); |
966 // Put the arguments back on the stack. | 965 // Put the arguments back on the stack. |
967 __ Push(base_tagged, exponent_tagged); | 966 __ Push(base_tagged, exponent_tagged); |
968 __ TailCallRuntime(Runtime::kMathPowRT, 2); | 967 __ TailCallRuntime(Runtime::kMathPowRT); |
969 | 968 |
970 // Return. | 969 // Return. |
971 __ Bind(&done); | 970 __ Bind(&done); |
972 __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1, | 971 __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1, |
973 result_double); | 972 result_double); |
974 DCHECK(result_tagged.is(x0)); | 973 DCHECK(result_tagged.is(x0)); |
975 __ IncrementCounter( | 974 __ IncrementCounter( |
976 isolate()->counters()->math_pow(), 1, scratch0, scratch1); | 975 isolate()->counters()->math_pow(), 1, scratch0, scratch1); |
977 __ Ret(); | 976 __ Ret(); |
978 } else { | 977 } else { |
(...skipping 622 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1601 __ Bind(&done); | 1600 __ Bind(&done); |
1602 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex); | 1601 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex); |
1603 __ Ret(); | 1602 __ Ret(); |
1604 | 1603 |
1605 // Found Proxy or access check needed: Call the runtime | 1604 // Found Proxy or access check needed: Call the runtime |
1606 __ Bind(&fast_runtime_fallback); | 1605 __ Bind(&fast_runtime_fallback); |
1607 __ Push(object, function_prototype); | 1606 __ Push(object, function_prototype); |
1608 // Invalidate the instanceof cache. | 1607 // Invalidate the instanceof cache. |
1609 __ Move(scratch, Smi::FromInt(0)); | 1608 __ Move(scratch, Smi::FromInt(0)); |
1610 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex); | 1609 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex); |
1611 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2); | 1610 __ TailCallRuntime(Runtime::kHasInPrototypeChain); |
1612 | 1611 |
1613 // Slow-case: Call the %InstanceOf runtime function. | 1612 // Slow-case: Call the %InstanceOf runtime function. |
1614 __ bind(&slow_case); | 1613 __ bind(&slow_case); |
1615 __ Push(object, function); | 1614 __ Push(object, function); |
1616 __ TailCallRuntime(Runtime::kInstanceOf, 2); | 1615 __ TailCallRuntime(Runtime::kInstanceOf); |
1617 } | 1616 } |
1618 | 1617 |
1619 | 1618 |
1620 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { | 1619 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
1621 Register arg_count = ArgumentsAccessReadDescriptor::parameter_count(); | 1620 Register arg_count = ArgumentsAccessReadDescriptor::parameter_count(); |
1622 Register key = ArgumentsAccessReadDescriptor::index(); | 1621 Register key = ArgumentsAccessReadDescriptor::index(); |
1623 DCHECK(arg_count.is(x0)); | 1622 DCHECK(arg_count.is(x0)); |
1624 DCHECK(key.is(x1)); | 1623 DCHECK(key.is(x1)); |
1625 | 1624 |
1626 // The displacement is the offset of the last parameter (if any) relative | 1625 // The displacement is the offset of the last parameter (if any) relative |
(...skipping 30 matching lines...) Expand all Loading... |
1657 // Read the argument from the stack and return it. | 1656 // Read the argument from the stack and return it. |
1658 __ Sub(x10, arg_count, key); | 1657 __ Sub(x10, arg_count, key); |
1659 __ Add(x10, local_fp, Operand::UntagSmiAndScale(x10, kPointerSizeLog2)); | 1658 __ Add(x10, local_fp, Operand::UntagSmiAndScale(x10, kPointerSizeLog2)); |
1660 __ Ldr(x0, MemOperand(x10, kDisplacement)); | 1659 __ Ldr(x0, MemOperand(x10, kDisplacement)); |
1661 __ Ret(); | 1660 __ Ret(); |
1662 | 1661 |
1663 // Slow case: handle non-smi or out-of-bounds access to arguments by calling | 1662 // Slow case: handle non-smi or out-of-bounds access to arguments by calling |
1664 // the runtime system. | 1663 // the runtime system. |
1665 __ Bind(&slow); | 1664 __ Bind(&slow); |
1666 __ Push(key); | 1665 __ Push(key); |
1667 __ TailCallRuntime(Runtime::kArguments, 1); | 1666 __ TailCallRuntime(Runtime::kArguments); |
1668 } | 1667 } |
1669 | 1668 |
1670 | 1669 |
1671 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { | 1670 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { |
1672 // x1 : function | 1671 // x1 : function |
1673 // x2 : number of parameters (tagged) | 1672 // x2 : number of parameters (tagged) |
1674 // x3 : parameters pointer | 1673 // x3 : parameters pointer |
1675 | 1674 |
1676 DCHECK(x1.is(ArgumentsAccessNewDescriptor::function())); | 1675 DCHECK(x1.is(ArgumentsAccessNewDescriptor::function())); |
1677 DCHECK(x2.is(ArgumentsAccessNewDescriptor::parameter_count())); | 1676 DCHECK(x2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
(...skipping 10 matching lines...) Expand all Loading... |
1688 __ B(ne, &runtime); | 1687 __ B(ne, &runtime); |
1689 | 1688 |
1690 // Patch the arguments.length and parameters pointer in the current frame. | 1689 // Patch the arguments.length and parameters pointer in the current frame. |
1691 __ Ldr(x2, | 1690 __ Ldr(x2, |
1692 MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1691 MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
1693 __ Add(x3, caller_fp, Operand::UntagSmiAndScale(x2, kPointerSizeLog2)); | 1692 __ Add(x3, caller_fp, Operand::UntagSmiAndScale(x2, kPointerSizeLog2)); |
1694 __ Add(x3, x3, StandardFrameConstants::kCallerSPOffset); | 1693 __ Add(x3, x3, StandardFrameConstants::kCallerSPOffset); |
1695 | 1694 |
1696 __ Bind(&runtime); | 1695 __ Bind(&runtime); |
1697 __ Push(x1, x3, x2); | 1696 __ Push(x1, x3, x2); |
1698 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); | 1697 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
1699 } | 1698 } |
1700 | 1699 |
1701 | 1700 |
1702 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | 1701 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
1703 // x1 : function | 1702 // x1 : function |
1704 // x2 : number of parameters (tagged) | 1703 // x2 : number of parameters (tagged) |
1705 // x3 : parameters pointer | 1704 // x3 : parameters pointer |
1706 // | 1705 // |
1707 // Returns pointer to result object in x0. | 1706 // Returns pointer to result object in x0. |
1708 | 1707 |
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1963 | 1962 |
1964 __ Bind(&arguments_test); | 1963 __ Bind(&arguments_test); |
1965 __ Cmp(x10, arg_count); | 1964 __ Cmp(x10, arg_count); |
1966 __ B(lt, &arguments_loop); | 1965 __ B(lt, &arguments_loop); |
1967 | 1966 |
1968 __ Ret(); | 1967 __ Ret(); |
1969 | 1968 |
1970 // Do the runtime call to allocate the arguments object. | 1969 // Do the runtime call to allocate the arguments object. |
1971 __ Bind(&runtime); | 1970 __ Bind(&runtime); |
1972 __ Push(function, recv_arg, arg_count_smi); | 1971 __ Push(function, recv_arg, arg_count_smi); |
1973 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); | 1972 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
1974 } | 1973 } |
1975 | 1974 |
1976 | 1975 |
1977 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 1976 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
1978 // Return address is in lr. | 1977 // Return address is in lr. |
1979 Label slow; | 1978 Label slow; |
1980 | 1979 |
1981 Register receiver = LoadDescriptor::ReceiverRegister(); | 1980 Register receiver = LoadDescriptor::ReceiverRegister(); |
1982 Register key = LoadDescriptor::NameRegister(); | 1981 Register key = LoadDescriptor::NameRegister(); |
1983 | 1982 |
1984 // Check that the key is an array index, that is Uint32. | 1983 // Check that the key is an array index, that is Uint32. |
1985 __ TestAndBranchIfAnySet(key, kSmiTagMask | kSmiSignMask, &slow); | 1984 __ TestAndBranchIfAnySet(key, kSmiTagMask | kSmiSignMask, &slow); |
1986 | 1985 |
1987 // Everything is fine, call runtime. | 1986 // Everything is fine, call runtime. |
1988 __ Push(receiver, key); | 1987 __ Push(receiver, key); |
1989 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2); | 1988 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor); |
1990 | 1989 |
1991 __ Bind(&slow); | 1990 __ Bind(&slow); |
1992 PropertyAccessCompiler::TailCallBuiltin( | 1991 PropertyAccessCompiler::TailCallBuiltin( |
1993 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1992 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
1994 } | 1993 } |
1995 | 1994 |
1996 | 1995 |
1997 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 1996 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
1998 // x1 : function | 1997 // x1 : function |
1999 // x2 : number of parameters (tagged) | 1998 // x2 : number of parameters (tagged) |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2111 __ Sub(param_count, param_count, 1); | 2110 __ Sub(param_count, param_count, 1); |
2112 __ Cbnz(param_count, &loop); | 2111 __ Cbnz(param_count, &loop); |
2113 | 2112 |
2114 // Return from stub. | 2113 // Return from stub. |
2115 __ Bind(&done); | 2114 __ Bind(&done); |
2116 __ Ret(); | 2115 __ Ret(); |
2117 | 2116 |
2118 // Do the runtime call to allocate the arguments object. | 2117 // Do the runtime call to allocate the arguments object. |
2119 __ Bind(&runtime); | 2118 __ Bind(&runtime); |
2120 __ Push(function, params, param_count_smi); | 2119 __ Push(function, params, param_count_smi); |
2121 __ TailCallRuntime(Runtime::kNewStrictArguments, 3); | 2120 __ TailCallRuntime(Runtime::kNewStrictArguments); |
2122 } | 2121 } |
2123 | 2122 |
2124 | 2123 |
2125 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { | 2124 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { |
2126 // Stack layout on entry. | 2125 // Stack layout on entry. |
2127 // jssp[0]: language mode (tagged) | 2126 // jssp[0]: language mode (tagged) |
2128 // jssp[8]: index of rest parameter (tagged) | 2127 // jssp[8]: index of rest parameter (tagged) |
2129 // jssp[16]: number of parameters (tagged) | 2128 // jssp[16]: number of parameters (tagged) |
2130 // jssp[24]: address of receiver argument | 2129 // jssp[24]: address of receiver argument |
2131 // | 2130 // |
(...skipping 28 matching lines...) Expand all Loading... |
2160 | 2159 |
2161 // Patch the argument length and parameters pointer. | 2160 // Patch the argument length and parameters pointer. |
2162 __ Ldr(param_count_smi, | 2161 __ Ldr(param_count_smi, |
2163 MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2162 MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
2164 __ SmiUntag(param_count, param_count_smi); | 2163 __ SmiUntag(param_count, param_count_smi); |
2165 __ Add(x10, caller_fp, Operand(param_count, LSL, kPointerSizeLog2)); | 2164 __ Add(x10, caller_fp, Operand(param_count, LSL, kPointerSizeLog2)); |
2166 __ Add(params, x10, StandardFrameConstants::kCallerSPOffset); | 2165 __ Add(params, x10, StandardFrameConstants::kCallerSPOffset); |
2167 | 2166 |
2168 __ Bind(&runtime); | 2167 __ Bind(&runtime); |
2169 __ Push(params, param_count_smi, rest_index_smi, language_mode_smi); | 2168 __ Push(params, param_count_smi, rest_index_smi, language_mode_smi); |
2170 __ TailCallRuntime(Runtime::kNewRestParam, 4); | 2169 __ TailCallRuntime(Runtime::kNewRestParam); |
2171 } | 2170 } |
2172 | 2171 |
2173 | 2172 |
2174 void RegExpExecStub::Generate(MacroAssembler* masm) { | 2173 void RegExpExecStub::Generate(MacroAssembler* masm) { |
2175 #ifdef V8_INTERPRETED_REGEXP | 2174 #ifdef V8_INTERPRETED_REGEXP |
2176 __ TailCallRuntime(Runtime::kRegExpExec, 4); | 2175 __ TailCallRuntime(Runtime::kRegExpExec); |
2177 #else // V8_INTERPRETED_REGEXP | 2176 #else // V8_INTERPRETED_REGEXP |
2178 | 2177 |
2179 // Stack frame on entry. | 2178 // Stack frame on entry. |
2180 // jssp[0]: last_match_info (expected JSArray) | 2179 // jssp[0]: last_match_info (expected JSArray) |
2181 // jssp[8]: previous index | 2180 // jssp[8]: previous index |
2182 // jssp[16]: subject string | 2181 // jssp[16]: subject string |
2183 // jssp[24]: JSRegExp object | 2182 // jssp[24]: JSRegExp object |
2184 Label runtime; | 2183 Label runtime; |
2185 | 2184 |
2186 // Use of registers for this function. | 2185 // Use of registers for this function. |
(...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2607 // If there is no pending exception, handle that in the runtime system. | 2606 // If there is no pending exception, handle that in the runtime system. |
2608 __ Mov(x10, Operand(isolate()->factory()->the_hole_value())); | 2607 __ Mov(x10, Operand(isolate()->factory()->the_hole_value())); |
2609 __ Mov(x11, | 2608 __ Mov(x11, |
2610 Operand(ExternalReference(Isolate::kPendingExceptionAddress, | 2609 Operand(ExternalReference(Isolate::kPendingExceptionAddress, |
2611 isolate()))); | 2610 isolate()))); |
2612 __ Ldr(exception_value, MemOperand(x11)); | 2611 __ Ldr(exception_value, MemOperand(x11)); |
2613 __ Cmp(x10, exception_value); | 2612 __ Cmp(x10, exception_value); |
2614 __ B(eq, &runtime); | 2613 __ B(eq, &runtime); |
2615 | 2614 |
2616 // For exception, throw the exception again. | 2615 // For exception, throw the exception again. |
2617 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4); | 2616 __ TailCallRuntime(Runtime::kRegExpExecReThrow); |
2618 | 2617 |
2619 __ Bind(&failure); | 2618 __ Bind(&failure); |
2620 __ Mov(x0, Operand(isolate()->factory()->null_value())); | 2619 __ Mov(x0, Operand(isolate()->factory()->null_value())); |
2621 // Drop the 4 arguments of the stub from the stack. | 2620 // Drop the 4 arguments of the stub from the stack. |
2622 __ Drop(4); | 2621 __ Drop(4); |
2623 __ Ret(); | 2622 __ Ret(); |
2624 | 2623 |
2625 __ Bind(&runtime); | 2624 __ Bind(&runtime); |
2626 __ TailCallRuntime(Runtime::kRegExpExec, 4); | 2625 __ TailCallRuntime(Runtime::kRegExpExec); |
2627 | 2626 |
2628 // Deferred code for string handling. | 2627 // Deferred code for string handling. |
2629 // (6) Not a long external string? If yes, go to (8). | 2628 // (6) Not a long external string? If yes, go to (8). |
2630 __ Bind(¬_seq_nor_cons); | 2629 __ Bind(¬_seq_nor_cons); |
2631 // Compare flags are still set. | 2630 // Compare flags are still set. |
2632 __ B(ne, ¬_long_external); // Go to (8). | 2631 __ B(ne, ¬_long_external); // Go to (8). |
2633 | 2632 |
2634 // (7) External string. Make it, offset-wise, look like a sequential string. | 2633 // (7) External string. Make it, offset-wise, look like a sequential string. |
2635 __ Bind(&external_string); | 2634 __ Bind(&external_string); |
2636 if (masm->emit_debug_code()) { | 2635 if (masm->emit_debug_code()) { |
(...skipping 371 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3008 | 3007 |
3009 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 3008 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
3010 ASM_LOCATION("CallICStub[Miss]"); | 3009 ASM_LOCATION("CallICStub[Miss]"); |
3011 | 3010 |
3012 FrameScope scope(masm, StackFrame::INTERNAL); | 3011 FrameScope scope(masm, StackFrame::INTERNAL); |
3013 | 3012 |
3014 // Push the receiver and the function and feedback info. | 3013 // Push the receiver and the function and feedback info. |
3015 __ Push(x1, x2, x3); | 3014 __ Push(x1, x2, x3); |
3016 | 3015 |
3017 // Call the entry. | 3016 // Call the entry. |
3018 __ CallRuntime(Runtime::kCallIC_Miss, 3); | 3017 __ CallRuntime(Runtime::kCallIC_Miss); |
3019 | 3018 |
3020 // Move result to edi and exit the internal frame. | 3019 // Move result to edi and exit the internal frame. |
3021 __ Mov(x1, x0); | 3020 __ Mov(x1, x0); |
3022 } | 3021 } |
3023 | 3022 |
3024 | 3023 |
3025 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 3024 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
3026 // If the receiver is a smi trigger the non-string case. | 3025 // If the receiver is a smi trigger the non-string case. |
3027 if (check_mode_ == RECEIVER_IS_UNKNOWN) { | 3026 if (check_mode_ == RECEIVER_IS_UNKNOWN) { |
3028 __ JumpIfSmi(object_, receiver_not_string_); | 3027 __ JumpIfSmi(object_, receiver_not_string_); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3066 __ JumpIfNotHeapNumber(index_, index_not_number_); | 3065 __ JumpIfNotHeapNumber(index_, index_not_number_); |
3067 call_helper.BeforeCall(masm); | 3066 call_helper.BeforeCall(masm); |
3068 if (embed_mode == PART_OF_IC_HANDLER) { | 3067 if (embed_mode == PART_OF_IC_HANDLER) { |
3069 __ Push(LoadWithVectorDescriptor::VectorRegister(), | 3068 __ Push(LoadWithVectorDescriptor::VectorRegister(), |
3070 LoadWithVectorDescriptor::SlotRegister(), object_, index_); | 3069 LoadWithVectorDescriptor::SlotRegister(), object_, index_); |
3071 } else { | 3070 } else { |
3072 // Save object_ on the stack and pass index_ as argument for runtime call. | 3071 // Save object_ on the stack and pass index_ as argument for runtime call. |
3073 __ Push(object_, index_); | 3072 __ Push(object_, index_); |
3074 } | 3073 } |
3075 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 3074 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
3076 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 3075 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero); |
3077 } else { | 3076 } else { |
3078 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 3077 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
3079 // NumberToSmi discards numbers that are not exact integers. | 3078 // NumberToSmi discards numbers that are not exact integers. |
3080 __ CallRuntime(Runtime::kNumberToSmi, 1); | 3079 __ CallRuntime(Runtime::kNumberToSmi); |
3081 } | 3080 } |
3082 // Save the conversion result before the pop instructions below | 3081 // Save the conversion result before the pop instructions below |
3083 // have a chance to overwrite it. | 3082 // have a chance to overwrite it. |
3084 __ Mov(index_, x0); | 3083 __ Mov(index_, x0); |
3085 if (embed_mode == PART_OF_IC_HANDLER) { | 3084 if (embed_mode == PART_OF_IC_HANDLER) { |
3086 __ Pop(object_, LoadWithVectorDescriptor::SlotRegister(), | 3085 __ Pop(object_, LoadWithVectorDescriptor::SlotRegister(), |
3087 LoadWithVectorDescriptor::VectorRegister()); | 3086 LoadWithVectorDescriptor::VectorRegister()); |
3088 } else { | 3087 } else { |
3089 __ Pop(object_); | 3088 __ Pop(object_); |
3090 } | 3089 } |
3091 // Reload the instance type. | 3090 // Reload the instance type. |
3092 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); | 3091 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); |
3093 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); | 3092 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); |
3094 call_helper.AfterCall(masm); | 3093 call_helper.AfterCall(masm); |
3095 | 3094 |
3096 // If index is still not a smi, it must be out of range. | 3095 // If index is still not a smi, it must be out of range. |
3097 __ JumpIfNotSmi(index_, index_out_of_range_); | 3096 __ JumpIfNotSmi(index_, index_out_of_range_); |
3098 // Otherwise, return to the fast path. | 3097 // Otherwise, return to the fast path. |
3099 __ B(&got_smi_index_); | 3098 __ B(&got_smi_index_); |
3100 | 3099 |
3101 // Call runtime. We get here when the receiver is a string and the | 3100 // Call runtime. We get here when the receiver is a string and the |
3102 // index is a number, but the code of getting the actual character | 3101 // index is a number, but the code of getting the actual character |
3103 // is too complex (e.g., when the string needs to be flattened). | 3102 // is too complex (e.g., when the string needs to be flattened). |
3104 __ Bind(&call_runtime_); | 3103 __ Bind(&call_runtime_); |
3105 call_helper.BeforeCall(masm); | 3104 call_helper.BeforeCall(masm); |
3106 __ SmiTag(index_); | 3105 __ SmiTag(index_); |
3107 __ Push(object_, index_); | 3106 __ Push(object_, index_); |
3108 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2); | 3107 __ CallRuntime(Runtime::kStringCharCodeAtRT); |
3109 __ Mov(result_, x0); | 3108 __ Mov(result_, x0); |
3110 call_helper.AfterCall(masm); | 3109 call_helper.AfterCall(masm); |
3111 __ B(&exit_); | 3110 __ B(&exit_); |
3112 | 3111 |
3113 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); | 3112 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); |
3114 } | 3113 } |
3115 | 3114 |
3116 | 3115 |
3117 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { | 3116 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { |
3118 __ JumpIfNotSmi(code_, &slow_case_); | 3117 __ JumpIfNotSmi(code_, &slow_case_); |
(...skipping 10 matching lines...) Expand all Loading... |
3129 | 3128 |
3130 | 3129 |
3131 void StringCharFromCodeGenerator::GenerateSlow( | 3130 void StringCharFromCodeGenerator::GenerateSlow( |
3132 MacroAssembler* masm, | 3131 MacroAssembler* masm, |
3133 const RuntimeCallHelper& call_helper) { | 3132 const RuntimeCallHelper& call_helper) { |
3134 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); | 3133 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); |
3135 | 3134 |
3136 __ Bind(&slow_case_); | 3135 __ Bind(&slow_case_); |
3137 call_helper.BeforeCall(masm); | 3136 call_helper.BeforeCall(masm); |
3138 __ Push(code_); | 3137 __ Push(code_); |
3139 __ CallRuntime(Runtime::kStringCharFromCode, 1); | 3138 __ CallRuntime(Runtime::kStringCharFromCode); |
3140 __ Mov(result_, x0); | 3139 __ Mov(result_, x0); |
3141 call_helper.AfterCall(masm); | 3140 call_helper.AfterCall(masm); |
3142 __ B(&exit_); | 3141 __ B(&exit_); |
3143 | 3142 |
3144 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 3143 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); |
3145 } | 3144 } |
3146 | 3145 |
3147 | 3146 |
3148 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { | 3147 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { |
3149 // Inputs are in x0 (lhs) and x1 (rhs). | 3148 // Inputs are in x0 (lhs) and x1 (rhs). |
3150 DCHECK_EQ(CompareICState::BOOLEAN, state()); | 3149 DCHECK_EQ(CompareICState::BOOLEAN, state()); |
3151 ASM_LOCATION("CompareICStub[Booleans]"); | 3150 ASM_LOCATION("CompareICStub[Booleans]"); |
3152 Label miss; | 3151 Label miss; |
3153 | 3152 |
3154 __ CheckMap(x1, x2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3153 __ CheckMap(x1, x2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
3155 __ CheckMap(x0, x3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3154 __ CheckMap(x0, x3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
3156 if (op() != Token::EQ_STRICT && is_strong(strength())) { | 3155 if (op() != Token::EQ_STRICT && is_strong(strength())) { |
3157 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); | 3156 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion); |
3158 } else { | 3157 } else { |
3159 if (!Token::IsEqualityOp(op())) { | 3158 if (!Token::IsEqualityOp(op())) { |
3160 __ Ldr(x1, FieldMemOperand(x1, Oddball::kToNumberOffset)); | 3159 __ Ldr(x1, FieldMemOperand(x1, Oddball::kToNumberOffset)); |
3161 __ AssertSmi(x1); | 3160 __ AssertSmi(x1); |
3162 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); | 3161 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); |
3163 __ AssertSmi(x0); | 3162 __ AssertSmi(x0); |
3164 } | 3163 } |
3165 __ Sub(x0, x1, x0); | 3164 __ Sub(x0, x1, x0); |
3166 __ Ret(); | 3165 __ Ret(); |
3167 } | 3166 } |
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3401 x12); | 3400 x12); |
3402 } else { | 3401 } else { |
3403 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, x10, x11, | 3402 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, x10, x11, |
3404 x12, x13); | 3403 x12, x13); |
3405 } | 3404 } |
3406 | 3405 |
3407 // Handle more complex cases in runtime. | 3406 // Handle more complex cases in runtime. |
3408 __ Bind(&runtime); | 3407 __ Bind(&runtime); |
3409 __ Push(lhs, rhs); | 3408 __ Push(lhs, rhs); |
3410 if (equality) { | 3409 if (equality) { |
3411 __ TailCallRuntime(Runtime::kStringEquals, 2); | 3410 __ TailCallRuntime(Runtime::kStringEquals); |
3412 } else { | 3411 } else { |
3413 __ TailCallRuntime(Runtime::kStringCompare, 2); | 3412 __ TailCallRuntime(Runtime::kStringCompare); |
3414 } | 3413 } |
3415 | 3414 |
3416 __ Bind(&miss); | 3415 __ Bind(&miss); |
3417 GenerateMiss(masm); | 3416 GenerateMiss(masm); |
3418 } | 3417 } |
3419 | 3418 |
3420 | 3419 |
3421 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { | 3420 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { |
3422 DCHECK_EQ(CompareICState::RECEIVER, state()); | 3421 DCHECK_EQ(CompareICState::RECEIVER, state()); |
3423 ASM_LOCATION("CompareICStub[Receivers]"); | 3422 ASM_LOCATION("CompareICStub[Receivers]"); |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3463 __ Ldr(lhs_map, FieldMemOperand(lhs, HeapObject::kMapOffset)); | 3462 __ Ldr(lhs_map, FieldMemOperand(lhs, HeapObject::kMapOffset)); |
3464 __ Cmp(rhs_map, map); | 3463 __ Cmp(rhs_map, map); |
3465 __ B(ne, &miss); | 3464 __ B(ne, &miss); |
3466 __ Cmp(lhs_map, map); | 3465 __ Cmp(lhs_map, map); |
3467 __ B(ne, &miss); | 3466 __ B(ne, &miss); |
3468 | 3467 |
3469 if (Token::IsEqualityOp(op())) { | 3468 if (Token::IsEqualityOp(op())) { |
3470 __ Sub(result, rhs, lhs); | 3469 __ Sub(result, rhs, lhs); |
3471 __ Ret(); | 3470 __ Ret(); |
3472 } else if (is_strong(strength())) { | 3471 } else if (is_strong(strength())) { |
3473 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); | 3472 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion); |
3474 } else { | 3473 } else { |
3475 Register ncr = x2; | 3474 Register ncr = x2; |
3476 if (op() == Token::LT || op() == Token::LTE) { | 3475 if (op() == Token::LT || op() == Token::LTE) { |
3477 __ Mov(ncr, Smi::FromInt(GREATER)); | 3476 __ Mov(ncr, Smi::FromInt(GREATER)); |
3478 } else { | 3477 } else { |
3479 __ Mov(ncr, Smi::FromInt(LESS)); | 3478 __ Mov(ncr, Smi::FromInt(LESS)); |
3480 } | 3479 } |
3481 __ Push(lhs, rhs, ncr); | 3480 __ Push(lhs, rhs, ncr); |
3482 __ TailCallRuntime(Runtime::kCompare, 3); | 3481 __ TailCallRuntime(Runtime::kCompare); |
3483 } | 3482 } |
3484 | 3483 |
3485 __ Bind(&miss); | 3484 __ Bind(&miss); |
3486 GenerateMiss(masm); | 3485 GenerateMiss(masm); |
3487 } | 3486 } |
3488 | 3487 |
3489 | 3488 |
3490 // This method handles the case where a compare stub had the wrong | 3489 // This method handles the case where a compare stub had the wrong |
3491 // implementation. It calls a miss handler, which re-writes the stub. All other | 3490 // implementation. It calls a miss handler, which re-writes the stub. All other |
3492 // CompareICStub::Generate* methods should fall back into this one if their | 3491 // CompareICStub::Generate* methods should fall back into this one if their |
3493 // operands were not the expected types. | 3492 // operands were not the expected types. |
3494 void CompareICStub::GenerateMiss(MacroAssembler* masm) { | 3493 void CompareICStub::GenerateMiss(MacroAssembler* masm) { |
3495 ASM_LOCATION("CompareICStub[Miss]"); | 3494 ASM_LOCATION("CompareICStub[Miss]"); |
3496 | 3495 |
3497 Register stub_entry = x11; | 3496 Register stub_entry = x11; |
3498 { | 3497 { |
3499 FrameScope scope(masm, StackFrame::INTERNAL); | 3498 FrameScope scope(masm, StackFrame::INTERNAL); |
3500 Register op = x10; | 3499 Register op = x10; |
3501 Register left = x1; | 3500 Register left = x1; |
3502 Register right = x0; | 3501 Register right = x0; |
3503 // Preserve some caller-saved registers. | 3502 // Preserve some caller-saved registers. |
3504 __ Push(x1, x0, lr); | 3503 __ Push(x1, x0, lr); |
3505 // Push the arguments. | 3504 // Push the arguments. |
3506 __ Mov(op, Smi::FromInt(this->op())); | 3505 __ Mov(op, Smi::FromInt(this->op())); |
3507 __ Push(left, right, op); | 3506 __ Push(left, right, op); |
3508 | 3507 |
3509 // Call the miss handler. This also pops the arguments. | 3508 // Call the miss handler. This also pops the arguments. |
3510 __ CallRuntime(Runtime::kCompareIC_Miss, 3); | 3509 __ CallRuntime(Runtime::kCompareIC_Miss); |
3511 | 3510 |
3512 // Compute the entry point of the rewritten stub. | 3511 // Compute the entry point of the rewritten stub. |
3513 __ Add(stub_entry, x0, Code::kHeaderSize - kHeapObjectTag); | 3512 __ Add(stub_entry, x0, Code::kHeaderSize - kHeapObjectTag); |
3514 // Restore caller-saved registers. | 3513 // Restore caller-saved registers. |
3515 __ Pop(lr, x0, x1); | 3514 __ Pop(lr, x0, x1); |
3516 } | 3515 } |
3517 | 3516 |
3518 // Tail-call to the new stub. | 3517 // Tail-call to the new stub. |
3519 __ Jump(stub_entry); | 3518 __ Jump(stub_entry); |
3520 } | 3519 } |
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3746 __ Add(result_length, result_length, result_length); | 3745 __ Add(result_length, result_length, result_length); |
3747 __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong); | 3746 __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong); |
3748 | 3747 |
3749 __ Bind(&return_x0); | 3748 __ Bind(&return_x0); |
3750 Counters* counters = isolate()->counters(); | 3749 Counters* counters = isolate()->counters(); |
3751 __ IncrementCounter(counters->sub_string_native(), 1, x3, x4); | 3750 __ IncrementCounter(counters->sub_string_native(), 1, x3, x4); |
3752 __ Drop(3); | 3751 __ Drop(3); |
3753 __ Ret(); | 3752 __ Ret(); |
3754 | 3753 |
3755 __ Bind(&runtime); | 3754 __ Bind(&runtime); |
3756 __ TailCallRuntime(Runtime::kSubString, 3); | 3755 __ TailCallRuntime(Runtime::kSubString); |
3757 | 3756 |
3758 __ bind(&single_char); | 3757 __ bind(&single_char); |
3759 // x1: result_length | 3758 // x1: result_length |
3760 // x10: input_string | 3759 // x10: input_string |
3761 // x12: input_type | 3760 // x12: input_type |
3762 // x15: from (untagged) | 3761 // x15: from (untagged) |
3763 __ SmiTag(from); | 3762 __ SmiTag(from); |
3764 StringCharAtGenerator generator(input_string, from, result_length, x0, | 3763 StringCharAtGenerator generator(input_string, from, result_length, x0, |
3765 &runtime, &runtime, &runtime, | 3764 &runtime, &runtime, &runtime, |
3766 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); | 3765 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); |
(...skipping 25 matching lines...) Expand all Loading... |
3792 __ Cmp(x1, FIRST_NONSTRING_TYPE); | 3791 __ Cmp(x1, FIRST_NONSTRING_TYPE); |
3793 __ B(hs, ¬_string); | 3792 __ B(hs, ¬_string); |
3794 // Check if string has a cached array index. | 3793 // Check if string has a cached array index. |
3795 __ Ldr(x2, FieldMemOperand(x0, String::kHashFieldOffset)); | 3794 __ Ldr(x2, FieldMemOperand(x0, String::kHashFieldOffset)); |
3796 __ Tst(x2, Operand(String::kContainsCachedArrayIndexMask)); | 3795 __ Tst(x2, Operand(String::kContainsCachedArrayIndexMask)); |
3797 __ B(ne, &slow_string); | 3796 __ B(ne, &slow_string); |
3798 __ IndexFromHash(x2, x0); | 3797 __ IndexFromHash(x2, x0); |
3799 __ Ret(); | 3798 __ Ret(); |
3800 __ Bind(&slow_string); | 3799 __ Bind(&slow_string); |
3801 __ Push(x0); // Push argument. | 3800 __ Push(x0); // Push argument. |
3802 __ TailCallRuntime(Runtime::kStringToNumber, 1); | 3801 __ TailCallRuntime(Runtime::kStringToNumber); |
3803 __ Bind(¬_string); | 3802 __ Bind(¬_string); |
3804 | 3803 |
3805 Label not_oddball; | 3804 Label not_oddball; |
3806 __ Cmp(x1, ODDBALL_TYPE); | 3805 __ Cmp(x1, ODDBALL_TYPE); |
3807 __ B(ne, ¬_oddball); | 3806 __ B(ne, ¬_oddball); |
3808 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); | 3807 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); |
3809 __ Ret(); | 3808 __ Ret(); |
3810 __ Bind(¬_oddball); | 3809 __ Bind(¬_oddball); |
3811 | 3810 |
3812 __ Push(x0); // Push argument. | 3811 __ Push(x0); // Push argument. |
3813 __ TailCallRuntime(Runtime::kToNumber, 1); | 3812 __ TailCallRuntime(Runtime::kToNumber); |
3814 } | 3813 } |
3815 | 3814 |
3816 | 3815 |
3817 void ToLengthStub::Generate(MacroAssembler* masm) { | 3816 void ToLengthStub::Generate(MacroAssembler* masm) { |
3818 // The ToLength stub takes one argument in x0. | 3817 // The ToLength stub takes one argument in x0. |
3819 Label not_smi; | 3818 Label not_smi; |
3820 __ JumpIfNotSmi(x0, ¬_smi); | 3819 __ JumpIfNotSmi(x0, ¬_smi); |
3821 STATIC_ASSERT(kSmiTag == 0); | 3820 STATIC_ASSERT(kSmiTag == 0); |
3822 __ Tst(x0, x0); | 3821 __ Tst(x0, x0); |
3823 __ Csel(x0, x0, Operand(0), ge); | 3822 __ Csel(x0, x0, Operand(0), ge); |
3824 __ Ret(); | 3823 __ Ret(); |
3825 __ Bind(¬_smi); | 3824 __ Bind(¬_smi); |
3826 | 3825 |
3827 __ Push(x0); // Push argument. | 3826 __ Push(x0); // Push argument. |
3828 __ TailCallRuntime(Runtime::kToLength, 1); | 3827 __ TailCallRuntime(Runtime::kToLength); |
3829 } | 3828 } |
3830 | 3829 |
3831 | 3830 |
3832 void ToStringStub::Generate(MacroAssembler* masm) { | 3831 void ToStringStub::Generate(MacroAssembler* masm) { |
3833 // The ToString stub takes one argument in x0. | 3832 // The ToString stub takes one argument in x0. |
3834 Label is_number; | 3833 Label is_number; |
3835 __ JumpIfSmi(x0, &is_number); | 3834 __ JumpIfSmi(x0, &is_number); |
3836 | 3835 |
3837 Label not_string; | 3836 Label not_string; |
3838 __ JumpIfObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE, ¬_string, hs); | 3837 __ JumpIfObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE, ¬_string, hs); |
(...skipping 11 matching lines...) Expand all Loading... |
3850 __ Bind(¬_heap_number); | 3849 __ Bind(¬_heap_number); |
3851 | 3850 |
3852 Label not_oddball; | 3851 Label not_oddball; |
3853 __ Cmp(x1, ODDBALL_TYPE); | 3852 __ Cmp(x1, ODDBALL_TYPE); |
3854 __ B(ne, ¬_oddball); | 3853 __ B(ne, ¬_oddball); |
3855 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToStringOffset)); | 3854 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToStringOffset)); |
3856 __ Ret(); | 3855 __ Ret(); |
3857 __ Bind(¬_oddball); | 3856 __ Bind(¬_oddball); |
3858 | 3857 |
3859 __ Push(x0); // Push argument. | 3858 __ Push(x0); // Push argument. |
3860 __ TailCallRuntime(Runtime::kToString, 1); | 3859 __ TailCallRuntime(Runtime::kToString); |
3861 } | 3860 } |
3862 | 3861 |
3863 | 3862 |
3864 void StringHelper::GenerateFlatOneByteStringEquals( | 3863 void StringHelper::GenerateFlatOneByteStringEquals( |
3865 MacroAssembler* masm, Register left, Register right, Register scratch1, | 3864 MacroAssembler* masm, Register left, Register right, Register scratch1, |
3866 Register scratch2, Register scratch3) { | 3865 Register scratch2, Register scratch3) { |
3867 DCHECK(!AreAliased(left, right, scratch1, scratch2, scratch3)); | 3866 DCHECK(!AreAliased(left, right, scratch1, scratch2, scratch3)); |
3868 Register result = x0; | 3867 Register result = x0; |
3869 Register left_length = scratch1; | 3868 Register left_length = scratch1; |
3870 Register right_length = scratch2; | 3869 Register right_length = scratch2; |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3994 // Compare flat one-byte strings natively. | 3993 // Compare flat one-byte strings natively. |
3995 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x3, | 3994 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x3, |
3996 x4); | 3995 x4); |
3997 StringHelper::GenerateCompareFlatOneByteStrings(masm, x1, x0, x12, x13, x14, | 3996 StringHelper::GenerateCompareFlatOneByteStrings(masm, x1, x0, x12, x13, x14, |
3998 x15); | 3997 x15); |
3999 | 3998 |
4000 // Call the runtime. | 3999 // Call the runtime. |
4001 // Returns -1 (less), 0 (equal), or 1 (greater) tagged as a small integer. | 4000 // Returns -1 (less), 0 (equal), or 1 (greater) tagged as a small integer. |
4002 __ Bind(&runtime); | 4001 __ Bind(&runtime); |
4003 __ Push(x1, x0); | 4002 __ Push(x1, x0); |
4004 __ TailCallRuntime(Runtime::kStringCompare, 2); | 4003 __ TailCallRuntime(Runtime::kStringCompare); |
4005 } | 4004 } |
4006 | 4005 |
4007 | 4006 |
4008 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 4007 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
4009 // ----------- S t a t e ------------- | 4008 // ----------- S t a t e ------------- |
4010 // -- x1 : left | 4009 // -- x1 : left |
4011 // -- x0 : right | 4010 // -- x0 : right |
4012 // -- lr : return address | 4011 // -- lr : return address |
4013 // ----------------------------------- | 4012 // ----------------------------------- |
4014 | 4013 |
(...skipping 1348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5363 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); | 5362 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); |
5364 | 5363 |
5365 // If the result is not the_hole, return. Otherwise, handle in the runtime. | 5364 // If the result is not the_hole, return. Otherwise, handle in the runtime. |
5366 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case); | 5365 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case); |
5367 __ Ret(); | 5366 __ Ret(); |
5368 | 5367 |
5369 // Fallback to runtime. | 5368 // Fallback to runtime. |
5370 __ Bind(&slow_case); | 5369 __ Bind(&slow_case); |
5371 __ SmiTag(slot); | 5370 __ SmiTag(slot); |
5372 __ Push(slot); | 5371 __ Push(slot); |
5373 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1); | 5372 __ TailCallRuntime(Runtime::kLoadGlobalViaContext); |
5374 } | 5373 } |
5375 | 5374 |
5376 | 5375 |
5377 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5376 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
5378 Register context = cp; | 5377 Register context = cp; |
5379 Register value = x0; | 5378 Register value = x0; |
5380 Register slot = x2; | 5379 Register slot = x2; |
5381 Register context_temp = x10; | 5380 Register context_temp = x10; |
5382 Register cell = x10; | 5381 Register cell = x10; |
5383 Register cell_details = x11; | 5382 Register cell_details = x11; |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5483 __ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset)); | 5482 __ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset)); |
5484 __ Cmp(cell_value_map, value_map); | 5483 __ Cmp(cell_value_map, value_map); |
5485 __ B(eq, &fast_heapobject_case); | 5484 __ B(eq, &fast_heapobject_case); |
5486 | 5485 |
5487 // Fall back to the runtime. | 5486 // Fall back to the runtime. |
5488 __ Bind(&slow_case); | 5487 __ Bind(&slow_case); |
5489 __ SmiTag(slot); | 5488 __ SmiTag(slot); |
5490 __ Push(slot, value); | 5489 __ Push(slot, value); |
5491 __ TailCallRuntime(is_strict(language_mode()) | 5490 __ TailCallRuntime(is_strict(language_mode()) |
5492 ? Runtime::kStoreGlobalViaContext_Strict | 5491 ? Runtime::kStoreGlobalViaContext_Strict |
5493 : Runtime::kStoreGlobalViaContext_Sloppy, | 5492 : Runtime::kStoreGlobalViaContext_Sloppy); |
5494 2); | |
5495 } | 5493 } |
5496 | 5494 |
5497 | 5495 |
5498 // The number of register that CallApiFunctionAndReturn will need to save on | 5496 // The number of register that CallApiFunctionAndReturn will need to save on |
5499 // the stack. The space for these registers need to be allocated in the | 5497 // the stack. The space for these registers need to be allocated in the |
5500 // ExitFrame before calling CallApiFunctionAndReturn. | 5498 // ExitFrame before calling CallApiFunctionAndReturn. |
5501 static const int kCallApiFunctionSpillSpace = 4; | 5499 static const int kCallApiFunctionSpillSpace = 4; |
5502 | 5500 |
5503 | 5501 |
5504 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5502 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5637 | 5635 |
5638 if (stack_space_operand != NULL) { | 5636 if (stack_space_operand != NULL) { |
5639 __ Drop(x2, 1); | 5637 __ Drop(x2, 1); |
5640 } else { | 5638 } else { |
5641 __ Drop(stack_space); | 5639 __ Drop(stack_space); |
5642 } | 5640 } |
5643 __ Ret(); | 5641 __ Ret(); |
5644 | 5642 |
5645 // Re-throw by promoting a scheduled exception. | 5643 // Re-throw by promoting a scheduled exception. |
5646 __ Bind(&promote_scheduled_exception); | 5644 __ Bind(&promote_scheduled_exception); |
5647 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0); | 5645 __ TailCallRuntime(Runtime::kPromoteScheduledException); |
5648 | 5646 |
5649 // HandleScope limit has changed. Delete allocated extensions. | 5647 // HandleScope limit has changed. Delete allocated extensions. |
5650 __ Bind(&delete_allocated_handles); | 5648 __ Bind(&delete_allocated_handles); |
5651 __ Str(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); | 5649 __ Str(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); |
5652 // Save the return value in a callee-save register. | 5650 // Save the return value in a callee-save register. |
5653 Register saved_result = x19; | 5651 Register saved_result = x19; |
5654 __ Mov(saved_result, x0); | 5652 __ Mov(saved_result, x0); |
5655 __ Mov(x0, ExternalReference::isolate_address(isolate)); | 5653 __ Mov(x0, ExternalReference::isolate_address(isolate)); |
5656 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), | 5654 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), |
5657 1); | 5655 1); |
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5838 MemOperand(fp, 6 * kPointerSize), NULL); | 5836 MemOperand(fp, 6 * kPointerSize), NULL); |
5839 } | 5837 } |
5840 | 5838 |
5841 | 5839 |
5842 #undef __ | 5840 #undef __ |
5843 | 5841 |
5844 } // namespace internal | 5842 } // namespace internal |
5845 } // namespace v8 | 5843 } // namespace v8 |
5846 | 5844 |
5847 #endif // V8_TARGET_ARCH_ARM64 | 5845 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |