| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 710 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 721 t1); | 721 t1); |
| 722 } | 722 } |
| 723 // Never falls through to here. | 723 // Never falls through to here. |
| 724 | 724 |
| 725 __ bind(&slow); | 725 __ bind(&slow); |
| 726 // Prepare for call to builtin. Push object pointers, a0 (lhs) first, | 726 // Prepare for call to builtin. Push object pointers, a0 (lhs) first, |
| 727 // a1 (rhs) second. | 727 // a1 (rhs) second. |
| 728 __ Push(lhs, rhs); | 728 __ Push(lhs, rhs); |
| 729 // Figure out which native to call and setup the arguments. | 729 // Figure out which native to call and setup the arguments. |
| 730 if (cc == eq) { | 730 if (cc == eq) { |
| 731 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2); | 731 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals); |
| 732 } else { | 732 } else { |
| 733 int ncr; // NaN compare result. | 733 int ncr; // NaN compare result. |
| 734 if (cc == lt || cc == le) { | 734 if (cc == lt || cc == le) { |
| 735 ncr = GREATER; | 735 ncr = GREATER; |
| 736 } else { | 736 } else { |
| 737 DCHECK(cc == gt || cc == ge); // Remaining cases. | 737 DCHECK(cc == gt || cc == ge); // Remaining cases. |
| 738 ncr = LESS; | 738 ncr = LESS; |
| 739 } | 739 } |
| 740 __ li(a0, Operand(Smi::FromInt(ncr))); | 740 __ li(a0, Operand(Smi::FromInt(ncr))); |
| 741 __ push(a0); | 741 __ push(a0); |
| 742 | 742 |
| 743 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 743 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 744 // tagged as a small integer. | 744 // tagged as a small integer. |
| 745 __ TailCallRuntime( | 745 __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong |
| 746 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, | 746 : Runtime::kCompare); |
| 747 3); | |
| 748 } | 747 } |
| 749 | 748 |
| 750 __ bind(&miss); | 749 __ bind(&miss); |
| 751 GenerateMiss(masm); | 750 GenerateMiss(masm); |
| 752 } | 751 } |
| 753 | 752 |
| 754 | 753 |
| 755 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { | 754 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { |
| 756 __ mov(t9, ra); | 755 __ mov(t9, ra); |
| 757 __ pop(ra); | 756 __ pop(ra); |
| (...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 971 // double_exponent may not contain the exponent value if the input was a | 970 // double_exponent may not contain the exponent value if the input was a |
| 972 // smi. We set it with exponent value before bailing out. | 971 // smi. We set it with exponent value before bailing out. |
| 973 __ mtc1(exponent, single_scratch); | 972 __ mtc1(exponent, single_scratch); |
| 974 __ cvt_d_w(double_exponent, single_scratch); | 973 __ cvt_d_w(double_exponent, single_scratch); |
| 975 | 974 |
| 976 // Returning or bailing out. | 975 // Returning or bailing out. |
| 977 Counters* counters = isolate()->counters(); | 976 Counters* counters = isolate()->counters(); |
| 978 if (exponent_type() == ON_STACK) { | 977 if (exponent_type() == ON_STACK) { |
| 979 // The arguments are still on the stack. | 978 // The arguments are still on the stack. |
| 980 __ bind(&call_runtime); | 979 __ bind(&call_runtime); |
| 981 __ TailCallRuntime(Runtime::kMathPowRT, 2); | 980 __ TailCallRuntime(Runtime::kMathPowRT); |
| 982 | 981 |
| 983 // The stub is called from non-optimized code, which expects the result | 982 // The stub is called from non-optimized code, which expects the result |
| 984 // as heap number in exponent. | 983 // as heap number in exponent. |
| 985 __ bind(&done); | 984 __ bind(&done); |
| 986 __ AllocateHeapNumber( | 985 __ AllocateHeapNumber( |
| 987 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime); | 986 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime); |
| 988 __ sdc1(double_result, | 987 __ sdc1(double_result, |
| 989 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); | 988 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); |
| 990 DCHECK(heapnumber.is(v0)); | 989 DCHECK(heapnumber.is(v0)); |
| 991 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); | 990 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); |
| (...skipping 533 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1525 __ Ret(USE_DELAY_SLOT); | 1524 __ Ret(USE_DELAY_SLOT); |
| 1526 __ StoreRoot(result, | 1525 __ StoreRoot(result, |
| 1527 Heap::kInstanceofCacheAnswerRootIndex); // In delay slot. | 1526 Heap::kInstanceofCacheAnswerRootIndex); // In delay slot. |
| 1528 | 1527 |
| 1529 // Found Proxy or access check needed: Call the runtime | 1528 // Found Proxy or access check needed: Call the runtime |
| 1530 __ bind(&fast_runtime_fallback); | 1529 __ bind(&fast_runtime_fallback); |
| 1531 __ Push(object, function_prototype); | 1530 __ Push(object, function_prototype); |
| 1532 // Invalidate the instanceof cache. | 1531 // Invalidate the instanceof cache. |
| 1533 DCHECK(Smi::FromInt(0) == 0); | 1532 DCHECK(Smi::FromInt(0) == 0); |
| 1534 __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex); | 1533 __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex); |
| 1535 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2); | 1534 __ TailCallRuntime(Runtime::kHasInPrototypeChain); |
| 1536 | 1535 |
| 1537 // Slow-case: Call the %InstanceOf runtime function. | 1536 // Slow-case: Call the %InstanceOf runtime function. |
| 1538 __ bind(&slow_case); | 1537 __ bind(&slow_case); |
| 1539 __ Push(object, function); | 1538 __ Push(object, function); |
| 1540 __ TailCallRuntime(Runtime::kInstanceOf, 2); | 1539 __ TailCallRuntime(Runtime::kInstanceOf); |
| 1541 } | 1540 } |
| 1542 | 1541 |
| 1543 | 1542 |
| 1544 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { | 1543 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
| 1545 Label miss; | 1544 Label miss; |
| 1546 Register receiver = LoadDescriptor::ReceiverRegister(); | 1545 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 1547 // Ensure that the vector and slot registers won't be clobbered before | 1546 // Ensure that the vector and slot registers won't be clobbered before |
| 1548 // calling the miss handler. | 1547 // calling the miss handler. |
| 1549 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::VectorRegister(), | 1548 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::VectorRegister(), |
| 1550 LoadWithVectorDescriptor::SlotRegister())); | 1549 LoadWithVectorDescriptor::SlotRegister())); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1601 __ subu(a3, a0, a1); | 1600 __ subu(a3, a0, a1); |
| 1602 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize); | 1601 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize); |
| 1603 __ Addu(a3, a2, Operand(t3)); | 1602 __ Addu(a3, a2, Operand(t3)); |
| 1604 __ Ret(USE_DELAY_SLOT); | 1603 __ Ret(USE_DELAY_SLOT); |
| 1605 __ lw(v0, MemOperand(a3, kDisplacement)); | 1604 __ lw(v0, MemOperand(a3, kDisplacement)); |
| 1606 | 1605 |
| 1607 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 1606 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
| 1608 // by calling the runtime system. | 1607 // by calling the runtime system. |
| 1609 __ bind(&slow); | 1608 __ bind(&slow); |
| 1610 __ push(a1); | 1609 __ push(a1); |
| 1611 __ TailCallRuntime(Runtime::kArguments, 1); | 1610 __ TailCallRuntime(Runtime::kArguments); |
| 1612 } | 1611 } |
| 1613 | 1612 |
| 1614 | 1613 |
| 1615 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { | 1614 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { |
| 1616 // a1 : function | 1615 // a1 : function |
| 1617 // a2 : number of parameters (tagged) | 1616 // a2 : number of parameters (tagged) |
| 1618 // a3 : parameters pointer | 1617 // a3 : parameters pointer |
| 1619 | 1618 |
| 1620 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); | 1619 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); |
| 1621 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); | 1620 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
| 1622 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | 1621 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); |
| 1623 | 1622 |
| 1624 // Check if the calling frame is an arguments adaptor frame. | 1623 // Check if the calling frame is an arguments adaptor frame. |
| 1625 Label runtime; | 1624 Label runtime; |
| 1626 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1625 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1627 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); | 1626 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); |
| 1628 __ Branch(&runtime, ne, a0, | 1627 __ Branch(&runtime, ne, a0, |
| 1629 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1628 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1630 | 1629 |
| 1631 // Patch the arguments.length and the parameters pointer in the current frame. | 1630 // Patch the arguments.length and the parameters pointer in the current frame. |
| 1632 __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1631 __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1633 __ sll(t3, a2, 1); | 1632 __ sll(t3, a2, 1); |
| 1634 __ Addu(t0, t0, Operand(t3)); | 1633 __ Addu(t0, t0, Operand(t3)); |
| 1635 __ addiu(a3, t0, StandardFrameConstants::kCallerSPOffset); | 1634 __ addiu(a3, t0, StandardFrameConstants::kCallerSPOffset); |
| 1636 | 1635 |
| 1637 __ bind(&runtime); | 1636 __ bind(&runtime); |
| 1638 __ Push(a1, a3, a2); | 1637 __ Push(a1, a3, a2); |
| 1639 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); | 1638 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
| 1640 } | 1639 } |
| 1641 | 1640 |
| 1642 | 1641 |
| 1643 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | 1642 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
| 1644 // a1 : function | 1643 // a1 : function |
| 1645 // a2 : number of parameters (tagged) | 1644 // a2 : number of parameters (tagged) |
| 1646 // a3 : parameters pointer | 1645 // a3 : parameters pointer |
| 1647 // Registers used over whole function: | 1646 // Registers used over whole function: |
| 1648 // t1 : arguments count (tagged) | 1647 // t1 : arguments count (tagged) |
| 1649 // t2 : mapped parameter count (tagged) | 1648 // t2 : mapped parameter count (tagged) |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1843 __ bind(&arguments_test); | 1842 __ bind(&arguments_test); |
| 1844 __ Branch(&arguments_loop, lt, t2, Operand(t1)); | 1843 __ Branch(&arguments_loop, lt, t2, Operand(t1)); |
| 1845 | 1844 |
| 1846 // Return. | 1845 // Return. |
| 1847 __ Ret(); | 1846 __ Ret(); |
| 1848 | 1847 |
| 1849 // Do the runtime call to allocate the arguments object. | 1848 // Do the runtime call to allocate the arguments object. |
| 1850 // t1 = argument count (tagged) | 1849 // t1 = argument count (tagged) |
| 1851 __ bind(&runtime); | 1850 __ bind(&runtime); |
| 1852 __ Push(a1, a3, t1); | 1851 __ Push(a1, a3, t1); |
| 1853 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); | 1852 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
| 1854 } | 1853 } |
| 1855 | 1854 |
| 1856 | 1855 |
| 1857 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 1856 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
| 1858 // Return address is in ra. | 1857 // Return address is in ra. |
| 1859 Label slow; | 1858 Label slow; |
| 1860 | 1859 |
| 1861 Register receiver = LoadDescriptor::ReceiverRegister(); | 1860 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 1862 Register key = LoadDescriptor::NameRegister(); | 1861 Register key = LoadDescriptor::NameRegister(); |
| 1863 | 1862 |
| 1864 // Check that the key is an array index, that is Uint32. | 1863 // Check that the key is an array index, that is Uint32. |
| 1865 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); | 1864 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); |
| 1866 __ Branch(&slow, ne, t0, Operand(zero_reg)); | 1865 __ Branch(&slow, ne, t0, Operand(zero_reg)); |
| 1867 | 1866 |
| 1868 // Everything is fine, call runtime. | 1867 // Everything is fine, call runtime. |
| 1869 __ Push(receiver, key); // Receiver, key. | 1868 __ Push(receiver, key); // Receiver, key. |
| 1870 | 1869 |
| 1871 // Perform tail call to the entry. | 1870 // Perform tail call to the entry. |
| 1872 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2); | 1871 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor); |
| 1873 | 1872 |
| 1874 __ bind(&slow); | 1873 __ bind(&slow); |
| 1875 PropertyAccessCompiler::TailCallBuiltin( | 1874 PropertyAccessCompiler::TailCallBuiltin( |
| 1876 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1875 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
| 1877 } | 1876 } |
| 1878 | 1877 |
| 1879 | 1878 |
| 1880 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 1879 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
| 1881 // a1 : function | 1880 // a1 : function |
| 1882 // a2 : number of parameters (tagged) | 1881 // a2 : number of parameters (tagged) |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1956 __ Subu(a2, a2, Operand(1)); | 1955 __ Subu(a2, a2, Operand(1)); |
| 1957 __ Branch(&loop, ne, a2, Operand(zero_reg)); | 1956 __ Branch(&loop, ne, a2, Operand(zero_reg)); |
| 1958 | 1957 |
| 1959 // Return. | 1958 // Return. |
| 1960 __ bind(&done); | 1959 __ bind(&done); |
| 1961 __ Ret(); | 1960 __ Ret(); |
| 1962 | 1961 |
| 1963 // Do the runtime call to allocate the arguments object. | 1962 // Do the runtime call to allocate the arguments object. |
| 1964 __ bind(&runtime); | 1963 __ bind(&runtime); |
| 1965 __ Push(a1, a3, a2); | 1964 __ Push(a1, a3, a2); |
| 1966 __ TailCallRuntime(Runtime::kNewStrictArguments, 3); | 1965 __ TailCallRuntime(Runtime::kNewStrictArguments); |
| 1967 } | 1966 } |
| 1968 | 1967 |
| 1969 | 1968 |
| 1970 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { | 1969 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { |
| 1971 // sp[0] : language mode | 1970 // sp[0] : language mode |
| 1972 // sp[4] : index of rest parameter | 1971 // sp[4] : index of rest parameter |
| 1973 // sp[8] : number of parameters | 1972 // sp[8] : number of parameters |
| 1974 // sp[12] : receiver displacement | 1973 // sp[12] : receiver displacement |
| 1975 // Check if the calling frame is an arguments adaptor frame. | 1974 // Check if the calling frame is an arguments adaptor frame. |
| 1976 | 1975 |
| 1977 Label runtime; | 1976 Label runtime; |
| 1978 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1977 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1979 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); | 1978 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); |
| 1980 __ Branch(&runtime, ne, a3, | 1979 __ Branch(&runtime, ne, a3, |
| 1981 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1980 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1982 | 1981 |
| 1983 // Patch the arguments.length and the parameters pointer. | 1982 // Patch the arguments.length and the parameters pointer. |
| 1984 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1983 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1985 __ sw(a1, MemOperand(sp, 2 * kPointerSize)); | 1984 __ sw(a1, MemOperand(sp, 2 * kPointerSize)); |
| 1986 __ sll(at, a1, kPointerSizeLog2 - kSmiTagSize); | 1985 __ sll(at, a1, kPointerSizeLog2 - kSmiTagSize); |
| 1987 __ Addu(a3, a2, Operand(at)); | 1986 __ Addu(a3, a2, Operand(at)); |
| 1988 | 1987 |
| 1989 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); | 1988 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 1990 __ sw(a3, MemOperand(sp, 3 * kPointerSize)); | 1989 __ sw(a3, MemOperand(sp, 3 * kPointerSize)); |
| 1991 | 1990 |
| 1992 // Do the runtime call to allocate the arguments object. | 1991 // Do the runtime call to allocate the arguments object. |
| 1993 __ bind(&runtime); | 1992 __ bind(&runtime); |
| 1994 __ TailCallRuntime(Runtime::kNewRestParam, 4); | 1993 __ TailCallRuntime(Runtime::kNewRestParam); |
| 1995 } | 1994 } |
| 1996 | 1995 |
| 1997 | 1996 |
| 1998 void RegExpExecStub::Generate(MacroAssembler* masm) { | 1997 void RegExpExecStub::Generate(MacroAssembler* masm) { |
| 1999 // Just jump directly to runtime if native RegExp is not selected at compile | 1998 // Just jump directly to runtime if native RegExp is not selected at compile |
| 2000 // time or if regexp entry in generated code is turned off runtime switch or | 1999 // time or if regexp entry in generated code is turned off runtime switch or |
| 2001 // at compilation. | 2000 // at compilation. |
| 2002 #ifdef V8_INTERPRETED_REGEXP | 2001 #ifdef V8_INTERPRETED_REGEXP |
| 2003 __ TailCallRuntime(Runtime::kRegExpExec, 4); | 2002 __ TailCallRuntime(Runtime::kRegExpExec); |
| 2004 #else // V8_INTERPRETED_REGEXP | 2003 #else // V8_INTERPRETED_REGEXP |
| 2005 | 2004 |
| 2006 // Stack frame on entry. | 2005 // Stack frame on entry. |
| 2007 // sp[0]: last_match_info (expected JSArray) | 2006 // sp[0]: last_match_info (expected JSArray) |
| 2008 // sp[4]: previous index | 2007 // sp[4]: previous index |
| 2009 // sp[8]: subject string | 2008 // sp[8]: subject string |
| 2010 // sp[12]: JSRegExp object | 2009 // sp[12]: JSRegExp object |
| 2011 | 2010 |
| 2012 const int kLastMatchInfoOffset = 0 * kPointerSize; | 2011 const int kLastMatchInfoOffset = 0 * kPointerSize; |
| 2013 const int kPreviousIndexOffset = 1 * kPointerSize; | 2012 const int kPreviousIndexOffset = 1 * kPointerSize; |
| (...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2278 // stack overflow (on the backtrack stack) was detected in RegExp code but | 2277 // stack overflow (on the backtrack stack) was detected in RegExp code but |
| 2279 // haven't created the exception yet. Handle that in the runtime system. | 2278 // haven't created the exception yet. Handle that in the runtime system. |
| 2280 // TODO(592): Rerunning the RegExp to get the stack overflow exception. | 2279 // TODO(592): Rerunning the RegExp to get the stack overflow exception. |
| 2281 __ li(a1, Operand(isolate()->factory()->the_hole_value())); | 2280 __ li(a1, Operand(isolate()->factory()->the_hole_value())); |
| 2282 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, | 2281 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, |
| 2283 isolate()))); | 2282 isolate()))); |
| 2284 __ lw(v0, MemOperand(a2, 0)); | 2283 __ lw(v0, MemOperand(a2, 0)); |
| 2285 __ Branch(&runtime, eq, v0, Operand(a1)); | 2284 __ Branch(&runtime, eq, v0, Operand(a1)); |
| 2286 | 2285 |
| 2287 // For exception, throw the exception again. | 2286 // For exception, throw the exception again. |
| 2288 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4); | 2287 __ TailCallRuntime(Runtime::kRegExpExecReThrow); |
| 2289 | 2288 |
| 2290 __ bind(&failure); | 2289 __ bind(&failure); |
| 2291 // For failure and exception return null. | 2290 // For failure and exception return null. |
| 2292 __ li(v0, Operand(isolate()->factory()->null_value())); | 2291 __ li(v0, Operand(isolate()->factory()->null_value())); |
| 2293 __ DropAndRet(4); | 2292 __ DropAndRet(4); |
| 2294 | 2293 |
| 2295 // Process the result from the native regexp code. | 2294 // Process the result from the native regexp code. |
| 2296 __ bind(&success); | 2295 __ bind(&success); |
| 2297 __ lw(a1, | 2296 __ lw(a1, |
| 2298 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset)); | 2297 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset)); |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2374 __ addiu(a0, a0, kPointerSize); // In branch delay slot. | 2373 __ addiu(a0, a0, kPointerSize); // In branch delay slot. |
| 2375 | 2374 |
| 2376 __ bind(&done); | 2375 __ bind(&done); |
| 2377 | 2376 |
| 2378 // Return last match info. | 2377 // Return last match info. |
| 2379 __ lw(v0, MemOperand(sp, kLastMatchInfoOffset)); | 2378 __ lw(v0, MemOperand(sp, kLastMatchInfoOffset)); |
| 2380 __ DropAndRet(4); | 2379 __ DropAndRet(4); |
| 2381 | 2380 |
| 2382 // Do the runtime call to execute the regexp. | 2381 // Do the runtime call to execute the regexp. |
| 2383 __ bind(&runtime); | 2382 __ bind(&runtime); |
| 2384 __ TailCallRuntime(Runtime::kRegExpExec, 4); | 2383 __ TailCallRuntime(Runtime::kRegExpExec); |
| 2385 | 2384 |
| 2386 // Deferred code for string handling. | 2385 // Deferred code for string handling. |
| 2387 // (6) Not a long external string? If yes, go to (8). | 2386 // (6) Not a long external string? If yes, go to (8). |
| 2388 __ bind(¬_seq_nor_cons); | 2387 __ bind(¬_seq_nor_cons); |
| 2389 // Go to (8). | 2388 // Go to (8). |
| 2390 __ Branch(¬_long_external, gt, a1, Operand(kExternalStringTag)); | 2389 __ Branch(¬_long_external, gt, a1, Operand(kExternalStringTag)); |
| 2391 | 2390 |
| 2392 // (7) External string. Make it, offset-wise, look like a sequential string. | 2391 // (7) External string. Make it, offset-wise, look like a sequential string. |
| 2393 __ bind(&external_string); | 2392 __ bind(&external_string); |
| 2394 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); | 2393 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); |
| (...skipping 345 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2740 } | 2739 } |
| 2741 | 2740 |
| 2742 | 2741 |
| 2743 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2742 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
| 2744 FrameScope scope(masm, StackFrame::INTERNAL); | 2743 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2745 | 2744 |
| 2746 // Push the receiver and the function and feedback info. | 2745 // Push the receiver and the function and feedback info. |
| 2747 __ Push(a1, a2, a3); | 2746 __ Push(a1, a2, a3); |
| 2748 | 2747 |
| 2749 // Call the entry. | 2748 // Call the entry. |
| 2750 __ CallRuntime(Runtime::kCallIC_Miss, 3); | 2749 __ CallRuntime(Runtime::kCallIC_Miss); |
| 2751 | 2750 |
| 2752 // Move result to a1 and exit the internal frame. | 2751 // Move result to a1 and exit the internal frame. |
| 2753 __ mov(a1, v0); | 2752 __ mov(a1, v0); |
| 2754 } | 2753 } |
| 2755 | 2754 |
| 2756 | 2755 |
| 2757 // StringCharCodeAtGenerator. | 2756 // StringCharCodeAtGenerator. |
| 2758 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 2757 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
| 2759 DCHECK(!t0.is(index_)); | 2758 DCHECK(!t0.is(index_)); |
| 2760 DCHECK(!t0.is(result_)); | 2759 DCHECK(!t0.is(result_)); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2808 DONT_DO_SMI_CHECK); | 2807 DONT_DO_SMI_CHECK); |
| 2809 call_helper.BeforeCall(masm); | 2808 call_helper.BeforeCall(masm); |
| 2810 // Consumed by runtime conversion function: | 2809 // Consumed by runtime conversion function: |
| 2811 if (embed_mode == PART_OF_IC_HANDLER) { | 2810 if (embed_mode == PART_OF_IC_HANDLER) { |
| 2812 __ Push(LoadWithVectorDescriptor::VectorRegister(), | 2811 __ Push(LoadWithVectorDescriptor::VectorRegister(), |
| 2813 LoadWithVectorDescriptor::SlotRegister(), object_, index_); | 2812 LoadWithVectorDescriptor::SlotRegister(), object_, index_); |
| 2814 } else { | 2813 } else { |
| 2815 __ Push(object_, index_); | 2814 __ Push(object_, index_); |
| 2816 } | 2815 } |
| 2817 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 2816 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
| 2818 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 2817 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero); |
| 2819 } else { | 2818 } else { |
| 2820 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 2819 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
| 2821 // NumberToSmi discards numbers that are not exact integers. | 2820 // NumberToSmi discards numbers that are not exact integers. |
| 2822 __ CallRuntime(Runtime::kNumberToSmi, 1); | 2821 __ CallRuntime(Runtime::kNumberToSmi); |
| 2823 } | 2822 } |
| 2824 | 2823 |
| 2825 // Save the conversion result before the pop instructions below | 2824 // Save the conversion result before the pop instructions below |
| 2826 // have a chance to overwrite it. | 2825 // have a chance to overwrite it. |
| 2827 __ Move(index_, v0); | 2826 __ Move(index_, v0); |
| 2828 if (embed_mode == PART_OF_IC_HANDLER) { | 2827 if (embed_mode == PART_OF_IC_HANDLER) { |
| 2829 __ Pop(LoadWithVectorDescriptor::VectorRegister(), | 2828 __ Pop(LoadWithVectorDescriptor::VectorRegister(), |
| 2830 LoadWithVectorDescriptor::SlotRegister(), object_); | 2829 LoadWithVectorDescriptor::SlotRegister(), object_); |
| 2831 } else { | 2830 } else { |
| 2832 __ pop(object_); | 2831 __ pop(object_); |
| 2833 } | 2832 } |
| 2834 // Reload the instance type. | 2833 // Reload the instance type. |
| 2835 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); | 2834 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); |
| 2836 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); | 2835 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); |
| 2837 call_helper.AfterCall(masm); | 2836 call_helper.AfterCall(masm); |
| 2838 // If index is still not a smi, it must be out of range. | 2837 // If index is still not a smi, it must be out of range. |
| 2839 __ JumpIfNotSmi(index_, index_out_of_range_); | 2838 __ JumpIfNotSmi(index_, index_out_of_range_); |
| 2840 // Otherwise, return to the fast path. | 2839 // Otherwise, return to the fast path. |
| 2841 __ Branch(&got_smi_index_); | 2840 __ Branch(&got_smi_index_); |
| 2842 | 2841 |
| 2843 // Call runtime. We get here when the receiver is a string and the | 2842 // Call runtime. We get here when the receiver is a string and the |
| 2844 // index is a number, but the code of getting the actual character | 2843 // index is a number, but the code of getting the actual character |
| 2845 // is too complex (e.g., when the string needs to be flattened). | 2844 // is too complex (e.g., when the string needs to be flattened). |
| 2846 __ bind(&call_runtime_); | 2845 __ bind(&call_runtime_); |
| 2847 call_helper.BeforeCall(masm); | 2846 call_helper.BeforeCall(masm); |
| 2848 __ sll(index_, index_, kSmiTagSize); | 2847 __ sll(index_, index_, kSmiTagSize); |
| 2849 __ Push(object_, index_); | 2848 __ Push(object_, index_); |
| 2850 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2); | 2849 __ CallRuntime(Runtime::kStringCharCodeAtRT); |
| 2851 | 2850 |
| 2852 __ Move(result_, v0); | 2851 __ Move(result_, v0); |
| 2853 | 2852 |
| 2854 call_helper.AfterCall(masm); | 2853 call_helper.AfterCall(masm); |
| 2855 __ jmp(&exit_); | 2854 __ jmp(&exit_); |
| 2856 | 2855 |
| 2857 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); | 2856 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); |
| 2858 } | 2857 } |
| 2859 | 2858 |
| 2860 | 2859 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 2887 | 2886 |
| 2888 | 2887 |
| 2889 void StringCharFromCodeGenerator::GenerateSlow( | 2888 void StringCharFromCodeGenerator::GenerateSlow( |
| 2890 MacroAssembler* masm, | 2889 MacroAssembler* masm, |
| 2891 const RuntimeCallHelper& call_helper) { | 2890 const RuntimeCallHelper& call_helper) { |
| 2892 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); | 2891 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); |
| 2893 | 2892 |
| 2894 __ bind(&slow_case_); | 2893 __ bind(&slow_case_); |
| 2895 call_helper.BeforeCall(masm); | 2894 call_helper.BeforeCall(masm); |
| 2896 __ push(code_); | 2895 __ push(code_); |
| 2897 __ CallRuntime(Runtime::kStringCharFromCode, 1); | 2896 __ CallRuntime(Runtime::kStringCharFromCode); |
| 2898 __ Move(result_, v0); | 2897 __ Move(result_, v0); |
| 2899 | 2898 |
| 2900 call_helper.AfterCall(masm); | 2899 call_helper.AfterCall(masm); |
| 2901 __ Branch(&exit_); | 2900 __ Branch(&exit_); |
| 2902 | 2901 |
| 2903 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 2902 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); |
| 2904 } | 2903 } |
| 2905 | 2904 |
| 2906 | 2905 |
| 2907 enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 }; | 2906 enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 }; |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3148 StringHelper::GenerateCopyCharacters( | 3147 StringHelper::GenerateCopyCharacters( |
| 3149 masm, a1, t1, a2, a3, String::TWO_BYTE_ENCODING); | 3148 masm, a1, t1, a2, a3, String::TWO_BYTE_ENCODING); |
| 3150 | 3149 |
| 3151 __ bind(&return_v0); | 3150 __ bind(&return_v0); |
| 3152 Counters* counters = isolate()->counters(); | 3151 Counters* counters = isolate()->counters(); |
| 3153 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0); | 3152 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0); |
| 3154 __ DropAndRet(3); | 3153 __ DropAndRet(3); |
| 3155 | 3154 |
| 3156 // Just jump to runtime to create the sub string. | 3155 // Just jump to runtime to create the sub string. |
| 3157 __ bind(&runtime); | 3156 __ bind(&runtime); |
| 3158 __ TailCallRuntime(Runtime::kSubString, 3); | 3157 __ TailCallRuntime(Runtime::kSubString); |
| 3159 | 3158 |
| 3160 __ bind(&single_char); | 3159 __ bind(&single_char); |
| 3161 // v0: original string | 3160 // v0: original string |
| 3162 // a1: instance type | 3161 // a1: instance type |
| 3163 // a2: length | 3162 // a2: length |
| 3164 // a3: from index (untagged) | 3163 // a3: from index (untagged) |
| 3165 __ SmiTag(a3, a3); | 3164 __ SmiTag(a3, a3); |
| 3166 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime, | 3165 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime, |
| 3167 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); | 3166 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); |
| 3168 generator.GenerateFast(masm); | 3167 generator.GenerateFast(masm); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 3193 __ Branch(¬_string, hs, a1, Operand(FIRST_NONSTRING_TYPE)); | 3192 __ Branch(¬_string, hs, a1, Operand(FIRST_NONSTRING_TYPE)); |
| 3194 // Check if string has a cached array index. | 3193 // Check if string has a cached array index. |
| 3195 __ lw(a2, FieldMemOperand(a0, String::kHashFieldOffset)); | 3194 __ lw(a2, FieldMemOperand(a0, String::kHashFieldOffset)); |
| 3196 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask)); | 3195 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask)); |
| 3197 __ Branch(&slow_string, ne, at, Operand(zero_reg)); | 3196 __ Branch(&slow_string, ne, at, Operand(zero_reg)); |
| 3198 __ IndexFromHash(a2, a0); | 3197 __ IndexFromHash(a2, a0); |
| 3199 __ Ret(USE_DELAY_SLOT); | 3198 __ Ret(USE_DELAY_SLOT); |
| 3200 __ mov(v0, a0); | 3199 __ mov(v0, a0); |
| 3201 __ bind(&slow_string); | 3200 __ bind(&slow_string); |
| 3202 __ push(a0); // Push argument. | 3201 __ push(a0); // Push argument. |
| 3203 __ TailCallRuntime(Runtime::kStringToNumber, 1); | 3202 __ TailCallRuntime(Runtime::kStringToNumber); |
| 3204 __ bind(¬_string); | 3203 __ bind(¬_string); |
| 3205 | 3204 |
| 3206 Label not_oddball; | 3205 Label not_oddball; |
| 3207 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); | 3206 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); |
| 3208 __ Ret(USE_DELAY_SLOT); | 3207 __ Ret(USE_DELAY_SLOT); |
| 3209 __ lw(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); | 3208 __ lw(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); |
| 3210 __ bind(¬_oddball); | 3209 __ bind(¬_oddball); |
| 3211 | 3210 |
| 3212 __ push(a0); // Push argument. | 3211 __ push(a0); // Push argument. |
| 3213 __ TailCallRuntime(Runtime::kToNumber, 1); | 3212 __ TailCallRuntime(Runtime::kToNumber); |
| 3214 } | 3213 } |
| 3215 | 3214 |
| 3216 | 3215 |
| 3217 void ToLengthStub::Generate(MacroAssembler* masm) { | 3216 void ToLengthStub::Generate(MacroAssembler* masm) { |
| 3218 // The ToLength stub takes on argument in a0. | 3217 // The ToLength stub takes on argument in a0. |
| 3219 Label not_smi, positive_smi; | 3218 Label not_smi, positive_smi; |
| 3220 __ JumpIfNotSmi(a0, ¬_smi); | 3219 __ JumpIfNotSmi(a0, ¬_smi); |
| 3221 STATIC_ASSERT(kSmiTag == 0); | 3220 STATIC_ASSERT(kSmiTag == 0); |
| 3222 __ Branch(&positive_smi, ge, a0, Operand(zero_reg)); | 3221 __ Branch(&positive_smi, ge, a0, Operand(zero_reg)); |
| 3223 __ mov(a0, zero_reg); | 3222 __ mov(a0, zero_reg); |
| 3224 __ bind(&positive_smi); | 3223 __ bind(&positive_smi); |
| 3225 __ Ret(USE_DELAY_SLOT); | 3224 __ Ret(USE_DELAY_SLOT); |
| 3226 __ mov(v0, a0); | 3225 __ mov(v0, a0); |
| 3227 __ bind(¬_smi); | 3226 __ bind(¬_smi); |
| 3228 | 3227 |
| 3229 __ push(a0); // Push argument. | 3228 __ push(a0); // Push argument. |
| 3230 __ TailCallRuntime(Runtime::kToLength, 1); | 3229 __ TailCallRuntime(Runtime::kToLength); |
| 3231 } | 3230 } |
| 3232 | 3231 |
| 3233 | 3232 |
| 3234 void ToStringStub::Generate(MacroAssembler* masm) { | 3233 void ToStringStub::Generate(MacroAssembler* masm) { |
| 3235 // The ToString stub takes on argument in a0. | 3234 // The ToString stub takes on argument in a0. |
| 3236 Label is_number; | 3235 Label is_number; |
| 3237 __ JumpIfSmi(a0, &is_number); | 3236 __ JumpIfSmi(a0, &is_number); |
| 3238 | 3237 |
| 3239 Label not_string; | 3238 Label not_string; |
| 3240 __ GetObjectType(a0, a1, a1); | 3239 __ GetObjectType(a0, a1, a1); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 3252 __ TailCallStub(&stub); | 3251 __ TailCallStub(&stub); |
| 3253 __ bind(¬_heap_number); | 3252 __ bind(¬_heap_number); |
| 3254 | 3253 |
| 3255 Label not_oddball; | 3254 Label not_oddball; |
| 3256 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); | 3255 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); |
| 3257 __ Ret(USE_DELAY_SLOT); | 3256 __ Ret(USE_DELAY_SLOT); |
| 3258 __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset)); | 3257 __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset)); |
| 3259 __ bind(¬_oddball); | 3258 __ bind(¬_oddball); |
| 3260 | 3259 |
| 3261 __ push(a0); // Push argument. | 3260 __ push(a0); // Push argument. |
| 3262 __ TailCallRuntime(Runtime::kToString, 1); | 3261 __ TailCallRuntime(Runtime::kToString); |
| 3263 } | 3262 } |
| 3264 | 3263 |
| 3265 | 3264 |
| 3266 void StringHelper::GenerateFlatOneByteStringEquals( | 3265 void StringHelper::GenerateFlatOneByteStringEquals( |
| 3267 MacroAssembler* masm, Register left, Register right, Register scratch1, | 3266 MacroAssembler* masm, Register left, Register right, Register scratch1, |
| 3268 Register scratch2, Register scratch3) { | 3267 Register scratch2, Register scratch3) { |
| 3269 Register length = scratch1; | 3268 Register length = scratch1; |
| 3270 | 3269 |
| 3271 // Compare lengths. | 3270 // Compare lengths. |
| 3272 Label strings_not_equal, check_zero_length; | 3271 Label strings_not_equal, check_zero_length; |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3390 Label runtime; | 3389 Label runtime; |
| 3391 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime); | 3390 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime); |
| 3392 | 3391 |
| 3393 // Compare flat ASCII strings natively. | 3392 // Compare flat ASCII strings natively. |
| 3394 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, | 3393 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, |
| 3395 a3); | 3394 a3); |
| 3396 StringHelper::GenerateCompareFlatOneByteStrings(masm, a1, a0, a2, a3, t0, t1); | 3395 StringHelper::GenerateCompareFlatOneByteStrings(masm, a1, a0, a2, a3, t0, t1); |
| 3397 | 3396 |
| 3398 __ bind(&runtime); | 3397 __ bind(&runtime); |
| 3399 __ Push(a1, a0); | 3398 __ Push(a1, a0); |
| 3400 __ TailCallRuntime(Runtime::kStringCompare, 2); | 3399 __ TailCallRuntime(Runtime::kStringCompare); |
| 3401 } | 3400 } |
| 3402 | 3401 |
| 3403 | 3402 |
| 3404 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 3403 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
| 3405 // ----------- S t a t e ------------- | 3404 // ----------- S t a t e ------------- |
| 3406 // -- a1 : left | 3405 // -- a1 : left |
| 3407 // -- a0 : right | 3406 // -- a0 : right |
| 3408 // -- ra : return address | 3407 // -- ra : return address |
| 3409 // ----------------------------------- | 3408 // ----------------------------------- |
| 3410 | 3409 |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3429 } | 3428 } |
| 3430 | 3429 |
| 3431 | 3430 |
| 3432 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { | 3431 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { |
| 3433 DCHECK_EQ(CompareICState::BOOLEAN, state()); | 3432 DCHECK_EQ(CompareICState::BOOLEAN, state()); |
| 3434 Label miss; | 3433 Label miss; |
| 3435 | 3434 |
| 3436 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3435 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
| 3437 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3436 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
| 3438 if (op() != Token::EQ_STRICT && is_strong(strength())) { | 3437 if (op() != Token::EQ_STRICT && is_strong(strength())) { |
| 3439 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); | 3438 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion); |
| 3440 } else { | 3439 } else { |
| 3441 if (!Token::IsEqualityOp(op())) { | 3440 if (!Token::IsEqualityOp(op())) { |
| 3442 __ lw(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); | 3441 __ lw(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); |
| 3443 __ AssertSmi(a1); | 3442 __ AssertSmi(a1); |
| 3444 __ lw(a0, FieldMemOperand(a0, Oddball::kToNumberOffset)); | 3443 __ lw(a0, FieldMemOperand(a0, Oddball::kToNumberOffset)); |
| 3445 __ AssertSmi(a0); | 3444 __ AssertSmi(a0); |
| 3446 } | 3445 } |
| 3447 __ Ret(USE_DELAY_SLOT); | 3446 __ Ret(USE_DELAY_SLOT); |
| 3448 __ Subu(v0, a1, a0); | 3447 __ Subu(v0, a1, a0); |
| 3449 } | 3448 } |
| (...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3722 tmp3); | 3721 tmp3); |
| 3723 } else { | 3722 } else { |
| 3724 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, | 3723 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, |
| 3725 tmp2, tmp3, tmp4); | 3724 tmp2, tmp3, tmp4); |
| 3726 } | 3725 } |
| 3727 | 3726 |
| 3728 // Handle more complex cases in runtime. | 3727 // Handle more complex cases in runtime. |
| 3729 __ bind(&runtime); | 3728 __ bind(&runtime); |
| 3730 __ Push(left, right); | 3729 __ Push(left, right); |
| 3731 if (equality) { | 3730 if (equality) { |
| 3732 __ TailCallRuntime(Runtime::kStringEquals, 2); | 3731 __ TailCallRuntime(Runtime::kStringEquals); |
| 3733 } else { | 3732 } else { |
| 3734 __ TailCallRuntime(Runtime::kStringCompare, 2); | 3733 __ TailCallRuntime(Runtime::kStringCompare); |
| 3735 } | 3734 } |
| 3736 | 3735 |
| 3737 __ bind(&miss); | 3736 __ bind(&miss); |
| 3738 GenerateMiss(masm); | 3737 GenerateMiss(masm); |
| 3739 } | 3738 } |
| 3740 | 3739 |
| 3741 | 3740 |
| 3742 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { | 3741 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { |
| 3743 DCHECK_EQ(CompareICState::RECEIVER, state()); | 3742 DCHECK_EQ(CompareICState::RECEIVER, state()); |
| 3744 Label miss; | 3743 Label miss; |
| (...skipping 23 matching lines...) Expand all Loading... |
| 3768 __ GetWeakValue(t0, cell); | 3767 __ GetWeakValue(t0, cell); |
| 3769 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); | 3768 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); |
| 3770 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); | 3769 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 3771 __ Branch(&miss, ne, a2, Operand(t0)); | 3770 __ Branch(&miss, ne, a2, Operand(t0)); |
| 3772 __ Branch(&miss, ne, a3, Operand(t0)); | 3771 __ Branch(&miss, ne, a3, Operand(t0)); |
| 3773 | 3772 |
| 3774 if (Token::IsEqualityOp(op())) { | 3773 if (Token::IsEqualityOp(op())) { |
| 3775 __ Ret(USE_DELAY_SLOT); | 3774 __ Ret(USE_DELAY_SLOT); |
| 3776 __ subu(v0, a0, a1); | 3775 __ subu(v0, a0, a1); |
| 3777 } else if (is_strong(strength())) { | 3776 } else if (is_strong(strength())) { |
| 3778 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); | 3777 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion); |
| 3779 } else { | 3778 } else { |
| 3780 if (op() == Token::LT || op() == Token::LTE) { | 3779 if (op() == Token::LT || op() == Token::LTE) { |
| 3781 __ li(a2, Operand(Smi::FromInt(GREATER))); | 3780 __ li(a2, Operand(Smi::FromInt(GREATER))); |
| 3782 } else { | 3781 } else { |
| 3783 __ li(a2, Operand(Smi::FromInt(LESS))); | 3782 __ li(a2, Operand(Smi::FromInt(LESS))); |
| 3784 } | 3783 } |
| 3785 __ Push(a1, a0, a2); | 3784 __ Push(a1, a0, a2); |
| 3786 __ TailCallRuntime(Runtime::kCompare, 3); | 3785 __ TailCallRuntime(Runtime::kCompare); |
| 3787 } | 3786 } |
| 3788 | 3787 |
| 3789 __ bind(&miss); | 3788 __ bind(&miss); |
| 3790 GenerateMiss(masm); | 3789 GenerateMiss(masm); |
| 3791 } | 3790 } |
| 3792 | 3791 |
| 3793 | 3792 |
| 3794 void CompareICStub::GenerateMiss(MacroAssembler* masm) { | 3793 void CompareICStub::GenerateMiss(MacroAssembler* masm) { |
| 3795 { | 3794 { |
| 3796 // Call the runtime system in a fresh internal frame. | 3795 // Call the runtime system in a fresh internal frame. |
| (...skipping 1371 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5168 | 5167 |
| 5169 // Check that value is not the_hole. | 5168 // Check that value is not the_hole. |
| 5170 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 5169 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 5171 __ Branch(&slow_case, eq, result_reg, Operand(at)); | 5170 __ Branch(&slow_case, eq, result_reg, Operand(at)); |
| 5172 __ Ret(); | 5171 __ Ret(); |
| 5173 | 5172 |
| 5174 // Fallback to the runtime. | 5173 // Fallback to the runtime. |
| 5175 __ bind(&slow_case); | 5174 __ bind(&slow_case); |
| 5176 __ SmiTag(slot_reg); | 5175 __ SmiTag(slot_reg); |
| 5177 __ Push(slot_reg); | 5176 __ Push(slot_reg); |
| 5178 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1); | 5177 __ TailCallRuntime(Runtime::kLoadGlobalViaContext); |
| 5179 } | 5178 } |
| 5180 | 5179 |
| 5181 | 5180 |
| 5182 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5181 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 5183 Register context_reg = cp; | 5182 Register context_reg = cp; |
| 5184 Register slot_reg = a2; | 5183 Register slot_reg = a2; |
| 5185 Register value_reg = a0; | 5184 Register value_reg = a0; |
| 5186 Register cell_reg = t0; | 5185 Register cell_reg = t0; |
| 5187 Register cell_value_reg = t1; | 5186 Register cell_value_reg = t1; |
| 5188 Register cell_details_reg = t2; | 5187 Register cell_details_reg = t2; |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5282 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); | 5281 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); |
| 5283 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, | 5282 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, |
| 5284 FieldMemOperand(value_reg, HeapObject::kMapOffset)); | 5283 FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 5285 | 5284 |
| 5286 // Fallback to the runtime. | 5285 // Fallback to the runtime. |
| 5287 __ bind(&slow_case); | 5286 __ bind(&slow_case); |
| 5288 __ SmiTag(slot_reg); | 5287 __ SmiTag(slot_reg); |
| 5289 __ Push(slot_reg, value_reg); | 5288 __ Push(slot_reg, value_reg); |
| 5290 __ TailCallRuntime(is_strict(language_mode()) | 5289 __ TailCallRuntime(is_strict(language_mode()) |
| 5291 ? Runtime::kStoreGlobalViaContext_Strict | 5290 ? Runtime::kStoreGlobalViaContext_Strict |
| 5292 : Runtime::kStoreGlobalViaContext_Sloppy, | 5291 : Runtime::kStoreGlobalViaContext_Sloppy); |
| 5293 2); | |
| 5294 } | 5292 } |
| 5295 | 5293 |
| 5296 | 5294 |
| 5297 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5295 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
| 5298 return ref0.address() - ref1.address(); | 5296 return ref0.address() - ref1.address(); |
| 5299 } | 5297 } |
| 5300 | 5298 |
| 5301 | 5299 |
| 5302 // Calls an API function. Allocates HandleScope, extracts returned value | 5300 // Calls an API function. Allocates HandleScope, extracts returned value |
| 5303 // from handle and propagates exceptions. Restores context. stack_space | 5301 // from handle and propagates exceptions. Restores context. stack_space |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5407 // Check if the function scheduled an exception. | 5405 // Check if the function scheduled an exception. |
| 5408 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | 5406 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 5409 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate))); | 5407 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate))); |
| 5410 __ lw(t1, MemOperand(at)); | 5408 __ lw(t1, MemOperand(at)); |
| 5411 __ Branch(&promote_scheduled_exception, ne, t0, Operand(t1)); | 5409 __ Branch(&promote_scheduled_exception, ne, t0, Operand(t1)); |
| 5412 | 5410 |
| 5413 __ Ret(); | 5411 __ Ret(); |
| 5414 | 5412 |
| 5415 // Re-throw by promoting a scheduled exception. | 5413 // Re-throw by promoting a scheduled exception. |
| 5416 __ bind(&promote_scheduled_exception); | 5414 __ bind(&promote_scheduled_exception); |
| 5417 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0); | 5415 __ TailCallRuntime(Runtime::kPromoteScheduledException); |
| 5418 | 5416 |
| 5419 // HandleScope limit has changed. Delete allocated extensions. | 5417 // HandleScope limit has changed. Delete allocated extensions. |
| 5420 __ bind(&delete_allocated_handles); | 5418 __ bind(&delete_allocated_handles); |
| 5421 __ sw(s1, MemOperand(s3, kLimitOffset)); | 5419 __ sw(s1, MemOperand(s3, kLimitOffset)); |
| 5422 __ mov(s0, v0); | 5420 __ mov(s0, v0); |
| 5423 __ mov(a0, v0); | 5421 __ mov(a0, v0); |
| 5424 __ PrepareCallCFunction(1, s1); | 5422 __ PrepareCallCFunction(1, s1); |
| 5425 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); | 5423 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); |
| 5426 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), | 5424 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), |
| 5427 1); | 5425 1); |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5596 MemOperand(fp, 6 * kPointerSize), NULL); | 5594 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5597 } | 5595 } |
| 5598 | 5596 |
| 5599 | 5597 |
| 5600 #undef __ | 5598 #undef __ |
| 5601 | 5599 |
| 5602 } // namespace internal | 5600 } // namespace internal |
| 5603 } // namespace v8 | 5601 } // namespace v8 |
| 5604 | 5602 |
| 5605 #endif // V8_TARGET_ARCH_MIPS | 5603 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |