OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 707 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
718 a5); | 718 a5); |
719 } | 719 } |
720 // Never falls through to here. | 720 // Never falls through to here. |
721 | 721 |
722 __ bind(&slow); | 722 __ bind(&slow); |
723 // Prepare for call to builtin. Push object pointers, a0 (lhs) first, | 723 // Prepare for call to builtin. Push object pointers, a0 (lhs) first, |
724 // a1 (rhs) second. | 724 // a1 (rhs) second. |
725 __ Push(lhs, rhs); | 725 __ Push(lhs, rhs); |
726 // Figure out which native to call and setup the arguments. | 726 // Figure out which native to call and setup the arguments. |
727 if (cc == eq) { | 727 if (cc == eq) { |
728 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2, | 728 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2); |
729 1); | |
730 } else { | 729 } else { |
731 int ncr; // NaN compare result. | 730 int ncr; // NaN compare result. |
732 if (cc == lt || cc == le) { | 731 if (cc == lt || cc == le) { |
733 ncr = GREATER; | 732 ncr = GREATER; |
734 } else { | 733 } else { |
735 DCHECK(cc == gt || cc == ge); // Remaining cases. | 734 DCHECK(cc == gt || cc == ge); // Remaining cases. |
736 ncr = LESS; | 735 ncr = LESS; |
737 } | 736 } |
738 __ li(a0, Operand(Smi::FromInt(ncr))); | 737 __ li(a0, Operand(Smi::FromInt(ncr))); |
739 __ push(a0); | 738 __ push(a0); |
740 | 739 |
741 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 740 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
742 // tagged as a small integer. | 741 // tagged as a small integer. |
743 __ TailCallRuntime( | 742 __ TailCallRuntime( |
744 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3, | 743 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, |
745 1); | 744 3); |
746 } | 745 } |
747 | 746 |
748 __ bind(&miss); | 747 __ bind(&miss); |
749 GenerateMiss(masm); | 748 GenerateMiss(masm); |
750 } | 749 } |
751 | 750 |
752 | 751 |
753 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { | 752 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { |
754 __ mov(t9, ra); | 753 __ mov(t9, ra); |
755 __ pop(ra); | 754 __ pop(ra); |
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
970 // double_exponent may not contain the exponent value if the input was a | 969 // double_exponent may not contain the exponent value if the input was a |
971 // smi. We set it with exponent value before bailing out. | 970 // smi. We set it with exponent value before bailing out. |
972 __ mtc1(exponent, single_scratch); | 971 __ mtc1(exponent, single_scratch); |
973 __ cvt_d_w(double_exponent, single_scratch); | 972 __ cvt_d_w(double_exponent, single_scratch); |
974 | 973 |
975 // Returning or bailing out. | 974 // Returning or bailing out. |
976 Counters* counters = isolate()->counters(); | 975 Counters* counters = isolate()->counters(); |
977 if (exponent_type() == ON_STACK) { | 976 if (exponent_type() == ON_STACK) { |
978 // The arguments are still on the stack. | 977 // The arguments are still on the stack. |
979 __ bind(&call_runtime); | 978 __ bind(&call_runtime); |
980 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1); | 979 __ TailCallRuntime(Runtime::kMathPowRT, 2); |
981 | 980 |
982 // The stub is called from non-optimized code, which expects the result | 981 // The stub is called from non-optimized code, which expects the result |
983 // as heap number in exponent. | 982 // as heap number in exponent. |
984 __ bind(&done); | 983 __ bind(&done); |
985 __ AllocateHeapNumber( | 984 __ AllocateHeapNumber( |
986 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime); | 985 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime); |
987 __ sdc1(double_result, | 986 __ sdc1(double_result, |
988 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); | 987 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); |
989 DCHECK(heapnumber.is(v0)); | 988 DCHECK(heapnumber.is(v0)); |
990 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); | 989 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); |
(...skipping 537 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1528 __ Ret(USE_DELAY_SLOT); | 1527 __ Ret(USE_DELAY_SLOT); |
1529 __ StoreRoot(result, | 1528 __ StoreRoot(result, |
1530 Heap::kInstanceofCacheAnswerRootIndex); // In delay slot. | 1529 Heap::kInstanceofCacheAnswerRootIndex); // In delay slot. |
1531 | 1530 |
1532 // Found Proxy or access check needed: Call the runtime | 1531 // Found Proxy or access check needed: Call the runtime |
1533 __ bind(&fast_runtime_fallback); | 1532 __ bind(&fast_runtime_fallback); |
1534 __ Push(object, function_prototype); | 1533 __ Push(object, function_prototype); |
1535 // Invalidate the instanceof cache. | 1534 // Invalidate the instanceof cache. |
1536 DCHECK(Smi::FromInt(0) == 0); | 1535 DCHECK(Smi::FromInt(0) == 0); |
1537 __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex); | 1536 __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex); |
1538 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2, 1); | 1537 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2); |
1539 | 1538 |
1540 // Slow-case: Call the %InstanceOf runtime function. | 1539 // Slow-case: Call the %InstanceOf runtime function. |
1541 __ bind(&slow_case); | 1540 __ bind(&slow_case); |
1542 __ Push(object, function); | 1541 __ Push(object, function); |
1543 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); | 1542 __ TailCallRuntime(Runtime::kInstanceOf, 2); |
1544 } | 1543 } |
1545 | 1544 |
1546 | 1545 |
1547 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { | 1546 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
1548 Label miss; | 1547 Label miss; |
1549 Register receiver = LoadDescriptor::ReceiverRegister(); | 1548 Register receiver = LoadDescriptor::ReceiverRegister(); |
1550 // Ensure that the vector and slot registers won't be clobbered before | 1549 // Ensure that the vector and slot registers won't be clobbered before |
1551 // calling the miss handler. | 1550 // calling the miss handler. |
1552 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::VectorRegister(), | 1551 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::VectorRegister(), |
1553 LoadWithVectorDescriptor::SlotRegister())); | 1552 LoadWithVectorDescriptor::SlotRegister())); |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1604 __ dsubu(a3, a0, a1); | 1603 __ dsubu(a3, a0, a1); |
1605 __ SmiScale(a7, a3, kPointerSizeLog2); | 1604 __ SmiScale(a7, a3, kPointerSizeLog2); |
1606 __ Daddu(a3, a2, Operand(a7)); | 1605 __ Daddu(a3, a2, Operand(a7)); |
1607 __ Ret(USE_DELAY_SLOT); | 1606 __ Ret(USE_DELAY_SLOT); |
1608 __ ld(v0, MemOperand(a3, kDisplacement)); | 1607 __ ld(v0, MemOperand(a3, kDisplacement)); |
1609 | 1608 |
1610 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 1609 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
1611 // by calling the runtime system. | 1610 // by calling the runtime system. |
1612 __ bind(&slow); | 1611 __ bind(&slow); |
1613 __ push(a1); | 1612 __ push(a1); |
1614 __ TailCallRuntime(Runtime::kArguments, 1, 1); | 1613 __ TailCallRuntime(Runtime::kArguments, 1); |
1615 } | 1614 } |
1616 | 1615 |
1617 | 1616 |
1618 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { | 1617 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { |
1619 // a1 : function | 1618 // a1 : function |
1620 // a2 : number of parameters (tagged) | 1619 // a2 : number of parameters (tagged) |
1621 // a3 : parameters pointer | 1620 // a3 : parameters pointer |
1622 | 1621 |
1623 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); | 1622 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); |
1624 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); | 1623 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
1625 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | 1624 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); |
1626 | 1625 |
1627 // Check if the calling frame is an arguments adaptor frame. | 1626 // Check if the calling frame is an arguments adaptor frame. |
1628 Label runtime; | 1627 Label runtime; |
1629 __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1628 __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
1630 __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset)); | 1629 __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset)); |
1631 __ Branch(&runtime, ne, a0, | 1630 __ Branch(&runtime, ne, a0, |
1632 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1631 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
1633 | 1632 |
1634 // Patch the arguments.length and the parameters pointer in the current frame. | 1633 // Patch the arguments.length and the parameters pointer in the current frame. |
1635 __ ld(a2, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1634 __ ld(a2, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
1636 __ SmiScale(a7, a2, kPointerSizeLog2); | 1635 __ SmiScale(a7, a2, kPointerSizeLog2); |
1637 __ Daddu(a4, a4, Operand(a7)); | 1636 __ Daddu(a4, a4, Operand(a7)); |
1638 __ daddiu(a3, a4, StandardFrameConstants::kCallerSPOffset); | 1637 __ daddiu(a3, a4, StandardFrameConstants::kCallerSPOffset); |
1639 | 1638 |
1640 __ bind(&runtime); | 1639 __ bind(&runtime); |
1641 __ Push(a1, a3, a2); | 1640 __ Push(a1, a3, a2); |
1642 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); | 1641 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); |
1643 } | 1642 } |
1644 | 1643 |
1645 | 1644 |
1646 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | 1645 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
1647 // a1 : function | 1646 // a1 : function |
1648 // a2 : number of parameters (tagged) | 1647 // a2 : number of parameters (tagged) |
1649 // a3 : parameters pointer | 1648 // a3 : parameters pointer |
1650 // Registers used over whole function: | 1649 // Registers used over whole function: |
1651 // a5 : arguments count (tagged) | 1650 // a5 : arguments count (tagged) |
1652 // a6 : mapped parameter count (tagged) | 1651 // a6 : mapped parameter count (tagged) |
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1846 __ bind(&arguments_test); | 1845 __ bind(&arguments_test); |
1847 __ Branch(&arguments_loop, lt, a6, Operand(a5)); | 1846 __ Branch(&arguments_loop, lt, a6, Operand(a5)); |
1848 | 1847 |
1849 // Return. | 1848 // Return. |
1850 __ Ret(); | 1849 __ Ret(); |
1851 | 1850 |
1852 // Do the runtime call to allocate the arguments object. | 1851 // Do the runtime call to allocate the arguments object. |
1853 // a5 = argument count (tagged) | 1852 // a5 = argument count (tagged) |
1854 __ bind(&runtime); | 1853 __ bind(&runtime); |
1855 __ Push(a1, a3, a5); | 1854 __ Push(a1, a3, a5); |
1856 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); | 1855 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); |
1857 } | 1856 } |
1858 | 1857 |
1859 | 1858 |
1860 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 1859 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
1861 // Return address is in ra. | 1860 // Return address is in ra. |
1862 Label slow; | 1861 Label slow; |
1863 | 1862 |
1864 Register receiver = LoadDescriptor::ReceiverRegister(); | 1863 Register receiver = LoadDescriptor::ReceiverRegister(); |
1865 Register key = LoadDescriptor::NameRegister(); | 1864 Register key = LoadDescriptor::NameRegister(); |
1866 | 1865 |
1867 // Check that the key is an array index, that is Uint32. | 1866 // Check that the key is an array index, that is Uint32. |
1868 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); | 1867 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); |
1869 __ Branch(&slow, ne, t0, Operand(zero_reg)); | 1868 __ Branch(&slow, ne, t0, Operand(zero_reg)); |
1870 | 1869 |
1871 // Everything is fine, call runtime. | 1870 // Everything is fine, call runtime. |
1872 __ Push(receiver, key); // Receiver, key. | 1871 __ Push(receiver, key); // Receiver, key. |
1873 | 1872 |
1874 // Perform tail call to the entry. | 1873 // Perform tail call to the entry. |
1875 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); | 1874 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2); |
1876 | 1875 |
1877 __ bind(&slow); | 1876 __ bind(&slow); |
1878 PropertyAccessCompiler::TailCallBuiltin( | 1877 PropertyAccessCompiler::TailCallBuiltin( |
1879 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1878 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
1880 } | 1879 } |
1881 | 1880 |
1882 | 1881 |
1883 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 1882 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
1884 // a1 : function | 1883 // a1 : function |
1885 // a2 : number of parameters (tagged) | 1884 // a2 : number of parameters (tagged) |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1959 __ Dsubu(a2, a2, Operand(1)); | 1958 __ Dsubu(a2, a2, Operand(1)); |
1960 __ Branch(&loop, ne, a2, Operand(zero_reg)); | 1959 __ Branch(&loop, ne, a2, Operand(zero_reg)); |
1961 | 1960 |
1962 // Return. | 1961 // Return. |
1963 __ bind(&done); | 1962 __ bind(&done); |
1964 __ Ret(); | 1963 __ Ret(); |
1965 | 1964 |
1966 // Do the runtime call to allocate the arguments object. | 1965 // Do the runtime call to allocate the arguments object. |
1967 __ bind(&runtime); | 1966 __ bind(&runtime); |
1968 __ Push(a1, a3, a2); | 1967 __ Push(a1, a3, a2); |
1969 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); | 1968 __ TailCallRuntime(Runtime::kNewStrictArguments, 3); |
1970 } | 1969 } |
1971 | 1970 |
1972 | 1971 |
1973 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { | 1972 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { |
1974 // sp[0] : language mode | 1973 // sp[0] : language mode |
1975 // sp[4] : index of rest parameter | 1974 // sp[4] : index of rest parameter |
1976 // sp[8] : number of parameters | 1975 // sp[8] : number of parameters |
1977 // sp[12] : receiver displacement | 1976 // sp[12] : receiver displacement |
1978 // Check if the calling frame is an arguments adaptor frame. | 1977 // Check if the calling frame is an arguments adaptor frame. |
1979 | 1978 |
1980 Label runtime; | 1979 Label runtime; |
1981 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1980 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
1982 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); | 1981 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); |
1983 __ Branch(&runtime, ne, a3, | 1982 __ Branch(&runtime, ne, a3, |
1984 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1983 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
1985 | 1984 |
1986 // Patch the arguments.length and the parameters pointer. | 1985 // Patch the arguments.length and the parameters pointer. |
1987 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1986 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
1988 __ sd(a1, MemOperand(sp, 2 * kPointerSize)); | 1987 __ sd(a1, MemOperand(sp, 2 * kPointerSize)); |
1989 __ SmiScale(at, a1, kPointerSizeLog2); | 1988 __ SmiScale(at, a1, kPointerSizeLog2); |
1990 | 1989 |
1991 __ Daddu(a3, a2, Operand(at)); | 1990 __ Daddu(a3, a2, Operand(at)); |
1992 | 1991 |
1993 __ Daddu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); | 1992 __ Daddu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); |
1994 __ sd(a3, MemOperand(sp, 3 * kPointerSize)); | 1993 __ sd(a3, MemOperand(sp, 3 * kPointerSize)); |
1995 | 1994 |
1996 // Do the runtime call to allocate the arguments object. | 1995 // Do the runtime call to allocate the arguments object. |
1997 __ bind(&runtime); | 1996 __ bind(&runtime); |
1998 __ TailCallRuntime(Runtime::kNewRestParam, 4, 1); | 1997 __ TailCallRuntime(Runtime::kNewRestParam, 4); |
1999 } | 1998 } |
2000 | 1999 |
2001 | 2000 |
2002 void RegExpExecStub::Generate(MacroAssembler* masm) { | 2001 void RegExpExecStub::Generate(MacroAssembler* masm) { |
2003 // Just jump directly to runtime if native RegExp is not selected at compile | 2002 // Just jump directly to runtime if native RegExp is not selected at compile |
2004 // time or if regexp entry in generated code is turned off runtime switch or | 2003 // time or if regexp entry in generated code is turned off runtime switch or |
2005 // at compilation. | 2004 // at compilation. |
2006 #ifdef V8_INTERPRETED_REGEXP | 2005 #ifdef V8_INTERPRETED_REGEXP |
2007 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 2006 __ TailCallRuntime(Runtime::kRegExpExec, 4); |
2008 #else // V8_INTERPRETED_REGEXP | 2007 #else // V8_INTERPRETED_REGEXP |
2009 | 2008 |
2010 // Stack frame on entry. | 2009 // Stack frame on entry. |
2011 // sp[0]: last_match_info (expected JSArray) | 2010 // sp[0]: last_match_info (expected JSArray) |
2012 // sp[4]: previous index | 2011 // sp[4]: previous index |
2013 // sp[8]: subject string | 2012 // sp[8]: subject string |
2014 // sp[12]: JSRegExp object | 2013 // sp[12]: JSRegExp object |
2015 | 2014 |
2016 const int kLastMatchInfoOffset = 0 * kPointerSize; | 2015 const int kLastMatchInfoOffset = 0 * kPointerSize; |
2017 const int kPreviousIndexOffset = 1 * kPointerSize; | 2016 const int kPreviousIndexOffset = 1 * kPointerSize; |
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2314 // stack overflow (on the backtrack stack) was detected in RegExp code but | 2313 // stack overflow (on the backtrack stack) was detected in RegExp code but |
2315 // haven't created the exception yet. Handle that in the runtime system. | 2314 // haven't created the exception yet. Handle that in the runtime system. |
2316 // TODO(592): Rerunning the RegExp to get the stack overflow exception. | 2315 // TODO(592): Rerunning the RegExp to get the stack overflow exception. |
2317 __ li(a1, Operand(isolate()->factory()->the_hole_value())); | 2316 __ li(a1, Operand(isolate()->factory()->the_hole_value())); |
2318 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, | 2317 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, |
2319 isolate()))); | 2318 isolate()))); |
2320 __ ld(v0, MemOperand(a2, 0)); | 2319 __ ld(v0, MemOperand(a2, 0)); |
2321 __ Branch(&runtime, eq, v0, Operand(a1)); | 2320 __ Branch(&runtime, eq, v0, Operand(a1)); |
2322 | 2321 |
2323 // For exception, throw the exception again. | 2322 // For exception, throw the exception again. |
2324 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1); | 2323 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4); |
2325 | 2324 |
2326 __ bind(&failure); | 2325 __ bind(&failure); |
2327 // For failure and exception return null. | 2326 // For failure and exception return null. |
2328 __ li(v0, Operand(isolate()->factory()->null_value())); | 2327 __ li(v0, Operand(isolate()->factory()->null_value())); |
2329 __ DropAndRet(4); | 2328 __ DropAndRet(4); |
2330 | 2329 |
2331 // Process the result from the native regexp code. | 2330 // Process the result from the native regexp code. |
2332 __ bind(&success); | 2331 __ bind(&success); |
2333 | 2332 |
2334 __ lw(a1, UntagSmiFieldMemOperand( | 2333 __ lw(a1, UntagSmiFieldMemOperand( |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2410 __ daddiu(a0, a0, kPointerSize); // In branch delay slot. | 2409 __ daddiu(a0, a0, kPointerSize); // In branch delay slot. |
2411 | 2410 |
2412 __ bind(&done); | 2411 __ bind(&done); |
2413 | 2412 |
2414 // Return last match info. | 2413 // Return last match info. |
2415 __ ld(v0, MemOperand(sp, kLastMatchInfoOffset)); | 2414 __ ld(v0, MemOperand(sp, kLastMatchInfoOffset)); |
2416 __ DropAndRet(4); | 2415 __ DropAndRet(4); |
2417 | 2416 |
2418 // Do the runtime call to execute the regexp. | 2417 // Do the runtime call to execute the regexp. |
2419 __ bind(&runtime); | 2418 __ bind(&runtime); |
2420 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 2419 __ TailCallRuntime(Runtime::kRegExpExec, 4); |
2421 | 2420 |
2422 // Deferred code for string handling. | 2421 // Deferred code for string handling. |
2423 // (6) Not a long external string? If yes, go to (8). | 2422 // (6) Not a long external string? If yes, go to (8). |
2424 __ bind(¬_seq_nor_cons); | 2423 __ bind(¬_seq_nor_cons); |
2425 // Go to (8). | 2424 // Go to (8). |
2426 __ Branch(¬_long_external, gt, a1, Operand(kExternalStringTag)); | 2425 __ Branch(¬_long_external, gt, a1, Operand(kExternalStringTag)); |
2427 | 2426 |
2428 // (7) External string. Make it, offset-wise, look like a sequential string. | 2427 // (7) External string. Make it, offset-wise, look like a sequential string. |
2429 __ bind(&external_string); | 2428 __ bind(&external_string); |
2430 __ ld(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); | 2429 __ ld(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); |
(...skipping 750 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3181 StringHelper::GenerateCopyCharacters( | 3180 StringHelper::GenerateCopyCharacters( |
3182 masm, a1, a5, a2, a3, String::TWO_BYTE_ENCODING); | 3181 masm, a1, a5, a2, a3, String::TWO_BYTE_ENCODING); |
3183 | 3182 |
3184 __ bind(&return_v0); | 3183 __ bind(&return_v0); |
3185 Counters* counters = isolate()->counters(); | 3184 Counters* counters = isolate()->counters(); |
3186 __ IncrementCounter(counters->sub_string_native(), 1, a3, a4); | 3185 __ IncrementCounter(counters->sub_string_native(), 1, a3, a4); |
3187 __ DropAndRet(3); | 3186 __ DropAndRet(3); |
3188 | 3187 |
3189 // Just jump to runtime to create the sub string. | 3188 // Just jump to runtime to create the sub string. |
3190 __ bind(&runtime); | 3189 __ bind(&runtime); |
3191 __ TailCallRuntime(Runtime::kSubString, 3, 1); | 3190 __ TailCallRuntime(Runtime::kSubString, 3); |
3192 | 3191 |
3193 __ bind(&single_char); | 3192 __ bind(&single_char); |
3194 // v0: original string | 3193 // v0: original string |
3195 // a1: instance type | 3194 // a1: instance type |
3196 // a2: length | 3195 // a2: length |
3197 // a3: from index (untagged) | 3196 // a3: from index (untagged) |
3198 __ SmiTag(a3); | 3197 __ SmiTag(a3); |
3199 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime, | 3198 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime, |
3200 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); | 3199 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); |
3201 generator.GenerateFast(masm); | 3200 generator.GenerateFast(masm); |
(...skipping 24 matching lines...) Expand all Loading... |
3226 __ Branch(¬_string, hs, a1, Operand(FIRST_NONSTRING_TYPE)); | 3225 __ Branch(¬_string, hs, a1, Operand(FIRST_NONSTRING_TYPE)); |
3227 // Check if string has a cached array index. | 3226 // Check if string has a cached array index. |
3228 __ lwu(a2, FieldMemOperand(a0, String::kHashFieldOffset)); | 3227 __ lwu(a2, FieldMemOperand(a0, String::kHashFieldOffset)); |
3229 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask)); | 3228 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask)); |
3230 __ Branch(&slow_string, ne, at, Operand(zero_reg)); | 3229 __ Branch(&slow_string, ne, at, Operand(zero_reg)); |
3231 __ IndexFromHash(a2, a0); | 3230 __ IndexFromHash(a2, a0); |
3232 __ Ret(USE_DELAY_SLOT); | 3231 __ Ret(USE_DELAY_SLOT); |
3233 __ mov(v0, a0); | 3232 __ mov(v0, a0); |
3234 __ bind(&slow_string); | 3233 __ bind(&slow_string); |
3235 __ push(a0); // Push argument. | 3234 __ push(a0); // Push argument. |
3236 __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); | 3235 __ TailCallRuntime(Runtime::kStringToNumber, 1); |
3237 __ bind(¬_string); | 3236 __ bind(¬_string); |
3238 | 3237 |
3239 Label not_oddball; | 3238 Label not_oddball; |
3240 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); | 3239 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); |
3241 __ Ret(USE_DELAY_SLOT); | 3240 __ Ret(USE_DELAY_SLOT); |
3242 __ ld(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); | 3241 __ ld(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); |
3243 __ bind(¬_oddball); | 3242 __ bind(¬_oddball); |
3244 | 3243 |
3245 __ push(a0); // Push argument. | 3244 __ push(a0); // Push argument. |
3246 __ TailCallRuntime(Runtime::kToNumber, 1, 1); | 3245 __ TailCallRuntime(Runtime::kToNumber, 1); |
3247 } | 3246 } |
3248 | 3247 |
3249 | 3248 |
3250 void ToLengthStub::Generate(MacroAssembler* masm) { | 3249 void ToLengthStub::Generate(MacroAssembler* masm) { |
3251 // The ToLength stub takes on argument in a0. | 3250 // The ToLength stub takes on argument in a0. |
3252 Label not_smi, positive_smi; | 3251 Label not_smi, positive_smi; |
3253 __ JumpIfNotSmi(a0, ¬_smi); | 3252 __ JumpIfNotSmi(a0, ¬_smi); |
3254 STATIC_ASSERT(kSmiTag == 0); | 3253 STATIC_ASSERT(kSmiTag == 0); |
3255 __ Branch(&positive_smi, ge, a0, Operand(zero_reg)); | 3254 __ Branch(&positive_smi, ge, a0, Operand(zero_reg)); |
3256 __ mov(a0, zero_reg); | 3255 __ mov(a0, zero_reg); |
3257 __ bind(&positive_smi); | 3256 __ bind(&positive_smi); |
3258 __ Ret(USE_DELAY_SLOT); | 3257 __ Ret(USE_DELAY_SLOT); |
3259 __ mov(v0, a0); | 3258 __ mov(v0, a0); |
3260 __ bind(¬_smi); | 3259 __ bind(¬_smi); |
3261 | 3260 |
3262 __ push(a0); // Push argument. | 3261 __ push(a0); // Push argument. |
3263 __ TailCallRuntime(Runtime::kToLength, 1, 1); | 3262 __ TailCallRuntime(Runtime::kToLength, 1); |
3264 } | 3263 } |
3265 | 3264 |
3266 | 3265 |
3267 void ToStringStub::Generate(MacroAssembler* masm) { | 3266 void ToStringStub::Generate(MacroAssembler* masm) { |
3268 // The ToString stub takes on argument in a0. | 3267 // The ToString stub takes on argument in a0. |
3269 Label is_number; | 3268 Label is_number; |
3270 __ JumpIfSmi(a0, &is_number); | 3269 __ JumpIfSmi(a0, &is_number); |
3271 | 3270 |
3272 Label not_string; | 3271 Label not_string; |
3273 __ GetObjectType(a0, a1, a1); | 3272 __ GetObjectType(a0, a1, a1); |
(...skipping 11 matching lines...) Expand all Loading... |
3285 __ TailCallStub(&stub); | 3284 __ TailCallStub(&stub); |
3286 __ bind(¬_heap_number); | 3285 __ bind(¬_heap_number); |
3287 | 3286 |
3288 Label not_oddball; | 3287 Label not_oddball; |
3289 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); | 3288 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); |
3290 __ Ret(USE_DELAY_SLOT); | 3289 __ Ret(USE_DELAY_SLOT); |
3291 __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset)); | 3290 __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset)); |
3292 __ bind(¬_oddball); | 3291 __ bind(¬_oddball); |
3293 | 3292 |
3294 __ push(a0); // Push argument. | 3293 __ push(a0); // Push argument. |
3295 __ TailCallRuntime(Runtime::kToString, 1, 1); | 3294 __ TailCallRuntime(Runtime::kToString, 1); |
3296 } | 3295 } |
3297 | 3296 |
3298 | 3297 |
3299 void StringHelper::GenerateFlatOneByteStringEquals( | 3298 void StringHelper::GenerateFlatOneByteStringEquals( |
3300 MacroAssembler* masm, Register left, Register right, Register scratch1, | 3299 MacroAssembler* masm, Register left, Register right, Register scratch1, |
3301 Register scratch2, Register scratch3) { | 3300 Register scratch2, Register scratch3) { |
3302 Register length = scratch1; | 3301 Register length = scratch1; |
3303 | 3302 |
3304 // Compare lengths. | 3303 // Compare lengths. |
3305 Label strings_not_equal, check_zero_length; | 3304 Label strings_not_equal, check_zero_length; |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3423 Label runtime; | 3422 Label runtime; |
3424 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime); | 3423 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime); |
3425 | 3424 |
3426 // Compare flat ASCII strings natively. | 3425 // Compare flat ASCII strings natively. |
3427 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, | 3426 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, |
3428 a3); | 3427 a3); |
3429 StringHelper::GenerateCompareFlatOneByteStrings(masm, a1, a0, a2, a3, t0, t1); | 3428 StringHelper::GenerateCompareFlatOneByteStrings(masm, a1, a0, a2, a3, t0, t1); |
3430 | 3429 |
3431 __ bind(&runtime); | 3430 __ bind(&runtime); |
3432 __ Push(a1, a0); | 3431 __ Push(a1, a0); |
3433 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 3432 __ TailCallRuntime(Runtime::kStringCompare, 2); |
3434 } | 3433 } |
3435 | 3434 |
3436 | 3435 |
3437 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 3436 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
3438 // ----------- S t a t e ------------- | 3437 // ----------- S t a t e ------------- |
3439 // -- a1 : left | 3438 // -- a1 : left |
3440 // -- a0 : right | 3439 // -- a0 : right |
3441 // -- ra : return address | 3440 // -- ra : return address |
3442 // ----------------------------------- | 3441 // ----------------------------------- |
3443 | 3442 |
(...skipping 18 matching lines...) Expand all Loading... |
3462 } | 3461 } |
3463 | 3462 |
3464 | 3463 |
3465 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { | 3464 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { |
3466 DCHECK_EQ(CompareICState::BOOLEAN, state()); | 3465 DCHECK_EQ(CompareICState::BOOLEAN, state()); |
3467 Label miss; | 3466 Label miss; |
3468 | 3467 |
3469 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3468 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
3470 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3469 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
3471 if (op() != Token::EQ_STRICT && is_strong(strength())) { | 3470 if (op() != Token::EQ_STRICT && is_strong(strength())) { |
3472 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); | 3471 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); |
3473 } else { | 3472 } else { |
3474 if (!Token::IsEqualityOp(op())) { | 3473 if (!Token::IsEqualityOp(op())) { |
3475 __ ld(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); | 3474 __ ld(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); |
3476 __ AssertSmi(a1); | 3475 __ AssertSmi(a1); |
3477 __ ld(a0, FieldMemOperand(a0, Oddball::kToNumberOffset)); | 3476 __ ld(a0, FieldMemOperand(a0, Oddball::kToNumberOffset)); |
3478 __ AssertSmi(a0); | 3477 __ AssertSmi(a0); |
3479 } | 3478 } |
3480 __ Ret(USE_DELAY_SLOT); | 3479 __ Ret(USE_DELAY_SLOT); |
3481 __ Dsubu(v0, a1, a0); | 3480 __ Dsubu(v0, a1, a0); |
3482 } | 3481 } |
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3755 tmp3); | 3754 tmp3); |
3756 } else { | 3755 } else { |
3757 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, | 3756 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, |
3758 tmp2, tmp3, tmp4); | 3757 tmp2, tmp3, tmp4); |
3759 } | 3758 } |
3760 | 3759 |
3761 // Handle more complex cases in runtime. | 3760 // Handle more complex cases in runtime. |
3762 __ bind(&runtime); | 3761 __ bind(&runtime); |
3763 __ Push(left, right); | 3762 __ Push(left, right); |
3764 if (equality) { | 3763 if (equality) { |
3765 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); | 3764 __ TailCallRuntime(Runtime::kStringEquals, 2); |
3766 } else { | 3765 } else { |
3767 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 3766 __ TailCallRuntime(Runtime::kStringCompare, 2); |
3768 } | 3767 } |
3769 | 3768 |
3770 __ bind(&miss); | 3769 __ bind(&miss); |
3771 GenerateMiss(masm); | 3770 GenerateMiss(masm); |
3772 } | 3771 } |
3773 | 3772 |
3774 | 3773 |
3775 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { | 3774 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { |
3776 DCHECK_EQ(CompareICState::RECEIVER, state()); | 3775 DCHECK_EQ(CompareICState::RECEIVER, state()); |
3777 Label miss; | 3776 Label miss; |
(...skipping 23 matching lines...) Expand all Loading... |
3801 __ GetWeakValue(a4, cell); | 3800 __ GetWeakValue(a4, cell); |
3802 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); | 3801 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); |
3803 __ ld(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); | 3802 __ ld(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); |
3804 __ Branch(&miss, ne, a2, Operand(a4)); | 3803 __ Branch(&miss, ne, a2, Operand(a4)); |
3805 __ Branch(&miss, ne, a3, Operand(a4)); | 3804 __ Branch(&miss, ne, a3, Operand(a4)); |
3806 | 3805 |
3807 if (Token::IsEqualityOp(op())) { | 3806 if (Token::IsEqualityOp(op())) { |
3808 __ Ret(USE_DELAY_SLOT); | 3807 __ Ret(USE_DELAY_SLOT); |
3809 __ dsubu(v0, a0, a1); | 3808 __ dsubu(v0, a0, a1); |
3810 } else if (is_strong(strength())) { | 3809 } else if (is_strong(strength())) { |
3811 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); | 3810 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); |
3812 } else { | 3811 } else { |
3813 if (op() == Token::LT || op() == Token::LTE) { | 3812 if (op() == Token::LT || op() == Token::LTE) { |
3814 __ li(a2, Operand(Smi::FromInt(GREATER))); | 3813 __ li(a2, Operand(Smi::FromInt(GREATER))); |
3815 } else { | 3814 } else { |
3816 __ li(a2, Operand(Smi::FromInt(LESS))); | 3815 __ li(a2, Operand(Smi::FromInt(LESS))); |
3817 } | 3816 } |
3818 __ Push(a1, a0, a2); | 3817 __ Push(a1, a0, a2); |
3819 __ TailCallRuntime(Runtime::kCompare, 3, 1); | 3818 __ TailCallRuntime(Runtime::kCompare, 3); |
3820 } | 3819 } |
3821 | 3820 |
3822 __ bind(&miss); | 3821 __ bind(&miss); |
3823 GenerateMiss(masm); | 3822 GenerateMiss(masm); |
3824 } | 3823 } |
3825 | 3824 |
3826 | 3825 |
3827 void CompareICStub::GenerateMiss(MacroAssembler* masm) { | 3826 void CompareICStub::GenerateMiss(MacroAssembler* masm) { |
3828 { | 3827 { |
3829 // Call the runtime system in a fresh internal frame. | 3828 // Call the runtime system in a fresh internal frame. |
(...skipping 1365 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5195 | 5194 |
5196 // Check that value is not the_hole. | 5195 // Check that value is not the_hole. |
5197 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 5196 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
5198 __ Branch(&slow_case, eq, result_reg, Operand(at)); | 5197 __ Branch(&slow_case, eq, result_reg, Operand(at)); |
5199 __ Ret(); | 5198 __ Ret(); |
5200 | 5199 |
5201 // Fallback to the runtime. | 5200 // Fallback to the runtime. |
5202 __ bind(&slow_case); | 5201 __ bind(&slow_case); |
5203 __ SmiTag(slot_reg); | 5202 __ SmiTag(slot_reg); |
5204 __ Push(slot_reg); | 5203 __ Push(slot_reg); |
5205 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); | 5204 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1); |
5206 } | 5205 } |
5207 | 5206 |
5208 | 5207 |
5209 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5208 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
5210 Register context_reg = cp; | 5209 Register context_reg = cp; |
5211 Register slot_reg = a2; | 5210 Register slot_reg = a2; |
5212 Register value_reg = a0; | 5211 Register value_reg = a0; |
5213 Register cell_reg = a4; | 5212 Register cell_reg = a4; |
5214 Register cell_value_reg = a5; | 5213 Register cell_value_reg = a5; |
5215 Register cell_details_reg = a6; | 5214 Register cell_details_reg = a6; |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5310 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, | 5309 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, |
5311 FieldMemOperand(value_reg, HeapObject::kMapOffset)); | 5310 FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
5312 | 5311 |
5313 // Fallback to the runtime. | 5312 // Fallback to the runtime. |
5314 __ bind(&slow_case); | 5313 __ bind(&slow_case); |
5315 __ SmiTag(slot_reg); | 5314 __ SmiTag(slot_reg); |
5316 __ Push(slot_reg, value_reg); | 5315 __ Push(slot_reg, value_reg); |
5317 __ TailCallRuntime(is_strict(language_mode()) | 5316 __ TailCallRuntime(is_strict(language_mode()) |
5318 ? Runtime::kStoreGlobalViaContext_Strict | 5317 ? Runtime::kStoreGlobalViaContext_Strict |
5319 : Runtime::kStoreGlobalViaContext_Sloppy, | 5318 : Runtime::kStoreGlobalViaContext_Sloppy, |
5320 2, 1); | 5319 2); |
5321 } | 5320 } |
5322 | 5321 |
5323 | 5322 |
5324 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5323 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
5325 int64_t offset = (ref0.address() - ref1.address()); | 5324 int64_t offset = (ref0.address() - ref1.address()); |
5326 DCHECK(static_cast<int>(offset) == offset); | 5325 DCHECK(static_cast<int>(offset) == offset); |
5327 return static_cast<int>(offset); | 5326 return static_cast<int>(offset); |
5328 } | 5327 } |
5329 | 5328 |
5330 | 5329 |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5435 // Check if the function scheduled an exception. | 5434 // Check if the function scheduled an exception. |
5436 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); | 5435 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); |
5437 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate))); | 5436 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate))); |
5438 __ ld(a5, MemOperand(at)); | 5437 __ ld(a5, MemOperand(at)); |
5439 __ Branch(&promote_scheduled_exception, ne, a4, Operand(a5)); | 5438 __ Branch(&promote_scheduled_exception, ne, a4, Operand(a5)); |
5440 | 5439 |
5441 __ Ret(); | 5440 __ Ret(); |
5442 | 5441 |
5443 // Re-throw by promoting a scheduled exception. | 5442 // Re-throw by promoting a scheduled exception. |
5444 __ bind(&promote_scheduled_exception); | 5443 __ bind(&promote_scheduled_exception); |
5445 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); | 5444 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0); |
5446 | 5445 |
5447 // HandleScope limit has changed. Delete allocated extensions. | 5446 // HandleScope limit has changed. Delete allocated extensions. |
5448 __ bind(&delete_allocated_handles); | 5447 __ bind(&delete_allocated_handles); |
5449 __ sd(s1, MemOperand(s3, kLimitOffset)); | 5448 __ sd(s1, MemOperand(s3, kLimitOffset)); |
5450 __ mov(s0, v0); | 5449 __ mov(s0, v0); |
5451 __ mov(a0, v0); | 5450 __ mov(a0, v0); |
5452 __ PrepareCallCFunction(1, s1); | 5451 __ PrepareCallCFunction(1, s1); |
5453 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); | 5452 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); |
5454 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), | 5453 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), |
5455 1); | 5454 1); |
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5628 MemOperand(fp, 6 * kPointerSize), NULL); | 5627 MemOperand(fp, 6 * kPointerSize), NULL); |
5629 } | 5628 } |
5630 | 5629 |
5631 | 5630 |
5632 #undef __ | 5631 #undef __ |
5633 | 5632 |
5634 } // namespace internal | 5633 } // namespace internal |
5635 } // namespace v8 | 5634 } // namespace v8 |
5636 | 5635 |
5637 #endif // V8_TARGET_ARCH_MIPS64 | 5636 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |