Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/mips64/code-stubs-mips64.cc

Issue 1553703002: [runtime] TailCallRuntime and CallRuntime should use default argument counts (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@2015-12-29_TailCallRuntime_default_result_size_1_1550923002
Patch Set: Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS64 5 #if V8_TARGET_ARCH_MIPS64
6 6
7 #include "src/bootstrapper.h" 7 #include "src/bootstrapper.h"
8 #include "src/code-stubs.h" 8 #include "src/code-stubs.h"
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic/handler-compiler.h" 10 #include "src/ic/handler-compiler.h"
(...skipping 707 matching lines...) Expand 10 before | Expand all | Expand 10 after
718 a5); 718 a5);
719 } 719 }
720 // Never falls through to here. 720 // Never falls through to here.
721 721
722 __ bind(&slow); 722 __ bind(&slow);
723 // Prepare for call to builtin. Push object pointers, a0 (lhs) first, 723 // Prepare for call to builtin. Push object pointers, a0 (lhs) first,
724 // a1 (rhs) second. 724 // a1 (rhs) second.
725 __ Push(lhs, rhs); 725 __ Push(lhs, rhs);
726 // Figure out which native to call and setup the arguments. 726 // Figure out which native to call and setup the arguments.
727 if (cc == eq) { 727 if (cc == eq) {
728 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2); 728 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
729 } else { 729 } else {
730 int ncr; // NaN compare result. 730 int ncr; // NaN compare result.
731 if (cc == lt || cc == le) { 731 if (cc == lt || cc == le) {
732 ncr = GREATER; 732 ncr = GREATER;
733 } else { 733 } else {
734 DCHECK(cc == gt || cc == ge); // Remaining cases. 734 DCHECK(cc == gt || cc == ge); // Remaining cases.
735 ncr = LESS; 735 ncr = LESS;
736 } 736 }
737 __ li(a0, Operand(Smi::FromInt(ncr))); 737 __ li(a0, Operand(Smi::FromInt(ncr)));
738 __ push(a0); 738 __ push(a0);
739 739
740 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 740 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
741 // tagged as a small integer. 741 // tagged as a small integer.
742 __ TailCallRuntime( 742 __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
743 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 743 : Runtime::kCompare);
744 3);
745 } 744 }
746 745
747 __ bind(&miss); 746 __ bind(&miss);
748 GenerateMiss(masm); 747 GenerateMiss(masm);
749 } 748 }
750 749
751 750
752 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { 751 void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
753 __ mov(t9, ra); 752 __ mov(t9, ra);
754 __ pop(ra); 753 __ pop(ra);
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
969 // double_exponent may not contain the exponent value if the input was a 968 // double_exponent may not contain the exponent value if the input was a
970 // smi. We set it with exponent value before bailing out. 969 // smi. We set it with exponent value before bailing out.
971 __ mtc1(exponent, single_scratch); 970 __ mtc1(exponent, single_scratch);
972 __ cvt_d_w(double_exponent, single_scratch); 971 __ cvt_d_w(double_exponent, single_scratch);
973 972
974 // Returning or bailing out. 973 // Returning or bailing out.
975 Counters* counters = isolate()->counters(); 974 Counters* counters = isolate()->counters();
976 if (exponent_type() == ON_STACK) { 975 if (exponent_type() == ON_STACK) {
977 // The arguments are still on the stack. 976 // The arguments are still on the stack.
978 __ bind(&call_runtime); 977 __ bind(&call_runtime);
979 __ TailCallRuntime(Runtime::kMathPowRT, 2); 978 __ TailCallRuntime(Runtime::kMathPowRT);
980 979
981 // The stub is called from non-optimized code, which expects the result 980 // The stub is called from non-optimized code, which expects the result
982 // as heap number in exponent. 981 // as heap number in exponent.
983 __ bind(&done); 982 __ bind(&done);
984 __ AllocateHeapNumber( 983 __ AllocateHeapNumber(
985 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime); 984 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
986 __ sdc1(double_result, 985 __ sdc1(double_result,
987 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); 986 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
988 DCHECK(heapnumber.is(v0)); 987 DCHECK(heapnumber.is(v0));
989 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); 988 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
(...skipping 537 matching lines...) Expand 10 before | Expand all | Expand 10 after
1527 __ Ret(USE_DELAY_SLOT); 1526 __ Ret(USE_DELAY_SLOT);
1528 __ StoreRoot(result, 1527 __ StoreRoot(result,
1529 Heap::kInstanceofCacheAnswerRootIndex); // In delay slot. 1528 Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
1530 1529
1531 // Found Proxy or access check needed: Call the runtime 1530 // Found Proxy or access check needed: Call the runtime
1532 __ bind(&fast_runtime_fallback); 1531 __ bind(&fast_runtime_fallback);
1533 __ Push(object, function_prototype); 1532 __ Push(object, function_prototype);
1534 // Invalidate the instanceof cache. 1533 // Invalidate the instanceof cache.
1535 DCHECK(Smi::FromInt(0) == 0); 1534 DCHECK(Smi::FromInt(0) == 0);
1536 __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex); 1535 __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex);
1537 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2); 1536 __ TailCallRuntime(Runtime::kHasInPrototypeChain);
1538 1537
1539 // Slow-case: Call the %InstanceOf runtime function. 1538 // Slow-case: Call the %InstanceOf runtime function.
1540 __ bind(&slow_case); 1539 __ bind(&slow_case);
1541 __ Push(object, function); 1540 __ Push(object, function);
1542 __ TailCallRuntime(Runtime::kInstanceOf, 2); 1541 __ TailCallRuntime(Runtime::kInstanceOf);
1543 } 1542 }
1544 1543
1545 1544
1546 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1545 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1547 Label miss; 1546 Label miss;
1548 Register receiver = LoadDescriptor::ReceiverRegister(); 1547 Register receiver = LoadDescriptor::ReceiverRegister();
1549 // Ensure that the vector and slot registers won't be clobbered before 1548 // Ensure that the vector and slot registers won't be clobbered before
1550 // calling the miss handler. 1549 // calling the miss handler.
1551 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::VectorRegister(), 1550 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::VectorRegister(),
1552 LoadWithVectorDescriptor::SlotRegister())); 1551 LoadWithVectorDescriptor::SlotRegister()));
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1603 __ dsubu(a3, a0, a1); 1602 __ dsubu(a3, a0, a1);
1604 __ SmiScale(a7, a3, kPointerSizeLog2); 1603 __ SmiScale(a7, a3, kPointerSizeLog2);
1605 __ Daddu(a3, a2, Operand(a7)); 1604 __ Daddu(a3, a2, Operand(a7));
1606 __ Ret(USE_DELAY_SLOT); 1605 __ Ret(USE_DELAY_SLOT);
1607 __ ld(v0, MemOperand(a3, kDisplacement)); 1606 __ ld(v0, MemOperand(a3, kDisplacement));
1608 1607
1609 // Slow-case: Handle non-smi or out-of-bounds access to arguments 1608 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1610 // by calling the runtime system. 1609 // by calling the runtime system.
1611 __ bind(&slow); 1610 __ bind(&slow);
1612 __ push(a1); 1611 __ push(a1);
1613 __ TailCallRuntime(Runtime::kArguments, 1); 1612 __ TailCallRuntime(Runtime::kArguments);
1614 } 1613 }
1615 1614
1616 1615
1617 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { 1616 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1618 // a1 : function 1617 // a1 : function
1619 // a2 : number of parameters (tagged) 1618 // a2 : number of parameters (tagged)
1620 // a3 : parameters pointer 1619 // a3 : parameters pointer
1621 1620
1622 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); 1621 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function()));
1623 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); 1622 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1624 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); 1623 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1625 1624
1626 // Check if the calling frame is an arguments adaptor frame. 1625 // Check if the calling frame is an arguments adaptor frame.
1627 Label runtime; 1626 Label runtime;
1628 __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1627 __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1629 __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset)); 1628 __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset));
1630 __ Branch(&runtime, ne, a0, 1629 __ Branch(&runtime, ne, a0,
1631 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1630 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1632 1631
1633 // Patch the arguments.length and the parameters pointer in the current frame. 1632 // Patch the arguments.length and the parameters pointer in the current frame.
1634 __ ld(a2, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1633 __ ld(a2, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1635 __ SmiScale(a7, a2, kPointerSizeLog2); 1634 __ SmiScale(a7, a2, kPointerSizeLog2);
1636 __ Daddu(a4, a4, Operand(a7)); 1635 __ Daddu(a4, a4, Operand(a7));
1637 __ daddiu(a3, a4, StandardFrameConstants::kCallerSPOffset); 1636 __ daddiu(a3, a4, StandardFrameConstants::kCallerSPOffset);
1638 1637
1639 __ bind(&runtime); 1638 __ bind(&runtime);
1640 __ Push(a1, a3, a2); 1639 __ Push(a1, a3, a2);
1641 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); 1640 __ TailCallRuntime(Runtime::kNewSloppyArguments);
1642 } 1641 }
1643 1642
1644 1643
1645 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 1644 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1646 // a1 : function 1645 // a1 : function
1647 // a2 : number of parameters (tagged) 1646 // a2 : number of parameters (tagged)
1648 // a3 : parameters pointer 1647 // a3 : parameters pointer
1649 // Registers used over whole function: 1648 // Registers used over whole function:
1650 // a5 : arguments count (tagged) 1649 // a5 : arguments count (tagged)
1651 // a6 : mapped parameter count (tagged) 1650 // a6 : mapped parameter count (tagged)
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
1845 __ bind(&arguments_test); 1844 __ bind(&arguments_test);
1846 __ Branch(&arguments_loop, lt, a6, Operand(a5)); 1845 __ Branch(&arguments_loop, lt, a6, Operand(a5));
1847 1846
1848 // Return. 1847 // Return.
1849 __ Ret(); 1848 __ Ret();
1850 1849
1851 // Do the runtime call to allocate the arguments object. 1850 // Do the runtime call to allocate the arguments object.
1852 // a5 = argument count (tagged) 1851 // a5 = argument count (tagged)
1853 __ bind(&runtime); 1852 __ bind(&runtime);
1854 __ Push(a1, a3, a5); 1853 __ Push(a1, a3, a5);
1855 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); 1854 __ TailCallRuntime(Runtime::kNewSloppyArguments);
1856 } 1855 }
1857 1856
1858 1857
1859 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { 1858 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
1860 // Return address is in ra. 1859 // Return address is in ra.
1861 Label slow; 1860 Label slow;
1862 1861
1863 Register receiver = LoadDescriptor::ReceiverRegister(); 1862 Register receiver = LoadDescriptor::ReceiverRegister();
1864 Register key = LoadDescriptor::NameRegister(); 1863 Register key = LoadDescriptor::NameRegister();
1865 1864
1866 // Check that the key is an array index, that is Uint32. 1865 // Check that the key is an array index, that is Uint32.
1867 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); 1866 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask));
1868 __ Branch(&slow, ne, t0, Operand(zero_reg)); 1867 __ Branch(&slow, ne, t0, Operand(zero_reg));
1869 1868
1870 // Everything is fine, call runtime. 1869 // Everything is fine, call runtime.
1871 __ Push(receiver, key); // Receiver, key. 1870 __ Push(receiver, key); // Receiver, key.
1872 1871
1873 // Perform tail call to the entry. 1872 // Perform tail call to the entry.
1874 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2); 1873 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
1875 1874
1876 __ bind(&slow); 1875 __ bind(&slow);
1877 PropertyAccessCompiler::TailCallBuiltin( 1876 PropertyAccessCompiler::TailCallBuiltin(
1878 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); 1877 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1879 } 1878 }
1880 1879
1881 1880
1882 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 1881 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1883 // a1 : function 1882 // a1 : function
1884 // a2 : number of parameters (tagged) 1883 // a2 : number of parameters (tagged)
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1958 __ Dsubu(a2, a2, Operand(1)); 1957 __ Dsubu(a2, a2, Operand(1));
1959 __ Branch(&loop, ne, a2, Operand(zero_reg)); 1958 __ Branch(&loop, ne, a2, Operand(zero_reg));
1960 1959
1961 // Return. 1960 // Return.
1962 __ bind(&done); 1961 __ bind(&done);
1963 __ Ret(); 1962 __ Ret();
1964 1963
1965 // Do the runtime call to allocate the arguments object. 1964 // Do the runtime call to allocate the arguments object.
1966 __ bind(&runtime); 1965 __ bind(&runtime);
1967 __ Push(a1, a3, a2); 1966 __ Push(a1, a3, a2);
1968 __ TailCallRuntime(Runtime::kNewStrictArguments, 3); 1967 __ TailCallRuntime(Runtime::kNewStrictArguments);
1969 } 1968 }
1970 1969
1971 1970
1972 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { 1971 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
1973 // sp[0] : language mode 1972 // sp[0] : language mode
1974 // sp[4] : index of rest parameter 1973 // sp[4] : index of rest parameter
1975 // sp[8] : number of parameters 1974 // sp[8] : number of parameters
1976 // sp[12] : receiver displacement 1975 // sp[12] : receiver displacement
1977 // Check if the calling frame is an arguments adaptor frame. 1976 // Check if the calling frame is an arguments adaptor frame.
1978 1977
1979 Label runtime; 1978 Label runtime;
1980 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1979 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1981 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); 1980 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
1982 __ Branch(&runtime, ne, a3, 1981 __ Branch(&runtime, ne, a3,
1983 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1982 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1984 1983
1985 // Patch the arguments.length and the parameters pointer. 1984 // Patch the arguments.length and the parameters pointer.
1986 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1985 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1987 __ sd(a1, MemOperand(sp, 2 * kPointerSize)); 1986 __ sd(a1, MemOperand(sp, 2 * kPointerSize));
1988 __ SmiScale(at, a1, kPointerSizeLog2); 1987 __ SmiScale(at, a1, kPointerSizeLog2);
1989 1988
1990 __ Daddu(a3, a2, Operand(at)); 1989 __ Daddu(a3, a2, Operand(at));
1991 1990
1992 __ Daddu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); 1991 __ Daddu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
1993 __ sd(a3, MemOperand(sp, 3 * kPointerSize)); 1992 __ sd(a3, MemOperand(sp, 3 * kPointerSize));
1994 1993
1995 // Do the runtime call to allocate the arguments object. 1994 // Do the runtime call to allocate the arguments object.
1996 __ bind(&runtime); 1995 __ bind(&runtime);
1997 __ TailCallRuntime(Runtime::kNewRestParam, 4); 1996 __ TailCallRuntime(Runtime::kNewRestParam);
1998 } 1997 }
1999 1998
2000 1999
2001 void RegExpExecStub::Generate(MacroAssembler* masm) { 2000 void RegExpExecStub::Generate(MacroAssembler* masm) {
2002 // Just jump directly to runtime if native RegExp is not selected at compile 2001 // Just jump directly to runtime if native RegExp is not selected at compile
2003 // time or if regexp entry in generated code is turned off runtime switch or 2002 // time or if regexp entry in generated code is turned off runtime switch or
2004 // at compilation. 2003 // at compilation.
2005 #ifdef V8_INTERPRETED_REGEXP 2004 #ifdef V8_INTERPRETED_REGEXP
2006 __ TailCallRuntime(Runtime::kRegExpExec, 4); 2005 __ TailCallRuntime(Runtime::kRegExpExec);
2007 #else // V8_INTERPRETED_REGEXP 2006 #else // V8_INTERPRETED_REGEXP
2008 2007
2009 // Stack frame on entry. 2008 // Stack frame on entry.
2010 // sp[0]: last_match_info (expected JSArray) 2009 // sp[0]: last_match_info (expected JSArray)
2011 // sp[4]: previous index 2010 // sp[4]: previous index
2012 // sp[8]: subject string 2011 // sp[8]: subject string
2013 // sp[12]: JSRegExp object 2012 // sp[12]: JSRegExp object
2014 2013
2015 const int kLastMatchInfoOffset = 0 * kPointerSize; 2014 const int kLastMatchInfoOffset = 0 * kPointerSize;
2016 const int kPreviousIndexOffset = 1 * kPointerSize; 2015 const int kPreviousIndexOffset = 1 * kPointerSize;
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after
2313 // stack overflow (on the backtrack stack) was detected in RegExp code but 2312 // stack overflow (on the backtrack stack) was detected in RegExp code but
2314 // haven't created the exception yet. Handle that in the runtime system. 2313 // haven't created the exception yet. Handle that in the runtime system.
2315 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 2314 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2316 __ li(a1, Operand(isolate()->factory()->the_hole_value())); 2315 __ li(a1, Operand(isolate()->factory()->the_hole_value()));
2317 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 2316 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2318 isolate()))); 2317 isolate())));
2319 __ ld(v0, MemOperand(a2, 0)); 2318 __ ld(v0, MemOperand(a2, 0));
2320 __ Branch(&runtime, eq, v0, Operand(a1)); 2319 __ Branch(&runtime, eq, v0, Operand(a1));
2321 2320
2322 // For exception, throw the exception again. 2321 // For exception, throw the exception again.
2323 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4); 2322 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
2324 2323
2325 __ bind(&failure); 2324 __ bind(&failure);
2326 // For failure and exception return null. 2325 // For failure and exception return null.
2327 __ li(v0, Operand(isolate()->factory()->null_value())); 2326 __ li(v0, Operand(isolate()->factory()->null_value()));
2328 __ DropAndRet(4); 2327 __ DropAndRet(4);
2329 2328
2330 // Process the result from the native regexp code. 2329 // Process the result from the native regexp code.
2331 __ bind(&success); 2330 __ bind(&success);
2332 2331
2333 __ lw(a1, UntagSmiFieldMemOperand( 2332 __ lw(a1, UntagSmiFieldMemOperand(
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
2409 __ daddiu(a0, a0, kPointerSize); // In branch delay slot. 2408 __ daddiu(a0, a0, kPointerSize); // In branch delay slot.
2410 2409
2411 __ bind(&done); 2410 __ bind(&done);
2412 2411
2413 // Return last match info. 2412 // Return last match info.
2414 __ ld(v0, MemOperand(sp, kLastMatchInfoOffset)); 2413 __ ld(v0, MemOperand(sp, kLastMatchInfoOffset));
2415 __ DropAndRet(4); 2414 __ DropAndRet(4);
2416 2415
2417 // Do the runtime call to execute the regexp. 2416 // Do the runtime call to execute the regexp.
2418 __ bind(&runtime); 2417 __ bind(&runtime);
2419 __ TailCallRuntime(Runtime::kRegExpExec, 4); 2418 __ TailCallRuntime(Runtime::kRegExpExec);
2420 2419
2421 // Deferred code for string handling. 2420 // Deferred code for string handling.
2422 // (6) Not a long external string? If yes, go to (8). 2421 // (6) Not a long external string? If yes, go to (8).
2423 __ bind(&not_seq_nor_cons); 2422 __ bind(&not_seq_nor_cons);
2424 // Go to (8). 2423 // Go to (8).
2425 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag)); 2424 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag));
2426 2425
2427 // (7) External string. Make it, offset-wise, look like a sequential string. 2426 // (7) External string. Make it, offset-wise, look like a sequential string.
2428 __ bind(&external_string); 2427 __ bind(&external_string);
2429 __ ld(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); 2428 __ ld(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
(...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after
2817 } 2816 }
2818 2817
2819 2818
2820 void CallICStub::GenerateMiss(MacroAssembler* masm) { 2819 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2821 FrameScope scope(masm, StackFrame::INTERNAL); 2820 FrameScope scope(masm, StackFrame::INTERNAL);
2822 2821
2823 // Push the receiver and the function and feedback info. 2822 // Push the receiver and the function and feedback info.
2824 __ Push(a1, a2, a3); 2823 __ Push(a1, a2, a3);
2825 2824
2826 // Call the entry. 2825 // Call the entry.
2827 __ CallRuntime(Runtime::kCallIC_Miss, 3); 2826 __ CallRuntime(Runtime::kCallIC_Miss);
2828 2827
2829 // Move result to a1 and exit the internal frame. 2828 // Move result to a1 and exit the internal frame.
2830 __ mov(a1, v0); 2829 __ mov(a1, v0);
2831 } 2830 }
2832 2831
2833 2832
2834 void StringCharCodeAtGenerator::GenerateSlow( 2833 void StringCharCodeAtGenerator::GenerateSlow(
2835 MacroAssembler* masm, EmbedMode embed_mode, 2834 MacroAssembler* masm, EmbedMode embed_mode,
2836 const RuntimeCallHelper& call_helper) { 2835 const RuntimeCallHelper& call_helper) {
2837 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); 2836 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2838 2837
2839 // Index is not a smi. 2838 // Index is not a smi.
2840 __ bind(&index_not_smi_); 2839 __ bind(&index_not_smi_);
2841 // If index is a heap number, try converting it to an integer. 2840 // If index is a heap number, try converting it to an integer.
2842 __ CheckMap(index_, 2841 __ CheckMap(index_,
2843 result_, 2842 result_,
2844 Heap::kHeapNumberMapRootIndex, 2843 Heap::kHeapNumberMapRootIndex,
2845 index_not_number_, 2844 index_not_number_,
2846 DONT_DO_SMI_CHECK); 2845 DONT_DO_SMI_CHECK);
2847 call_helper.BeforeCall(masm); 2846 call_helper.BeforeCall(masm);
2848 // Consumed by runtime conversion function: 2847 // Consumed by runtime conversion function:
2849 if (embed_mode == PART_OF_IC_HANDLER) { 2848 if (embed_mode == PART_OF_IC_HANDLER) {
2850 __ Push(LoadWithVectorDescriptor::VectorRegister(), 2849 __ Push(LoadWithVectorDescriptor::VectorRegister(),
2851 LoadWithVectorDescriptor::SlotRegister(), object_, index_); 2850 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
2852 } else { 2851 } else {
2853 __ Push(object_, index_); 2852 __ Push(object_, index_);
2854 } 2853 }
2855 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 2854 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
2856 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); 2855 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
2857 } else { 2856 } else {
2858 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 2857 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
2859 // NumberToSmi discards numbers that are not exact integers. 2858 // NumberToSmi discards numbers that are not exact integers.
2860 __ CallRuntime(Runtime::kNumberToSmi, 1); 2859 __ CallRuntime(Runtime::kNumberToSmi);
2861 } 2860 }
2862 2861
2863 // Save the conversion result before the pop instructions below 2862 // Save the conversion result before the pop instructions below
2864 // have a chance to overwrite it. 2863 // have a chance to overwrite it.
2865 2864
2866 __ Move(index_, v0); 2865 __ Move(index_, v0);
2867 if (embed_mode == PART_OF_IC_HANDLER) { 2866 if (embed_mode == PART_OF_IC_HANDLER) {
2868 __ Pop(LoadWithVectorDescriptor::VectorRegister(), 2867 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
2869 LoadWithVectorDescriptor::SlotRegister(), object_); 2868 LoadWithVectorDescriptor::SlotRegister(), object_);
2870 } else { 2869 } else {
2871 __ pop(object_); 2870 __ pop(object_);
2872 } 2871 }
2873 // Reload the instance type. 2872 // Reload the instance type.
2874 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 2873 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2875 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 2874 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2876 call_helper.AfterCall(masm); 2875 call_helper.AfterCall(masm);
2877 // If index is still not a smi, it must be out of range. 2876 // If index is still not a smi, it must be out of range.
2878 __ JumpIfNotSmi(index_, index_out_of_range_); 2877 __ JumpIfNotSmi(index_, index_out_of_range_);
2879 // Otherwise, return to the fast path. 2878 // Otherwise, return to the fast path.
2880 __ Branch(&got_smi_index_); 2879 __ Branch(&got_smi_index_);
2881 2880
2882 // Call runtime. We get here when the receiver is a string and the 2881 // Call runtime. We get here when the receiver is a string and the
2883 // index is a number, but the code of getting the actual character 2882 // index is a number, but the code of getting the actual character
2884 // is too complex (e.g., when the string needs to be flattened). 2883 // is too complex (e.g., when the string needs to be flattened).
2885 __ bind(&call_runtime_); 2884 __ bind(&call_runtime_);
2886 call_helper.BeforeCall(masm); 2885 call_helper.BeforeCall(masm);
2887 __ SmiTag(index_); 2886 __ SmiTag(index_);
2888 __ Push(object_, index_); 2887 __ Push(object_, index_);
2889 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2); 2888 __ CallRuntime(Runtime::kStringCharCodeAtRT);
2890 2889
2891 __ Move(result_, v0); 2890 __ Move(result_, v0);
2892 2891
2893 call_helper.AfterCall(masm); 2892 call_helper.AfterCall(masm);
2894 __ jmp(&exit_); 2893 __ jmp(&exit_);
2895 2894
2896 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); 2895 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2897 } 2896 }
2898 2897
2899 2898
(...skipping 18 matching lines...) Expand all
2918 2917
2919 2918
2920 void StringCharFromCodeGenerator::GenerateSlow( 2919 void StringCharFromCodeGenerator::GenerateSlow(
2921 MacroAssembler* masm, 2920 MacroAssembler* masm,
2922 const RuntimeCallHelper& call_helper) { 2921 const RuntimeCallHelper& call_helper) {
2923 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); 2922 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2924 2923
2925 __ bind(&slow_case_); 2924 __ bind(&slow_case_);
2926 call_helper.BeforeCall(masm); 2925 call_helper.BeforeCall(masm);
2927 __ push(code_); 2926 __ push(code_);
2928 __ CallRuntime(Runtime::kStringCharFromCode, 1); 2927 __ CallRuntime(Runtime::kStringCharFromCode);
2929 __ Move(result_, v0); 2928 __ Move(result_, v0);
2930 2929
2931 call_helper.AfterCall(masm); 2930 call_helper.AfterCall(masm);
2932 __ Branch(&exit_); 2931 __ Branch(&exit_);
2933 2932
2934 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); 2933 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2935 } 2934 }
2936 2935
2937 2936
2938 enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 }; 2937 enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after
3180 StringHelper::GenerateCopyCharacters( 3179 StringHelper::GenerateCopyCharacters(
3181 masm, a1, a5, a2, a3, String::TWO_BYTE_ENCODING); 3180 masm, a1, a5, a2, a3, String::TWO_BYTE_ENCODING);
3182 3181
3183 __ bind(&return_v0); 3182 __ bind(&return_v0);
3184 Counters* counters = isolate()->counters(); 3183 Counters* counters = isolate()->counters();
3185 __ IncrementCounter(counters->sub_string_native(), 1, a3, a4); 3184 __ IncrementCounter(counters->sub_string_native(), 1, a3, a4);
3186 __ DropAndRet(3); 3185 __ DropAndRet(3);
3187 3186
3188 // Just jump to runtime to create the sub string. 3187 // Just jump to runtime to create the sub string.
3189 __ bind(&runtime); 3188 __ bind(&runtime);
3190 __ TailCallRuntime(Runtime::kSubString, 3); 3189 __ TailCallRuntime(Runtime::kSubString);
3191 3190
3192 __ bind(&single_char); 3191 __ bind(&single_char);
3193 // v0: original string 3192 // v0: original string
3194 // a1: instance type 3193 // a1: instance type
3195 // a2: length 3194 // a2: length
3196 // a3: from index (untagged) 3195 // a3: from index (untagged)
3197 __ SmiTag(a3); 3196 __ SmiTag(a3);
3198 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime, 3197 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
3199 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); 3198 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
3200 generator.GenerateFast(masm); 3199 generator.GenerateFast(masm);
(...skipping 24 matching lines...) Expand all
3225 __ Branch(&not_string, hs, a1, Operand(FIRST_NONSTRING_TYPE)); 3224 __ Branch(&not_string, hs, a1, Operand(FIRST_NONSTRING_TYPE));
3226 // Check if string has a cached array index. 3225 // Check if string has a cached array index.
3227 __ lwu(a2, FieldMemOperand(a0, String::kHashFieldOffset)); 3226 __ lwu(a2, FieldMemOperand(a0, String::kHashFieldOffset));
3228 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask)); 3227 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask));
3229 __ Branch(&slow_string, ne, at, Operand(zero_reg)); 3228 __ Branch(&slow_string, ne, at, Operand(zero_reg));
3230 __ IndexFromHash(a2, a0); 3229 __ IndexFromHash(a2, a0);
3231 __ Ret(USE_DELAY_SLOT); 3230 __ Ret(USE_DELAY_SLOT);
3232 __ mov(v0, a0); 3231 __ mov(v0, a0);
3233 __ bind(&slow_string); 3232 __ bind(&slow_string);
3234 __ push(a0); // Push argument. 3233 __ push(a0); // Push argument.
3235 __ TailCallRuntime(Runtime::kStringToNumber, 1); 3234 __ TailCallRuntime(Runtime::kStringToNumber);
3236 __ bind(&not_string); 3235 __ bind(&not_string);
3237 3236
3238 Label not_oddball; 3237 Label not_oddball;
3239 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE)); 3238 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
3240 __ Ret(USE_DELAY_SLOT); 3239 __ Ret(USE_DELAY_SLOT);
3241 __ ld(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); 3240 __ ld(v0, FieldMemOperand(a0, Oddball::kToNumberOffset));
3242 __ bind(&not_oddball); 3241 __ bind(&not_oddball);
3243 3242
3244 __ push(a0); // Push argument. 3243 __ push(a0); // Push argument.
3245 __ TailCallRuntime(Runtime::kToNumber, 1); 3244 __ TailCallRuntime(Runtime::kToNumber);
3246 } 3245 }
3247 3246
3248 3247
3249 void ToLengthStub::Generate(MacroAssembler* masm) { 3248 void ToLengthStub::Generate(MacroAssembler* masm) {
3250 // The ToLength stub takes on argument in a0. 3249 // The ToLength stub takes on argument in a0.
3251 Label not_smi, positive_smi; 3250 Label not_smi, positive_smi;
3252 __ JumpIfNotSmi(a0, &not_smi); 3251 __ JumpIfNotSmi(a0, &not_smi);
3253 STATIC_ASSERT(kSmiTag == 0); 3252 STATIC_ASSERT(kSmiTag == 0);
3254 __ Branch(&positive_smi, ge, a0, Operand(zero_reg)); 3253 __ Branch(&positive_smi, ge, a0, Operand(zero_reg));
3255 __ mov(a0, zero_reg); 3254 __ mov(a0, zero_reg);
3256 __ bind(&positive_smi); 3255 __ bind(&positive_smi);
3257 __ Ret(USE_DELAY_SLOT); 3256 __ Ret(USE_DELAY_SLOT);
3258 __ mov(v0, a0); 3257 __ mov(v0, a0);
3259 __ bind(&not_smi); 3258 __ bind(&not_smi);
3260 3259
3261 __ push(a0); // Push argument. 3260 __ push(a0); // Push argument.
3262 __ TailCallRuntime(Runtime::kToLength, 1); 3261 __ TailCallRuntime(Runtime::kToLength);
3263 } 3262 }
3264 3263
3265 3264
3266 void ToStringStub::Generate(MacroAssembler* masm) { 3265 void ToStringStub::Generate(MacroAssembler* masm) {
3267 // The ToString stub takes on argument in a0. 3266 // The ToString stub takes on argument in a0.
3268 Label is_number; 3267 Label is_number;
3269 __ JumpIfSmi(a0, &is_number); 3268 __ JumpIfSmi(a0, &is_number);
3270 3269
3271 Label not_string; 3270 Label not_string;
3272 __ GetObjectType(a0, a1, a1); 3271 __ GetObjectType(a0, a1, a1);
(...skipping 11 matching lines...) Expand all
3284 __ TailCallStub(&stub); 3283 __ TailCallStub(&stub);
3285 __ bind(&not_heap_number); 3284 __ bind(&not_heap_number);
3286 3285
3287 Label not_oddball; 3286 Label not_oddball;
3288 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE)); 3287 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
3289 __ Ret(USE_DELAY_SLOT); 3288 __ Ret(USE_DELAY_SLOT);
3290 __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset)); 3289 __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
3291 __ bind(&not_oddball); 3290 __ bind(&not_oddball);
3292 3291
3293 __ push(a0); // Push argument. 3292 __ push(a0); // Push argument.
3294 __ TailCallRuntime(Runtime::kToString, 1); 3293 __ TailCallRuntime(Runtime::kToString);
3295 } 3294 }
3296 3295
3297 3296
3298 void StringHelper::GenerateFlatOneByteStringEquals( 3297 void StringHelper::GenerateFlatOneByteStringEquals(
3299 MacroAssembler* masm, Register left, Register right, Register scratch1, 3298 MacroAssembler* masm, Register left, Register right, Register scratch1,
3300 Register scratch2, Register scratch3) { 3299 Register scratch2, Register scratch3) {
3301 Register length = scratch1; 3300 Register length = scratch1;
3302 3301
3303 // Compare lengths. 3302 // Compare lengths.
3304 Label strings_not_equal, check_zero_length; 3303 Label strings_not_equal, check_zero_length;
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
3422 Label runtime; 3421 Label runtime;
3423 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime); 3422 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime);
3424 3423
3425 // Compare flat ASCII strings natively. 3424 // Compare flat ASCII strings natively.
3426 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, 3425 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2,
3427 a3); 3426 a3);
3428 StringHelper::GenerateCompareFlatOneByteStrings(masm, a1, a0, a2, a3, t0, t1); 3427 StringHelper::GenerateCompareFlatOneByteStrings(masm, a1, a0, a2, a3, t0, t1);
3429 3428
3430 __ bind(&runtime); 3429 __ bind(&runtime);
3431 __ Push(a1, a0); 3430 __ Push(a1, a0);
3432 __ TailCallRuntime(Runtime::kStringCompare, 2); 3431 __ TailCallRuntime(Runtime::kStringCompare);
3433 } 3432 }
3434 3433
3435 3434
3436 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 3435 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3437 // ----------- S t a t e ------------- 3436 // ----------- S t a t e -------------
3438 // -- a1 : left 3437 // -- a1 : left
3439 // -- a0 : right 3438 // -- a0 : right
3440 // -- ra : return address 3439 // -- ra : return address
3441 // ----------------------------------- 3440 // -----------------------------------
3442 3441
(...skipping 18 matching lines...) Expand all
3461 } 3460 }
3462 3461
3463 3462
3464 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { 3463 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
3465 DCHECK_EQ(CompareICState::BOOLEAN, state()); 3464 DCHECK_EQ(CompareICState::BOOLEAN, state());
3466 Label miss; 3465 Label miss;
3467 3466
3468 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3467 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3469 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3468 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3470 if (op() != Token::EQ_STRICT && is_strong(strength())) { 3469 if (op() != Token::EQ_STRICT && is_strong(strength())) {
3471 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); 3470 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3472 } else { 3471 } else {
3473 if (!Token::IsEqualityOp(op())) { 3472 if (!Token::IsEqualityOp(op())) {
3474 __ ld(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); 3473 __ ld(a1, FieldMemOperand(a1, Oddball::kToNumberOffset));
3475 __ AssertSmi(a1); 3474 __ AssertSmi(a1);
3476 __ ld(a0, FieldMemOperand(a0, Oddball::kToNumberOffset)); 3475 __ ld(a0, FieldMemOperand(a0, Oddball::kToNumberOffset));
3477 __ AssertSmi(a0); 3476 __ AssertSmi(a0);
3478 } 3477 }
3479 __ Ret(USE_DELAY_SLOT); 3478 __ Ret(USE_DELAY_SLOT);
3480 __ Dsubu(v0, a1, a0); 3479 __ Dsubu(v0, a1, a0);
3481 } 3480 }
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after
3754 tmp3); 3753 tmp3);
3755 } else { 3754 } else {
3756 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, 3755 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3757 tmp2, tmp3, tmp4); 3756 tmp2, tmp3, tmp4);
3758 } 3757 }
3759 3758
3760 // Handle more complex cases in runtime. 3759 // Handle more complex cases in runtime.
3761 __ bind(&runtime); 3760 __ bind(&runtime);
3762 __ Push(left, right); 3761 __ Push(left, right);
3763 if (equality) { 3762 if (equality) {
3764 __ TailCallRuntime(Runtime::kStringEquals, 2); 3763 __ TailCallRuntime(Runtime::kStringEquals);
3765 } else { 3764 } else {
3766 __ TailCallRuntime(Runtime::kStringCompare, 2); 3765 __ TailCallRuntime(Runtime::kStringCompare);
3767 } 3766 }
3768 3767
3769 __ bind(&miss); 3768 __ bind(&miss);
3770 GenerateMiss(masm); 3769 GenerateMiss(masm);
3771 } 3770 }
3772 3771
3773 3772
3774 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { 3773 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3775 DCHECK_EQ(CompareICState::RECEIVER, state()); 3774 DCHECK_EQ(CompareICState::RECEIVER, state());
3776 Label miss; 3775 Label miss;
(...skipping 23 matching lines...) Expand all
3800 __ GetWeakValue(a4, cell); 3799 __ GetWeakValue(a4, cell);
3801 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); 3800 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
3802 __ ld(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); 3801 __ ld(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
3803 __ Branch(&miss, ne, a2, Operand(a4)); 3802 __ Branch(&miss, ne, a2, Operand(a4));
3804 __ Branch(&miss, ne, a3, Operand(a4)); 3803 __ Branch(&miss, ne, a3, Operand(a4));
3805 3804
3806 if (Token::IsEqualityOp(op())) { 3805 if (Token::IsEqualityOp(op())) {
3807 __ Ret(USE_DELAY_SLOT); 3806 __ Ret(USE_DELAY_SLOT);
3808 __ dsubu(v0, a0, a1); 3807 __ dsubu(v0, a0, a1);
3809 } else if (is_strong(strength())) { 3808 } else if (is_strong(strength())) {
3810 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); 3809 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3811 } else { 3810 } else {
3812 if (op() == Token::LT || op() == Token::LTE) { 3811 if (op() == Token::LT || op() == Token::LTE) {
3813 __ li(a2, Operand(Smi::FromInt(GREATER))); 3812 __ li(a2, Operand(Smi::FromInt(GREATER)));
3814 } else { 3813 } else {
3815 __ li(a2, Operand(Smi::FromInt(LESS))); 3814 __ li(a2, Operand(Smi::FromInt(LESS)));
3816 } 3815 }
3817 __ Push(a1, a0, a2); 3816 __ Push(a1, a0, a2);
3818 __ TailCallRuntime(Runtime::kCompare, 3); 3817 __ TailCallRuntime(Runtime::kCompare);
3819 } 3818 }
3820 3819
3821 __ bind(&miss); 3820 __ bind(&miss);
3822 GenerateMiss(masm); 3821 GenerateMiss(masm);
3823 } 3822 }
3824 3823
3825 3824
3826 void CompareICStub::GenerateMiss(MacroAssembler* masm) { 3825 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3827 { 3826 {
3828 // Call the runtime system in a fresh internal frame. 3827 // Call the runtime system in a fresh internal frame.
(...skipping 1365 matching lines...) Expand 10 before | Expand all | Expand 10 after
5194 5193
5195 // Check that value is not the_hole. 5194 // Check that value is not the_hole.
5196 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 5195 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5197 __ Branch(&slow_case, eq, result_reg, Operand(at)); 5196 __ Branch(&slow_case, eq, result_reg, Operand(at));
5198 __ Ret(); 5197 __ Ret();
5199 5198
5200 // Fallback to the runtime. 5199 // Fallback to the runtime.
5201 __ bind(&slow_case); 5200 __ bind(&slow_case);
5202 __ SmiTag(slot_reg); 5201 __ SmiTag(slot_reg);
5203 __ Push(slot_reg); 5202 __ Push(slot_reg);
5204 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1); 5203 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
5205 } 5204 }
5206 5205
5207 5206
5208 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { 5207 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5209 Register context_reg = cp; 5208 Register context_reg = cp;
5210 Register slot_reg = a2; 5209 Register slot_reg = a2;
5211 Register value_reg = a0; 5210 Register value_reg = a0;
5212 Register cell_reg = a4; 5211 Register cell_reg = a4;
5213 Register cell_value_reg = a5; 5212 Register cell_value_reg = a5;
5214 Register cell_details_reg = a6; 5213 Register cell_details_reg = a6;
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
5308 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); 5307 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset));
5309 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, 5308 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg,
5310 FieldMemOperand(value_reg, HeapObject::kMapOffset)); 5309 FieldMemOperand(value_reg, HeapObject::kMapOffset));
5311 5310
5312 // Fallback to the runtime. 5311 // Fallback to the runtime.
5313 __ bind(&slow_case); 5312 __ bind(&slow_case);
5314 __ SmiTag(slot_reg); 5313 __ SmiTag(slot_reg);
5315 __ Push(slot_reg, value_reg); 5314 __ Push(slot_reg, value_reg);
5316 __ TailCallRuntime(is_strict(language_mode()) 5315 __ TailCallRuntime(is_strict(language_mode())
5317 ? Runtime::kStoreGlobalViaContext_Strict 5316 ? Runtime::kStoreGlobalViaContext_Strict
5318 : Runtime::kStoreGlobalViaContext_Sloppy, 5317 : Runtime::kStoreGlobalViaContext_Sloppy);
5319 2);
5320 } 5318 }
5321 5319
5322 5320
5323 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { 5321 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5324 int64_t offset = (ref0.address() - ref1.address()); 5322 int64_t offset = (ref0.address() - ref1.address());
5325 DCHECK(static_cast<int>(offset) == offset); 5323 DCHECK(static_cast<int>(offset) == offset);
5326 return static_cast<int>(offset); 5324 return static_cast<int>(offset);
5327 } 5325 }
5328 5326
5329 5327
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
5434 // Check if the function scheduled an exception. 5432 // Check if the function scheduled an exception.
5435 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); 5433 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
5436 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate))); 5434 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate)));
5437 __ ld(a5, MemOperand(at)); 5435 __ ld(a5, MemOperand(at));
5438 __ Branch(&promote_scheduled_exception, ne, a4, Operand(a5)); 5436 __ Branch(&promote_scheduled_exception, ne, a4, Operand(a5));
5439 5437
5440 __ Ret(); 5438 __ Ret();
5441 5439
5442 // Re-throw by promoting a scheduled exception. 5440 // Re-throw by promoting a scheduled exception.
5443 __ bind(&promote_scheduled_exception); 5441 __ bind(&promote_scheduled_exception);
5444 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0); 5442 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5445 5443
5446 // HandleScope limit has changed. Delete allocated extensions. 5444 // HandleScope limit has changed. Delete allocated extensions.
5447 __ bind(&delete_allocated_handles); 5445 __ bind(&delete_allocated_handles);
5448 __ sd(s1, MemOperand(s3, kLimitOffset)); 5446 __ sd(s1, MemOperand(s3, kLimitOffset));
5449 __ mov(s0, v0); 5447 __ mov(s0, v0);
5450 __ mov(a0, v0); 5448 __ mov(a0, v0);
5451 __ PrepareCallCFunction(1, s1); 5449 __ PrepareCallCFunction(1, s1);
5452 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); 5450 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5453 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), 5451 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5454 1); 5452 1);
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
5627 MemOperand(fp, 6 * kPointerSize), NULL); 5625 MemOperand(fp, 6 * kPointerSize), NULL);
5628 } 5626 }
5629 5627
5630 5628
5631 #undef __ 5629 #undef __
5632 5630
5633 } // namespace internal 5631 } // namespace internal
5634 } // namespace v8 5632 } // namespace v8
5635 5633
5636 #endif // V8_TARGET_ARCH_MIPS64 5634 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698