Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(490)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 1550923002: Remove uses of result size in TailCallRuntime and friends (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: change spaces Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/builtins-mips.cc ('k') | src/mips/macro-assembler-mips.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 710 matching lines...) Expand 10 before | Expand all | Expand 10 after
721 t1); 721 t1);
722 } 722 }
723 // Never falls through to here. 723 // Never falls through to here.
724 724
725 __ bind(&slow); 725 __ bind(&slow);
726 // Prepare for call to builtin. Push object pointers, a0 (lhs) first, 726 // Prepare for call to builtin. Push object pointers, a0 (lhs) first,
727 // a1 (rhs) second. 727 // a1 (rhs) second.
728 __ Push(lhs, rhs); 728 __ Push(lhs, rhs);
729 // Figure out which native to call and setup the arguments. 729 // Figure out which native to call and setup the arguments.
730 if (cc == eq) { 730 if (cc == eq) {
731 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2, 731 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2);
732 1);
733 } else { 732 } else {
734 int ncr; // NaN compare result. 733 int ncr; // NaN compare result.
735 if (cc == lt || cc == le) { 734 if (cc == lt || cc == le) {
736 ncr = GREATER; 735 ncr = GREATER;
737 } else { 736 } else {
738 DCHECK(cc == gt || cc == ge); // Remaining cases. 737 DCHECK(cc == gt || cc == ge); // Remaining cases.
739 ncr = LESS; 738 ncr = LESS;
740 } 739 }
741 __ li(a0, Operand(Smi::FromInt(ncr))); 740 __ li(a0, Operand(Smi::FromInt(ncr)));
742 __ push(a0); 741 __ push(a0);
743 742
744 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 743 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
745 // tagged as a small integer. 744 // tagged as a small integer.
746 __ TailCallRuntime( 745 __ TailCallRuntime(
747 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3, 746 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare,
748 1); 747 3);
749 } 748 }
750 749
751 __ bind(&miss); 750 __ bind(&miss);
752 GenerateMiss(masm); 751 GenerateMiss(masm);
753 } 752 }
754 753
755 754
756 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { 755 void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
757 __ mov(t9, ra); 756 __ mov(t9, ra);
758 __ pop(ra); 757 __ pop(ra);
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after
972 // double_exponent may not contain the exponent value if the input was a 971 // double_exponent may not contain the exponent value if the input was a
973 // smi. We set it with exponent value before bailing out. 972 // smi. We set it with exponent value before bailing out.
974 __ mtc1(exponent, single_scratch); 973 __ mtc1(exponent, single_scratch);
975 __ cvt_d_w(double_exponent, single_scratch); 974 __ cvt_d_w(double_exponent, single_scratch);
976 975
977 // Returning or bailing out. 976 // Returning or bailing out.
978 Counters* counters = isolate()->counters(); 977 Counters* counters = isolate()->counters();
979 if (exponent_type() == ON_STACK) { 978 if (exponent_type() == ON_STACK) {
980 // The arguments are still on the stack. 979 // The arguments are still on the stack.
981 __ bind(&call_runtime); 980 __ bind(&call_runtime);
982 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1); 981 __ TailCallRuntime(Runtime::kMathPowRT, 2);
983 982
984 // The stub is called from non-optimized code, which expects the result 983 // The stub is called from non-optimized code, which expects the result
985 // as heap number in exponent. 984 // as heap number in exponent.
986 __ bind(&done); 985 __ bind(&done);
987 __ AllocateHeapNumber( 986 __ AllocateHeapNumber(
988 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime); 987 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
989 __ sdc1(double_result, 988 __ sdc1(double_result,
990 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); 989 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
991 DCHECK(heapnumber.is(v0)); 990 DCHECK(heapnumber.is(v0));
992 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); 991 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
(...skipping 533 matching lines...) Expand 10 before | Expand all | Expand 10 after
1526 __ Ret(USE_DELAY_SLOT); 1525 __ Ret(USE_DELAY_SLOT);
1527 __ StoreRoot(result, 1526 __ StoreRoot(result,
1528 Heap::kInstanceofCacheAnswerRootIndex); // In delay slot. 1527 Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
1529 1528
1530 // Found Proxy or access check needed: Call the runtime 1529 // Found Proxy or access check needed: Call the runtime
1531 __ bind(&fast_runtime_fallback); 1530 __ bind(&fast_runtime_fallback);
1532 __ Push(object, function_prototype); 1531 __ Push(object, function_prototype);
1533 // Invalidate the instanceof cache. 1532 // Invalidate the instanceof cache.
1534 DCHECK(Smi::FromInt(0) == 0); 1533 DCHECK(Smi::FromInt(0) == 0);
1535 __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex); 1534 __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex);
1536 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2, 1); 1535 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2);
1537 1536
1538 // Slow-case: Call the %InstanceOf runtime function. 1537 // Slow-case: Call the %InstanceOf runtime function.
1539 __ bind(&slow_case); 1538 __ bind(&slow_case);
1540 __ Push(object, function); 1539 __ Push(object, function);
1541 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); 1540 __ TailCallRuntime(Runtime::kInstanceOf, 2);
1542 } 1541 }
1543 1542
1544 1543
1545 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1544 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1546 Label miss; 1545 Label miss;
1547 Register receiver = LoadDescriptor::ReceiverRegister(); 1546 Register receiver = LoadDescriptor::ReceiverRegister();
1548 // Ensure that the vector and slot registers won't be clobbered before 1547 // Ensure that the vector and slot registers won't be clobbered before
1549 // calling the miss handler. 1548 // calling the miss handler.
1550 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::VectorRegister(), 1549 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::VectorRegister(),
1551 LoadWithVectorDescriptor::SlotRegister())); 1550 LoadWithVectorDescriptor::SlotRegister()));
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1602 __ subu(a3, a0, a1); 1601 __ subu(a3, a0, a1);
1603 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize); 1602 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize);
1604 __ Addu(a3, a2, Operand(t3)); 1603 __ Addu(a3, a2, Operand(t3));
1605 __ Ret(USE_DELAY_SLOT); 1604 __ Ret(USE_DELAY_SLOT);
1606 __ lw(v0, MemOperand(a3, kDisplacement)); 1605 __ lw(v0, MemOperand(a3, kDisplacement));
1607 1606
1608 // Slow-case: Handle non-smi or out-of-bounds access to arguments 1607 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1609 // by calling the runtime system. 1608 // by calling the runtime system.
1610 __ bind(&slow); 1609 __ bind(&slow);
1611 __ push(a1); 1610 __ push(a1);
1612 __ TailCallRuntime(Runtime::kArguments, 1, 1); 1611 __ TailCallRuntime(Runtime::kArguments, 1);
1613 } 1612 }
1614 1613
1615 1614
1616 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { 1615 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1617 // a1 : function 1616 // a1 : function
1618 // a2 : number of parameters (tagged) 1617 // a2 : number of parameters (tagged)
1619 // a3 : parameters pointer 1618 // a3 : parameters pointer
1620 1619
1621 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); 1620 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function()));
1622 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); 1621 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1623 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); 1622 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1624 1623
1625 // Check if the calling frame is an arguments adaptor frame. 1624 // Check if the calling frame is an arguments adaptor frame.
1626 Label runtime; 1625 Label runtime;
1627 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1626 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1628 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); 1627 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset));
1629 __ Branch(&runtime, ne, a0, 1628 __ Branch(&runtime, ne, a0,
1630 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1629 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1631 1630
1632 // Patch the arguments.length and the parameters pointer in the current frame. 1631 // Patch the arguments.length and the parameters pointer in the current frame.
1633 __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1632 __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset));
1634 __ sll(t3, a2, 1); 1633 __ sll(t3, a2, 1);
1635 __ Addu(t0, t0, Operand(t3)); 1634 __ Addu(t0, t0, Operand(t3));
1636 __ addiu(a3, t0, StandardFrameConstants::kCallerSPOffset); 1635 __ addiu(a3, t0, StandardFrameConstants::kCallerSPOffset);
1637 1636
1638 __ bind(&runtime); 1637 __ bind(&runtime);
1639 __ Push(a1, a3, a2); 1638 __ Push(a1, a3, a2);
1640 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); 1639 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3);
1641 } 1640 }
1642 1641
1643 1642
1644 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 1643 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1645 // a1 : function 1644 // a1 : function
1646 // a2 : number of parameters (tagged) 1645 // a2 : number of parameters (tagged)
1647 // a3 : parameters pointer 1646 // a3 : parameters pointer
1648 // Registers used over whole function: 1647 // Registers used over whole function:
1649 // t1 : arguments count (tagged) 1648 // t1 : arguments count (tagged)
1650 // t2 : mapped parameter count (tagged) 1649 // t2 : mapped parameter count (tagged)
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
1844 __ bind(&arguments_test); 1843 __ bind(&arguments_test);
1845 __ Branch(&arguments_loop, lt, t2, Operand(t1)); 1844 __ Branch(&arguments_loop, lt, t2, Operand(t1));
1846 1845
1847 // Return. 1846 // Return.
1848 __ Ret(); 1847 __ Ret();
1849 1848
1850 // Do the runtime call to allocate the arguments object. 1849 // Do the runtime call to allocate the arguments object.
1851 // t1 = argument count (tagged) 1850 // t1 = argument count (tagged)
1852 __ bind(&runtime); 1851 __ bind(&runtime);
1853 __ Push(a1, a3, t1); 1852 __ Push(a1, a3, t1);
1854 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); 1853 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3);
1855 } 1854 }
1856 1855
1857 1856
1858 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { 1857 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
1859 // Return address is in ra. 1858 // Return address is in ra.
1860 Label slow; 1859 Label slow;
1861 1860
1862 Register receiver = LoadDescriptor::ReceiverRegister(); 1861 Register receiver = LoadDescriptor::ReceiverRegister();
1863 Register key = LoadDescriptor::NameRegister(); 1862 Register key = LoadDescriptor::NameRegister();
1864 1863
1865 // Check that the key is an array index, that is Uint32. 1864 // Check that the key is an array index, that is Uint32.
1866 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); 1865 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask));
1867 __ Branch(&slow, ne, t0, Operand(zero_reg)); 1866 __ Branch(&slow, ne, t0, Operand(zero_reg));
1868 1867
1869 // Everything is fine, call runtime. 1868 // Everything is fine, call runtime.
1870 __ Push(receiver, key); // Receiver, key. 1869 __ Push(receiver, key); // Receiver, key.
1871 1870
1872 // Perform tail call to the entry. 1871 // Perform tail call to the entry.
1873 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); 1872 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2);
1874 1873
1875 __ bind(&slow); 1874 __ bind(&slow);
1876 PropertyAccessCompiler::TailCallBuiltin( 1875 PropertyAccessCompiler::TailCallBuiltin(
1877 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); 1876 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1878 } 1877 }
1879 1878
1880 1879
1881 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 1880 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1882 // a1 : function 1881 // a1 : function
1883 // a2 : number of parameters (tagged) 1882 // a2 : number of parameters (tagged)
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1957 __ Subu(a2, a2, Operand(1)); 1956 __ Subu(a2, a2, Operand(1));
1958 __ Branch(&loop, ne, a2, Operand(zero_reg)); 1957 __ Branch(&loop, ne, a2, Operand(zero_reg));
1959 1958
1960 // Return. 1959 // Return.
1961 __ bind(&done); 1960 __ bind(&done);
1962 __ Ret(); 1961 __ Ret();
1963 1962
1964 // Do the runtime call to allocate the arguments object. 1963 // Do the runtime call to allocate the arguments object.
1965 __ bind(&runtime); 1964 __ bind(&runtime);
1966 __ Push(a1, a3, a2); 1965 __ Push(a1, a3, a2);
1967 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); 1966 __ TailCallRuntime(Runtime::kNewStrictArguments, 3);
1968 } 1967 }
1969 1968
1970 1969
1971 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { 1970 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
1972 // sp[0] : language mode 1971 // sp[0] : language mode
1973 // sp[4] : index of rest parameter 1972 // sp[4] : index of rest parameter
1974 // sp[8] : number of parameters 1973 // sp[8] : number of parameters
1975 // sp[12] : receiver displacement 1974 // sp[12] : receiver displacement
1976 // Check if the calling frame is an arguments adaptor frame. 1975 // Check if the calling frame is an arguments adaptor frame.
1977 1976
1978 Label runtime; 1977 Label runtime;
1979 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1978 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1980 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); 1979 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
1981 __ Branch(&runtime, ne, a3, 1980 __ Branch(&runtime, ne, a3,
1982 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1981 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1983 1982
1984 // Patch the arguments.length and the parameters pointer. 1983 // Patch the arguments.length and the parameters pointer.
1985 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1984 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1986 __ sw(a1, MemOperand(sp, 2 * kPointerSize)); 1985 __ sw(a1, MemOperand(sp, 2 * kPointerSize));
1987 __ sll(at, a1, kPointerSizeLog2 - kSmiTagSize); 1986 __ sll(at, a1, kPointerSizeLog2 - kSmiTagSize);
1988 __ Addu(a3, a2, Operand(at)); 1987 __ Addu(a3, a2, Operand(at));
1989 1988
1990 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); 1989 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
1991 __ sw(a3, MemOperand(sp, 3 * kPointerSize)); 1990 __ sw(a3, MemOperand(sp, 3 * kPointerSize));
1992 1991
1993 // Do the runtime call to allocate the arguments object. 1992 // Do the runtime call to allocate the arguments object.
1994 __ bind(&runtime); 1993 __ bind(&runtime);
1995 __ TailCallRuntime(Runtime::kNewRestParam, 4, 1); 1994 __ TailCallRuntime(Runtime::kNewRestParam, 4);
1996 } 1995 }
1997 1996
1998 1997
1999 void RegExpExecStub::Generate(MacroAssembler* masm) { 1998 void RegExpExecStub::Generate(MacroAssembler* masm) {
2000 // Just jump directly to runtime if native RegExp is not selected at compile 1999 // Just jump directly to runtime if native RegExp is not selected at compile
2001 // time or if regexp entry in generated code is turned off runtime switch or 2000 // time or if regexp entry in generated code is turned off runtime switch or
2002 // at compilation. 2001 // at compilation.
2003 #ifdef V8_INTERPRETED_REGEXP 2002 #ifdef V8_INTERPRETED_REGEXP
2004 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2003 __ TailCallRuntime(Runtime::kRegExpExec, 4);
2005 #else // V8_INTERPRETED_REGEXP 2004 #else // V8_INTERPRETED_REGEXP
2006 2005
2007 // Stack frame on entry. 2006 // Stack frame on entry.
2008 // sp[0]: last_match_info (expected JSArray) 2007 // sp[0]: last_match_info (expected JSArray)
2009 // sp[4]: previous index 2008 // sp[4]: previous index
2010 // sp[8]: subject string 2009 // sp[8]: subject string
2011 // sp[12]: JSRegExp object 2010 // sp[12]: JSRegExp object
2012 2011
2013 const int kLastMatchInfoOffset = 0 * kPointerSize; 2012 const int kLastMatchInfoOffset = 0 * kPointerSize;
2014 const int kPreviousIndexOffset = 1 * kPointerSize; 2013 const int kPreviousIndexOffset = 1 * kPointerSize;
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after
2279 // stack overflow (on the backtrack stack) was detected in RegExp code but 2278 // stack overflow (on the backtrack stack) was detected in RegExp code but
2280 // haven't created the exception yet. Handle that in the runtime system. 2279 // haven't created the exception yet. Handle that in the runtime system.
2281 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 2280 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2282 __ li(a1, Operand(isolate()->factory()->the_hole_value())); 2281 __ li(a1, Operand(isolate()->factory()->the_hole_value()));
2283 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 2282 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2284 isolate()))); 2283 isolate())));
2285 __ lw(v0, MemOperand(a2, 0)); 2284 __ lw(v0, MemOperand(a2, 0));
2286 __ Branch(&runtime, eq, v0, Operand(a1)); 2285 __ Branch(&runtime, eq, v0, Operand(a1));
2287 2286
2288 // For exception, throw the exception again. 2287 // For exception, throw the exception again.
2289 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1); 2288 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4);
2290 2289
2291 __ bind(&failure); 2290 __ bind(&failure);
2292 // For failure and exception return null. 2291 // For failure and exception return null.
2293 __ li(v0, Operand(isolate()->factory()->null_value())); 2292 __ li(v0, Operand(isolate()->factory()->null_value()));
2294 __ DropAndRet(4); 2293 __ DropAndRet(4);
2295 2294
2296 // Process the result from the native regexp code. 2295 // Process the result from the native regexp code.
2297 __ bind(&success); 2296 __ bind(&success);
2298 __ lw(a1, 2297 __ lw(a1,
2299 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset)); 2298 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
2375 __ addiu(a0, a0, kPointerSize); // In branch delay slot. 2374 __ addiu(a0, a0, kPointerSize); // In branch delay slot.
2376 2375
2377 __ bind(&done); 2376 __ bind(&done);
2378 2377
2379 // Return last match info. 2378 // Return last match info.
2380 __ lw(v0, MemOperand(sp, kLastMatchInfoOffset)); 2379 __ lw(v0, MemOperand(sp, kLastMatchInfoOffset));
2381 __ DropAndRet(4); 2380 __ DropAndRet(4);
2382 2381
2383 // Do the runtime call to execute the regexp. 2382 // Do the runtime call to execute the regexp.
2384 __ bind(&runtime); 2383 __ bind(&runtime);
2385 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2384 __ TailCallRuntime(Runtime::kRegExpExec, 4);
2386 2385
2387 // Deferred code for string handling. 2386 // Deferred code for string handling.
2388 // (6) Not a long external string? If yes, go to (8). 2387 // (6) Not a long external string? If yes, go to (8).
2389 __ bind(&not_seq_nor_cons); 2388 __ bind(&not_seq_nor_cons);
2390 // Go to (8). 2389 // Go to (8).
2391 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag)); 2390 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag));
2392 2391
2393 // (7) External string. Make it, offset-wise, look like a sequential string. 2392 // (7) External string. Make it, offset-wise, look like a sequential string.
2394 __ bind(&external_string); 2393 __ bind(&external_string);
2395 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); 2394 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
(...skipping 753 matching lines...) Expand 10 before | Expand all | Expand 10 after
3149 StringHelper::GenerateCopyCharacters( 3148 StringHelper::GenerateCopyCharacters(
3150 masm, a1, t1, a2, a3, String::TWO_BYTE_ENCODING); 3149 masm, a1, t1, a2, a3, String::TWO_BYTE_ENCODING);
3151 3150
3152 __ bind(&return_v0); 3151 __ bind(&return_v0);
3153 Counters* counters = isolate()->counters(); 3152 Counters* counters = isolate()->counters();
3154 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0); 3153 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
3155 __ DropAndRet(3); 3154 __ DropAndRet(3);
3156 3155
3157 // Just jump to runtime to create the sub string. 3156 // Just jump to runtime to create the sub string.
3158 __ bind(&runtime); 3157 __ bind(&runtime);
3159 __ TailCallRuntime(Runtime::kSubString, 3, 1); 3158 __ TailCallRuntime(Runtime::kSubString, 3);
3160 3159
3161 __ bind(&single_char); 3160 __ bind(&single_char);
3162 // v0: original string 3161 // v0: original string
3163 // a1: instance type 3162 // a1: instance type
3164 // a2: length 3163 // a2: length
3165 // a3: from index (untagged) 3164 // a3: from index (untagged)
3166 __ SmiTag(a3, a3); 3165 __ SmiTag(a3, a3);
3167 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime, 3166 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
3168 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); 3167 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
3169 generator.GenerateFast(masm); 3168 generator.GenerateFast(masm);
(...skipping 24 matching lines...) Expand all
3194 __ Branch(&not_string, hs, a1, Operand(FIRST_NONSTRING_TYPE)); 3193 __ Branch(&not_string, hs, a1, Operand(FIRST_NONSTRING_TYPE));
3195 // Check if string has a cached array index. 3194 // Check if string has a cached array index.
3196 __ lw(a2, FieldMemOperand(a0, String::kHashFieldOffset)); 3195 __ lw(a2, FieldMemOperand(a0, String::kHashFieldOffset));
3197 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask)); 3196 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask));
3198 __ Branch(&slow_string, ne, at, Operand(zero_reg)); 3197 __ Branch(&slow_string, ne, at, Operand(zero_reg));
3199 __ IndexFromHash(a2, a0); 3198 __ IndexFromHash(a2, a0);
3200 __ Ret(USE_DELAY_SLOT); 3199 __ Ret(USE_DELAY_SLOT);
3201 __ mov(v0, a0); 3200 __ mov(v0, a0);
3202 __ bind(&slow_string); 3201 __ bind(&slow_string);
3203 __ push(a0); // Push argument. 3202 __ push(a0); // Push argument.
3204 __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); 3203 __ TailCallRuntime(Runtime::kStringToNumber, 1);
3205 __ bind(&not_string); 3204 __ bind(&not_string);
3206 3205
3207 Label not_oddball; 3206 Label not_oddball;
3208 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE)); 3207 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
3209 __ Ret(USE_DELAY_SLOT); 3208 __ Ret(USE_DELAY_SLOT);
3210 __ lw(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); 3209 __ lw(v0, FieldMemOperand(a0, Oddball::kToNumberOffset));
3211 __ bind(&not_oddball); 3210 __ bind(&not_oddball);
3212 3211
3213 __ push(a0); // Push argument. 3212 __ push(a0); // Push argument.
3214 __ TailCallRuntime(Runtime::kToNumber, 1, 1); 3213 __ TailCallRuntime(Runtime::kToNumber, 1);
3215 } 3214 }
3216 3215
3217 3216
3218 void ToLengthStub::Generate(MacroAssembler* masm) { 3217 void ToLengthStub::Generate(MacroAssembler* masm) {
3219 // The ToLength stub takes on argument in a0. 3218 // The ToLength stub takes on argument in a0.
3220 Label not_smi, positive_smi; 3219 Label not_smi, positive_smi;
3221 __ JumpIfNotSmi(a0, &not_smi); 3220 __ JumpIfNotSmi(a0, &not_smi);
3222 STATIC_ASSERT(kSmiTag == 0); 3221 STATIC_ASSERT(kSmiTag == 0);
3223 __ Branch(&positive_smi, ge, a0, Operand(zero_reg)); 3222 __ Branch(&positive_smi, ge, a0, Operand(zero_reg));
3224 __ mov(a0, zero_reg); 3223 __ mov(a0, zero_reg);
3225 __ bind(&positive_smi); 3224 __ bind(&positive_smi);
3226 __ Ret(USE_DELAY_SLOT); 3225 __ Ret(USE_DELAY_SLOT);
3227 __ mov(v0, a0); 3226 __ mov(v0, a0);
3228 __ bind(&not_smi); 3227 __ bind(&not_smi);
3229 3228
3230 __ push(a0); // Push argument. 3229 __ push(a0); // Push argument.
3231 __ TailCallRuntime(Runtime::kToLength, 1, 1); 3230 __ TailCallRuntime(Runtime::kToLength, 1);
3232 } 3231 }
3233 3232
3234 3233
3235 void ToStringStub::Generate(MacroAssembler* masm) { 3234 void ToStringStub::Generate(MacroAssembler* masm) {
3236 // The ToString stub takes on argument in a0. 3235 // The ToString stub takes on argument in a0.
3237 Label is_number; 3236 Label is_number;
3238 __ JumpIfSmi(a0, &is_number); 3237 __ JumpIfSmi(a0, &is_number);
3239 3238
3240 Label not_string; 3239 Label not_string;
3241 __ GetObjectType(a0, a1, a1); 3240 __ GetObjectType(a0, a1, a1);
(...skipping 11 matching lines...) Expand all
3253 __ TailCallStub(&stub); 3252 __ TailCallStub(&stub);
3254 __ bind(&not_heap_number); 3253 __ bind(&not_heap_number);
3255 3254
3256 Label not_oddball; 3255 Label not_oddball;
3257 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE)); 3256 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
3258 __ Ret(USE_DELAY_SLOT); 3257 __ Ret(USE_DELAY_SLOT);
3259 __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset)); 3258 __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
3260 __ bind(&not_oddball); 3259 __ bind(&not_oddball);
3261 3260
3262 __ push(a0); // Push argument. 3261 __ push(a0); // Push argument.
3263 __ TailCallRuntime(Runtime::kToString, 1, 1); 3262 __ TailCallRuntime(Runtime::kToString, 1);
3264 } 3263 }
3265 3264
3266 3265
3267 void StringHelper::GenerateFlatOneByteStringEquals( 3266 void StringHelper::GenerateFlatOneByteStringEquals(
3268 MacroAssembler* masm, Register left, Register right, Register scratch1, 3267 MacroAssembler* masm, Register left, Register right, Register scratch1,
3269 Register scratch2, Register scratch3) { 3268 Register scratch2, Register scratch3) {
3270 Register length = scratch1; 3269 Register length = scratch1;
3271 3270
3272 // Compare lengths. 3271 // Compare lengths.
3273 Label strings_not_equal, check_zero_length; 3272 Label strings_not_equal, check_zero_length;
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
3391 Label runtime; 3390 Label runtime;
3392 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime); 3391 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime);
3393 3392
3394 // Compare flat ASCII strings natively. 3393 // Compare flat ASCII strings natively.
3395 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, 3394 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2,
3396 a3); 3395 a3);
3397 StringHelper::GenerateCompareFlatOneByteStrings(masm, a1, a0, a2, a3, t0, t1); 3396 StringHelper::GenerateCompareFlatOneByteStrings(masm, a1, a0, a2, a3, t0, t1);
3398 3397
3399 __ bind(&runtime); 3398 __ bind(&runtime);
3400 __ Push(a1, a0); 3399 __ Push(a1, a0);
3401 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 3400 __ TailCallRuntime(Runtime::kStringCompare, 2);
3402 } 3401 }
3403 3402
3404 3403
3405 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 3404 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3406 // ----------- S t a t e ------------- 3405 // ----------- S t a t e -------------
3407 // -- a1 : left 3406 // -- a1 : left
3408 // -- a0 : right 3407 // -- a0 : right
3409 // -- ra : return address 3408 // -- ra : return address
3410 // ----------------------------------- 3409 // -----------------------------------
3411 3410
(...skipping 18 matching lines...) Expand all
3430 } 3429 }
3431 3430
3432 3431
3433 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { 3432 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
3434 DCHECK_EQ(CompareICState::BOOLEAN, state()); 3433 DCHECK_EQ(CompareICState::BOOLEAN, state());
3435 Label miss; 3434 Label miss;
3436 3435
3437 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3436 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3438 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3437 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3439 if (op() != Token::EQ_STRICT && is_strong(strength())) { 3438 if (op() != Token::EQ_STRICT && is_strong(strength())) {
3440 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); 3439 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0);
3441 } else { 3440 } else {
3442 if (!Token::IsEqualityOp(op())) { 3441 if (!Token::IsEqualityOp(op())) {
3443 __ lw(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); 3442 __ lw(a1, FieldMemOperand(a1, Oddball::kToNumberOffset));
3444 __ AssertSmi(a1); 3443 __ AssertSmi(a1);
3445 __ lw(a0, FieldMemOperand(a0, Oddball::kToNumberOffset)); 3444 __ lw(a0, FieldMemOperand(a0, Oddball::kToNumberOffset));
3446 __ AssertSmi(a0); 3445 __ AssertSmi(a0);
3447 } 3446 }
3448 __ Ret(USE_DELAY_SLOT); 3447 __ Ret(USE_DELAY_SLOT);
3449 __ Subu(v0, a1, a0); 3448 __ Subu(v0, a1, a0);
3450 } 3449 }
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after
3723 tmp3); 3722 tmp3);
3724 } else { 3723 } else {
3725 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, 3724 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3726 tmp2, tmp3, tmp4); 3725 tmp2, tmp3, tmp4);
3727 } 3726 }
3728 3727
3729 // Handle more complex cases in runtime. 3728 // Handle more complex cases in runtime.
3730 __ bind(&runtime); 3729 __ bind(&runtime);
3731 __ Push(left, right); 3730 __ Push(left, right);
3732 if (equality) { 3731 if (equality) {
3733 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); 3732 __ TailCallRuntime(Runtime::kStringEquals, 2);
3734 } else { 3733 } else {
3735 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 3734 __ TailCallRuntime(Runtime::kStringCompare, 2);
3736 } 3735 }
3737 3736
3738 __ bind(&miss); 3737 __ bind(&miss);
3739 GenerateMiss(masm); 3738 GenerateMiss(masm);
3740 } 3739 }
3741 3740
3742 3741
3743 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { 3742 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3744 DCHECK_EQ(CompareICState::RECEIVER, state()); 3743 DCHECK_EQ(CompareICState::RECEIVER, state());
3745 Label miss; 3744 Label miss;
(...skipping 23 matching lines...) Expand all
3769 __ GetWeakValue(t0, cell); 3768 __ GetWeakValue(t0, cell);
3770 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); 3769 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
3771 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); 3770 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
3772 __ Branch(&miss, ne, a2, Operand(t0)); 3771 __ Branch(&miss, ne, a2, Operand(t0));
3773 __ Branch(&miss, ne, a3, Operand(t0)); 3772 __ Branch(&miss, ne, a3, Operand(t0));
3774 3773
3775 if (Token::IsEqualityOp(op())) { 3774 if (Token::IsEqualityOp(op())) {
3776 __ Ret(USE_DELAY_SLOT); 3775 __ Ret(USE_DELAY_SLOT);
3777 __ subu(v0, a0, a1); 3776 __ subu(v0, a0, a1);
3778 } else if (is_strong(strength())) { 3777 } else if (is_strong(strength())) {
3779 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); 3778 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0);
3780 } else { 3779 } else {
3781 if (op() == Token::LT || op() == Token::LTE) { 3780 if (op() == Token::LT || op() == Token::LTE) {
3782 __ li(a2, Operand(Smi::FromInt(GREATER))); 3781 __ li(a2, Operand(Smi::FromInt(GREATER)));
3783 } else { 3782 } else {
3784 __ li(a2, Operand(Smi::FromInt(LESS))); 3783 __ li(a2, Operand(Smi::FromInt(LESS)));
3785 } 3784 }
3786 __ Push(a1, a0, a2); 3785 __ Push(a1, a0, a2);
3787 __ TailCallRuntime(Runtime::kCompare, 3, 1); 3786 __ TailCallRuntime(Runtime::kCompare, 3);
3788 } 3787 }
3789 3788
3790 __ bind(&miss); 3789 __ bind(&miss);
3791 GenerateMiss(masm); 3790 GenerateMiss(masm);
3792 } 3791 }
3793 3792
3794 3793
3795 void CompareICStub::GenerateMiss(MacroAssembler* masm) { 3794 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3796 { 3795 {
3797 // Call the runtime system in a fresh internal frame. 3796 // Call the runtime system in a fresh internal frame.
(...skipping 1371 matching lines...) Expand 10 before | Expand all | Expand 10 after
5169 5168
5170 // Check that value is not the_hole. 5169 // Check that value is not the_hole.
5171 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 5170 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5172 __ Branch(&slow_case, eq, result_reg, Operand(at)); 5171 __ Branch(&slow_case, eq, result_reg, Operand(at));
5173 __ Ret(); 5172 __ Ret();
5174 5173
5175 // Fallback to the runtime. 5174 // Fallback to the runtime.
5176 __ bind(&slow_case); 5175 __ bind(&slow_case);
5177 __ SmiTag(slot_reg); 5176 __ SmiTag(slot_reg);
5178 __ Push(slot_reg); 5177 __ Push(slot_reg);
5179 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); 5178 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1);
5180 } 5179 }
5181 5180
5182 5181
5183 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { 5182 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5184 Register context_reg = cp; 5183 Register context_reg = cp;
5185 Register slot_reg = a2; 5184 Register slot_reg = a2;
5186 Register value_reg = a0; 5185 Register value_reg = a0;
5187 Register cell_reg = t0; 5186 Register cell_reg = t0;
5188 Register cell_value_reg = t1; 5187 Register cell_value_reg = t1;
5189 Register cell_details_reg = t2; 5188 Register cell_details_reg = t2;
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
5284 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, 5283 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg,
5285 FieldMemOperand(value_reg, HeapObject::kMapOffset)); 5284 FieldMemOperand(value_reg, HeapObject::kMapOffset));
5286 5285
5287 // Fallback to the runtime. 5286 // Fallback to the runtime.
5288 __ bind(&slow_case); 5287 __ bind(&slow_case);
5289 __ SmiTag(slot_reg); 5288 __ SmiTag(slot_reg);
5290 __ Push(slot_reg, value_reg); 5289 __ Push(slot_reg, value_reg);
5291 __ TailCallRuntime(is_strict(language_mode()) 5290 __ TailCallRuntime(is_strict(language_mode())
5292 ? Runtime::kStoreGlobalViaContext_Strict 5291 ? Runtime::kStoreGlobalViaContext_Strict
5293 : Runtime::kStoreGlobalViaContext_Sloppy, 5292 : Runtime::kStoreGlobalViaContext_Sloppy,
5294 2, 1); 5293 2);
5295 } 5294 }
5296 5295
5297 5296
5298 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { 5297 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5299 return ref0.address() - ref1.address(); 5298 return ref0.address() - ref1.address();
5300 } 5299 }
5301 5300
5302 5301
5303 // Calls an API function. Allocates HandleScope, extracts returned value 5302 // Calls an API function. Allocates HandleScope, extracts returned value
5304 // from handle and propagates exceptions. Restores context. stack_space 5303 // from handle and propagates exceptions. Restores context. stack_space
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
5408 // Check if the function scheduled an exception. 5407 // Check if the function scheduled an exception.
5409 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 5408 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
5410 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate))); 5409 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate)));
5411 __ lw(t1, MemOperand(at)); 5410 __ lw(t1, MemOperand(at));
5412 __ Branch(&promote_scheduled_exception, ne, t0, Operand(t1)); 5411 __ Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
5413 5412
5414 __ Ret(); 5413 __ Ret();
5415 5414
5416 // Re-throw by promoting a scheduled exception. 5415 // Re-throw by promoting a scheduled exception.
5417 __ bind(&promote_scheduled_exception); 5416 __ bind(&promote_scheduled_exception);
5418 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); 5417 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0);
5419 5418
5420 // HandleScope limit has changed. Delete allocated extensions. 5419 // HandleScope limit has changed. Delete allocated extensions.
5421 __ bind(&delete_allocated_handles); 5420 __ bind(&delete_allocated_handles);
5422 __ sw(s1, MemOperand(s3, kLimitOffset)); 5421 __ sw(s1, MemOperand(s3, kLimitOffset));
5423 __ mov(s0, v0); 5422 __ mov(s0, v0);
5424 __ mov(a0, v0); 5423 __ mov(a0, v0);
5425 __ PrepareCallCFunction(1, s1); 5424 __ PrepareCallCFunction(1, s1);
5426 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); 5425 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5427 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), 5426 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5428 1); 5427 1);
(...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after
5597 MemOperand(fp, 6 * kPointerSize), NULL); 5596 MemOperand(fp, 6 * kPointerSize), NULL);
5598 } 5597 }
5599 5598
5600 5599
5601 #undef __ 5600 #undef __
5602 5601
5603 } // namespace internal 5602 } // namespace internal
5604 } // namespace v8 5603 } // namespace v8
5605 5604
5606 #endif // V8_TARGET_ARCH_MIPS 5605 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/builtins-mips.cc ('k') | src/mips/macro-assembler-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698