Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(78)

Side by Side Diff: src/ppc/code-stubs-ppc.cc

Issue 1550923002: Remove uses of result size in TailCallRuntime and friends (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: change spaces Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ppc/builtins-ppc.cc ('k') | src/ppc/macro-assembler-ppc.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_PPC 5 #if V8_TARGET_ARCH_PPC
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 690 matching lines...) Expand 10 before | Expand all | Expand 10 after
701 } else { 701 } else {
702 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r5, r6, r7); 702 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r5, r6, r7);
703 } 703 }
704 // Never falls through to here. 704 // Never falls through to here.
705 705
706 __ bind(&slow); 706 __ bind(&slow);
707 707
708 __ Push(lhs, rhs); 708 __ Push(lhs, rhs);
709 // Figure out which native to call and setup the arguments. 709 // Figure out which native to call and setup the arguments.
710 if (cc == eq) { 710 if (cc == eq) {
711 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2, 711 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2);
712 1);
713 } else { 712 } else {
714 int ncr; // NaN compare result 713 int ncr; // NaN compare result
715 if (cc == lt || cc == le) { 714 if (cc == lt || cc == le) {
716 ncr = GREATER; 715 ncr = GREATER;
717 } else { 716 } else {
718 DCHECK(cc == gt || cc == ge); // remaining cases 717 DCHECK(cc == gt || cc == ge); // remaining cases
719 ncr = LESS; 718 ncr = LESS;
720 } 719 }
721 __ LoadSmiLiteral(r3, Smi::FromInt(ncr)); 720 __ LoadSmiLiteral(r3, Smi::FromInt(ncr));
722 __ push(r3); 721 __ push(r3);
723 722
724 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 723 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
725 // tagged as a small integer. 724 // tagged as a small integer.
726 __ TailCallRuntime( 725 __ TailCallRuntime(
727 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3, 726 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare,
728 1); 727 3);
729 } 728 }
730 729
731 __ bind(&miss); 730 __ bind(&miss);
732 GenerateMiss(masm); 731 GenerateMiss(masm);
733 } 732 }
734 733
735 734
736 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 735 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
737 // We don't allow a GC during a store buffer overflow so there is no need to 736 // We don't allow a GC during a store buffer overflow so there is no need to
738 // store the registers in any particular way, but we do have to store and 737 // store the registers in any particular way, but we do have to store and
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after
941 __ bne(&done); 940 __ bne(&done);
942 // double_exponent may not containe the exponent value if the input was a 941 // double_exponent may not containe the exponent value if the input was a
943 // smi. We set it with exponent value before bailing out. 942 // smi. We set it with exponent value before bailing out.
944 __ ConvertIntToDouble(exponent, double_exponent); 943 __ ConvertIntToDouble(exponent, double_exponent);
945 944
946 // Returning or bailing out. 945 // Returning or bailing out.
947 Counters* counters = isolate()->counters(); 946 Counters* counters = isolate()->counters();
948 if (exponent_type() == ON_STACK) { 947 if (exponent_type() == ON_STACK) {
949 // The arguments are still on the stack. 948 // The arguments are still on the stack.
950 __ bind(&call_runtime); 949 __ bind(&call_runtime);
951 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1); 950 __ TailCallRuntime(Runtime::kMathPowRT, 2);
952 951
953 // The stub is called from non-optimized code, which expects the result 952 // The stub is called from non-optimized code, which expects the result
954 // as heap number in exponent. 953 // as heap number in exponent.
955 __ bind(&done); 954 __ bind(&done);
956 __ AllocateHeapNumber(heapnumber, scratch, scratch2, heapnumbermap, 955 __ AllocateHeapNumber(heapnumber, scratch, scratch2, heapnumbermap,
957 &call_runtime); 956 &call_runtime);
958 __ stfd(double_result, 957 __ stfd(double_result,
959 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); 958 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
960 DCHECK(heapnumber.is(r3)); 959 DCHECK(heapnumber.is(r3));
961 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); 960 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
(...skipping 523 matching lines...) Expand 10 before | Expand all | Expand 10 after
1485 __ bind(&done); 1484 __ bind(&done);
1486 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex); 1485 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
1487 __ Ret(); 1486 __ Ret();
1488 1487
1489 // Found Proxy or access check needed: Call the runtime 1488 // Found Proxy or access check needed: Call the runtime
1490 __ bind(&fast_runtime_fallback); 1489 __ bind(&fast_runtime_fallback);
1491 __ Push(object, function_prototype); 1490 __ Push(object, function_prototype);
1492 // Invalidate the instanceof cache. 1491 // Invalidate the instanceof cache.
1493 __ LoadSmiLiteral(scratch, Smi::FromInt(0)); 1492 __ LoadSmiLiteral(scratch, Smi::FromInt(0));
1494 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex); 1493 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex);
1495 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2, 1); 1494 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2);
1496 1495
1497 // Slow-case: Call the %InstanceOf runtime function. 1496 // Slow-case: Call the %InstanceOf runtime function.
1498 __ bind(&slow_case); 1497 __ bind(&slow_case);
1499 __ Push(object, function); 1498 __ Push(object, function);
1500 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); 1499 __ TailCallRuntime(Runtime::kInstanceOf, 2);
1501 } 1500 }
1502 1501
1503 1502
1504 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1503 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1505 Label miss; 1504 Label miss;
1506 Register receiver = LoadDescriptor::ReceiverRegister(); 1505 Register receiver = LoadDescriptor::ReceiverRegister();
1507 // Ensure that the vector and slot registers won't be clobbered before 1506 // Ensure that the vector and slot registers won't be clobbered before
1508 // calling the miss handler. 1507 // calling the miss handler.
1509 DCHECK(!AreAliased(r7, r8, LoadWithVectorDescriptor::VectorRegister(), 1508 DCHECK(!AreAliased(r7, r8, LoadWithVectorDescriptor::VectorRegister(),
1510 LoadWithVectorDescriptor::SlotRegister())); 1509 LoadWithVectorDescriptor::SlotRegister()));
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
1595 __ sub(r6, r3, r4); 1594 __ sub(r6, r3, r4);
1596 __ SmiToPtrArrayOffset(r6, r6); 1595 __ SmiToPtrArrayOffset(r6, r6);
1597 __ add(r6, r5, r6); 1596 __ add(r6, r5, r6);
1598 __ LoadP(r3, MemOperand(r6, kDisplacement)); 1597 __ LoadP(r3, MemOperand(r6, kDisplacement));
1599 __ blr(); 1598 __ blr();
1600 1599
1601 // Slow-case: Handle non-smi or out-of-bounds access to arguments 1600 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1602 // by calling the runtime system. 1601 // by calling the runtime system.
1603 __ bind(&slow); 1602 __ bind(&slow);
1604 __ push(r4); 1603 __ push(r4);
1605 __ TailCallRuntime(Runtime::kArguments, 1, 1); 1604 __ TailCallRuntime(Runtime::kArguments, 1);
1606 } 1605 }
1607 1606
1608 1607
1609 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { 1608 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1610 // r4 : function 1609 // r4 : function
1611 // r5 : number of parameters (tagged) 1610 // r5 : number of parameters (tagged)
1612 // r6 : parameters pointer 1611 // r6 : parameters pointer
1613 1612
1614 DCHECK(r4.is(ArgumentsAccessNewDescriptor::function())); 1613 DCHECK(r4.is(ArgumentsAccessNewDescriptor::function()));
1615 DCHECK(r5.is(ArgumentsAccessNewDescriptor::parameter_count())); 1614 DCHECK(r5.is(ArgumentsAccessNewDescriptor::parameter_count()));
1616 DCHECK(r6.is(ArgumentsAccessNewDescriptor::parameter_pointer())); 1615 DCHECK(r6.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1617 1616
1618 // Check if the calling frame is an arguments adaptor frame. 1617 // Check if the calling frame is an arguments adaptor frame.
1619 Label runtime; 1618 Label runtime;
1620 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1619 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1621 __ LoadP(r3, MemOperand(r7, StandardFrameConstants::kContextOffset)); 1620 __ LoadP(r3, MemOperand(r7, StandardFrameConstants::kContextOffset));
1622 __ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); 1621 __ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
1623 __ bne(&runtime); 1622 __ bne(&runtime);
1624 1623
1625 // Patch the arguments.length and the parameters pointer in the current frame. 1624 // Patch the arguments.length and the parameters pointer in the current frame.
1626 __ LoadP(r5, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1625 __ LoadP(r5, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset));
1627 __ SmiToPtrArrayOffset(r6, r5); 1626 __ SmiToPtrArrayOffset(r6, r5);
1628 __ add(r6, r6, r7); 1627 __ add(r6, r6, r7);
1629 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); 1628 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset));
1630 1629
1631 __ bind(&runtime); 1630 __ bind(&runtime);
1632 __ Push(r4, r6, r5); 1631 __ Push(r4, r6, r5);
1633 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); 1632 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3);
1634 } 1633 }
1635 1634
1636 1635
1637 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 1636 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1638 // r4 : function 1637 // r4 : function
1639 // r5 : number of parameters (tagged) 1638 // r5 : number of parameters (tagged)
1640 // r6 : parameters pointer 1639 // r6 : parameters pointer
1641 // Registers used over whole function: 1640 // Registers used over whole function:
1642 // r8 : arguments count (tagged) 1641 // r8 : arguments count (tagged)
1643 // r9 : mapped parameter count (tagged) 1642 // r9 : mapped parameter count (tagged)
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after
1865 __ StorePU(r7, MemOperand(r11, kPointerSize)); 1864 __ StorePU(r7, MemOperand(r11, kPointerSize));
1866 __ bdnz(&arguments_loop); 1865 __ bdnz(&arguments_loop);
1867 1866
1868 // Return. 1867 // Return.
1869 __ Ret(); 1868 __ Ret();
1870 1869
1871 // Do the runtime call to allocate the arguments object. 1870 // Do the runtime call to allocate the arguments object.
1872 // r8 = argument count (tagged) 1871 // r8 = argument count (tagged)
1873 __ bind(&runtime); 1872 __ bind(&runtime);
1874 __ Push(r4, r6, r8); 1873 __ Push(r4, r6, r8);
1875 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); 1874 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3);
1876 } 1875 }
1877 1876
1878 1877
1879 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { 1878 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
1880 // Return address is in lr. 1879 // Return address is in lr.
1881 Label slow; 1880 Label slow;
1882 1881
1883 Register receiver = LoadDescriptor::ReceiverRegister(); 1882 Register receiver = LoadDescriptor::ReceiverRegister();
1884 Register key = LoadDescriptor::NameRegister(); 1883 Register key = LoadDescriptor::NameRegister();
1885 1884
1886 // Check that the key is an array index, that is Uint32. 1885 // Check that the key is an array index, that is Uint32.
1887 __ TestIfPositiveSmi(key, r0); 1886 __ TestIfPositiveSmi(key, r0);
1888 __ bne(&slow, cr0); 1887 __ bne(&slow, cr0);
1889 1888
1890 // Everything is fine, call runtime. 1889 // Everything is fine, call runtime.
1891 __ Push(receiver, key); // Receiver, key. 1890 __ Push(receiver, key); // Receiver, key.
1892 1891
1893 // Perform tail call to the entry. 1892 // Perform tail call to the entry.
1894 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); 1893 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2);
1895 1894
1896 __ bind(&slow); 1895 __ bind(&slow);
1897 PropertyAccessCompiler::TailCallBuiltin( 1896 PropertyAccessCompiler::TailCallBuiltin(
1898 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); 1897 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1899 } 1898 }
1900 1899
1901 1900
1902 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 1901 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1903 // r4 : function 1902 // r4 : function
1904 // r5 : number of parameters (tagged) 1903 // r5 : number of parameters (tagged)
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
1976 // Pre-increment r7 with kPointerSize on each iteration. 1975 // Pre-increment r7 with kPointerSize on each iteration.
1977 __ StorePU(r8, MemOperand(r7, kPointerSize)); 1976 __ StorePU(r8, MemOperand(r7, kPointerSize));
1978 __ bdnz(&loop); 1977 __ bdnz(&loop);
1979 1978
1980 // Return. 1979 // Return.
1981 __ Ret(); 1980 __ Ret();
1982 1981
1983 // Do the runtime call to allocate the arguments object. 1982 // Do the runtime call to allocate the arguments object.
1984 __ bind(&runtime); 1983 __ bind(&runtime);
1985 __ Push(r4, r6, r5); 1984 __ Push(r4, r6, r5);
1986 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); 1985 __ TailCallRuntime(Runtime::kNewStrictArguments, 3);
1987 } 1986 }
1988 1987
1989 1988
1990 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { 1989 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
1991 // Stack layout on entry. 1990 // Stack layout on entry.
1992 // sp[0] : language mode 1991 // sp[0] : language mode
1993 // sp[4] : index of rest parameter 1992 // sp[4] : index of rest parameter
1994 // sp[8] : number of parameters 1993 // sp[8] : number of parameters
1995 // sp[12] : receiver displacement 1994 // sp[12] : receiver displacement
1996 1995
1997 Label runtime; 1996 Label runtime;
1998 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1997 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1999 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset)); 1998 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
2000 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); 1999 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
2001 __ bne(&runtime); 2000 __ bne(&runtime);
2002 2001
2003 // Patch the arguments.length and the parameters pointer. 2002 // Patch the arguments.length and the parameters pointer.
2004 __ LoadP(r4, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2003 __ LoadP(r4, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
2005 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize)); 2004 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2006 __ SmiToPtrArrayOffset(r0, r4); 2005 __ SmiToPtrArrayOffset(r0, r4);
2007 __ add(r6, r5, r0); 2006 __ add(r6, r5, r0);
2008 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); 2007 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset));
2009 __ StoreP(r6, MemOperand(sp, 3 * kPointerSize)); 2008 __ StoreP(r6, MemOperand(sp, 3 * kPointerSize));
2010 2009
2011 __ bind(&runtime); 2010 __ bind(&runtime);
2012 __ TailCallRuntime(Runtime::kNewRestParam, 4, 1); 2011 __ TailCallRuntime(Runtime::kNewRestParam, 4);
2013 } 2012 }
2014 2013
2015 2014
2016 void RegExpExecStub::Generate(MacroAssembler* masm) { 2015 void RegExpExecStub::Generate(MacroAssembler* masm) {
2017 // Just jump directly to runtime if native RegExp is not selected at compile 2016 // Just jump directly to runtime if native RegExp is not selected at compile
2018 // time or if regexp entry in generated code is turned off runtime switch or 2017 // time or if regexp entry in generated code is turned off runtime switch or
2019 // at compilation. 2018 // at compilation.
2020 #ifdef V8_INTERPRETED_REGEXP 2019 #ifdef V8_INTERPRETED_REGEXP
2021 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2020 __ TailCallRuntime(Runtime::kRegExpExec, 4);
2022 #else // V8_INTERPRETED_REGEXP 2021 #else // V8_INTERPRETED_REGEXP
2023 2022
2024 // Stack frame on entry. 2023 // Stack frame on entry.
2025 // sp[0]: last_match_info (expected JSArray) 2024 // sp[0]: last_match_info (expected JSArray)
2026 // sp[4]: previous index 2025 // sp[4]: previous index
2027 // sp[8]: subject string 2026 // sp[8]: subject string
2028 // sp[12]: JSRegExp object 2027 // sp[12]: JSRegExp object
2029 2028
2030 const int kLastMatchInfoOffset = 0 * kPointerSize; 2029 const int kLastMatchInfoOffset = 0 * kPointerSize;
2031 const int kPreviousIndexOffset = 1 * kPointerSize; 2030 const int kPreviousIndexOffset = 1 * kPointerSize;
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
2306 // haven't created the exception yet. Handle that in the runtime system. 2305 // haven't created the exception yet. Handle that in the runtime system.
2307 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 2306 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2308 __ mov(r4, Operand(isolate()->factory()->the_hole_value())); 2307 __ mov(r4, Operand(isolate()->factory()->the_hole_value()));
2309 __ mov(r5, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 2308 __ mov(r5, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2310 isolate()))); 2309 isolate())));
2311 __ LoadP(r3, MemOperand(r5, 0)); 2310 __ LoadP(r3, MemOperand(r5, 0));
2312 __ cmp(r3, r4); 2311 __ cmp(r3, r4);
2313 __ beq(&runtime); 2312 __ beq(&runtime);
2314 2313
2315 // For exception, throw the exception again. 2314 // For exception, throw the exception again.
2316 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1); 2315 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4);
2317 2316
2318 __ bind(&failure); 2317 __ bind(&failure);
2319 // For failure and exception return null. 2318 // For failure and exception return null.
2320 __ mov(r3, Operand(isolate()->factory()->null_value())); 2319 __ mov(r3, Operand(isolate()->factory()->null_value()));
2321 __ addi(sp, sp, Operand(4 * kPointerSize)); 2320 __ addi(sp, sp, Operand(4 * kPointerSize));
2322 __ Ret(); 2321 __ Ret();
2323 2322
2324 // Process the result from the native regexp code. 2323 // Process the result from the native regexp code.
2325 __ bind(&success); 2324 __ bind(&success);
2326 __ LoadP(r4, 2325 __ LoadP(r4,
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
2395 __ StorePU(r6, MemOperand(r3, kPointerSize)); 2394 __ StorePU(r6, MemOperand(r3, kPointerSize));
2396 __ bdnz(&next_capture); 2395 __ bdnz(&next_capture);
2397 2396
2398 // Return last match info. 2397 // Return last match info.
2399 __ LoadP(r3, MemOperand(sp, kLastMatchInfoOffset)); 2398 __ LoadP(r3, MemOperand(sp, kLastMatchInfoOffset));
2400 __ addi(sp, sp, Operand(4 * kPointerSize)); 2399 __ addi(sp, sp, Operand(4 * kPointerSize));
2401 __ Ret(); 2400 __ Ret();
2402 2401
2403 // Do the runtime call to execute the regexp. 2402 // Do the runtime call to execute the regexp.
2404 __ bind(&runtime); 2403 __ bind(&runtime);
2405 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2404 __ TailCallRuntime(Runtime::kRegExpExec, 4);
2406 2405
2407 // Deferred code for string handling. 2406 // Deferred code for string handling.
2408 // (6) Not a long external string? If yes, go to (8). 2407 // (6) Not a long external string? If yes, go to (8).
2409 __ bind(&not_seq_nor_cons); 2408 __ bind(&not_seq_nor_cons);
2410 // Compare flags are still set. 2409 // Compare flags are still set.
2411 __ bgt(&not_long_external); // Go to (8). 2410 __ bgt(&not_long_external); // Go to (8).
2412 2411
2413 // (7) External string. Make it, offset-wise, look like a sequential string. 2412 // (7) External string. Make it, offset-wise, look like a sequential string.
2414 __ bind(&external_string); 2413 __ bind(&external_string);
2415 __ LoadP(r3, FieldMemOperand(subject, HeapObject::kMapOffset)); 2414 __ LoadP(r3, FieldMemOperand(subject, HeapObject::kMapOffset));
(...skipping 731 matching lines...) Expand 10 before | Expand all | Expand 10 after
3147 String::TWO_BYTE_ENCODING); 3146 String::TWO_BYTE_ENCODING);
3148 3147
3149 __ bind(&return_r3); 3148 __ bind(&return_r3);
3150 Counters* counters = isolate()->counters(); 3149 Counters* counters = isolate()->counters();
3151 __ IncrementCounter(counters->sub_string_native(), 1, r6, r7); 3150 __ IncrementCounter(counters->sub_string_native(), 1, r6, r7);
3152 __ Drop(3); 3151 __ Drop(3);
3153 __ Ret(); 3152 __ Ret();
3154 3153
3155 // Just jump to runtime to create the sub string. 3154 // Just jump to runtime to create the sub string.
3156 __ bind(&runtime); 3155 __ bind(&runtime);
3157 __ TailCallRuntime(Runtime::kSubString, 3, 1); 3156 __ TailCallRuntime(Runtime::kSubString, 3);
3158 3157
3159 __ bind(&single_char); 3158 __ bind(&single_char);
3160 // r3: original string 3159 // r3: original string
3161 // r4: instance type 3160 // r4: instance type
3162 // r5: length 3161 // r5: length
3163 // r6: from index (untagged) 3162 // r6: from index (untagged)
3164 __ SmiTag(r6, r6); 3163 __ SmiTag(r6, r6);
3165 StringCharAtGenerator generator(r3, r6, r5, r3, &runtime, &runtime, &runtime, 3164 StringCharAtGenerator generator(r3, r6, r5, r3, &runtime, &runtime, &runtime,
3166 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); 3165 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
3167 generator.GenerateFast(masm); 3166 generator.GenerateFast(masm);
(...skipping 19 matching lines...) Expand all
3187 __ cmpli(r4, Operand(FIRST_NONSTRING_TYPE)); 3186 __ cmpli(r4, Operand(FIRST_NONSTRING_TYPE));
3188 __ bge(&not_string); 3187 __ bge(&not_string);
3189 // Check if string has a cached array index. 3188 // Check if string has a cached array index.
3190 __ lwz(r5, FieldMemOperand(r3, String::kHashFieldOffset)); 3189 __ lwz(r5, FieldMemOperand(r3, String::kHashFieldOffset));
3191 __ And(r0, r5, Operand(String::kContainsCachedArrayIndexMask), SetRC); 3190 __ And(r0, r5, Operand(String::kContainsCachedArrayIndexMask), SetRC);
3192 __ bne(&slow_string, cr0); 3191 __ bne(&slow_string, cr0);
3193 __ IndexFromHash(r5, r3); 3192 __ IndexFromHash(r5, r3);
3194 __ blr(); 3193 __ blr();
3195 __ bind(&slow_string); 3194 __ bind(&slow_string);
3196 __ push(r3); // Push argument. 3195 __ push(r3); // Push argument.
3197 __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); 3196 __ TailCallRuntime(Runtime::kStringToNumber, 1);
3198 __ bind(&not_string); 3197 __ bind(&not_string);
3199 3198
3200 Label not_oddball; 3199 Label not_oddball;
3201 __ cmpi(r4, Operand(ODDBALL_TYPE)); 3200 __ cmpi(r4, Operand(ODDBALL_TYPE));
3202 __ bne(&not_oddball); 3201 __ bne(&not_oddball);
3203 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset)); 3202 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset));
3204 __ blr(); 3203 __ blr();
3205 __ bind(&not_oddball); 3204 __ bind(&not_oddball);
3206 3205
3207 __ push(r3); // Push argument. 3206 __ push(r3); // Push argument.
3208 __ TailCallRuntime(Runtime::kToNumber, 1, 1); 3207 __ TailCallRuntime(Runtime::kToNumber, 1);
3209 } 3208 }
3210 3209
3211 3210
3212 void ToLengthStub::Generate(MacroAssembler* masm) { 3211 void ToLengthStub::Generate(MacroAssembler* masm) {
3213 // The ToLength stub takes one argument in r3. 3212 // The ToLength stub takes one argument in r3.
3214 Label not_smi; 3213 Label not_smi;
3215 __ JumpIfNotSmi(r3, &not_smi); 3214 __ JumpIfNotSmi(r3, &not_smi);
3216 STATIC_ASSERT(kSmiTag == 0); 3215 STATIC_ASSERT(kSmiTag == 0);
3217 __ cmpi(r3, Operand::Zero()); 3216 __ cmpi(r3, Operand::Zero());
3218 if (CpuFeatures::IsSupported(ISELECT)) { 3217 if (CpuFeatures::IsSupported(ISELECT)) {
3219 __ isel(lt, r3, r0, r3); 3218 __ isel(lt, r3, r0, r3);
3220 } else { 3219 } else {
3221 Label positive; 3220 Label positive;
3222 __ bgt(&positive); 3221 __ bgt(&positive);
3223 __ li(r3, Operand::Zero()); 3222 __ li(r3, Operand::Zero());
3224 __ bind(&positive); 3223 __ bind(&positive);
3225 } 3224 }
3226 __ Ret(); 3225 __ Ret();
3227 __ bind(&not_smi); 3226 __ bind(&not_smi);
3228 3227
3229 __ push(r3); // Push argument. 3228 __ push(r3); // Push argument.
3230 __ TailCallRuntime(Runtime::kToLength, 1, 1); 3229 __ TailCallRuntime(Runtime::kToLength, 1);
3231 } 3230 }
3232 3231
3233 3232
3234 void ToStringStub::Generate(MacroAssembler* masm) { 3233 void ToStringStub::Generate(MacroAssembler* masm) {
3235 // The ToString stub takes one argument in r3. 3234 // The ToString stub takes one argument in r3.
3236 Label is_number; 3235 Label is_number;
3237 __ JumpIfSmi(r3, &is_number); 3236 __ JumpIfSmi(r3, &is_number);
3238 3237
3239 __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE); 3238 __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE);
3240 // r3: receiver 3239 // r3: receiver
3241 // r4: receiver instance type 3240 // r4: receiver instance type
3242 __ Ret(lt); 3241 __ Ret(lt);
3243 3242
3244 Label not_heap_number; 3243 Label not_heap_number;
3245 __ cmpi(r4, Operand(HEAP_NUMBER_TYPE)); 3244 __ cmpi(r4, Operand(HEAP_NUMBER_TYPE));
3246 __ bne(&not_heap_number); 3245 __ bne(&not_heap_number);
3247 __ bind(&is_number); 3246 __ bind(&is_number);
3248 NumberToStringStub stub(isolate()); 3247 NumberToStringStub stub(isolate());
3249 __ TailCallStub(&stub); 3248 __ TailCallStub(&stub);
3250 __ bind(&not_heap_number); 3249 __ bind(&not_heap_number);
3251 3250
3252 Label not_oddball; 3251 Label not_oddball;
3253 __ cmpi(r4, Operand(ODDBALL_TYPE)); 3252 __ cmpi(r4, Operand(ODDBALL_TYPE));
3254 __ bne(&not_oddball); 3253 __ bne(&not_oddball);
3255 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToStringOffset)); 3254 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToStringOffset));
3256 __ Ret(); 3255 __ Ret();
3257 __ bind(&not_oddball); 3256 __ bind(&not_oddball);
3258 3257
3259 __ push(r3); // Push argument. 3258 __ push(r3); // Push argument.
3260 __ TailCallRuntime(Runtime::kToString, 1, 1); 3259 __ TailCallRuntime(Runtime::kToString, 1);
3261 } 3260 }
3262 3261
3263 3262
3264 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm, 3263 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
3265 Register left, 3264 Register left,
3266 Register right, 3265 Register right,
3267 Register scratch1, 3266 Register scratch1,
3268 Register scratch2) { 3267 Register scratch2) {
3269 Register length = scratch1; 3268 Register length = scratch1;
3270 3269
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
3405 3404
3406 // Compare flat one-byte strings natively. 3405 // Compare flat one-byte strings natively.
3407 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r5, 3406 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r5,
3408 r6); 3407 r6);
3409 StringHelper::GenerateCompareFlatOneByteStrings(masm, r4, r3, r5, r6, r7); 3408 StringHelper::GenerateCompareFlatOneByteStrings(masm, r4, r3, r5, r6, r7);
3410 3409
3411 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 3410 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3412 // tagged as a small integer. 3411 // tagged as a small integer.
3413 __ bind(&runtime); 3412 __ bind(&runtime);
3414 __ Push(r4, r3); 3413 __ Push(r4, r3);
3415 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 3414 __ TailCallRuntime(Runtime::kStringCompare, 2);
3416 } 3415 }
3417 3416
3418 3417
3419 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 3418 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3420 // ----------- S t a t e ------------- 3419 // ----------- S t a t e -------------
3421 // -- r4 : left 3420 // -- r4 : left
3422 // -- r3 : right 3421 // -- r3 : right
3423 // -- lr : return address 3422 // -- lr : return address
3424 // ----------------------------------- 3423 // -----------------------------------
3425 3424
(...skipping 21 matching lines...) Expand all
3447 } 3446 }
3448 3447
3449 3448
3450 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { 3449 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
3451 DCHECK_EQ(CompareICState::BOOLEAN, state()); 3450 DCHECK_EQ(CompareICState::BOOLEAN, state());
3452 Label miss; 3451 Label miss;
3453 3452
3454 __ CheckMap(r4, r5, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3453 __ CheckMap(r4, r5, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3455 __ CheckMap(r3, r6, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3454 __ CheckMap(r3, r6, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3456 if (op() != Token::EQ_STRICT && is_strong(strength())) { 3455 if (op() != Token::EQ_STRICT && is_strong(strength())) {
3457 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); 3456 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0);
3458 } else { 3457 } else {
3459 if (!Token::IsEqualityOp(op())) { 3458 if (!Token::IsEqualityOp(op())) {
3460 __ LoadP(r4, FieldMemOperand(r4, Oddball::kToNumberOffset)); 3459 __ LoadP(r4, FieldMemOperand(r4, Oddball::kToNumberOffset));
3461 __ AssertSmi(r4); 3460 __ AssertSmi(r4);
3462 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset)); 3461 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset));
3463 __ AssertSmi(r3); 3462 __ AssertSmi(r3);
3464 } 3463 }
3465 __ sub(r3, r4, r3); 3464 __ sub(r3, r4, r3);
3466 __ Ret(); 3465 __ Ret();
3467 } 3466 }
(...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after
3731 tmp2); 3730 tmp2);
3732 } else { 3731 } else {
3733 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, 3732 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3734 tmp2, tmp3); 3733 tmp2, tmp3);
3735 } 3734 }
3736 3735
3737 // Handle more complex cases in runtime. 3736 // Handle more complex cases in runtime.
3738 __ bind(&runtime); 3737 __ bind(&runtime);
3739 __ Push(left, right); 3738 __ Push(left, right);
3740 if (equality) { 3739 if (equality) {
3741 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); 3740 __ TailCallRuntime(Runtime::kStringEquals, 2);
3742 } else { 3741 } else {
3743 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 3742 __ TailCallRuntime(Runtime::kStringCompare, 2);
3744 } 3743 }
3745 3744
3746 __ bind(&miss); 3745 __ bind(&miss);
3747 GenerateMiss(masm); 3746 GenerateMiss(masm);
3748 } 3747 }
3749 3748
3750 3749
3751 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { 3750 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3752 DCHECK_EQ(CompareICState::RECEIVER, state()); 3751 DCHECK_EQ(CompareICState::RECEIVER, state());
3753 Label miss; 3752 Label miss;
(...skipping 25 matching lines...) Expand all
3779 __ LoadP(r6, FieldMemOperand(r4, HeapObject::kMapOffset)); 3778 __ LoadP(r6, FieldMemOperand(r4, HeapObject::kMapOffset));
3780 __ cmp(r5, r7); 3779 __ cmp(r5, r7);
3781 __ bne(&miss); 3780 __ bne(&miss);
3782 __ cmp(r6, r7); 3781 __ cmp(r6, r7);
3783 __ bne(&miss); 3782 __ bne(&miss);
3784 3783
3785 if (Token::IsEqualityOp(op())) { 3784 if (Token::IsEqualityOp(op())) {
3786 __ sub(r3, r3, r4); 3785 __ sub(r3, r3, r4);
3787 __ Ret(); 3786 __ Ret();
3788 } else if (is_strong(strength())) { 3787 } else if (is_strong(strength())) {
3789 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); 3788 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0);
3790 } else { 3789 } else {
3791 if (op() == Token::LT || op() == Token::LTE) { 3790 if (op() == Token::LT || op() == Token::LTE) {
3792 __ LoadSmiLiteral(r5, Smi::FromInt(GREATER)); 3791 __ LoadSmiLiteral(r5, Smi::FromInt(GREATER));
3793 } else { 3792 } else {
3794 __ LoadSmiLiteral(r5, Smi::FromInt(LESS)); 3793 __ LoadSmiLiteral(r5, Smi::FromInt(LESS));
3795 } 3794 }
3796 __ Push(r4, r3, r5); 3795 __ Push(r4, r3, r5);
3797 __ TailCallRuntime(Runtime::kCompare, 3, 1); 3796 __ TailCallRuntime(Runtime::kCompare, 3);
3798 } 3797 }
3799 3798
3800 __ bind(&miss); 3799 __ bind(&miss);
3801 GenerateMiss(masm); 3800 GenerateMiss(masm);
3802 } 3801 }
3803 3802
3804 3803
3805 void CompareICStub::GenerateMiss(MacroAssembler* masm) { 3804 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3806 { 3805 {
3807 // Call the runtime system in a fresh internal frame. 3806 // Call the runtime system in a fresh internal frame.
(...skipping 1368 matching lines...) Expand 10 before | Expand all | Expand 10 after
5176 __ LoadP(result, ContextMemOperand(result)); 5175 __ LoadP(result, ContextMemOperand(result));
5177 __ LoadP(result, FieldMemOperand(result, PropertyCell::kValueOffset)); 5176 __ LoadP(result, FieldMemOperand(result, PropertyCell::kValueOffset));
5178 5177
5179 // If the result is not the_hole, return. Otherwise, handle in the runtime. 5178 // If the result is not the_hole, return. Otherwise, handle in the runtime.
5180 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 5179 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
5181 __ Ret(ne); 5180 __ Ret(ne);
5182 5181
5183 // Fallback to runtime. 5182 // Fallback to runtime.
5184 __ SmiTag(slot); 5183 __ SmiTag(slot);
5185 __ Push(slot); 5184 __ Push(slot);
5186 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); 5185 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1);
5187 } 5186 }
5188 5187
5189 5188
5190 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { 5189 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5191 Register value = r3; 5190 Register value = r3;
5192 Register slot = r5; 5191 Register slot = r5;
5193 5192
5194 Register cell = r4; 5193 Register cell = r4;
5195 Register cell_details = r6; 5194 Register cell_details = r6;
5196 Register cell_value = r7; 5195 Register cell_value = r7;
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
5305 __ cmp(cell_value_map, scratch); 5304 __ cmp(cell_value_map, scratch);
5306 __ beq(&fast_heapobject_case); 5305 __ beq(&fast_heapobject_case);
5307 5306
5308 // Fallback to runtime. 5307 // Fallback to runtime.
5309 __ bind(&slow_case); 5308 __ bind(&slow_case);
5310 __ SmiTag(slot); 5309 __ SmiTag(slot);
5311 __ Push(slot, value); 5310 __ Push(slot, value);
5312 __ TailCallRuntime(is_strict(language_mode()) 5311 __ TailCallRuntime(is_strict(language_mode())
5313 ? Runtime::kStoreGlobalViaContext_Strict 5312 ? Runtime::kStoreGlobalViaContext_Strict
5314 : Runtime::kStoreGlobalViaContext_Sloppy, 5313 : Runtime::kStoreGlobalViaContext_Sloppy,
5315 2, 1); 5314 2);
5316 } 5315 }
5317 5316
5318 5317
5319 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { 5318 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5320 return ref0.address() - ref1.address(); 5319 return ref0.address() - ref1.address();
5321 } 5320 }
5322 5321
5323 5322
5324 // Calls an API function. Allocates HandleScope, extracts returned value 5323 // Calls an API function. Allocates HandleScope, extracts returned value
5325 // from handle and propagates exceptions. Restores context. stack_space 5324 // from handle and propagates exceptions. Restores context. stack_space
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
5441 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); 5440 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
5442 __ mov(r15, Operand(ExternalReference::scheduled_exception_address(isolate))); 5441 __ mov(r15, Operand(ExternalReference::scheduled_exception_address(isolate)));
5443 __ LoadP(r15, MemOperand(r15)); 5442 __ LoadP(r15, MemOperand(r15));
5444 __ cmp(r14, r15); 5443 __ cmp(r14, r15);
5445 __ bne(&promote_scheduled_exception); 5444 __ bne(&promote_scheduled_exception);
5446 5445
5447 __ blr(); 5446 __ blr();
5448 5447
5449 // Re-throw by promoting a scheduled exception. 5448 // Re-throw by promoting a scheduled exception.
5450 __ bind(&promote_scheduled_exception); 5449 __ bind(&promote_scheduled_exception);
5451 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); 5450 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0);
5452 5451
5453 // HandleScope limit has changed. Delete allocated extensions. 5452 // HandleScope limit has changed. Delete allocated extensions.
5454 __ bind(&delete_allocated_handles); 5453 __ bind(&delete_allocated_handles);
5455 __ StoreP(r15, MemOperand(r17, kLimitOffset)); 5454 __ StoreP(r15, MemOperand(r17, kLimitOffset));
5456 __ mr(r14, r3); 5455 __ mr(r14, r3);
5457 __ PrepareCallCFunction(1, r15); 5456 __ PrepareCallCFunction(1, r15);
5458 __ mov(r3, Operand(ExternalReference::isolate_address(isolate))); 5457 __ mov(r3, Operand(ExternalReference::isolate_address(isolate)));
5459 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), 5458 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5460 1); 5459 1);
5461 __ mr(r3, r14); 5460 __ mr(r3, r14);
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
5673 kStackUnwindSpace, NULL, 5672 kStackUnwindSpace, NULL,
5674 MemOperand(fp, 6 * kPointerSize), NULL); 5673 MemOperand(fp, 6 * kPointerSize), NULL);
5675 } 5674 }
5676 5675
5677 5676
5678 #undef __ 5677 #undef __
5679 } // namespace internal 5678 } // namespace internal
5680 } // namespace v8 5679 } // namespace v8
5681 5680
5682 #endif // V8_TARGET_ARCH_PPC 5681 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « src/ppc/builtins-ppc.cc ('k') | src/ppc/macro-assembler-ppc.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698