Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(52)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 1553703002: [runtime] TailCallRuntime and CallRuntime should use default argument counts (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@2015-12-29_TailCallRuntime_default_result_size_1_1550923002
Patch Set: Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM 5 #if V8_TARGET_ARCH_ARM
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 664 matching lines...) Expand 10 before | Expand all | Expand 10 after
675 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r2, r3, r4, 675 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r2, r3, r4,
676 r5); 676 r5);
677 } 677 }
678 // Never falls through to here. 678 // Never falls through to here.
679 679
680 __ bind(&slow); 680 __ bind(&slow);
681 681
682 __ Push(lhs, rhs); 682 __ Push(lhs, rhs);
683 // Figure out which native to call and setup the arguments. 683 // Figure out which native to call and setup the arguments.
684 if (cc == eq) { 684 if (cc == eq) {
685 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2); 685 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
686 } else { 686 } else {
687 int ncr; // NaN compare result 687 int ncr; // NaN compare result
688 if (cc == lt || cc == le) { 688 if (cc == lt || cc == le) {
689 ncr = GREATER; 689 ncr = GREATER;
690 } else { 690 } else {
691 DCHECK(cc == gt || cc == ge); // remaining cases 691 DCHECK(cc == gt || cc == ge); // remaining cases
692 ncr = LESS; 692 ncr = LESS;
693 } 693 }
694 __ mov(r0, Operand(Smi::FromInt(ncr))); 694 __ mov(r0, Operand(Smi::FromInt(ncr)));
695 __ push(r0); 695 __ push(r0);
696 696
697 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 697 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
698 // tagged as a small integer. 698 // tagged as a small integer.
699 __ TailCallRuntime( 699 __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
700 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 700 : Runtime::kCompare);
701 3);
702 } 701 }
703 702
704 __ bind(&miss); 703 __ bind(&miss);
705 GenerateMiss(masm); 704 GenerateMiss(masm);
706 } 705 }
707 706
708 707
709 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 708 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
710 // We don't allow a GC during a store buffer overflow so there is no need to 709 // We don't allow a GC during a store buffer overflow so there is no need to
711 // store the registers in any particular way, but we do have to store and 710 // store the registers in any particular way, but we do have to store and
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after
893 // double_exponent may not containe the exponent value if the input was a 892 // double_exponent may not containe the exponent value if the input was a
894 // smi. We set it with exponent value before bailing out. 893 // smi. We set it with exponent value before bailing out.
895 __ vmov(single_scratch, exponent); 894 __ vmov(single_scratch, exponent);
896 __ vcvt_f64_s32(double_exponent, single_scratch); 895 __ vcvt_f64_s32(double_exponent, single_scratch);
897 896
898 // Returning or bailing out. 897 // Returning or bailing out.
899 Counters* counters = isolate()->counters(); 898 Counters* counters = isolate()->counters();
900 if (exponent_type() == ON_STACK) { 899 if (exponent_type() == ON_STACK) {
901 // The arguments are still on the stack. 900 // The arguments are still on the stack.
902 __ bind(&call_runtime); 901 __ bind(&call_runtime);
903 __ TailCallRuntime(Runtime::kMathPowRT, 2); 902 __ TailCallRuntime(Runtime::kMathPowRT);
904 903
905 // The stub is called from non-optimized code, which expects the result 904 // The stub is called from non-optimized code, which expects the result
906 // as heap number in exponent. 905 // as heap number in exponent.
907 __ bind(&done); 906 __ bind(&done);
908 __ AllocateHeapNumber( 907 __ AllocateHeapNumber(
909 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime); 908 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
910 __ vstr(double_result, 909 __ vstr(double_result,
911 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); 910 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
912 DCHECK(heapnumber.is(r0)); 911 DCHECK(heapnumber.is(r0));
913 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); 912 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
(...skipping 483 matching lines...) Expand 10 before | Expand all | Expand 10 after
1397 __ bind(&done); 1396 __ bind(&done);
1398 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex); 1397 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
1399 __ Ret(); 1398 __ Ret();
1400 1399
1401 // Found Proxy or access check needed: Call the runtime 1400 // Found Proxy or access check needed: Call the runtime
1402 __ bind(&fast_runtime_fallback); 1401 __ bind(&fast_runtime_fallback);
1403 __ Push(object, function_prototype); 1402 __ Push(object, function_prototype);
1404 // Invalidate the instanceof cache. 1403 // Invalidate the instanceof cache.
1405 __ Move(scratch, Smi::FromInt(0)); 1404 __ Move(scratch, Smi::FromInt(0));
1406 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex); 1405 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex);
1407 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2); 1406 __ TailCallRuntime(Runtime::kHasInPrototypeChain);
1408 1407
1409 // Slow-case: Call the %InstanceOf runtime function. 1408 // Slow-case: Call the %InstanceOf runtime function.
1410 __ bind(&slow_case); 1409 __ bind(&slow_case);
1411 __ Push(object, function); 1410 __ Push(object, function);
1412 __ TailCallRuntime(Runtime::kInstanceOf, 2); 1411 __ TailCallRuntime(Runtime::kInstanceOf);
1413 } 1412 }
1414 1413
1415 1414
1416 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1415 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1417 Label miss; 1416 Label miss;
1418 Register receiver = LoadDescriptor::ReceiverRegister(); 1417 Register receiver = LoadDescriptor::ReceiverRegister();
1419 // Ensure that the vector and slot registers won't be clobbered before 1418 // Ensure that the vector and slot registers won't be clobbered before
1420 // calling the miss handler. 1419 // calling the miss handler.
1421 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(), 1420 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(),
1422 LoadWithVectorDescriptor::SlotRegister())); 1421 LoadWithVectorDescriptor::SlotRegister()));
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
1504 // Read the argument from the adaptor frame and return it. 1503 // Read the argument from the adaptor frame and return it.
1505 __ sub(r3, r0, r1); 1504 __ sub(r3, r0, r1);
1506 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r3)); 1505 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r3));
1507 __ ldr(r0, MemOperand(r3, kDisplacement)); 1506 __ ldr(r0, MemOperand(r3, kDisplacement));
1508 __ Jump(lr); 1507 __ Jump(lr);
1509 1508
1510 // Slow-case: Handle non-smi or out-of-bounds access to arguments 1509 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1511 // by calling the runtime system. 1510 // by calling the runtime system.
1512 __ bind(&slow); 1511 __ bind(&slow);
1513 __ push(r1); 1512 __ push(r1);
1514 __ TailCallRuntime(Runtime::kArguments, 1); 1513 __ TailCallRuntime(Runtime::kArguments);
1515 } 1514 }
1516 1515
1517 1516
1518 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { 1517 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1519 // r1 : function 1518 // r1 : function
1520 // r2 : number of parameters (tagged) 1519 // r2 : number of parameters (tagged)
1521 // r3 : parameters pointer 1520 // r3 : parameters pointer
1522 1521
1523 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function())); 1522 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
1524 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count())); 1523 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1525 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); 1524 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1526 1525
1527 // Check if the calling frame is an arguments adaptor frame. 1526 // Check if the calling frame is an arguments adaptor frame.
1528 Label runtime; 1527 Label runtime;
1529 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1528 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1530 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset)); 1529 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
1531 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1530 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1532 __ b(ne, &runtime); 1531 __ b(ne, &runtime);
1533 1532
1534 // Patch the arguments.length and the parameters pointer in the current frame. 1533 // Patch the arguments.length and the parameters pointer in the current frame.
1535 __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1534 __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1536 __ add(r4, r4, Operand(r2, LSL, 1)); 1535 __ add(r4, r4, Operand(r2, LSL, 1));
1537 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset)); 1536 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
1538 1537
1539 __ bind(&runtime); 1538 __ bind(&runtime);
1540 __ Push(r1, r3, r2); 1539 __ Push(r1, r3, r2);
1541 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); 1540 __ TailCallRuntime(Runtime::kNewSloppyArguments);
1542 } 1541 }
1543 1542
1544 1543
1545 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 1544 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1546 // r1 : function 1545 // r1 : function
1547 // r2 : number of parameters (tagged) 1546 // r2 : number of parameters (tagged)
1548 // r3 : parameters pointer 1547 // r3 : parameters pointer
1549 // Registers used over whole function: 1548 // Registers used over whole function:
1550 // r5 : arguments count (tagged) 1549 // r5 : arguments count (tagged)
1551 // r6 : mapped parameter count (tagged) 1550 // r6 : mapped parameter count (tagged)
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
1731 __ b(lt, &arguments_loop); 1730 __ b(lt, &arguments_loop);
1732 1731
1733 // Return. 1732 // Return.
1734 __ Ret(); 1733 __ Ret();
1735 1734
1736 // Do the runtime call to allocate the arguments object. 1735 // Do the runtime call to allocate the arguments object.
1737 // r0 = address of new object (tagged) 1736 // r0 = address of new object (tagged)
1738 // r5 = argument count (tagged) 1737 // r5 = argument count (tagged)
1739 __ bind(&runtime); 1738 __ bind(&runtime);
1740 __ Push(r1, r3, r5); 1739 __ Push(r1, r3, r5);
1741 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); 1740 __ TailCallRuntime(Runtime::kNewSloppyArguments);
1742 } 1741 }
1743 1742
1744 1743
1745 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { 1744 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
1746 // Return address is in lr. 1745 // Return address is in lr.
1747 Label slow; 1746 Label slow;
1748 1747
1749 Register receiver = LoadDescriptor::ReceiverRegister(); 1748 Register receiver = LoadDescriptor::ReceiverRegister();
1750 Register key = LoadDescriptor::NameRegister(); 1749 Register key = LoadDescriptor::NameRegister();
1751 1750
1752 // Check that the key is an array index, that is Uint32. 1751 // Check that the key is an array index, that is Uint32.
1753 __ NonNegativeSmiTst(key); 1752 __ NonNegativeSmiTst(key);
1754 __ b(ne, &slow); 1753 __ b(ne, &slow);
1755 1754
1756 // Everything is fine, call runtime. 1755 // Everything is fine, call runtime.
1757 __ Push(receiver, key); // Receiver, key. 1756 __ Push(receiver, key); // Receiver, key.
1758 1757
1759 // Perform tail call to the entry. 1758 // Perform tail call to the entry.
1760 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2); 1759 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
1761 1760
1762 __ bind(&slow); 1761 __ bind(&slow);
1763 PropertyAccessCompiler::TailCallBuiltin( 1762 PropertyAccessCompiler::TailCallBuiltin(
1764 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); 1763 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1765 } 1764 }
1766 1765
1767 1766
1768 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 1767 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1769 // r1 : function 1768 // r1 : function
1770 // r2 : number of parameters (tagged) 1769 // r2 : number of parameters (tagged)
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1843 __ cmp(r2, Operand::Zero()); 1842 __ cmp(r2, Operand::Zero());
1844 __ b(ne, &loop); 1843 __ b(ne, &loop);
1845 1844
1846 // Return. 1845 // Return.
1847 __ bind(&done); 1846 __ bind(&done);
1848 __ Ret(); 1847 __ Ret();
1849 1848
1850 // Do the runtime call to allocate the arguments object. 1849 // Do the runtime call to allocate the arguments object.
1851 __ bind(&runtime); 1850 __ bind(&runtime);
1852 __ Push(r1, r3, r2); 1851 __ Push(r1, r3, r2);
1853 __ TailCallRuntime(Runtime::kNewStrictArguments, 3); 1852 __ TailCallRuntime(Runtime::kNewStrictArguments);
1854 } 1853 }
1855 1854
1856 1855
1857 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { 1856 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
1858 // Stack layout on entry. 1857 // Stack layout on entry.
1859 // sp[0] : language mode 1858 // sp[0] : language mode
1860 // sp[4] : index of rest parameter 1859 // sp[4] : index of rest parameter
1861 // sp[8] : number of parameters 1860 // sp[8] : number of parameters
1862 // sp[12] : receiver displacement 1861 // sp[12] : receiver displacement
1863 1862
1864 Label runtime; 1863 Label runtime;
1865 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1864 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1866 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 1865 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1867 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1866 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1868 __ b(ne, &runtime); 1867 __ b(ne, &runtime);
1869 1868
1870 // Patch the arguments.length and the parameters pointer. 1869 // Patch the arguments.length and the parameters pointer.
1871 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1870 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1872 __ str(r1, MemOperand(sp, 2 * kPointerSize)); 1871 __ str(r1, MemOperand(sp, 2 * kPointerSize));
1873 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r1)); 1872 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r1));
1874 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset)); 1873 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
1875 __ str(r3, MemOperand(sp, 3 * kPointerSize)); 1874 __ str(r3, MemOperand(sp, 3 * kPointerSize));
1876 1875
1877 __ bind(&runtime); 1876 __ bind(&runtime);
1878 __ TailCallRuntime(Runtime::kNewRestParam, 4); 1877 __ TailCallRuntime(Runtime::kNewRestParam);
1879 } 1878 }
1880 1879
1881 1880
1882 void RegExpExecStub::Generate(MacroAssembler* masm) { 1881 void RegExpExecStub::Generate(MacroAssembler* masm) {
1883 // Just jump directly to runtime if native RegExp is not selected at compile 1882 // Just jump directly to runtime if native RegExp is not selected at compile
1884 // time or if regexp entry in generated code is turned off runtime switch or 1883 // time or if regexp entry in generated code is turned off runtime switch or
1885 // at compilation. 1884 // at compilation.
1886 #ifdef V8_INTERPRETED_REGEXP 1885 #ifdef V8_INTERPRETED_REGEXP
1887 __ TailCallRuntime(Runtime::kRegExpExec, 4); 1886 __ TailCallRuntime(Runtime::kRegExpExec);
1888 #else // V8_INTERPRETED_REGEXP 1887 #else // V8_INTERPRETED_REGEXP
1889 1888
1890 // Stack frame on entry. 1889 // Stack frame on entry.
1891 // sp[0]: last_match_info (expected JSArray) 1890 // sp[0]: last_match_info (expected JSArray)
1892 // sp[4]: previous index 1891 // sp[4]: previous index
1893 // sp[8]: subject string 1892 // sp[8]: subject string
1894 // sp[12]: JSRegExp object 1893 // sp[12]: JSRegExp object
1895 1894
1896 const int kLastMatchInfoOffset = 0 * kPointerSize; 1895 const int kLastMatchInfoOffset = 0 * kPointerSize;
1897 const int kPreviousIndexOffset = 1 * kPointerSize; 1896 const int kPreviousIndexOffset = 1 * kPointerSize;
(...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after
2148 // haven't created the exception yet. Handle that in the runtime system. 2147 // haven't created the exception yet. Handle that in the runtime system.
2149 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 2148 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2150 __ mov(r1, Operand(isolate()->factory()->the_hole_value())); 2149 __ mov(r1, Operand(isolate()->factory()->the_hole_value()));
2151 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 2150 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2152 isolate()))); 2151 isolate())));
2153 __ ldr(r0, MemOperand(r2, 0)); 2152 __ ldr(r0, MemOperand(r2, 0));
2154 __ cmp(r0, r1); 2153 __ cmp(r0, r1);
2155 __ b(eq, &runtime); 2154 __ b(eq, &runtime);
2156 2155
2157 // For exception, throw the exception again. 2156 // For exception, throw the exception again.
2158 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4); 2157 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
2159 2158
2160 __ bind(&failure); 2159 __ bind(&failure);
2161 // For failure and exception return null. 2160 // For failure and exception return null.
2162 __ mov(r0, Operand(isolate()->factory()->null_value())); 2161 __ mov(r0, Operand(isolate()->factory()->null_value()));
2163 __ add(sp, sp, Operand(4 * kPointerSize)); 2162 __ add(sp, sp, Operand(4 * kPointerSize));
2164 __ Ret(); 2163 __ Ret();
2165 2164
2166 // Process the result from the native regexp code. 2165 // Process the result from the native regexp code.
2167 __ bind(&success); 2166 __ bind(&success);
2168 __ ldr(r1, 2167 __ ldr(r1,
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
2243 __ jmp(&next_capture); 2242 __ jmp(&next_capture);
2244 __ bind(&done); 2243 __ bind(&done);
2245 2244
2246 // Return last match info. 2245 // Return last match info.
2247 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset)); 2246 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
2248 __ add(sp, sp, Operand(4 * kPointerSize)); 2247 __ add(sp, sp, Operand(4 * kPointerSize));
2249 __ Ret(); 2248 __ Ret();
2250 2249
2251 // Do the runtime call to execute the regexp. 2250 // Do the runtime call to execute the regexp.
2252 __ bind(&runtime); 2251 __ bind(&runtime);
2253 __ TailCallRuntime(Runtime::kRegExpExec, 4); 2252 __ TailCallRuntime(Runtime::kRegExpExec);
2254 2253
2255 // Deferred code for string handling. 2254 // Deferred code for string handling.
2256 // (6) Not a long external string? If yes, go to (8). 2255 // (6) Not a long external string? If yes, go to (8).
2257 __ bind(&not_seq_nor_cons); 2256 __ bind(&not_seq_nor_cons);
2258 // Compare flags are still set. 2257 // Compare flags are still set.
2259 __ b(gt, &not_long_external); // Go to (8). 2258 __ b(gt, &not_long_external); // Go to (8).
2260 2259
2261 // (7) External string. Make it, offset-wise, look like a sequential string. 2260 // (7) External string. Make it, offset-wise, look like a sequential string.
2262 __ bind(&external_string); 2261 __ bind(&external_string);
2263 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); 2262 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
(...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after
2602 } 2601 }
2603 2602
2604 2603
2605 void CallICStub::GenerateMiss(MacroAssembler* masm) { 2604 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2606 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 2605 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2607 2606
2608 // Push the receiver and the function and feedback info. 2607 // Push the receiver and the function and feedback info.
2609 __ Push(r1, r2, r3); 2608 __ Push(r1, r2, r3);
2610 2609
2611 // Call the entry. 2610 // Call the entry.
2612 __ CallRuntime(Runtime::kCallIC_Miss, 3); 2611 __ CallRuntime(Runtime::kCallIC_Miss);
2613 2612
2614 // Move result to edi and exit the internal frame. 2613 // Move result to edi and exit the internal frame.
2615 __ mov(r1, r0); 2614 __ mov(r1, r0);
2616 } 2615 }
2617 2616
2618 2617
2619 // StringCharCodeAtGenerator 2618 // StringCharCodeAtGenerator
2620 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 2619 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2621 // If the receiver is a smi trigger the non-string case. 2620 // If the receiver is a smi trigger the non-string case.
2622 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 2621 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
2667 DONT_DO_SMI_CHECK); 2666 DONT_DO_SMI_CHECK);
2668 call_helper.BeforeCall(masm); 2667 call_helper.BeforeCall(masm);
2669 if (embed_mode == PART_OF_IC_HANDLER) { 2668 if (embed_mode == PART_OF_IC_HANDLER) {
2670 __ Push(LoadWithVectorDescriptor::VectorRegister(), 2669 __ Push(LoadWithVectorDescriptor::VectorRegister(),
2671 LoadWithVectorDescriptor::SlotRegister(), object_, index_); 2670 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
2672 } else { 2671 } else {
2673 // index_ is consumed by runtime conversion function. 2672 // index_ is consumed by runtime conversion function.
2674 __ Push(object_, index_); 2673 __ Push(object_, index_);
2675 } 2674 }
2676 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 2675 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
2677 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); 2676 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
2678 } else { 2677 } else {
2679 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 2678 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
2680 // NumberToSmi discards numbers that are not exact integers. 2679 // NumberToSmi discards numbers that are not exact integers.
2681 __ CallRuntime(Runtime::kNumberToSmi, 1); 2680 __ CallRuntime(Runtime::kNumberToSmi);
2682 } 2681 }
2683 // Save the conversion result before the pop instructions below 2682 // Save the conversion result before the pop instructions below
2684 // have a chance to overwrite it. 2683 // have a chance to overwrite it.
2685 __ Move(index_, r0); 2684 __ Move(index_, r0);
2686 if (embed_mode == PART_OF_IC_HANDLER) { 2685 if (embed_mode == PART_OF_IC_HANDLER) {
2687 __ Pop(LoadWithVectorDescriptor::VectorRegister(), 2686 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
2688 LoadWithVectorDescriptor::SlotRegister(), object_); 2687 LoadWithVectorDescriptor::SlotRegister(), object_);
2689 } else { 2688 } else {
2690 __ pop(object_); 2689 __ pop(object_);
2691 } 2690 }
2692 // Reload the instance type. 2691 // Reload the instance type.
2693 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 2692 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2694 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 2693 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2695 call_helper.AfterCall(masm); 2694 call_helper.AfterCall(masm);
2696 // If index is still not a smi, it must be out of range. 2695 // If index is still not a smi, it must be out of range.
2697 __ JumpIfNotSmi(index_, index_out_of_range_); 2696 __ JumpIfNotSmi(index_, index_out_of_range_);
2698 // Otherwise, return to the fast path. 2697 // Otherwise, return to the fast path.
2699 __ jmp(&got_smi_index_); 2698 __ jmp(&got_smi_index_);
2700 2699
2701 // Call runtime. We get here when the receiver is a string and the 2700 // Call runtime. We get here when the receiver is a string and the
2702 // index is a number, but the code of getting the actual character 2701 // index is a number, but the code of getting the actual character
2703 // is too complex (e.g., when the string needs to be flattened). 2702 // is too complex (e.g., when the string needs to be flattened).
2704 __ bind(&call_runtime_); 2703 __ bind(&call_runtime_);
2705 call_helper.BeforeCall(masm); 2704 call_helper.BeforeCall(masm);
2706 __ SmiTag(index_); 2705 __ SmiTag(index_);
2707 __ Push(object_, index_); 2706 __ Push(object_, index_);
2708 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2); 2707 __ CallRuntime(Runtime::kStringCharCodeAtRT);
2709 __ Move(result_, r0); 2708 __ Move(result_, r0);
2710 call_helper.AfterCall(masm); 2709 call_helper.AfterCall(masm);
2711 __ jmp(&exit_); 2710 __ jmp(&exit_);
2712 2711
2713 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); 2712 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2714 } 2713 }
2715 2714
2716 2715
2717 // ------------------------------------------------------------------------- 2716 // -------------------------------------------------------------------------
2718 // StringCharFromCodeGenerator 2717 // StringCharFromCodeGenerator
(...skipping 18 matching lines...) Expand all
2737 2736
2738 2737
2739 void StringCharFromCodeGenerator::GenerateSlow( 2738 void StringCharFromCodeGenerator::GenerateSlow(
2740 MacroAssembler* masm, 2739 MacroAssembler* masm,
2741 const RuntimeCallHelper& call_helper) { 2740 const RuntimeCallHelper& call_helper) {
2742 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); 2741 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2743 2742
2744 __ bind(&slow_case_); 2743 __ bind(&slow_case_);
2745 call_helper.BeforeCall(masm); 2744 call_helper.BeforeCall(masm);
2746 __ push(code_); 2745 __ push(code_);
2747 __ CallRuntime(Runtime::kStringCharFromCode, 1); 2746 __ CallRuntime(Runtime::kStringCharFromCode);
2748 __ Move(result_, r0); 2747 __ Move(result_, r0);
2749 call_helper.AfterCall(masm); 2748 call_helper.AfterCall(masm);
2750 __ jmp(&exit_); 2749 __ jmp(&exit_);
2751 2750
2752 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); 2751 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2753 } 2752 }
2754 2753
2755 2754
2756 enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 }; 2755 enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
2757 2756
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after
2993 masm, r1, r5, r2, r3, String::TWO_BYTE_ENCODING); 2992 masm, r1, r5, r2, r3, String::TWO_BYTE_ENCODING);
2994 2993
2995 __ bind(&return_r0); 2994 __ bind(&return_r0);
2996 Counters* counters = isolate()->counters(); 2995 Counters* counters = isolate()->counters();
2997 __ IncrementCounter(counters->sub_string_native(), 1, r3, r4); 2996 __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
2998 __ Drop(3); 2997 __ Drop(3);
2999 __ Ret(); 2998 __ Ret();
3000 2999
3001 // Just jump to runtime to create the sub string. 3000 // Just jump to runtime to create the sub string.
3002 __ bind(&runtime); 3001 __ bind(&runtime);
3003 __ TailCallRuntime(Runtime::kSubString, 3); 3002 __ TailCallRuntime(Runtime::kSubString);
3004 3003
3005 __ bind(&single_char); 3004 __ bind(&single_char);
3006 // r0: original string 3005 // r0: original string
3007 // r1: instance type 3006 // r1: instance type
3008 // r2: length 3007 // r2: length
3009 // r3: from index (untagged) 3008 // r3: from index (untagged)
3010 __ SmiTag(r3, r3); 3009 __ SmiTag(r3, r3);
3011 StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime, 3010 StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime,
3012 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); 3011 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
3013 generator.GenerateFast(masm); 3012 generator.GenerateFast(masm);
(...skipping 19 matching lines...) Expand all
3033 __ cmp(r1, Operand(FIRST_NONSTRING_TYPE)); 3032 __ cmp(r1, Operand(FIRST_NONSTRING_TYPE));
3034 __ b(hs, &not_string); 3033 __ b(hs, &not_string);
3035 // Check if string has a cached array index. 3034 // Check if string has a cached array index.
3036 __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset)); 3035 __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset));
3037 __ tst(r2, Operand(String::kContainsCachedArrayIndexMask)); 3036 __ tst(r2, Operand(String::kContainsCachedArrayIndexMask));
3038 __ b(ne, &slow_string); 3037 __ b(ne, &slow_string);
3039 __ IndexFromHash(r2, r0); 3038 __ IndexFromHash(r2, r0);
3040 __ Ret(); 3039 __ Ret();
3041 __ bind(&slow_string); 3040 __ bind(&slow_string);
3042 __ push(r0); // Push argument. 3041 __ push(r0); // Push argument.
3043 __ TailCallRuntime(Runtime::kStringToNumber, 1); 3042 __ TailCallRuntime(Runtime::kStringToNumber);
3044 __ bind(&not_string); 3043 __ bind(&not_string);
3045 3044
3046 Label not_oddball; 3045 Label not_oddball;
3047 __ cmp(r1, Operand(ODDBALL_TYPE)); 3046 __ cmp(r1, Operand(ODDBALL_TYPE));
3048 __ b(ne, &not_oddball); 3047 __ b(ne, &not_oddball);
3049 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset)); 3048 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
3050 __ Ret(); 3049 __ Ret();
3051 __ bind(&not_oddball); 3050 __ bind(&not_oddball);
3052 3051
3053 __ push(r0); // Push argument. 3052 __ push(r0); // Push argument.
3054 __ TailCallRuntime(Runtime::kToNumber, 1); 3053 __ TailCallRuntime(Runtime::kToNumber);
3055 } 3054 }
3056 3055
3057 3056
3058 void ToLengthStub::Generate(MacroAssembler* masm) { 3057 void ToLengthStub::Generate(MacroAssembler* masm) {
3059 // The ToLength stub takes one argument in r0. 3058 // The ToLength stub takes one argument in r0.
3060 Label not_smi; 3059 Label not_smi;
3061 __ JumpIfNotSmi(r0, &not_smi); 3060 __ JumpIfNotSmi(r0, &not_smi);
3062 STATIC_ASSERT(kSmiTag == 0); 3061 STATIC_ASSERT(kSmiTag == 0);
3063 __ tst(r0, r0); 3062 __ tst(r0, r0);
3064 __ mov(r0, Operand(0), LeaveCC, lt); 3063 __ mov(r0, Operand(0), LeaveCC, lt);
3065 __ Ret(); 3064 __ Ret();
3066 __ bind(&not_smi); 3065 __ bind(&not_smi);
3067 3066
3068 __ push(r0); // Push argument. 3067 __ push(r0); // Push argument.
3069 __ TailCallRuntime(Runtime::kToLength, 1); 3068 __ TailCallRuntime(Runtime::kToLength);
3070 } 3069 }
3071 3070
3072 3071
3073 void ToStringStub::Generate(MacroAssembler* masm) { 3072 void ToStringStub::Generate(MacroAssembler* masm) {
3074 // The ToString stub takes one argument in r0. 3073 // The ToString stub takes one argument in r0.
3075 Label is_number; 3074 Label is_number;
3076 __ JumpIfSmi(r0, &is_number); 3075 __ JumpIfSmi(r0, &is_number);
3077 3076
3078 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE); 3077 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
3079 // r0: receiver 3078 // r0: receiver
3080 // r1: receiver instance type 3079 // r1: receiver instance type
3081 __ Ret(lo); 3080 __ Ret(lo);
3082 3081
3083 Label not_heap_number; 3082 Label not_heap_number;
3084 __ cmp(r1, Operand(HEAP_NUMBER_TYPE)); 3083 __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
3085 __ b(ne, &not_heap_number); 3084 __ b(ne, &not_heap_number);
3086 __ bind(&is_number); 3085 __ bind(&is_number);
3087 NumberToStringStub stub(isolate()); 3086 NumberToStringStub stub(isolate());
3088 __ TailCallStub(&stub); 3087 __ TailCallStub(&stub);
3089 __ bind(&not_heap_number); 3088 __ bind(&not_heap_number);
3090 3089
3091 Label not_oddball; 3090 Label not_oddball;
3092 __ cmp(r1, Operand(ODDBALL_TYPE)); 3091 __ cmp(r1, Operand(ODDBALL_TYPE));
3093 __ b(ne, &not_oddball); 3092 __ b(ne, &not_oddball);
3094 __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset)); 3093 __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
3095 __ Ret(); 3094 __ Ret();
3096 __ bind(&not_oddball); 3095 __ bind(&not_oddball);
3097 3096
3098 __ push(r0); // Push argument. 3097 __ push(r0); // Push argument.
3099 __ TailCallRuntime(Runtime::kToString, 1); 3098 __ TailCallRuntime(Runtime::kToString);
3100 } 3099 }
3101 3100
3102 3101
3103 void StringHelper::GenerateFlatOneByteStringEquals( 3102 void StringHelper::GenerateFlatOneByteStringEquals(
3104 MacroAssembler* masm, Register left, Register right, Register scratch1, 3103 MacroAssembler* masm, Register left, Register right, Register scratch1,
3105 Register scratch2, Register scratch3) { 3104 Register scratch2, Register scratch3) {
3106 Register length = scratch1; 3105 Register length = scratch1;
3107 3106
3108 // Compare lengths. 3107 // Compare lengths.
3109 Label strings_not_equal, check_zero_length; 3108 Label strings_not_equal, check_zero_length;
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
3219 3218
3220 // Compare flat one-byte strings natively. 3219 // Compare flat one-byte strings natively.
3221 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2, 3220 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2,
3222 r3); 3221 r3);
3223 StringHelper::GenerateCompareFlatOneByteStrings(masm, r1, r0, r2, r3, r4, r5); 3222 StringHelper::GenerateCompareFlatOneByteStrings(masm, r1, r0, r2, r3, r4, r5);
3224 3223
3225 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 3224 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3226 // tagged as a small integer. 3225 // tagged as a small integer.
3227 __ bind(&runtime); 3226 __ bind(&runtime);
3228 __ Push(r1, r0); 3227 __ Push(r1, r0);
3229 __ TailCallRuntime(Runtime::kStringCompare, 2); 3228 __ TailCallRuntime(Runtime::kStringCompare);
3230 } 3229 }
3231 3230
3232 3231
3233 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 3232 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3234 // ----------- S t a t e ------------- 3233 // ----------- S t a t e -------------
3235 // -- r1 : left 3234 // -- r1 : left
3236 // -- r0 : right 3235 // -- r0 : right
3237 // -- lr : return address 3236 // -- lr : return address
3238 // ----------------------------------- 3237 // -----------------------------------
3239 3238
(...skipping 21 matching lines...) Expand all
3261 } 3260 }
3262 3261
3263 3262
3264 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { 3263 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
3265 DCHECK_EQ(CompareICState::BOOLEAN, state()); 3264 DCHECK_EQ(CompareICState::BOOLEAN, state());
3266 Label miss; 3265 Label miss;
3267 3266
3268 __ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3267 __ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3269 __ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3268 __ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3270 if (op() != Token::EQ_STRICT && is_strong(strength())) { 3269 if (op() != Token::EQ_STRICT && is_strong(strength())) {
3271 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); 3270 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3272 } else { 3271 } else {
3273 if (!Token::IsEqualityOp(op())) { 3272 if (!Token::IsEqualityOp(op())) {
3274 __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset)); 3273 __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset));
3275 __ AssertSmi(r1); 3274 __ AssertSmi(r1);
3276 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset)); 3275 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
3277 __ AssertSmi(r0); 3276 __ AssertSmi(r0);
3278 } 3277 }
3279 __ sub(r0, r1, r0); 3278 __ sub(r0, r1, r0);
3280 __ Ret(); 3279 __ Ret();
3281 } 3280 }
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
3522 tmp3); 3521 tmp3);
3523 } else { 3522 } else {
3524 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, 3523 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3525 tmp2, tmp3, tmp4); 3524 tmp2, tmp3, tmp4);
3526 } 3525 }
3527 3526
3528 // Handle more complex cases in runtime. 3527 // Handle more complex cases in runtime.
3529 __ bind(&runtime); 3528 __ bind(&runtime);
3530 __ Push(left, right); 3529 __ Push(left, right);
3531 if (equality) { 3530 if (equality) {
3532 __ TailCallRuntime(Runtime::kStringEquals, 2); 3531 __ TailCallRuntime(Runtime::kStringEquals);
3533 } else { 3532 } else {
3534 __ TailCallRuntime(Runtime::kStringCompare, 2); 3533 __ TailCallRuntime(Runtime::kStringCompare);
3535 } 3534 }
3536 3535
3537 __ bind(&miss); 3536 __ bind(&miss);
3538 GenerateMiss(masm); 3537 GenerateMiss(masm);
3539 } 3538 }
3540 3539
3541 3540
3542 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { 3541 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3543 DCHECK_EQ(CompareICState::RECEIVER, state()); 3542 DCHECK_EQ(CompareICState::RECEIVER, state());
3544 Label miss; 3543 Label miss;
(...skipping 25 matching lines...) Expand all
3570 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); 3569 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
3571 __ cmp(r2, r4); 3570 __ cmp(r2, r4);
3572 __ b(ne, &miss); 3571 __ b(ne, &miss);
3573 __ cmp(r3, r4); 3572 __ cmp(r3, r4);
3574 __ b(ne, &miss); 3573 __ b(ne, &miss);
3575 3574
3576 if (Token::IsEqualityOp(op())) { 3575 if (Token::IsEqualityOp(op())) {
3577 __ sub(r0, r0, Operand(r1)); 3576 __ sub(r0, r0, Operand(r1));
3578 __ Ret(); 3577 __ Ret();
3579 } else if (is_strong(strength())) { 3578 } else if (is_strong(strength())) {
3580 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); 3579 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3581 } else { 3580 } else {
3582 if (op() == Token::LT || op() == Token::LTE) { 3581 if (op() == Token::LT || op() == Token::LTE) {
3583 __ mov(r2, Operand(Smi::FromInt(GREATER))); 3582 __ mov(r2, Operand(Smi::FromInt(GREATER)));
3584 } else { 3583 } else {
3585 __ mov(r2, Operand(Smi::FromInt(LESS))); 3584 __ mov(r2, Operand(Smi::FromInt(LESS)));
3586 } 3585 }
3587 __ Push(r1, r0, r2); 3586 __ Push(r1, r0, r2);
3588 __ TailCallRuntime(Runtime::kCompare, 3); 3587 __ TailCallRuntime(Runtime::kCompare);
3589 } 3588 }
3590 3589
3591 __ bind(&miss); 3590 __ bind(&miss);
3592 GenerateMiss(masm); 3591 GenerateMiss(masm);
3593 } 3592 }
3594 3593
3595 3594
3596 void CompareICStub::GenerateMiss(MacroAssembler* masm) { 3595 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3597 { 3596 {
3598 // Call the runtime system in a fresh internal frame. 3597 // Call the runtime system in a fresh internal frame.
3599 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 3598 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
3600 __ Push(r1, r0); 3599 __ Push(r1, r0);
3601 __ Push(lr, r1, r0); 3600 __ Push(lr, r1, r0);
3602 __ mov(ip, Operand(Smi::FromInt(op()))); 3601 __ mov(ip, Operand(Smi::FromInt(op())));
3603 __ push(ip); 3602 __ push(ip);
3604 __ CallRuntime(Runtime::kCompareIC_Miss, 3); 3603 __ CallRuntime(Runtime::kCompareIC_Miss);
3605 // Compute the entry point of the rewritten stub. 3604 // Compute the entry point of the rewritten stub.
3606 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 3605 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
3607 // Restore registers. 3606 // Restore registers.
3608 __ pop(lr); 3607 __ pop(lr);
3609 __ Pop(r1, r0); 3608 __ Pop(r1, r0);
3610 } 3609 }
3611 3610
3612 __ Jump(r2); 3611 __ Jump(r2);
3613 } 3612 }
3614 3613
(...skipping 1323 matching lines...) Expand 10 before | Expand all | Expand 10 after
4938 __ ldr(result, ContextMemOperand(result)); 4937 __ ldr(result, ContextMemOperand(result));
4939 __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); 4938 __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
4940 4939
4941 // If the result is not the_hole, return. Otherwise, handle in the runtime. 4940 // If the result is not the_hole, return. Otherwise, handle in the runtime.
4942 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 4941 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
4943 __ Ret(ne); 4942 __ Ret(ne);
4944 4943
4945 // Fallback to runtime. 4944 // Fallback to runtime.
4946 __ SmiTag(slot); 4945 __ SmiTag(slot);
4947 __ push(slot); 4946 __ push(slot);
4948 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1); 4947 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
4949 } 4948 }
4950 4949
4951 4950
4952 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { 4951 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
4953 Register value = r0; 4952 Register value = r0;
4954 Register slot = r2; 4953 Register slot = r2;
4955 4954
4956 Register cell = r1; 4955 Register cell = r1;
4957 Register cell_details = r4; 4956 Register cell_details = r4;
4958 Register cell_value = r5; 4957 Register cell_value = r5;
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
5062 __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset)); 5061 __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
5063 __ cmp(cell_value_map, scratch); 5062 __ cmp(cell_value_map, scratch);
5064 __ b(eq, &fast_heapobject_case); 5063 __ b(eq, &fast_heapobject_case);
5065 5064
5066 // Fallback to runtime. 5065 // Fallback to runtime.
5067 __ bind(&slow_case); 5066 __ bind(&slow_case);
5068 __ SmiTag(slot); 5067 __ SmiTag(slot);
5069 __ Push(slot, value); 5068 __ Push(slot, value);
5070 __ TailCallRuntime(is_strict(language_mode()) 5069 __ TailCallRuntime(is_strict(language_mode())
5071 ? Runtime::kStoreGlobalViaContext_Strict 5070 ? Runtime::kStoreGlobalViaContext_Strict
5072 : Runtime::kStoreGlobalViaContext_Sloppy, 5071 : Runtime::kStoreGlobalViaContext_Sloppy);
5073 2);
5074 } 5072 }
5075 5073
5076 5074
5077 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { 5075 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5078 return ref0.address() - ref1.address(); 5076 return ref0.address() - ref1.address();
5079 } 5077 }
5080 5078
5081 5079
5082 // Calls an API function. Allocates HandleScope, extracts returned value 5080 // Calls an API function. Allocates HandleScope, extracts returned value
5083 // from handle and propagates exceptions. Restores context. stack_space 5081 // from handle and propagates exceptions. Restores context. stack_space
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
5190 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex); 5188 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
5191 __ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate))); 5189 __ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate)));
5192 __ ldr(r5, MemOperand(ip)); 5190 __ ldr(r5, MemOperand(ip));
5193 __ cmp(r4, r5); 5191 __ cmp(r4, r5);
5194 __ b(ne, &promote_scheduled_exception); 5192 __ b(ne, &promote_scheduled_exception);
5195 5193
5196 __ mov(pc, lr); 5194 __ mov(pc, lr);
5197 5195
5198 // Re-throw by promoting a scheduled exception. 5196 // Re-throw by promoting a scheduled exception.
5199 __ bind(&promote_scheduled_exception); 5197 __ bind(&promote_scheduled_exception);
5200 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0); 5198 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5201 5199
5202 // HandleScope limit has changed. Delete allocated extensions. 5200 // HandleScope limit has changed. Delete allocated extensions.
5203 __ bind(&delete_allocated_handles); 5201 __ bind(&delete_allocated_handles);
5204 __ str(r5, MemOperand(r9, kLimitOffset)); 5202 __ str(r5, MemOperand(r9, kLimitOffset));
5205 __ mov(r4, r0); 5203 __ mov(r4, r0);
5206 __ PrepareCallCFunction(1, r5); 5204 __ PrepareCallCFunction(1, r5);
5207 __ mov(r0, Operand(ExternalReference::isolate_address(isolate))); 5205 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5208 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), 5206 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5209 1); 5207 1);
5210 __ mov(r0, r4); 5208 __ mov(r0, r4);
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
5389 MemOperand(fp, 6 * kPointerSize), NULL); 5387 MemOperand(fp, 6 * kPointerSize), NULL);
5390 } 5388 }
5391 5389
5392 5390
5393 #undef __ 5391 #undef __
5394 5392
5395 } // namespace internal 5393 } // namespace internal
5396 } // namespace v8 5394 } // namespace v8
5397 5395
5398 #endif // V8_TARGET_ARCH_ARM 5396 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698