| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 664 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 675 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r2, r3, r4, | 675 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r2, r3, r4, |
| 676 r5); | 676 r5); |
| 677 } | 677 } |
| 678 // Never falls through to here. | 678 // Never falls through to here. |
| 679 | 679 |
| 680 __ bind(&slow); | 680 __ bind(&slow); |
| 681 | 681 |
| 682 __ Push(lhs, rhs); | 682 __ Push(lhs, rhs); |
| 683 // Figure out which native to call and setup the arguments. | 683 // Figure out which native to call and setup the arguments. |
| 684 if (cc == eq) { | 684 if (cc == eq) { |
| 685 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2, | 685 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2); |
| 686 1); | |
| 687 } else { | 686 } else { |
| 688 int ncr; // NaN compare result | 687 int ncr; // NaN compare result |
| 689 if (cc == lt || cc == le) { | 688 if (cc == lt || cc == le) { |
| 690 ncr = GREATER; | 689 ncr = GREATER; |
| 691 } else { | 690 } else { |
| 692 DCHECK(cc == gt || cc == ge); // remaining cases | 691 DCHECK(cc == gt || cc == ge); // remaining cases |
| 693 ncr = LESS; | 692 ncr = LESS; |
| 694 } | 693 } |
| 695 __ mov(r0, Operand(Smi::FromInt(ncr))); | 694 __ mov(r0, Operand(Smi::FromInt(ncr))); |
| 696 __ push(r0); | 695 __ push(r0); |
| 697 | 696 |
| 698 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 697 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 699 // tagged as a small integer. | 698 // tagged as a small integer. |
| 700 __ TailCallRuntime( | 699 __ TailCallRuntime( |
| 701 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3, | 700 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, |
| 702 1); | 701 3); |
| 703 } | 702 } |
| 704 | 703 |
| 705 __ bind(&miss); | 704 __ bind(&miss); |
| 706 GenerateMiss(masm); | 705 GenerateMiss(masm); |
| 707 } | 706 } |
| 708 | 707 |
| 709 | 708 |
| 710 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 709 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 711 // We don't allow a GC during a store buffer overflow so there is no need to | 710 // We don't allow a GC during a store buffer overflow so there is no need to |
| 712 // store the registers in any particular way, but we do have to store and | 711 // store the registers in any particular way, but we do have to store and |
| (...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 894 // double_exponent may not containe the exponent value if the input was a | 893 // double_exponent may not containe the exponent value if the input was a |
| 895 // smi. We set it with exponent value before bailing out. | 894 // smi. We set it with exponent value before bailing out. |
| 896 __ vmov(single_scratch, exponent); | 895 __ vmov(single_scratch, exponent); |
| 897 __ vcvt_f64_s32(double_exponent, single_scratch); | 896 __ vcvt_f64_s32(double_exponent, single_scratch); |
| 898 | 897 |
| 899 // Returning or bailing out. | 898 // Returning or bailing out. |
| 900 Counters* counters = isolate()->counters(); | 899 Counters* counters = isolate()->counters(); |
| 901 if (exponent_type() == ON_STACK) { | 900 if (exponent_type() == ON_STACK) { |
| 902 // The arguments are still on the stack. | 901 // The arguments are still on the stack. |
| 903 __ bind(&call_runtime); | 902 __ bind(&call_runtime); |
| 904 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1); | 903 __ TailCallRuntime(Runtime::kMathPowRT, 2); |
| 905 | 904 |
| 906 // The stub is called from non-optimized code, which expects the result | 905 // The stub is called from non-optimized code, which expects the result |
| 907 // as heap number in exponent. | 906 // as heap number in exponent. |
| 908 __ bind(&done); | 907 __ bind(&done); |
| 909 __ AllocateHeapNumber( | 908 __ AllocateHeapNumber( |
| 910 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime); | 909 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime); |
| 911 __ vstr(double_result, | 910 __ vstr(double_result, |
| 912 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); | 911 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); |
| 913 DCHECK(heapnumber.is(r0)); | 912 DCHECK(heapnumber.is(r0)); |
| 914 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); | 913 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); |
| (...skipping 483 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1398 __ bind(&done); | 1397 __ bind(&done); |
| 1399 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex); | 1398 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex); |
| 1400 __ Ret(); | 1399 __ Ret(); |
| 1401 | 1400 |
| 1402 // Found Proxy or access check needed: Call the runtime | 1401 // Found Proxy or access check needed: Call the runtime |
| 1403 __ bind(&fast_runtime_fallback); | 1402 __ bind(&fast_runtime_fallback); |
| 1404 __ Push(object, function_prototype); | 1403 __ Push(object, function_prototype); |
| 1405 // Invalidate the instanceof cache. | 1404 // Invalidate the instanceof cache. |
| 1406 __ Move(scratch, Smi::FromInt(0)); | 1405 __ Move(scratch, Smi::FromInt(0)); |
| 1407 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex); | 1406 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex); |
| 1408 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2, 1); | 1407 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2); |
| 1409 | 1408 |
| 1410 // Slow-case: Call the %InstanceOf runtime function. | 1409 // Slow-case: Call the %InstanceOf runtime function. |
| 1411 __ bind(&slow_case); | 1410 __ bind(&slow_case); |
| 1412 __ Push(object, function); | 1411 __ Push(object, function); |
| 1413 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); | 1412 __ TailCallRuntime(Runtime::kInstanceOf, 2); |
| 1414 } | 1413 } |
| 1415 | 1414 |
| 1416 | 1415 |
| 1417 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { | 1416 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
| 1418 Label miss; | 1417 Label miss; |
| 1419 Register receiver = LoadDescriptor::ReceiverRegister(); | 1418 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 1420 // Ensure that the vector and slot registers won't be clobbered before | 1419 // Ensure that the vector and slot registers won't be clobbered before |
| 1421 // calling the miss handler. | 1420 // calling the miss handler. |
| 1422 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(), | 1421 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(), |
| 1423 LoadWithVectorDescriptor::SlotRegister())); | 1422 LoadWithVectorDescriptor::SlotRegister())); |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1505 // Read the argument from the adaptor frame and return it. | 1504 // Read the argument from the adaptor frame and return it. |
| 1506 __ sub(r3, r0, r1); | 1505 __ sub(r3, r0, r1); |
| 1507 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r3)); | 1506 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r3)); |
| 1508 __ ldr(r0, MemOperand(r3, kDisplacement)); | 1507 __ ldr(r0, MemOperand(r3, kDisplacement)); |
| 1509 __ Jump(lr); | 1508 __ Jump(lr); |
| 1510 | 1509 |
| 1511 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 1510 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
| 1512 // by calling the runtime system. | 1511 // by calling the runtime system. |
| 1513 __ bind(&slow); | 1512 __ bind(&slow); |
| 1514 __ push(r1); | 1513 __ push(r1); |
| 1515 __ TailCallRuntime(Runtime::kArguments, 1, 1); | 1514 __ TailCallRuntime(Runtime::kArguments, 1); |
| 1516 } | 1515 } |
| 1517 | 1516 |
| 1518 | 1517 |
| 1519 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { | 1518 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { |
| 1520 // r1 : function | 1519 // r1 : function |
| 1521 // r2 : number of parameters (tagged) | 1520 // r2 : number of parameters (tagged) |
| 1522 // r3 : parameters pointer | 1521 // r3 : parameters pointer |
| 1523 | 1522 |
| 1524 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function())); | 1523 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function())); |
| 1525 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count())); | 1524 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
| 1526 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | 1525 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); |
| 1527 | 1526 |
| 1528 // Check if the calling frame is an arguments adaptor frame. | 1527 // Check if the calling frame is an arguments adaptor frame. |
| 1529 Label runtime; | 1528 Label runtime; |
| 1530 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1529 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1531 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset)); | 1530 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset)); |
| 1532 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1531 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1533 __ b(ne, &runtime); | 1532 __ b(ne, &runtime); |
| 1534 | 1533 |
| 1535 // Patch the arguments.length and the parameters pointer in the current frame. | 1534 // Patch the arguments.length and the parameters pointer in the current frame. |
| 1536 __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1535 __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1537 __ add(r4, r4, Operand(r2, LSL, 1)); | 1536 __ add(r4, r4, Operand(r2, LSL, 1)); |
| 1538 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset)); | 1537 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 1539 | 1538 |
| 1540 __ bind(&runtime); | 1539 __ bind(&runtime); |
| 1541 __ Push(r1, r3, r2); | 1540 __ Push(r1, r3, r2); |
| 1542 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); | 1541 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); |
| 1543 } | 1542 } |
| 1544 | 1543 |
| 1545 | 1544 |
| 1546 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | 1545 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
| 1547 // r1 : function | 1546 // r1 : function |
| 1548 // r2 : number of parameters (tagged) | 1547 // r2 : number of parameters (tagged) |
| 1549 // r3 : parameters pointer | 1548 // r3 : parameters pointer |
| 1550 // Registers used over whole function: | 1549 // Registers used over whole function: |
| 1551 // r5 : arguments count (tagged) | 1550 // r5 : arguments count (tagged) |
| 1552 // r6 : mapped parameter count (tagged) | 1551 // r6 : mapped parameter count (tagged) |
| (...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1732 __ b(lt, &arguments_loop); | 1731 __ b(lt, &arguments_loop); |
| 1733 | 1732 |
| 1734 // Return. | 1733 // Return. |
| 1735 __ Ret(); | 1734 __ Ret(); |
| 1736 | 1735 |
| 1737 // Do the runtime call to allocate the arguments object. | 1736 // Do the runtime call to allocate the arguments object. |
| 1738 // r0 = address of new object (tagged) | 1737 // r0 = address of new object (tagged) |
| 1739 // r5 = argument count (tagged) | 1738 // r5 = argument count (tagged) |
| 1740 __ bind(&runtime); | 1739 __ bind(&runtime); |
| 1741 __ Push(r1, r3, r5); | 1740 __ Push(r1, r3, r5); |
| 1742 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); | 1741 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); |
| 1743 } | 1742 } |
| 1744 | 1743 |
| 1745 | 1744 |
| 1746 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 1745 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
| 1747 // Return address is in lr. | 1746 // Return address is in lr. |
| 1748 Label slow; | 1747 Label slow; |
| 1749 | 1748 |
| 1750 Register receiver = LoadDescriptor::ReceiverRegister(); | 1749 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 1751 Register key = LoadDescriptor::NameRegister(); | 1750 Register key = LoadDescriptor::NameRegister(); |
| 1752 | 1751 |
| 1753 // Check that the key is an array index, that is Uint32. | 1752 // Check that the key is an array index, that is Uint32. |
| 1754 __ NonNegativeSmiTst(key); | 1753 __ NonNegativeSmiTst(key); |
| 1755 __ b(ne, &slow); | 1754 __ b(ne, &slow); |
| 1756 | 1755 |
| 1757 // Everything is fine, call runtime. | 1756 // Everything is fine, call runtime. |
| 1758 __ Push(receiver, key); // Receiver, key. | 1757 __ Push(receiver, key); // Receiver, key. |
| 1759 | 1758 |
| 1760 // Perform tail call to the entry. | 1759 // Perform tail call to the entry. |
| 1761 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); | 1760 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2); |
| 1762 | 1761 |
| 1763 __ bind(&slow); | 1762 __ bind(&slow); |
| 1764 PropertyAccessCompiler::TailCallBuiltin( | 1763 PropertyAccessCompiler::TailCallBuiltin( |
| 1765 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1764 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
| 1766 } | 1765 } |
| 1767 | 1766 |
| 1768 | 1767 |
| 1769 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 1768 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
| 1770 // r1 : function | 1769 // r1 : function |
| 1771 // r2 : number of parameters (tagged) | 1770 // r2 : number of parameters (tagged) |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1844 __ cmp(r2, Operand::Zero()); | 1843 __ cmp(r2, Operand::Zero()); |
| 1845 __ b(ne, &loop); | 1844 __ b(ne, &loop); |
| 1846 | 1845 |
| 1847 // Return. | 1846 // Return. |
| 1848 __ bind(&done); | 1847 __ bind(&done); |
| 1849 __ Ret(); | 1848 __ Ret(); |
| 1850 | 1849 |
| 1851 // Do the runtime call to allocate the arguments object. | 1850 // Do the runtime call to allocate the arguments object. |
| 1852 __ bind(&runtime); | 1851 __ bind(&runtime); |
| 1853 __ Push(r1, r3, r2); | 1852 __ Push(r1, r3, r2); |
| 1854 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); | 1853 __ TailCallRuntime(Runtime::kNewStrictArguments, 3); |
| 1855 } | 1854 } |
| 1856 | 1855 |
| 1857 | 1856 |
| 1858 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { | 1857 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { |
| 1859 // Stack layout on entry. | 1858 // Stack layout on entry. |
| 1860 // sp[0] : language mode | 1859 // sp[0] : language mode |
| 1861 // sp[4] : index of rest parameter | 1860 // sp[4] : index of rest parameter |
| 1862 // sp[8] : number of parameters | 1861 // sp[8] : number of parameters |
| 1863 // sp[12] : receiver displacement | 1862 // sp[12] : receiver displacement |
| 1864 | 1863 |
| 1865 Label runtime; | 1864 Label runtime; |
| 1866 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1865 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1867 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); | 1866 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); |
| 1868 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1867 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1869 __ b(ne, &runtime); | 1868 __ b(ne, &runtime); |
| 1870 | 1869 |
| 1871 // Patch the arguments.length and the parameters pointer. | 1870 // Patch the arguments.length and the parameters pointer. |
| 1872 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1871 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1873 __ str(r1, MemOperand(sp, 2 * kPointerSize)); | 1872 __ str(r1, MemOperand(sp, 2 * kPointerSize)); |
| 1874 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r1)); | 1873 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r1)); |
| 1875 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset)); | 1874 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 1876 __ str(r3, MemOperand(sp, 3 * kPointerSize)); | 1875 __ str(r3, MemOperand(sp, 3 * kPointerSize)); |
| 1877 | 1876 |
| 1878 __ bind(&runtime); | 1877 __ bind(&runtime); |
| 1879 __ TailCallRuntime(Runtime::kNewRestParam, 4, 1); | 1878 __ TailCallRuntime(Runtime::kNewRestParam, 4); |
| 1880 } | 1879 } |
| 1881 | 1880 |
| 1882 | 1881 |
| 1883 void RegExpExecStub::Generate(MacroAssembler* masm) { | 1882 void RegExpExecStub::Generate(MacroAssembler* masm) { |
| 1884 // Just jump directly to runtime if native RegExp is not selected at compile | 1883 // Just jump directly to runtime if native RegExp is not selected at compile |
| 1885 // time or if regexp entry in generated code is turned off runtime switch or | 1884 // time or if regexp entry in generated code is turned off runtime switch or |
| 1886 // at compilation. | 1885 // at compilation. |
| 1887 #ifdef V8_INTERPRETED_REGEXP | 1886 #ifdef V8_INTERPRETED_REGEXP |
| 1888 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 1887 __ TailCallRuntime(Runtime::kRegExpExec, 4); |
| 1889 #else // V8_INTERPRETED_REGEXP | 1888 #else // V8_INTERPRETED_REGEXP |
| 1890 | 1889 |
| 1891 // Stack frame on entry. | 1890 // Stack frame on entry. |
| 1892 // sp[0]: last_match_info (expected JSArray) | 1891 // sp[0]: last_match_info (expected JSArray) |
| 1893 // sp[4]: previous index | 1892 // sp[4]: previous index |
| 1894 // sp[8]: subject string | 1893 // sp[8]: subject string |
| 1895 // sp[12]: JSRegExp object | 1894 // sp[12]: JSRegExp object |
| 1896 | 1895 |
| 1897 const int kLastMatchInfoOffset = 0 * kPointerSize; | 1896 const int kLastMatchInfoOffset = 0 * kPointerSize; |
| 1898 const int kPreviousIndexOffset = 1 * kPointerSize; | 1897 const int kPreviousIndexOffset = 1 * kPointerSize; |
| (...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2149 // haven't created the exception yet. Handle that in the runtime system. | 2148 // haven't created the exception yet. Handle that in the runtime system. |
| 2150 // TODO(592): Rerunning the RegExp to get the stack overflow exception. | 2149 // TODO(592): Rerunning the RegExp to get the stack overflow exception. |
| 2151 __ mov(r1, Operand(isolate()->factory()->the_hole_value())); | 2150 __ mov(r1, Operand(isolate()->factory()->the_hole_value())); |
| 2152 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, | 2151 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, |
| 2153 isolate()))); | 2152 isolate()))); |
| 2154 __ ldr(r0, MemOperand(r2, 0)); | 2153 __ ldr(r0, MemOperand(r2, 0)); |
| 2155 __ cmp(r0, r1); | 2154 __ cmp(r0, r1); |
| 2156 __ b(eq, &runtime); | 2155 __ b(eq, &runtime); |
| 2157 | 2156 |
| 2158 // For exception, throw the exception again. | 2157 // For exception, throw the exception again. |
| 2159 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1); | 2158 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4); |
| 2160 | 2159 |
| 2161 __ bind(&failure); | 2160 __ bind(&failure); |
| 2162 // For failure and exception return null. | 2161 // For failure and exception return null. |
| 2163 __ mov(r0, Operand(isolate()->factory()->null_value())); | 2162 __ mov(r0, Operand(isolate()->factory()->null_value())); |
| 2164 __ add(sp, sp, Operand(4 * kPointerSize)); | 2163 __ add(sp, sp, Operand(4 * kPointerSize)); |
| 2165 __ Ret(); | 2164 __ Ret(); |
| 2166 | 2165 |
| 2167 // Process the result from the native regexp code. | 2166 // Process the result from the native regexp code. |
| 2168 __ bind(&success); | 2167 __ bind(&success); |
| 2169 __ ldr(r1, | 2168 __ ldr(r1, |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2244 __ jmp(&next_capture); | 2243 __ jmp(&next_capture); |
| 2245 __ bind(&done); | 2244 __ bind(&done); |
| 2246 | 2245 |
| 2247 // Return last match info. | 2246 // Return last match info. |
| 2248 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset)); | 2247 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset)); |
| 2249 __ add(sp, sp, Operand(4 * kPointerSize)); | 2248 __ add(sp, sp, Operand(4 * kPointerSize)); |
| 2250 __ Ret(); | 2249 __ Ret(); |
| 2251 | 2250 |
| 2252 // Do the runtime call to execute the regexp. | 2251 // Do the runtime call to execute the regexp. |
| 2253 __ bind(&runtime); | 2252 __ bind(&runtime); |
| 2254 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 2253 __ TailCallRuntime(Runtime::kRegExpExec, 4); |
| 2255 | 2254 |
| 2256 // Deferred code for string handling. | 2255 // Deferred code for string handling. |
| 2257 // (6) Not a long external string? If yes, go to (8). | 2256 // (6) Not a long external string? If yes, go to (8). |
| 2258 __ bind(¬_seq_nor_cons); | 2257 __ bind(¬_seq_nor_cons); |
| 2259 // Compare flags are still set. | 2258 // Compare flags are still set. |
| 2260 __ b(gt, ¬_long_external); // Go to (8). | 2259 __ b(gt, ¬_long_external); // Go to (8). |
| 2261 | 2260 |
| 2262 // (7) External string. Make it, offset-wise, look like a sequential string. | 2261 // (7) External string. Make it, offset-wise, look like a sequential string. |
| 2263 __ bind(&external_string); | 2262 __ bind(&external_string); |
| 2264 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); | 2263 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); |
| (...skipping 729 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2994 masm, r1, r5, r2, r3, String::TWO_BYTE_ENCODING); | 2993 masm, r1, r5, r2, r3, String::TWO_BYTE_ENCODING); |
| 2995 | 2994 |
| 2996 __ bind(&return_r0); | 2995 __ bind(&return_r0); |
| 2997 Counters* counters = isolate()->counters(); | 2996 Counters* counters = isolate()->counters(); |
| 2998 __ IncrementCounter(counters->sub_string_native(), 1, r3, r4); | 2997 __ IncrementCounter(counters->sub_string_native(), 1, r3, r4); |
| 2999 __ Drop(3); | 2998 __ Drop(3); |
| 3000 __ Ret(); | 2999 __ Ret(); |
| 3001 | 3000 |
| 3002 // Just jump to runtime to create the sub string. | 3001 // Just jump to runtime to create the sub string. |
| 3003 __ bind(&runtime); | 3002 __ bind(&runtime); |
| 3004 __ TailCallRuntime(Runtime::kSubString, 3, 1); | 3003 __ TailCallRuntime(Runtime::kSubString, 3); |
| 3005 | 3004 |
| 3006 __ bind(&single_char); | 3005 __ bind(&single_char); |
| 3007 // r0: original string | 3006 // r0: original string |
| 3008 // r1: instance type | 3007 // r1: instance type |
| 3009 // r2: length | 3008 // r2: length |
| 3010 // r3: from index (untagged) | 3009 // r3: from index (untagged) |
| 3011 __ SmiTag(r3, r3); | 3010 __ SmiTag(r3, r3); |
| 3012 StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime, | 3011 StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime, |
| 3013 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); | 3012 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); |
| 3014 generator.GenerateFast(masm); | 3013 generator.GenerateFast(masm); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 3034 __ cmp(r1, Operand(FIRST_NONSTRING_TYPE)); | 3033 __ cmp(r1, Operand(FIRST_NONSTRING_TYPE)); |
| 3035 __ b(hs, ¬_string); | 3034 __ b(hs, ¬_string); |
| 3036 // Check if string has a cached array index. | 3035 // Check if string has a cached array index. |
| 3037 __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset)); | 3036 __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset)); |
| 3038 __ tst(r2, Operand(String::kContainsCachedArrayIndexMask)); | 3037 __ tst(r2, Operand(String::kContainsCachedArrayIndexMask)); |
| 3039 __ b(ne, &slow_string); | 3038 __ b(ne, &slow_string); |
| 3040 __ IndexFromHash(r2, r0); | 3039 __ IndexFromHash(r2, r0); |
| 3041 __ Ret(); | 3040 __ Ret(); |
| 3042 __ bind(&slow_string); | 3041 __ bind(&slow_string); |
| 3043 __ push(r0); // Push argument. | 3042 __ push(r0); // Push argument. |
| 3044 __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); | 3043 __ TailCallRuntime(Runtime::kStringToNumber, 1); |
| 3045 __ bind(¬_string); | 3044 __ bind(¬_string); |
| 3046 | 3045 |
| 3047 Label not_oddball; | 3046 Label not_oddball; |
| 3048 __ cmp(r1, Operand(ODDBALL_TYPE)); | 3047 __ cmp(r1, Operand(ODDBALL_TYPE)); |
| 3049 __ b(ne, ¬_oddball); | 3048 __ b(ne, ¬_oddball); |
| 3050 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset)); | 3049 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset)); |
| 3051 __ Ret(); | 3050 __ Ret(); |
| 3052 __ bind(¬_oddball); | 3051 __ bind(¬_oddball); |
| 3053 | 3052 |
| 3054 __ push(r0); // Push argument. | 3053 __ push(r0); // Push argument. |
| 3055 __ TailCallRuntime(Runtime::kToNumber, 1, 1); | 3054 __ TailCallRuntime(Runtime::kToNumber, 1); |
| 3056 } | 3055 } |
| 3057 | 3056 |
| 3058 | 3057 |
| 3059 void ToLengthStub::Generate(MacroAssembler* masm) { | 3058 void ToLengthStub::Generate(MacroAssembler* masm) { |
| 3060 // The ToLength stub takes one argument in r0. | 3059 // The ToLength stub takes one argument in r0. |
| 3061 Label not_smi; | 3060 Label not_smi; |
| 3062 __ JumpIfNotSmi(r0, ¬_smi); | 3061 __ JumpIfNotSmi(r0, ¬_smi); |
| 3063 STATIC_ASSERT(kSmiTag == 0); | 3062 STATIC_ASSERT(kSmiTag == 0); |
| 3064 __ tst(r0, r0); | 3063 __ tst(r0, r0); |
| 3065 __ mov(r0, Operand(0), LeaveCC, lt); | 3064 __ mov(r0, Operand(0), LeaveCC, lt); |
| 3066 __ Ret(); | 3065 __ Ret(); |
| 3067 __ bind(¬_smi); | 3066 __ bind(¬_smi); |
| 3068 | 3067 |
| 3069 __ push(r0); // Push argument. | 3068 __ push(r0); // Push argument. |
| 3070 __ TailCallRuntime(Runtime::kToLength, 1, 1); | 3069 __ TailCallRuntime(Runtime::kToLength, 1); |
| 3071 } | 3070 } |
| 3072 | 3071 |
| 3073 | 3072 |
| 3074 void ToStringStub::Generate(MacroAssembler* masm) { | 3073 void ToStringStub::Generate(MacroAssembler* masm) { |
| 3075 // The ToString stub takes one argument in r0. | 3074 // The ToString stub takes one argument in r0. |
| 3076 Label is_number; | 3075 Label is_number; |
| 3077 __ JumpIfSmi(r0, &is_number); | 3076 __ JumpIfSmi(r0, &is_number); |
| 3078 | 3077 |
| 3079 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE); | 3078 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE); |
| 3080 // r0: receiver | 3079 // r0: receiver |
| 3081 // r1: receiver instance type | 3080 // r1: receiver instance type |
| 3082 __ Ret(lo); | 3081 __ Ret(lo); |
| 3083 | 3082 |
| 3084 Label not_heap_number; | 3083 Label not_heap_number; |
| 3085 __ cmp(r1, Operand(HEAP_NUMBER_TYPE)); | 3084 __ cmp(r1, Operand(HEAP_NUMBER_TYPE)); |
| 3086 __ b(ne, ¬_heap_number); | 3085 __ b(ne, ¬_heap_number); |
| 3087 __ bind(&is_number); | 3086 __ bind(&is_number); |
| 3088 NumberToStringStub stub(isolate()); | 3087 NumberToStringStub stub(isolate()); |
| 3089 __ TailCallStub(&stub); | 3088 __ TailCallStub(&stub); |
| 3090 __ bind(¬_heap_number); | 3089 __ bind(¬_heap_number); |
| 3091 | 3090 |
| 3092 Label not_oddball; | 3091 Label not_oddball; |
| 3093 __ cmp(r1, Operand(ODDBALL_TYPE)); | 3092 __ cmp(r1, Operand(ODDBALL_TYPE)); |
| 3094 __ b(ne, ¬_oddball); | 3093 __ b(ne, ¬_oddball); |
| 3095 __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset)); | 3094 __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset)); |
| 3096 __ Ret(); | 3095 __ Ret(); |
| 3097 __ bind(¬_oddball); | 3096 __ bind(¬_oddball); |
| 3098 | 3097 |
| 3099 __ push(r0); // Push argument. | 3098 __ push(r0); // Push argument. |
| 3100 __ TailCallRuntime(Runtime::kToString, 1, 1); | 3099 __ TailCallRuntime(Runtime::kToString, 1); |
| 3101 } | 3100 } |
| 3102 | 3101 |
| 3103 | 3102 |
| 3104 void StringHelper::GenerateFlatOneByteStringEquals( | 3103 void StringHelper::GenerateFlatOneByteStringEquals( |
| 3105 MacroAssembler* masm, Register left, Register right, Register scratch1, | 3104 MacroAssembler* masm, Register left, Register right, Register scratch1, |
| 3106 Register scratch2, Register scratch3) { | 3105 Register scratch2, Register scratch3) { |
| 3107 Register length = scratch1; | 3106 Register length = scratch1; |
| 3108 | 3107 |
| 3109 // Compare lengths. | 3108 // Compare lengths. |
| 3110 Label strings_not_equal, check_zero_length; | 3109 Label strings_not_equal, check_zero_length; |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3220 | 3219 |
| 3221 // Compare flat one-byte strings natively. | 3220 // Compare flat one-byte strings natively. |
| 3222 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2, | 3221 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2, |
| 3223 r3); | 3222 r3); |
| 3224 StringHelper::GenerateCompareFlatOneByteStrings(masm, r1, r0, r2, r3, r4, r5); | 3223 StringHelper::GenerateCompareFlatOneByteStrings(masm, r1, r0, r2, r3, r4, r5); |
| 3225 | 3224 |
| 3226 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 3225 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
| 3227 // tagged as a small integer. | 3226 // tagged as a small integer. |
| 3228 __ bind(&runtime); | 3227 __ bind(&runtime); |
| 3229 __ Push(r1, r0); | 3228 __ Push(r1, r0); |
| 3230 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 3229 __ TailCallRuntime(Runtime::kStringCompare, 2); |
| 3231 } | 3230 } |
| 3232 | 3231 |
| 3233 | 3232 |
| 3234 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 3233 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
| 3235 // ----------- S t a t e ------------- | 3234 // ----------- S t a t e ------------- |
| 3236 // -- r1 : left | 3235 // -- r1 : left |
| 3237 // -- r0 : right | 3236 // -- r0 : right |
| 3238 // -- lr : return address | 3237 // -- lr : return address |
| 3239 // ----------------------------------- | 3238 // ----------------------------------- |
| 3240 | 3239 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 3262 } | 3261 } |
| 3263 | 3262 |
| 3264 | 3263 |
| 3265 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { | 3264 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { |
| 3266 DCHECK_EQ(CompareICState::BOOLEAN, state()); | 3265 DCHECK_EQ(CompareICState::BOOLEAN, state()); |
| 3267 Label miss; | 3266 Label miss; |
| 3268 | 3267 |
| 3269 __ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3268 __ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
| 3270 __ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); | 3269 __ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); |
| 3271 if (op() != Token::EQ_STRICT && is_strong(strength())) { | 3270 if (op() != Token::EQ_STRICT && is_strong(strength())) { |
| 3272 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); | 3271 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); |
| 3273 } else { | 3272 } else { |
| 3274 if (!Token::IsEqualityOp(op())) { | 3273 if (!Token::IsEqualityOp(op())) { |
| 3275 __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset)); | 3274 __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset)); |
| 3276 __ AssertSmi(r1); | 3275 __ AssertSmi(r1); |
| 3277 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset)); | 3276 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset)); |
| 3278 __ AssertSmi(r0); | 3277 __ AssertSmi(r0); |
| 3279 } | 3278 } |
| 3280 __ sub(r0, r1, r0); | 3279 __ sub(r0, r1, r0); |
| 3281 __ Ret(); | 3280 __ Ret(); |
| 3282 } | 3281 } |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3523 tmp3); | 3522 tmp3); |
| 3524 } else { | 3523 } else { |
| 3525 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, | 3524 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, |
| 3526 tmp2, tmp3, tmp4); | 3525 tmp2, tmp3, tmp4); |
| 3527 } | 3526 } |
| 3528 | 3527 |
| 3529 // Handle more complex cases in runtime. | 3528 // Handle more complex cases in runtime. |
| 3530 __ bind(&runtime); | 3529 __ bind(&runtime); |
| 3531 __ Push(left, right); | 3530 __ Push(left, right); |
| 3532 if (equality) { | 3531 if (equality) { |
| 3533 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); | 3532 __ TailCallRuntime(Runtime::kStringEquals, 2); |
| 3534 } else { | 3533 } else { |
| 3535 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 3534 __ TailCallRuntime(Runtime::kStringCompare, 2); |
| 3536 } | 3535 } |
| 3537 | 3536 |
| 3538 __ bind(&miss); | 3537 __ bind(&miss); |
| 3539 GenerateMiss(masm); | 3538 GenerateMiss(masm); |
| 3540 } | 3539 } |
| 3541 | 3540 |
| 3542 | 3541 |
| 3543 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { | 3542 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { |
| 3544 DCHECK_EQ(CompareICState::RECEIVER, state()); | 3543 DCHECK_EQ(CompareICState::RECEIVER, state()); |
| 3545 Label miss; | 3544 Label miss; |
| (...skipping 25 matching lines...) Expand all Loading... |
| 3571 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); | 3570 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 3572 __ cmp(r2, r4); | 3571 __ cmp(r2, r4); |
| 3573 __ b(ne, &miss); | 3572 __ b(ne, &miss); |
| 3574 __ cmp(r3, r4); | 3573 __ cmp(r3, r4); |
| 3575 __ b(ne, &miss); | 3574 __ b(ne, &miss); |
| 3576 | 3575 |
| 3577 if (Token::IsEqualityOp(op())) { | 3576 if (Token::IsEqualityOp(op())) { |
| 3578 __ sub(r0, r0, Operand(r1)); | 3577 __ sub(r0, r0, Operand(r1)); |
| 3579 __ Ret(); | 3578 __ Ret(); |
| 3580 } else if (is_strong(strength())) { | 3579 } else if (is_strong(strength())) { |
| 3581 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); | 3580 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); |
| 3582 } else { | 3581 } else { |
| 3583 if (op() == Token::LT || op() == Token::LTE) { | 3582 if (op() == Token::LT || op() == Token::LTE) { |
| 3584 __ mov(r2, Operand(Smi::FromInt(GREATER))); | 3583 __ mov(r2, Operand(Smi::FromInt(GREATER))); |
| 3585 } else { | 3584 } else { |
| 3586 __ mov(r2, Operand(Smi::FromInt(LESS))); | 3585 __ mov(r2, Operand(Smi::FromInt(LESS))); |
| 3587 } | 3586 } |
| 3588 __ Push(r1, r0, r2); | 3587 __ Push(r1, r0, r2); |
| 3589 __ TailCallRuntime(Runtime::kCompare, 3, 1); | 3588 __ TailCallRuntime(Runtime::kCompare, 3); |
| 3590 } | 3589 } |
| 3591 | 3590 |
| 3592 __ bind(&miss); | 3591 __ bind(&miss); |
| 3593 GenerateMiss(masm); | 3592 GenerateMiss(masm); |
| 3594 } | 3593 } |
| 3595 | 3594 |
| 3596 | 3595 |
| 3597 void CompareICStub::GenerateMiss(MacroAssembler* masm) { | 3596 void CompareICStub::GenerateMiss(MacroAssembler* masm) { |
| 3598 { | 3597 { |
| 3599 // Call the runtime system in a fresh internal frame. | 3598 // Call the runtime system in a fresh internal frame. |
| (...skipping 1339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4939 __ ldr(result, ContextMemOperand(result)); | 4938 __ ldr(result, ContextMemOperand(result)); |
| 4940 __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); | 4939 __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); |
| 4941 | 4940 |
| 4942 // If the result is not the_hole, return. Otherwise, handle in the runtime. | 4941 // If the result is not the_hole, return. Otherwise, handle in the runtime. |
| 4943 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 4942 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 4944 __ Ret(ne); | 4943 __ Ret(ne); |
| 4945 | 4944 |
| 4946 // Fallback to runtime. | 4945 // Fallback to runtime. |
| 4947 __ SmiTag(slot); | 4946 __ SmiTag(slot); |
| 4948 __ push(slot); | 4947 __ push(slot); |
| 4949 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); | 4948 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1); |
| 4950 } | 4949 } |
| 4951 | 4950 |
| 4952 | 4951 |
| 4953 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | 4952 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 4954 Register value = r0; | 4953 Register value = r0; |
| 4955 Register slot = r2; | 4954 Register slot = r2; |
| 4956 | 4955 |
| 4957 Register cell = r1; | 4956 Register cell = r1; |
| 4958 Register cell_details = r4; | 4957 Register cell_details = r4; |
| 4959 Register cell_value = r5; | 4958 Register cell_value = r5; |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5064 __ cmp(cell_value_map, scratch); | 5063 __ cmp(cell_value_map, scratch); |
| 5065 __ b(eq, &fast_heapobject_case); | 5064 __ b(eq, &fast_heapobject_case); |
| 5066 | 5065 |
| 5067 // Fallback to runtime. | 5066 // Fallback to runtime. |
| 5068 __ bind(&slow_case); | 5067 __ bind(&slow_case); |
| 5069 __ SmiTag(slot); | 5068 __ SmiTag(slot); |
| 5070 __ Push(slot, value); | 5069 __ Push(slot, value); |
| 5071 __ TailCallRuntime(is_strict(language_mode()) | 5070 __ TailCallRuntime(is_strict(language_mode()) |
| 5072 ? Runtime::kStoreGlobalViaContext_Strict | 5071 ? Runtime::kStoreGlobalViaContext_Strict |
| 5073 : Runtime::kStoreGlobalViaContext_Sloppy, | 5072 : Runtime::kStoreGlobalViaContext_Sloppy, |
| 5074 2, 1); | 5073 2); |
| 5075 } | 5074 } |
| 5076 | 5075 |
| 5077 | 5076 |
| 5078 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5077 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
| 5079 return ref0.address() - ref1.address(); | 5078 return ref0.address() - ref1.address(); |
| 5080 } | 5079 } |
| 5081 | 5080 |
| 5082 | 5081 |
| 5083 // Calls an API function. Allocates HandleScope, extracts returned value | 5082 // Calls an API function. Allocates HandleScope, extracts returned value |
| 5084 // from handle and propagates exceptions. Restores context. stack_space | 5083 // from handle and propagates exceptions. Restores context. stack_space |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5191 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex); | 5190 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex); |
| 5192 __ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate))); | 5191 __ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate))); |
| 5193 __ ldr(r5, MemOperand(ip)); | 5192 __ ldr(r5, MemOperand(ip)); |
| 5194 __ cmp(r4, r5); | 5193 __ cmp(r4, r5); |
| 5195 __ b(ne, &promote_scheduled_exception); | 5194 __ b(ne, &promote_scheduled_exception); |
| 5196 | 5195 |
| 5197 __ mov(pc, lr); | 5196 __ mov(pc, lr); |
| 5198 | 5197 |
| 5199 // Re-throw by promoting a scheduled exception. | 5198 // Re-throw by promoting a scheduled exception. |
| 5200 __ bind(&promote_scheduled_exception); | 5199 __ bind(&promote_scheduled_exception); |
| 5201 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); | 5200 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0); |
| 5202 | 5201 |
| 5203 // HandleScope limit has changed. Delete allocated extensions. | 5202 // HandleScope limit has changed. Delete allocated extensions. |
| 5204 __ bind(&delete_allocated_handles); | 5203 __ bind(&delete_allocated_handles); |
| 5205 __ str(r5, MemOperand(r9, kLimitOffset)); | 5204 __ str(r5, MemOperand(r9, kLimitOffset)); |
| 5206 __ mov(r4, r0); | 5205 __ mov(r4, r0); |
| 5207 __ PrepareCallCFunction(1, r5); | 5206 __ PrepareCallCFunction(1, r5); |
| 5208 __ mov(r0, Operand(ExternalReference::isolate_address(isolate))); | 5207 __ mov(r0, Operand(ExternalReference::isolate_address(isolate))); |
| 5209 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), | 5208 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), |
| 5210 1); | 5209 1); |
| 5211 __ mov(r0, r4); | 5210 __ mov(r0, r4); |
| (...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5390 MemOperand(fp, 6 * kPointerSize), NULL); | 5389 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5391 } | 5390 } |
| 5392 | 5391 |
| 5393 | 5392 |
| 5394 #undef __ | 5393 #undef __ |
| 5395 | 5394 |
| 5396 } // namespace internal | 5395 } // namespace internal |
| 5397 } // namespace v8 | 5396 } // namespace v8 |
| 5398 | 5397 |
| 5399 #endif // V8_TARGET_ARCH_ARM | 5398 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |