| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 330 Register scratch, | 330 Register scratch, |
| 331 Label* miss_label) { | 331 Label* miss_label) { |
| 332 // Check that the receiver isn't a smi. | 332 // Check that the receiver isn't a smi. |
| 333 __ JumpIfSmi(receiver, miss_label); | 333 __ JumpIfSmi(receiver, miss_label); |
| 334 | 334 |
| 335 // Check that the object is a JS array. | 335 // Check that the object is a JS array. |
| 336 __ GetObjectType(receiver, scratch, scratch); | 336 __ GetObjectType(receiver, scratch, scratch); |
| 337 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE)); | 337 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE)); |
| 338 | 338 |
| 339 // Load length directly from the JS array. | 339 // Load length directly from the JS array. |
| 340 __ Ret(USE_DELAY_SLOT); |
| 340 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 341 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 341 __ Ret(); | |
| 342 } | 342 } |
| 343 | 343 |
| 344 | 344 |
| 345 // Generate code to check if an object is a string. If the object is a | 345 // Generate code to check if an object is a string. If the object is a |
| 346 // heap object, its map's instance type is left in the scratch1 register. | 346 // heap object, its map's instance type is left in the scratch1 register. |
| 347 // If this is not needed, scratch1 and scratch2 may be the same register. | 347 // If this is not needed, scratch1 and scratch2 may be the same register. |
| 348 static void GenerateStringCheck(MacroAssembler* masm, | 348 static void GenerateStringCheck(MacroAssembler* masm, |
| 349 Register receiver, | 349 Register receiver, |
| 350 Register scratch1, | 350 Register scratch1, |
| 351 Register scratch2, | 351 Register scratch2, |
| (...skipping 25 matching lines...) Expand all Loading... |
| 377 Label* miss, | 377 Label* miss, |
| 378 bool support_wrappers) { | 378 bool support_wrappers) { |
| 379 Label check_wrapper; | 379 Label check_wrapper; |
| 380 | 380 |
| 381 // Check if the object is a string leaving the instance type in the | 381 // Check if the object is a string leaving the instance type in the |
| 382 // scratch1 register. | 382 // scratch1 register. |
| 383 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, | 383 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, |
| 384 support_wrappers ? &check_wrapper : miss); | 384 support_wrappers ? &check_wrapper : miss); |
| 385 | 385 |
| 386 // Load length directly from the string. | 386 // Load length directly from the string. |
| 387 __ Ret(USE_DELAY_SLOT); |
| 387 __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset)); | 388 __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset)); |
| 388 __ Ret(); | |
| 389 | 389 |
| 390 if (support_wrappers) { | 390 if (support_wrappers) { |
| 391 // Check if the object is a JSValue wrapper. | 391 // Check if the object is a JSValue wrapper. |
| 392 __ bind(&check_wrapper); | 392 __ bind(&check_wrapper); |
| 393 __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE)); | 393 __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE)); |
| 394 | 394 |
| 395 // Unwrap the value and check if the wrapped value is a string. | 395 // Unwrap the value and check if the wrapped value is a string. |
| 396 __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset)); | 396 __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset)); |
| 397 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss); | 397 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss); |
| 398 __ Ret(USE_DELAY_SLOT); |
| 398 __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset)); | 399 __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset)); |
| 399 __ Ret(); | |
| 400 } | 400 } |
| 401 } | 401 } |
| 402 | 402 |
| 403 | 403 |
| 404 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, | 404 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, |
| 405 Register receiver, | 405 Register receiver, |
| 406 Register scratch1, | 406 Register scratch1, |
| 407 Register scratch2, | 407 Register scratch2, |
| 408 Label* miss_label) { | 408 Label* miss_label) { |
| 409 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); | 409 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); |
| 410 __ Ret(USE_DELAY_SLOT); |
| 410 __ mov(v0, scratch1); | 411 __ mov(v0, scratch1); |
| 411 __ Ret(); | |
| 412 } | 412 } |
| 413 | 413 |
| 414 | 414 |
| 415 // Generate code to check that a global property cell is empty. Create | 415 // Generate code to check that a global property cell is empty. Create |
| 416 // the property cell at compilation time if no cell exists for the | 416 // the property cell at compilation time if no cell exists for the |
| 417 // property. | 417 // property. |
| 418 static void GenerateCheckPropertyCell(MacroAssembler* masm, | 418 static void GenerateCheckPropertyCell(MacroAssembler* masm, |
| 419 Handle<GlobalObject> global, | 419 Handle<GlobalObject> global, |
| 420 Handle<Name> name, | 420 Handle<Name> name, |
| 421 Register scratch, | 421 Register scratch, |
| (...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 632 kRAHasNotBeenSaved, | 632 kRAHasNotBeenSaved, |
| 633 kDontSaveFPRegs, | 633 kDontSaveFPRegs, |
| 634 EMIT_REMEMBERED_SET, | 634 EMIT_REMEMBERED_SET, |
| 635 smi_check); | 635 smi_check); |
| 636 } | 636 } |
| 637 } | 637 } |
| 638 | 638 |
| 639 // Return the value (register v0). | 639 // Return the value (register v0). |
| 640 ASSERT(value_reg.is(a0)); | 640 ASSERT(value_reg.is(a0)); |
| 641 __ bind(&exit); | 641 __ bind(&exit); |
| 642 __ Ret(USE_DELAY_SLOT); |
| 642 __ mov(v0, a0); | 643 __ mov(v0, a0); |
| 643 __ Ret(); | |
| 644 } | 644 } |
| 645 | 645 |
| 646 | 646 |
| 647 // Generate StoreField code, value is passed in a0 register. | 647 // Generate StoreField code, value is passed in a0 register. |
| 648 // When leaving generated code after success, the receiver_reg and name_reg | 648 // When leaving generated code after success, the receiver_reg and name_reg |
| 649 // may be clobbered. Upon branch to miss_label, the receiver and name | 649 // may be clobbered. Upon branch to miss_label, the receiver and name |
| 650 // registers have their original values. | 650 // registers have their original values. |
| 651 void StubCompiler::GenerateStoreField(MacroAssembler* masm, | 651 void StubCompiler::GenerateStoreField(MacroAssembler* masm, |
| 652 Handle<JSObject> object, | 652 Handle<JSObject> object, |
| 653 LookupResult* lookup, | 653 LookupResult* lookup, |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 708 | 708 |
| 709 __ bind(&heap_number); | 709 __ bind(&heap_number); |
| 710 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, | 710 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, |
| 711 miss_label, DONT_DO_SMI_CHECK); | 711 miss_label, DONT_DO_SMI_CHECK); |
| 712 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | 712 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); |
| 713 | 713 |
| 714 __ bind(&do_store); | 714 __ bind(&do_store); |
| 715 __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); | 715 __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); |
| 716 // Return the value (register v0). | 716 // Return the value (register v0). |
| 717 ASSERT(value_reg.is(a0)); | 717 ASSERT(value_reg.is(a0)); |
| 718 __ Ret(USE_DELAY_SLOT); |
| 718 __ mov(v0, a0); | 719 __ mov(v0, a0); |
| 719 __ Ret(); | |
| 720 return; | 720 return; |
| 721 } | 721 } |
| 722 | 722 |
| 723 // TODO(verwaest): Share this code as a code stub. | 723 // TODO(verwaest): Share this code as a code stub. |
| 724 SmiCheck smi_check = representation.IsTagged() | 724 SmiCheck smi_check = representation.IsTagged() |
| 725 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | 725 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
| 726 if (index < 0) { | 726 if (index < 0) { |
| 727 // Set the property straight into the object. | 727 // Set the property straight into the object. |
| 728 int offset = object->map()->instance_size() + (index * kPointerSize); | 728 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 729 __ sw(value_reg, FieldMemOperand(receiver_reg, offset)); | 729 __ sw(value_reg, FieldMemOperand(receiver_reg, offset)); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 766 kRAHasNotBeenSaved, | 766 kRAHasNotBeenSaved, |
| 767 kDontSaveFPRegs, | 767 kDontSaveFPRegs, |
| 768 EMIT_REMEMBERED_SET, | 768 EMIT_REMEMBERED_SET, |
| 769 smi_check); | 769 smi_check); |
| 770 } | 770 } |
| 771 } | 771 } |
| 772 | 772 |
| 773 // Return the value (register v0). | 773 // Return the value (register v0). |
| 774 ASSERT(value_reg.is(a0)); | 774 ASSERT(value_reg.is(a0)); |
| 775 __ bind(&exit); | 775 __ bind(&exit); |
| 776 __ Ret(USE_DELAY_SLOT); |
| 776 __ mov(v0, a0); | 777 __ mov(v0, a0); |
| 777 __ Ret(); | |
| 778 } | 778 } |
| 779 | 779 |
| 780 | 780 |
| 781 void BaseStoreStubCompiler::GenerateRestoreName(MacroAssembler* masm, | 781 void BaseStoreStubCompiler::GenerateRestoreName(MacroAssembler* masm, |
| 782 Label* label, | 782 Label* label, |
| 783 Handle<Name> name) { | 783 Handle<Name> name) { |
| 784 if (!label->is_unused()) { | 784 if (!label->is_unused()) { |
| 785 __ bind(label); | 785 __ bind(label); |
| 786 __ li(this->name(), Operand(name)); | 786 __ li(this->name(), Operand(name)); |
| 787 } | 787 } |
| (...skipping 911 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1699 // Check that the receiver isn't a smi. | 1699 // Check that the receiver isn't a smi. |
| 1700 __ JumpIfSmi(receiver, &miss); | 1700 __ JumpIfSmi(receiver, &miss); |
| 1701 | 1701 |
| 1702 // Check that the maps haven't changed. | 1702 // Check that the maps haven't changed. |
| 1703 CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, a3, v0, t0, | 1703 CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, a3, v0, t0, |
| 1704 name, &miss); | 1704 name, &miss); |
| 1705 | 1705 |
| 1706 if (argc == 0) { | 1706 if (argc == 0) { |
| 1707 // Nothing to do, just return the length. | 1707 // Nothing to do, just return the length. |
| 1708 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1708 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 1709 __ Drop(argc + 1); | 1709 __ DropAndRet(argc + 1); |
| 1710 __ Ret(); | |
| 1711 } else { | 1710 } else { |
| 1712 Label call_builtin; | 1711 Label call_builtin; |
| 1713 if (argc == 1) { // Otherwise fall through to call the builtin. | 1712 if (argc == 1) { // Otherwise fall through to call the builtin. |
| 1714 Label attempt_to_grow_elements, with_write_barrier, check_double; | 1713 Label attempt_to_grow_elements, with_write_barrier, check_double; |
| 1715 | 1714 |
| 1716 Register elements = t2; | 1715 Register elements = t2; |
| 1717 Register end_elements = t1; | 1716 Register end_elements = t1; |
| 1718 // Get the elements array of the object. | 1717 // Get the elements array of the object. |
| 1719 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); | 1718 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); |
| 1720 | 1719 |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1748 // We may need a register containing the address end_elements below, | 1747 // We may need a register containing the address end_elements below, |
| 1749 // so write back the value in end_elements. | 1748 // so write back the value in end_elements. |
| 1750 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize); | 1749 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize); |
| 1751 __ Addu(end_elements, elements, end_elements); | 1750 __ Addu(end_elements, elements, end_elements); |
| 1752 const int kEndElementsOffset = | 1751 const int kEndElementsOffset = |
| 1753 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; | 1752 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; |
| 1754 __ Addu(end_elements, end_elements, kEndElementsOffset); | 1753 __ Addu(end_elements, end_elements, kEndElementsOffset); |
| 1755 __ sw(t0, MemOperand(end_elements)); | 1754 __ sw(t0, MemOperand(end_elements)); |
| 1756 | 1755 |
| 1757 // Check for a smi. | 1756 // Check for a smi. |
| 1758 __ Drop(argc + 1); | 1757 __ DropAndRet(argc + 1); |
| 1759 __ Ret(); | |
| 1760 | 1758 |
| 1761 __ bind(&check_double); | 1759 __ bind(&check_double); |
| 1762 | 1760 |
| 1763 // Check that the elements are in fast mode and writable. | 1761 // Check that the elements are in fast mode and writable. |
| 1764 __ CheckMap(elements, | 1762 __ CheckMap(elements, |
| 1765 a0, | 1763 a0, |
| 1766 Heap::kFixedDoubleArrayMapRootIndex, | 1764 Heap::kFixedDoubleArrayMapRootIndex, |
| 1767 &call_builtin, | 1765 &call_builtin, |
| 1768 DONT_DO_SMI_CHECK); | 1766 DONT_DO_SMI_CHECK); |
| 1769 | 1767 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 1781 | 1779 |
| 1782 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize)); | 1780 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize)); |
| 1783 __ StoreNumberToDoubleElements( | 1781 __ StoreNumberToDoubleElements( |
| 1784 t0, a0, elements, a3, t1, a2, t5, | 1782 t0, a0, elements, a3, t1, a2, t5, |
| 1785 &call_builtin, argc * kDoubleSize); | 1783 &call_builtin, argc * kDoubleSize); |
| 1786 | 1784 |
| 1787 // Save new length. | 1785 // Save new length. |
| 1788 __ sw(a0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1786 __ sw(a0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 1789 | 1787 |
| 1790 // Check for a smi. | 1788 // Check for a smi. |
| 1791 __ Drop(argc + 1); | 1789 __ DropAndRet(argc + 1); |
| 1792 __ Ret(); | |
| 1793 | 1790 |
| 1794 __ bind(&with_write_barrier); | 1791 __ bind(&with_write_barrier); |
| 1795 | 1792 |
| 1796 __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 1793 __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 1797 | 1794 |
| 1798 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { | 1795 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { |
| 1799 Label fast_object, not_fast_object; | 1796 Label fast_object, not_fast_object; |
| 1800 __ CheckFastObjectElements(a3, t3, ¬_fast_object); | 1797 __ CheckFastObjectElements(a3, t3, ¬_fast_object); |
| 1801 __ jmp(&fast_object); | 1798 __ jmp(&fast_object); |
| 1802 // In case of fast smi-only, convert to fast object, otherwise bail out. | 1799 // In case of fast smi-only, convert to fast object, otherwise bail out. |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1848 __ Addu(end_elements, end_elements, kEndElementsOffset); | 1845 __ Addu(end_elements, end_elements, kEndElementsOffset); |
| 1849 __ sw(t0, MemOperand(end_elements)); | 1846 __ sw(t0, MemOperand(end_elements)); |
| 1850 | 1847 |
| 1851 __ RecordWrite(elements, | 1848 __ RecordWrite(elements, |
| 1852 end_elements, | 1849 end_elements, |
| 1853 t0, | 1850 t0, |
| 1854 kRAHasNotBeenSaved, | 1851 kRAHasNotBeenSaved, |
| 1855 kDontSaveFPRegs, | 1852 kDontSaveFPRegs, |
| 1856 EMIT_REMEMBERED_SET, | 1853 EMIT_REMEMBERED_SET, |
| 1857 OMIT_SMI_CHECK); | 1854 OMIT_SMI_CHECK); |
| 1858 __ Drop(argc + 1); | 1855 __ DropAndRet(argc + 1); |
| 1859 __ Ret(); | |
| 1860 | 1856 |
| 1861 __ bind(&attempt_to_grow_elements); | 1857 __ bind(&attempt_to_grow_elements); |
| 1862 // v0: array's length + 1. | 1858 // v0: array's length + 1. |
| 1863 // t0: elements' length. | 1859 // t0: elements' length. |
| 1864 | 1860 |
| 1865 if (!FLAG_inline_new) { | 1861 if (!FLAG_inline_new) { |
| 1866 __ Branch(&call_builtin); | 1862 __ Branch(&call_builtin); |
| 1867 } | 1863 } |
| 1868 | 1864 |
| 1869 __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize)); | 1865 __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize)); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1904 for (int i = 1; i < kAllocationDelta; i++) { | 1900 for (int i = 1; i < kAllocationDelta; i++) { |
| 1905 __ sw(a3, MemOperand(end_elements, i * kPointerSize)); | 1901 __ sw(a3, MemOperand(end_elements, i * kPointerSize)); |
| 1906 } | 1902 } |
| 1907 | 1903 |
| 1908 // Update elements' and array's sizes. | 1904 // Update elements' and array's sizes. |
| 1909 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1905 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 1910 __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta))); | 1906 __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta))); |
| 1911 __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 1907 __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
| 1912 | 1908 |
| 1913 // Elements are in new space, so write barrier is not required. | 1909 // Elements are in new space, so write barrier is not required. |
| 1914 __ Drop(argc + 1); | 1910 __ DropAndRet(argc + 1); |
| 1915 __ Ret(); | |
| 1916 } | 1911 } |
| 1917 __ bind(&call_builtin); | 1912 __ bind(&call_builtin); |
| 1918 __ TailCallExternalReference( | 1913 __ TailCallExternalReference( |
| 1919 ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1); | 1914 ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1); |
| 1920 } | 1915 } |
| 1921 | 1916 |
| 1922 // Handle call cache miss. | 1917 // Handle call cache miss. |
| 1923 __ bind(&miss); | 1918 __ bind(&miss); |
| 1924 GenerateMissBranch(); | 1919 GenerateMissBranch(); |
| 1925 | 1920 |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1984 __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize); | 1979 __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize); |
| 1985 __ Addu(elements, elements, t1); | 1980 __ Addu(elements, elements, t1); |
| 1986 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize)); | 1981 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize)); |
| 1987 __ Branch(&call_builtin, eq, v0, Operand(t2)); | 1982 __ Branch(&call_builtin, eq, v0, Operand(t2)); |
| 1988 | 1983 |
| 1989 // Set the array's length. | 1984 // Set the array's length. |
| 1990 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1985 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 1991 | 1986 |
| 1992 // Fill with the hole. | 1987 // Fill with the hole. |
| 1993 __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize)); | 1988 __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize)); |
| 1994 __ Drop(argc + 1); | 1989 __ DropAndRet(argc + 1); |
| 1995 __ Ret(); | |
| 1996 | 1990 |
| 1997 __ bind(&return_undefined); | 1991 __ bind(&return_undefined); |
| 1998 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | 1992 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
| 1999 __ Drop(argc + 1); | 1993 __ DropAndRet(argc + 1); |
| 2000 __ Ret(); | |
| 2001 | 1994 |
| 2002 __ bind(&call_builtin); | 1995 __ bind(&call_builtin); |
| 2003 __ TailCallExternalReference( | 1996 __ TailCallExternalReference( |
| 2004 ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1); | 1997 ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1); |
| 2005 | 1998 |
| 2006 // Handle call cache miss. | 1999 // Handle call cache miss. |
| 2007 __ bind(&miss); | 2000 __ bind(&miss); |
| 2008 GenerateMissBranch(); | 2001 GenerateMissBranch(); |
| 2009 | 2002 |
| 2010 // Return the generated code. | 2003 // Return the generated code. |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2065 } | 2058 } |
| 2066 | 2059 |
| 2067 StringCharCodeAtGenerator generator(receiver, | 2060 StringCharCodeAtGenerator generator(receiver, |
| 2068 index, | 2061 index, |
| 2069 result, | 2062 result, |
| 2070 &miss, // When not a string. | 2063 &miss, // When not a string. |
| 2071 &miss, // When not a number. | 2064 &miss, // When not a number. |
| 2072 index_out_of_range_label, | 2065 index_out_of_range_label, |
| 2073 STRING_INDEX_IS_NUMBER); | 2066 STRING_INDEX_IS_NUMBER); |
| 2074 generator.GenerateFast(masm()); | 2067 generator.GenerateFast(masm()); |
| 2075 __ Drop(argc + 1); | 2068 __ DropAndRet(argc + 1); |
| 2076 __ Ret(); | |
| 2077 | 2069 |
| 2078 StubRuntimeCallHelper call_helper; | 2070 StubRuntimeCallHelper call_helper; |
| 2079 generator.GenerateSlow(masm(), call_helper); | 2071 generator.GenerateSlow(masm(), call_helper); |
| 2080 | 2072 |
| 2081 if (index_out_of_range.is_linked()) { | 2073 if (index_out_of_range.is_linked()) { |
| 2082 __ bind(&index_out_of_range); | 2074 __ bind(&index_out_of_range); |
| 2083 __ LoadRoot(v0, Heap::kNanValueRootIndex); | 2075 __ LoadRoot(v0, Heap::kNanValueRootIndex); |
| 2084 __ Drop(argc + 1); | 2076 __ DropAndRet(argc + 1); |
| 2085 __ Ret(); | |
| 2086 } | 2077 } |
| 2087 | 2078 |
| 2088 __ bind(&miss); | 2079 __ bind(&miss); |
| 2089 // Restore function name in a2. | 2080 // Restore function name in a2. |
| 2090 __ li(a2, name); | 2081 __ li(a2, name); |
| 2091 __ bind(&name_miss); | 2082 __ bind(&name_miss); |
| 2092 GenerateMissBranch(); | 2083 GenerateMissBranch(); |
| 2093 | 2084 |
| 2094 // Return the generated code. | 2085 // Return the generated code. |
| 2095 return GetCode(function); | 2086 return GetCode(function); |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2148 | 2139 |
| 2149 StringCharAtGenerator generator(receiver, | 2140 StringCharAtGenerator generator(receiver, |
| 2150 index, | 2141 index, |
| 2151 scratch, | 2142 scratch, |
| 2152 result, | 2143 result, |
| 2153 &miss, // When not a string. | 2144 &miss, // When not a string. |
| 2154 &miss, // When not a number. | 2145 &miss, // When not a number. |
| 2155 index_out_of_range_label, | 2146 index_out_of_range_label, |
| 2156 STRING_INDEX_IS_NUMBER); | 2147 STRING_INDEX_IS_NUMBER); |
| 2157 generator.GenerateFast(masm()); | 2148 generator.GenerateFast(masm()); |
| 2158 __ Drop(argc + 1); | 2149 __ DropAndRet(argc + 1); |
| 2159 __ Ret(); | |
| 2160 | 2150 |
| 2161 StubRuntimeCallHelper call_helper; | 2151 StubRuntimeCallHelper call_helper; |
| 2162 generator.GenerateSlow(masm(), call_helper); | 2152 generator.GenerateSlow(masm(), call_helper); |
| 2163 | 2153 |
| 2164 if (index_out_of_range.is_linked()) { | 2154 if (index_out_of_range.is_linked()) { |
| 2165 __ bind(&index_out_of_range); | 2155 __ bind(&index_out_of_range); |
| 2166 __ LoadRoot(v0, Heap::kempty_stringRootIndex); | 2156 __ LoadRoot(v0, Heap::kempty_stringRootIndex); |
| 2167 __ Drop(argc + 1); | 2157 __ DropAndRet(argc + 1); |
| 2168 __ Ret(); | |
| 2169 } | 2158 } |
| 2170 | 2159 |
| 2171 __ bind(&miss); | 2160 __ bind(&miss); |
| 2172 // Restore function name in a2. | 2161 // Restore function name in a2. |
| 2173 __ li(a2, name); | 2162 __ li(a2, name); |
| 2174 __ bind(&name_miss); | 2163 __ bind(&name_miss); |
| 2175 GenerateMissBranch(); | 2164 GenerateMissBranch(); |
| 2176 | 2165 |
| 2177 // Return the generated code. | 2166 // Return the generated code. |
| 2178 return GetCode(function); | 2167 return GetCode(function); |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2224 // Check the code is a smi. | 2213 // Check the code is a smi. |
| 2225 Label slow; | 2214 Label slow; |
| 2226 STATIC_ASSERT(kSmiTag == 0); | 2215 STATIC_ASSERT(kSmiTag == 0); |
| 2227 __ JumpIfNotSmi(code, &slow); | 2216 __ JumpIfNotSmi(code, &slow); |
| 2228 | 2217 |
| 2229 // Convert the smi code to uint16. | 2218 // Convert the smi code to uint16. |
| 2230 __ And(code, code, Operand(Smi::FromInt(0xffff))); | 2219 __ And(code, code, Operand(Smi::FromInt(0xffff))); |
| 2231 | 2220 |
| 2232 StringCharFromCodeGenerator generator(code, v0); | 2221 StringCharFromCodeGenerator generator(code, v0); |
| 2233 generator.GenerateFast(masm()); | 2222 generator.GenerateFast(masm()); |
| 2234 __ Drop(argc + 1); | 2223 __ DropAndRet(argc + 1); |
| 2235 __ Ret(); | |
| 2236 | 2224 |
| 2237 StubRuntimeCallHelper call_helper; | 2225 StubRuntimeCallHelper call_helper; |
| 2238 generator.GenerateSlow(masm(), call_helper); | 2226 generator.GenerateSlow(masm(), call_helper); |
| 2239 | 2227 |
| 2240 // Tail call the full function. We do not have to patch the receiver | 2228 // Tail call the full function. We do not have to patch the receiver |
| 2241 // because the function makes no use of it. | 2229 // because the function makes no use of it. |
| 2242 __ bind(&slow); | 2230 __ bind(&slow); |
| 2243 ParameterCount expected(function); | 2231 ParameterCount expected(function); |
| 2244 __ InvokeFunction(function, expected, arguments(), | 2232 __ InvokeFunction(function, expected, arguments(), |
| 2245 JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); | 2233 JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2288 &miss); | 2276 &miss); |
| 2289 GenerateLoadFunctionFromCell(cell, function, &miss); | 2277 GenerateLoadFunctionFromCell(cell, function, &miss); |
| 2290 } | 2278 } |
| 2291 | 2279 |
| 2292 // Load the (only) argument into v0. | 2280 // Load the (only) argument into v0. |
| 2293 __ lw(v0, MemOperand(sp, 0 * kPointerSize)); | 2281 __ lw(v0, MemOperand(sp, 0 * kPointerSize)); |
| 2294 | 2282 |
| 2295 // If the argument is a smi, just return. | 2283 // If the argument is a smi, just return. |
| 2296 STATIC_ASSERT(kSmiTag == 0); | 2284 STATIC_ASSERT(kSmiTag == 0); |
| 2297 __ And(t0, v0, Operand(kSmiTagMask)); | 2285 __ And(t0, v0, Operand(kSmiTagMask)); |
| 2298 __ Drop(argc + 1, eq, t0, Operand(zero_reg)); | 2286 __ DropAndRet(argc + 1, eq, t0, Operand(zero_reg)); |
| 2299 __ Ret(eq, t0, Operand(zero_reg)); | |
| 2300 | 2287 |
| 2301 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK); | 2288 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK); |
| 2302 | 2289 |
| 2303 Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return; | 2290 Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return; |
| 2304 | 2291 |
| 2305 // If fpu is enabled, we use the floor instruction. | 2292 // If fpu is enabled, we use the floor instruction. |
| 2306 | 2293 |
| 2307 // Load the HeapNumber value. | 2294 // Load the HeapNumber value. |
| 2308 __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset)); | 2295 __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset)); |
| 2309 | 2296 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2354 __ And(t0, t1, Operand(HeapNumber::kSignMask)); | 2341 __ And(t0, t1, Operand(HeapNumber::kSignMask)); |
| 2355 // If our HeapNumber is negative it was -0, so load its address and return. | 2342 // If our HeapNumber is negative it was -0, so load its address and return. |
| 2356 // Else v0 is loaded with 0, so we can also just return. | 2343 // Else v0 is loaded with 0, so we can also just return. |
| 2357 __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg)); | 2344 __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg)); |
| 2358 __ lw(v0, MemOperand(sp, 0 * kPointerSize)); | 2345 __ lw(v0, MemOperand(sp, 0 * kPointerSize)); |
| 2359 | 2346 |
| 2360 __ bind(&restore_fcsr_and_return); | 2347 __ bind(&restore_fcsr_and_return); |
| 2361 // Restore FCSR and return. | 2348 // Restore FCSR and return. |
| 2362 __ ctc1(a3, FCSR); | 2349 __ ctc1(a3, FCSR); |
| 2363 | 2350 |
| 2364 __ Drop(argc + 1); | 2351 __ DropAndRet(argc + 1); |
| 2365 __ Ret(); | |
| 2366 | 2352 |
| 2367 __ bind(&wont_fit_smi); | 2353 __ bind(&wont_fit_smi); |
| 2368 // Restore FCSR and fall to slow case. | 2354 // Restore FCSR and fall to slow case. |
| 2369 __ ctc1(a3, FCSR); | 2355 __ ctc1(a3, FCSR); |
| 2370 | 2356 |
| 2371 __ bind(&slow); | 2357 __ bind(&slow); |
| 2372 // Tail call the full function. We do not have to patch the receiver | 2358 // Tail call the full function. We do not have to patch the receiver |
| 2373 // because the function makes no use of it. | 2359 // because the function makes no use of it. |
| 2374 ParameterCount expected(function); | 2360 ParameterCount expected(function); |
| 2375 __ InvokeFunction(function, expected, arguments(), | 2361 __ InvokeFunction(function, expected, arguments(), |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2434 | 2420 |
| 2435 // Add 1 or do nothing depending on the sign of the argument. | 2421 // Add 1 or do nothing depending on the sign of the argument. |
| 2436 __ Subu(v0, a1, t0); | 2422 __ Subu(v0, a1, t0); |
| 2437 | 2423 |
| 2438 // If the result is still negative, go to the slow case. | 2424 // If the result is still negative, go to the slow case. |
| 2439 // This only happens for the most negative smi. | 2425 // This only happens for the most negative smi. |
| 2440 Label slow; | 2426 Label slow; |
| 2441 __ Branch(&slow, lt, v0, Operand(zero_reg)); | 2427 __ Branch(&slow, lt, v0, Operand(zero_reg)); |
| 2442 | 2428 |
| 2443 // Smi case done. | 2429 // Smi case done. |
| 2444 __ Drop(argc + 1); | 2430 __ DropAndRet(argc + 1); |
| 2445 __ Ret(); | |
| 2446 | 2431 |
| 2447 // Check if the argument is a heap number and load its exponent and | 2432 // Check if the argument is a heap number and load its exponent and |
| 2448 // sign. | 2433 // sign. |
| 2449 __ bind(¬_smi); | 2434 __ bind(¬_smi); |
| 2450 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK); | 2435 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK); |
| 2451 __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset)); | 2436 __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset)); |
| 2452 | 2437 |
| 2453 // Check the sign of the argument. If the argument is positive, | 2438 // Check the sign of the argument. If the argument is positive, |
| 2454 // just return it. | 2439 // just return it. |
| 2455 Label negative_sign; | 2440 Label negative_sign; |
| 2456 __ And(t0, a1, Operand(HeapNumber::kSignMask)); | 2441 __ And(t0, a1, Operand(HeapNumber::kSignMask)); |
| 2457 __ Branch(&negative_sign, ne, t0, Operand(zero_reg)); | 2442 __ Branch(&negative_sign, ne, t0, Operand(zero_reg)); |
| 2458 __ Drop(argc + 1); | 2443 __ DropAndRet(argc + 1); |
| 2459 __ Ret(); | |
| 2460 | 2444 |
| 2461 // If the argument is negative, clear the sign, and return a new | 2445 // If the argument is negative, clear the sign, and return a new |
| 2462 // number. | 2446 // number. |
| 2463 __ bind(&negative_sign); | 2447 __ bind(&negative_sign); |
| 2464 __ Xor(a1, a1, Operand(HeapNumber::kSignMask)); | 2448 __ Xor(a1, a1, Operand(HeapNumber::kSignMask)); |
| 2465 __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); | 2449 __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); |
| 2466 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex); | 2450 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex); |
| 2467 __ AllocateHeapNumber(v0, t0, t1, t2, &slow); | 2451 __ AllocateHeapNumber(v0, t0, t1, t2, &slow); |
| 2468 __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset)); | 2452 __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset)); |
| 2469 __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); | 2453 __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); |
| 2470 __ Drop(argc + 1); | 2454 __ DropAndRet(argc + 1); |
| 2471 __ Ret(); | |
| 2472 | 2455 |
| 2473 // Tail call the full function. We do not have to patch the receiver | 2456 // Tail call the full function. We do not have to patch the receiver |
| 2474 // because the function makes no use of it. | 2457 // because the function makes no use of it. |
| 2475 __ bind(&slow); | 2458 __ bind(&slow); |
| 2476 ParameterCount expected(function); | 2459 ParameterCount expected(function); |
| 2477 __ InvokeFunction(function, expected, arguments(), | 2460 __ InvokeFunction(function, expected, arguments(), |
| 2478 JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); | 2461 JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); |
| 2479 | 2462 |
| 2480 __ bind(&miss); | 2463 __ bind(&miss); |
| 2481 // a2: function name. | 2464 // a2: function name. |
| (...skipping 577 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3059 if (!is_dont_delete) { | 3042 if (!is_dont_delete) { |
| 3060 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 3043 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 3061 __ Branch(&miss, eq, t0, Operand(at)); | 3044 __ Branch(&miss, eq, t0, Operand(at)); |
| 3062 } | 3045 } |
| 3063 | 3046 |
| 3064 HandlerFrontendFooter(&success, &miss); | 3047 HandlerFrontendFooter(&success, &miss); |
| 3065 __ bind(&success); | 3048 __ bind(&success); |
| 3066 | 3049 |
| 3067 Counters* counters = isolate()->counters(); | 3050 Counters* counters = isolate()->counters(); |
| 3068 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3); | 3051 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3); |
| 3052 __ Ret(USE_DELAY_SLOT); |
| 3069 __ mov(v0, t0); | 3053 __ mov(v0, t0); |
| 3070 __ Ret(); | |
| 3071 | 3054 |
| 3072 // Return the generated code. | 3055 // Return the generated code. |
| 3073 return GetICCode(kind(), Code::NORMAL, name); | 3056 return GetICCode(kind(), Code::NORMAL, name); |
| 3074 } | 3057 } |
| 3075 | 3058 |
| 3076 | 3059 |
| 3077 Handle<Code> BaseLoadStubCompiler::CompilePolymorphicIC( | 3060 Handle<Code> BaseLoadStubCompiler::CompilePolymorphicIC( |
| 3078 MapHandleList* receiver_maps, | 3061 MapHandleList* receiver_maps, |
| 3079 CodeHandleList* handlers, | 3062 CodeHandleList* handlers, |
| 3080 Handle<Name> name, | 3063 Handle<Name> name, |
| (...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3331 case FAST_HOLEY_ELEMENTS: | 3314 case FAST_HOLEY_ELEMENTS: |
| 3332 case FAST_HOLEY_SMI_ELEMENTS: | 3315 case FAST_HOLEY_SMI_ELEMENTS: |
| 3333 case FAST_HOLEY_DOUBLE_ELEMENTS: | 3316 case FAST_HOLEY_DOUBLE_ELEMENTS: |
| 3334 case DICTIONARY_ELEMENTS: | 3317 case DICTIONARY_ELEMENTS: |
| 3335 case NON_STRICT_ARGUMENTS_ELEMENTS: | 3318 case NON_STRICT_ARGUMENTS_ELEMENTS: |
| 3336 UNREACHABLE(); | 3319 UNREACHABLE(); |
| 3337 break; | 3320 break; |
| 3338 } | 3321 } |
| 3339 | 3322 |
| 3340 // Entry registers are intact, a0 holds the value which is the return value. | 3323 // Entry registers are intact, a0 holds the value which is the return value. |
| 3324 __ Ret(USE_DELAY_SLOT); |
| 3341 __ mov(v0, a0); | 3325 __ mov(v0, a0); |
| 3342 __ Ret(); | |
| 3343 | 3326 |
| 3344 if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) { | 3327 if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) { |
| 3345 // a3: external array. | 3328 // a3: external array. |
| 3346 __ bind(&check_heap_number); | 3329 __ bind(&check_heap_number); |
| 3347 __ GetObjectType(value, t1, t2); | 3330 __ GetObjectType(value, t1, t2); |
| 3348 __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE)); | 3331 __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE)); |
| 3349 | 3332 |
| 3350 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset)); | 3333 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset)); |
| 3351 | 3334 |
| 3352 // a3: base pointer of external storage. | 3335 // a3: base pointer of external storage. |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3399 case FAST_HOLEY_DOUBLE_ELEMENTS: | 3382 case FAST_HOLEY_DOUBLE_ELEMENTS: |
| 3400 case DICTIONARY_ELEMENTS: | 3383 case DICTIONARY_ELEMENTS: |
| 3401 case NON_STRICT_ARGUMENTS_ELEMENTS: | 3384 case NON_STRICT_ARGUMENTS_ELEMENTS: |
| 3402 UNREACHABLE(); | 3385 UNREACHABLE(); |
| 3403 break; | 3386 break; |
| 3404 } | 3387 } |
| 3405 } | 3388 } |
| 3406 | 3389 |
| 3407 // Entry registers are intact, a0 holds the value | 3390 // Entry registers are intact, a0 holds the value |
| 3408 // which is the return value. | 3391 // which is the return value. |
| 3392 __ Ret(USE_DELAY_SLOT); |
| 3409 __ mov(v0, a0); | 3393 __ mov(v0, a0); |
| 3410 __ Ret(); | |
| 3411 } | 3394 } |
| 3412 | 3395 |
| 3413 // Slow case, key and receiver still in a0 and a1. | 3396 // Slow case, key and receiver still in a0 and a1. |
| 3414 __ bind(&slow); | 3397 __ bind(&slow); |
| 3415 __ IncrementCounter( | 3398 __ IncrementCounter( |
| 3416 masm->isolate()->counters()->keyed_load_external_array_slow(), | 3399 masm->isolate()->counters()->keyed_load_external_array_slow(), |
| 3417 1, a2, a3); | 3400 1, a2, a3); |
| 3418 // Entry registers are intact. | 3401 // Entry registers are intact. |
| 3419 // ---------- S t a t e -------------- | 3402 // ---------- S t a t e -------------- |
| 3420 // -- ra : return address | 3403 // -- ra : return address |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3561 | 3544 |
| 3562 // Install the new backing store in the JSArray. | 3545 // Install the new backing store in the JSArray. |
| 3563 __ sw(elements_reg, | 3546 __ sw(elements_reg, |
| 3564 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); | 3547 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
| 3565 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg, | 3548 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg, |
| 3566 scratch, kRAHasNotBeenSaved, kDontSaveFPRegs, | 3549 scratch, kRAHasNotBeenSaved, kDontSaveFPRegs, |
| 3567 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3550 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 3568 | 3551 |
| 3569 // Increment the length of the array. | 3552 // Increment the length of the array. |
| 3570 __ li(length_reg, Operand(Smi::FromInt(1))); | 3553 __ li(length_reg, Operand(Smi::FromInt(1))); |
| 3554 __ Ret(USE_DELAY_SLOT); |
| 3571 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); | 3555 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); |
| 3572 __ Ret(); | |
| 3573 | 3556 |
| 3574 __ bind(&check_capacity); | 3557 __ bind(&check_capacity); |
| 3575 // Check for cow elements, in general they are not handled by this stub | 3558 // Check for cow elements, in general they are not handled by this stub |
| 3576 __ CheckMap(elements_reg, | 3559 __ CheckMap(elements_reg, |
| 3577 scratch, | 3560 scratch, |
| 3578 Heap::kFixedCOWArrayMapRootIndex, | 3561 Heap::kFixedCOWArrayMapRootIndex, |
| 3579 &miss_force_generic, | 3562 &miss_force_generic, |
| 3580 DONT_DO_SMI_CHECK); | 3563 DONT_DO_SMI_CHECK); |
| 3581 | 3564 |
| 3582 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); | 3565 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3726 // Install the new backing store in the JSArray. | 3709 // Install the new backing store in the JSArray. |
| 3727 __ sw(elements_reg, | 3710 __ sw(elements_reg, |
| 3728 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); | 3711 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
| 3729 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg, | 3712 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg, |
| 3730 scratch1, kRAHasNotBeenSaved, kDontSaveFPRegs, | 3713 scratch1, kRAHasNotBeenSaved, kDontSaveFPRegs, |
| 3731 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3714 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 3732 | 3715 |
| 3733 // Increment the length of the array. | 3716 // Increment the length of the array. |
| 3734 __ li(length_reg, Operand(Smi::FromInt(1))); | 3717 __ li(length_reg, Operand(Smi::FromInt(1))); |
| 3735 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); | 3718 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); |
| 3719 __ Ret(USE_DELAY_SLOT); |
| 3736 __ lw(elements_reg, | 3720 __ lw(elements_reg, |
| 3737 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); | 3721 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
| 3738 __ Ret(); | |
| 3739 | 3722 |
| 3740 __ bind(&check_capacity); | 3723 __ bind(&check_capacity); |
| 3741 // Make sure that the backing store can hold additional elements. | 3724 // Make sure that the backing store can hold additional elements. |
| 3742 __ lw(scratch1, | 3725 __ lw(scratch1, |
| 3743 FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset)); | 3726 FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset)); |
| 3744 __ Branch(&slow, hs, length_reg, Operand(scratch1)); | 3727 __ Branch(&slow, hs, length_reg, Operand(scratch1)); |
| 3745 | 3728 |
| 3746 // Grow the array and finish the store. | 3729 // Grow the array and finish the store. |
| 3747 __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1))); | 3730 __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1))); |
| 3748 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); | 3731 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); |
| 3749 __ jmp(&finish_store); | 3732 __ jmp(&finish_store); |
| 3750 | 3733 |
| 3751 __ bind(&slow); | 3734 __ bind(&slow); |
| 3752 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); | 3735 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); |
| 3753 } | 3736 } |
| 3754 } | 3737 } |
| 3755 | 3738 |
| 3756 | 3739 |
| 3757 #undef __ | 3740 #undef __ |
| 3758 | 3741 |
| 3759 } } // namespace v8::internal | 3742 } } // namespace v8::internal |
| 3760 | 3743 |
| 3761 #endif // V8_TARGET_ARCH_MIPS | 3744 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |