OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 851 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
862 | 862 |
863 // Pass the additional arguments. | 863 // Pass the additional arguments. |
864 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | 864 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); |
865 Handle<Object> call_data(api_call_info->data(), masm->isolate()); | 865 Handle<Object> call_data(api_call_info->data(), masm->isolate()); |
866 if (masm->isolate()->heap()->InNewSpace(*call_data)) { | 866 if (masm->isolate()->heap()->InNewSpace(*call_data)) { |
867 __ Move(r0, api_call_info); | 867 __ Move(r0, api_call_info); |
868 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset)); | 868 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset)); |
869 } else { | 869 } else { |
870 __ Move(r6, call_data); | 870 __ Move(r6, call_data); |
871 } | 871 } |
872 __ mov(r7, Operand(ExternalReference::isolate_address(masm->isolate()))); | 872 __ mov(ip, Operand(ExternalReference::isolate_address(masm->isolate()))); |
873 // Store JS function, call data, isolate ReturnValue default and ReturnValue. | 873 // Store JS function, call data, isolate ReturnValue default and ReturnValue. |
874 __ stm(ib, sp, r5.bit() | r6.bit() | r7.bit()); | 874 __ stm(ib, sp, r5.bit() | r6.bit() | ip.bit()); |
875 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 875 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
876 __ str(r5, MemOperand(sp, 4 * kPointerSize)); | 876 __ str(r5, MemOperand(sp, 4 * kPointerSize)); |
877 __ str(r5, MemOperand(sp, 5 * kPointerSize)); | 877 __ str(r5, MemOperand(sp, 5 * kPointerSize)); |
878 | 878 |
879 // Prepare arguments. | 879 // Prepare arguments. |
880 __ add(r2, sp, Operand(5 * kPointerSize)); | 880 __ add(r2, sp, Operand(5 * kPointerSize)); |
881 | 881 |
882 // Allocate the v8::Arguments structure in the arguments' space since | 882 // Allocate the v8::Arguments structure in the arguments' space since |
883 // it's not controlled by GC. | 883 // it's not controlled by GC. |
884 const int kApiStackSpace = 4; | 884 const int kApiStackSpace = 4; |
(...skipping 937 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1822 // Check for a smi. | 1822 // Check for a smi. |
1823 __ Drop(argc + 1); | 1823 __ Drop(argc + 1); |
1824 __ Ret(); | 1824 __ Ret(); |
1825 | 1825 |
1826 __ bind(&with_write_barrier); | 1826 __ bind(&with_write_barrier); |
1827 | 1827 |
1828 __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 1828 __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
1829 | 1829 |
1830 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { | 1830 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { |
1831 Label fast_object, not_fast_object; | 1831 Label fast_object, not_fast_object; |
1832 __ CheckFastObjectElements(r3, r7, ¬_fast_object); | 1832 __ CheckFastObjectElements(r3, r9, ¬_fast_object); |
1833 __ jmp(&fast_object); | 1833 __ jmp(&fast_object); |
1834 // In case of fast smi-only, convert to fast object, otherwise bail out. | 1834 // In case of fast smi-only, convert to fast object, otherwise bail out. |
1835 __ bind(¬_fast_object); | 1835 __ bind(¬_fast_object); |
1836 __ CheckFastSmiElements(r3, r7, &call_builtin); | 1836 __ CheckFastSmiElements(r3, r9, &call_builtin); |
1837 | 1837 |
1838 __ ldr(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); | 1838 __ ldr(r9, FieldMemOperand(r4, HeapObject::kMapOffset)); |
1839 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 1839 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); |
1840 __ cmp(r7, ip); | 1840 __ cmp(r9, ip); |
1841 __ b(eq, &call_builtin); | 1841 __ b(eq, &call_builtin); |
1842 // edx: receiver | 1842 // edx: receiver |
1843 // r3: map | 1843 // r3: map |
1844 Label try_holey_map; | 1844 Label try_holey_map; |
1845 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | 1845 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
1846 FAST_ELEMENTS, | 1846 FAST_ELEMENTS, |
1847 r3, | 1847 r3, |
1848 r7, | 1848 r9, |
1849 &try_holey_map); | 1849 &try_holey_map); |
1850 __ mov(r2, receiver); | 1850 __ mov(r2, receiver); |
1851 ElementsTransitionGenerator:: | 1851 ElementsTransitionGenerator:: |
1852 GenerateMapChangeElementsTransition(masm(), | 1852 GenerateMapChangeElementsTransition(masm(), |
1853 DONT_TRACK_ALLOCATION_SITE, | 1853 DONT_TRACK_ALLOCATION_SITE, |
1854 NULL); | 1854 NULL); |
1855 __ jmp(&fast_object); | 1855 __ jmp(&fast_object); |
1856 | 1856 |
1857 __ bind(&try_holey_map); | 1857 __ bind(&try_holey_map); |
1858 __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS, | 1858 __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS, |
1859 FAST_HOLEY_ELEMENTS, | 1859 FAST_HOLEY_ELEMENTS, |
1860 r3, | 1860 r3, |
1861 r7, | 1861 r9, |
1862 &call_builtin); | 1862 &call_builtin); |
1863 __ mov(r2, receiver); | 1863 __ mov(r2, receiver); |
1864 ElementsTransitionGenerator:: | 1864 ElementsTransitionGenerator:: |
1865 GenerateMapChangeElementsTransition(masm(), | 1865 GenerateMapChangeElementsTransition(masm(), |
1866 DONT_TRACK_ALLOCATION_SITE, | 1866 DONT_TRACK_ALLOCATION_SITE, |
1867 NULL); | 1867 NULL); |
1868 __ bind(&fast_object); | 1868 __ bind(&fast_object); |
1869 } else { | 1869 } else { |
1870 __ CheckFastObjectElements(r3, r3, &call_builtin); | 1870 __ CheckFastObjectElements(r3, r3, &call_builtin); |
1871 } | 1871 } |
(...skipping 12 matching lines...) Expand all Loading... |
1884 r4, | 1884 r4, |
1885 kLRHasNotBeenSaved, | 1885 kLRHasNotBeenSaved, |
1886 kDontSaveFPRegs, | 1886 kDontSaveFPRegs, |
1887 EMIT_REMEMBERED_SET, | 1887 EMIT_REMEMBERED_SET, |
1888 OMIT_SMI_CHECK); | 1888 OMIT_SMI_CHECK); |
1889 __ Drop(argc + 1); | 1889 __ Drop(argc + 1); |
1890 __ Ret(); | 1890 __ Ret(); |
1891 | 1891 |
1892 __ bind(&attempt_to_grow_elements); | 1892 __ bind(&attempt_to_grow_elements); |
1893 // r0: array's length + 1. | 1893 // r0: array's length + 1. |
1894 // r4: elements' length. | |
1895 | 1894 |
1896 if (!FLAG_inline_new) { | 1895 if (!FLAG_inline_new) { |
1897 __ b(&call_builtin); | 1896 __ b(&call_builtin); |
1898 } | 1897 } |
1899 | 1898 |
1900 __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize)); | 1899 __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize)); |
1901 // Growing elements that are SMI-only requires special handling in case | 1900 // Growing elements that are SMI-only requires special handling in case |
1902 // the new element is non-Smi. For now, delegate to the builtin. | 1901 // the new element is non-Smi. For now, delegate to the builtin. |
1903 Label no_fast_elements_check; | 1902 Label no_fast_elements_check; |
1904 __ JumpIfSmi(r2, &no_fast_elements_check); | 1903 __ JumpIfSmi(r2, &no_fast_elements_check); |
1905 __ ldr(r7, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 1904 __ ldr(r9, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
1906 __ CheckFastObjectElements(r7, r7, &call_builtin); | 1905 __ CheckFastObjectElements(r9, r9, &call_builtin); |
1907 __ bind(&no_fast_elements_check); | 1906 __ bind(&no_fast_elements_check); |
1908 | 1907 |
1909 ExternalReference new_space_allocation_top = | 1908 ExternalReference new_space_allocation_top = |
1910 ExternalReference::new_space_allocation_top_address(isolate()); | 1909 ExternalReference::new_space_allocation_top_address(isolate()); |
1911 ExternalReference new_space_allocation_limit = | 1910 ExternalReference new_space_allocation_limit = |
1912 ExternalReference::new_space_allocation_limit_address(isolate()); | 1911 ExternalReference::new_space_allocation_limit_address(isolate()); |
1913 | 1912 |
1914 const int kAllocationDelta = 4; | 1913 const int kAllocationDelta = 4; |
1915 // Load top and check if it is the end of elements. | 1914 // Load top and check if it is the end of elements. |
1916 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0)); | 1915 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0)); |
1917 __ add(end_elements, end_elements, Operand(kEndElementsOffset)); | 1916 __ add(end_elements, end_elements, Operand(kEndElementsOffset)); |
1918 __ mov(r7, Operand(new_space_allocation_top)); | 1917 __ mov(r4, Operand(new_space_allocation_top)); |
1919 __ ldr(r3, MemOperand(r7)); | 1918 __ ldr(r3, MemOperand(r4)); |
1920 __ cmp(end_elements, r3); | 1919 __ cmp(end_elements, r3); |
1921 __ b(ne, &call_builtin); | 1920 __ b(ne, &call_builtin); |
1922 | 1921 |
1923 __ mov(r9, Operand(new_space_allocation_limit)); | 1922 __ mov(r9, Operand(new_space_allocation_limit)); |
1924 __ ldr(r9, MemOperand(r9)); | 1923 __ ldr(r9, MemOperand(r9)); |
1925 __ add(r3, r3, Operand(kAllocationDelta * kPointerSize)); | 1924 __ add(r3, r3, Operand(kAllocationDelta * kPointerSize)); |
1926 __ cmp(r3, r9); | 1925 __ cmp(r3, r9); |
1927 __ b(hi, &call_builtin); | 1926 __ b(hi, &call_builtin); |
1928 | 1927 |
1929 // We fit and could grow elements. | 1928 // We fit and could grow elements. |
1930 // Update new_space_allocation_top. | 1929 // Update new_space_allocation_top. |
1931 __ str(r3, MemOperand(r7)); | 1930 __ str(r3, MemOperand(r4)); |
1932 // Push the argument. | 1931 // Push the argument. |
1933 __ str(r2, MemOperand(end_elements)); | 1932 __ str(r2, MemOperand(end_elements)); |
1934 // Fill the rest with holes. | 1933 // Fill the rest with holes. |
1935 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); | 1934 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); |
1936 for (int i = 1; i < kAllocationDelta; i++) { | 1935 for (int i = 1; i < kAllocationDelta; i++) { |
1937 __ str(r3, MemOperand(end_elements, i * kPointerSize)); | 1936 __ str(r3, MemOperand(end_elements, i * kPointerSize)); |
1938 } | 1937 } |
1939 | 1938 |
1940 // Update elements' and array's sizes. | 1939 // Update elements' and array's sizes. |
1941 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1940 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 1941 __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
1942 __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta))); | 1942 __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta))); |
1943 __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 1943 __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
1944 | 1944 |
1945 // Elements are in new space, so write barrier is not required. | 1945 // Elements are in new space, so write barrier is not required. |
1946 __ Drop(argc + 1); | 1946 __ Drop(argc + 1); |
1947 __ Ret(); | 1947 __ Ret(); |
1948 } | 1948 } |
1949 __ bind(&call_builtin); | 1949 __ bind(&call_builtin); |
1950 __ TailCallExternalReference( | 1950 __ TailCallExternalReference( |
1951 ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1); | 1951 ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1); |
(...skipping 1321 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3273 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: | 3273 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: |
3274 __ strh(r5, MemOperand(r3, key, LSL, 0)); | 3274 __ strh(r5, MemOperand(r3, key, LSL, 0)); |
3275 break; | 3275 break; |
3276 case EXTERNAL_INT_ELEMENTS: | 3276 case EXTERNAL_INT_ELEMENTS: |
3277 case EXTERNAL_UNSIGNED_INT_ELEMENTS: | 3277 case EXTERNAL_UNSIGNED_INT_ELEMENTS: |
3278 __ str(r5, MemOperand(r3, key, LSL, 1)); | 3278 __ str(r5, MemOperand(r3, key, LSL, 1)); |
3279 break; | 3279 break; |
3280 case EXTERNAL_FLOAT_ELEMENTS: | 3280 case EXTERNAL_FLOAT_ELEMENTS: |
3281 // Perform int-to-float conversion and store to memory. | 3281 // Perform int-to-float conversion and store to memory. |
3282 __ SmiUntag(r4, key); | 3282 __ SmiUntag(r4, key); |
3283 StoreIntAsFloat(masm, r3, r4, r5, r7); | 3283 StoreIntAsFloat(masm, r3, r4, r5, r6); |
3284 break; | 3284 break; |
3285 case EXTERNAL_DOUBLE_ELEMENTS: | 3285 case EXTERNAL_DOUBLE_ELEMENTS: |
3286 __ vmov(s2, r5); | 3286 __ vmov(s2, r5); |
3287 __ vcvt_f64_s32(d0, s2); | 3287 __ vcvt_f64_s32(d0, s2); |
3288 __ add(r3, r3, Operand(key, LSL, 2)); | 3288 __ add(r3, r3, Operand(key, LSL, 2)); |
3289 // r3: effective address of the double element | 3289 // r3: effective address of the double element |
3290 __ vstr(d0, r3, 0); | 3290 __ vstr(d0, r3, 0); |
3291 break; | 3291 break; |
3292 case FAST_ELEMENTS: | 3292 case FAST_ELEMENTS: |
3293 case FAST_SMI_ELEMENTS: | 3293 case FAST_SMI_ELEMENTS: |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3329 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { | 3329 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { |
3330 __ sub(r5, r0, Operand(kHeapObjectTag)); | 3330 __ sub(r5, r0, Operand(kHeapObjectTag)); |
3331 __ vldr(d0, r5, HeapNumber::kValueOffset); | 3331 __ vldr(d0, r5, HeapNumber::kValueOffset); |
3332 __ add(r5, r3, Operand(key, LSL, 2)); | 3332 __ add(r5, r3, Operand(key, LSL, 2)); |
3333 __ vstr(d0, r5, 0); | 3333 __ vstr(d0, r5, 0); |
3334 } else { | 3334 } else { |
3335 // Hoisted load. vldr requires offset to be a multiple of 4 so we can | 3335 // Hoisted load. vldr requires offset to be a multiple of 4 so we can |
3336 // not include -kHeapObjectTag into it. | 3336 // not include -kHeapObjectTag into it. |
3337 __ sub(r5, value, Operand(kHeapObjectTag)); | 3337 __ sub(r5, value, Operand(kHeapObjectTag)); |
3338 __ vldr(d0, r5, HeapNumber::kValueOffset); | 3338 __ vldr(d0, r5, HeapNumber::kValueOffset); |
3339 __ ECMAToInt32(r5, d0, r6, r7, r9, d1); | 3339 __ ECMAToInt32(r5, d0, r4, r6, r9, d1); |
3340 | 3340 |
3341 switch (elements_kind) { | 3341 switch (elements_kind) { |
3342 case EXTERNAL_BYTE_ELEMENTS: | 3342 case EXTERNAL_BYTE_ELEMENTS: |
3343 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: | 3343 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: |
3344 __ strb(r5, MemOperand(r3, key, LSR, 1)); | 3344 __ strb(r5, MemOperand(r3, key, LSR, 1)); |
3345 break; | 3345 break; |
3346 case EXTERNAL_SHORT_ELEMENTS: | 3346 case EXTERNAL_SHORT_ELEMENTS: |
3347 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: | 3347 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: |
3348 __ strh(r5, MemOperand(r3, key, LSL, 0)); | 3348 __ strh(r5, MemOperand(r3, key, LSL, 0)); |
3349 break; | 3349 break; |
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3566 // -- r5 : scratch | 3566 // -- r5 : scratch |
3567 // ----------------------------------- | 3567 // ----------------------------------- |
3568 Label miss_force_generic, transition_elements_kind, grow, slow; | 3568 Label miss_force_generic, transition_elements_kind, grow, slow; |
3569 Label finish_store, check_capacity; | 3569 Label finish_store, check_capacity; |
3570 | 3570 |
3571 Register value_reg = r0; | 3571 Register value_reg = r0; |
3572 Register key_reg = r1; | 3572 Register key_reg = r1; |
3573 Register receiver_reg = r2; | 3573 Register receiver_reg = r2; |
3574 Register elements_reg = r3; | 3574 Register elements_reg = r3; |
3575 Register scratch1 = r4; | 3575 Register scratch1 = r4; |
3576 Register scratch2 = r5; | 3576 Register scratch2 = no_reg; // Will be r5. |
3577 Register length_reg = r7; | 3577 Register length_reg = r5; |
3578 | 3578 |
3579 // This stub is meant to be tail-jumped to, the receiver must already | 3579 // This stub is meant to be tail-jumped to, the receiver must already |
3580 // have been verified by the caller to not be a smi. | 3580 // have been verified by the caller to not be a smi. |
3581 | 3581 |
3582 // Check that the key is a smi or a heap number convertible to a smi. | 3582 // Check that the key is a smi or a heap number convertible to a smi. |
3583 GenerateSmiKeyCheck(masm, key_reg, r4, d1, d2, &miss_force_generic); | 3583 GenerateSmiKeyCheck(masm, key_reg, r4, d1, d2, &miss_force_generic); |
3584 | 3584 |
3585 __ ldr(elements_reg, | 3585 __ ldr(elements_reg, |
3586 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); | 3586 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
3587 | 3587 |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3631 | 3631 |
3632 // Check for the empty array, and preallocate a small backing store if | 3632 // Check for the empty array, and preallocate a small backing store if |
3633 // possible. | 3633 // possible. |
3634 __ ldr(length_reg, | 3634 __ ldr(length_reg, |
3635 FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); | 3635 FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); |
3636 __ ldr(elements_reg, | 3636 __ ldr(elements_reg, |
3637 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); | 3637 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
3638 __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex); | 3638 __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex); |
3639 __ b(ne, &check_capacity); | 3639 __ b(ne, &check_capacity); |
3640 | 3640 |
| 3641 scratch2 = length_reg; // Use length_reg as scratch2 here. |
| 3642 |
3641 int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements); | 3643 int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements); |
3642 __ Allocate(size, elements_reg, scratch1, scratch2, &slow, TAG_OBJECT); | 3644 __ Allocate(size, elements_reg, scratch1, scratch2, &slow, TAG_OBJECT); |
3643 | 3645 |
3644 // Initialize the new FixedDoubleArray. | 3646 // Initialize the new FixedDoubleArray. |
3645 __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex); | 3647 __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex); |
3646 __ str(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset)); | 3648 __ str(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset)); |
3647 __ mov(scratch1, | 3649 __ mov(scratch1, |
3648 Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements))); | 3650 Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements))); |
3649 __ str(scratch1, | 3651 __ str(scratch1, |
3650 FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset)); | 3652 FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset)); |
3651 | 3653 |
3652 __ mov(scratch1, elements_reg); | 3654 __ mov(scratch1, elements_reg); |
3653 __ StoreNumberToDoubleElements(value_reg, key_reg, scratch1, | 3655 __ StoreNumberToDoubleElements(value_reg, key_reg, scratch1, |
3654 scratch2, d0, &transition_elements_kind); | 3656 scratch2, d0, &transition_elements_kind); |
3655 | 3657 |
3656 __ mov(scratch1, Operand(kHoleNanLower32)); | 3658 __ mov(scratch1, Operand(kHoleNanLower32)); |
3657 __ mov(scratch2, Operand(kHoleNanUpper32)); | 3659 __ mov(scratch2, Operand(kHoleNanUpper32)); |
3658 for (int i = 1; i < JSArray::kPreallocatedArrayElements; i++) { | 3660 for (int i = 1; i < JSArray::kPreallocatedArrayElements; i++) { |
3659 int offset = FixedDoubleArray::OffsetOfElementAt(i); | 3661 int offset = FixedDoubleArray::OffsetOfElementAt(i); |
3660 __ str(scratch1, FieldMemOperand(elements_reg, offset)); | 3662 __ str(scratch1, FieldMemOperand(elements_reg, offset)); |
3661 __ str(scratch2, FieldMemOperand(elements_reg, offset + kPointerSize)); | 3663 __ str(scratch2, FieldMemOperand(elements_reg, offset + kPointerSize)); |
3662 } | 3664 } |
3663 | 3665 |
| 3666 scratch2 = no_reg; // End of scratch2's live range. |
| 3667 |
3664 // Install the new backing store in the JSArray. | 3668 // Install the new backing store in the JSArray. |
3665 __ str(elements_reg, | 3669 __ str(elements_reg, |
3666 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); | 3670 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
3667 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg, | 3671 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg, |
3668 scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs, | 3672 scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs, |
3669 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3673 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
3670 | 3674 |
3671 // Increment the length of the array. | 3675 // Increment the length of the array. |
3672 __ mov(length_reg, Operand(Smi::FromInt(1))); | 3676 __ mov(length_reg, Operand(Smi::FromInt(1))); |
3673 __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); | 3677 __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); |
(...skipping 17 matching lines...) Expand all Loading... |
3691 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); | 3695 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); |
3692 } | 3696 } |
3693 } | 3697 } |
3694 | 3698 |
3695 | 3699 |
3696 #undef __ | 3700 #undef __ |
3697 | 3701 |
3698 } } // namespace v8::internal | 3702 } } // namespace v8::internal |
3699 | 3703 |
3700 #endif // V8_TARGET_ARCH_ARM | 3704 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |