| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3824 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3835 __ add(sp, sp, Operand(2 * kPointerSize)); | 3835 __ add(sp, sp, Operand(2 * kPointerSize)); |
| 3836 GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5); | 3836 GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5); |
| 3837 | 3837 |
| 3838 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 3838 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
| 3839 // tagged as a small integer. | 3839 // tagged as a small integer. |
| 3840 __ bind(&runtime); | 3840 __ bind(&runtime); |
| 3841 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); | 3841 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); |
| 3842 } | 3842 } |
| 3843 | 3843 |
| 3844 | 3844 |
| 3845 void ArrayPushStub::Generate(MacroAssembler* masm) { | |
| 3846 Register receiver = r0; | |
| 3847 Register scratch = r1; | |
| 3848 | |
| 3849 int argc = arguments_count(); | |
| 3850 | |
| 3851 if (argc == 0) { | |
| 3852 // Nothing to do, just return the length. | |
| 3853 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | |
| 3854 __ Drop(argc + 1); | |
| 3855 __ Ret(); | |
| 3856 return; | |
| 3857 } | |
| 3858 | |
| 3859 Isolate* isolate = masm->isolate(); | |
| 3860 | |
| 3861 if (argc != 1) { | |
| 3862 __ TailCallExternalReference( | |
| 3863 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); | |
| 3864 return; | |
| 3865 } | |
| 3866 | |
| 3867 Label call_builtin, attempt_to_grow_elements, with_write_barrier; | |
| 3868 | |
| 3869 Register elements = r6; | |
| 3870 Register end_elements = r5; | |
| 3871 // Get the elements array of the object. | |
| 3872 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); | |
| 3873 | |
| 3874 if (IsFastSmiOrObjectElementsKind(elements_kind())) { | |
| 3875 // Check that the elements are in fast mode and writable. | |
| 3876 __ CheckMap(elements, | |
| 3877 scratch, | |
| 3878 Heap::kFixedArrayMapRootIndex, | |
| 3879 &call_builtin, | |
| 3880 DONT_DO_SMI_CHECK); | |
| 3881 } | |
| 3882 | |
| 3883 // Get the array's length into scratch and calculate new length. | |
| 3884 __ ldr(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); | |
| 3885 __ add(scratch, scratch, Operand(Smi::FromInt(argc))); | |
| 3886 | |
| 3887 // Get the elements' length. | |
| 3888 __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); | |
| 3889 | |
| 3890 // Check if we could survive without allocation. | |
| 3891 __ cmp(scratch, r4); | |
| 3892 | |
| 3893 const int kEndElementsOffset = | |
| 3894 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; | |
| 3895 | |
| 3896 if (IsFastSmiOrObjectElementsKind(elements_kind())) { | |
| 3897 __ b(gt, &attempt_to_grow_elements); | |
| 3898 | |
| 3899 // Check if value is a smi. | |
| 3900 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize)); | |
| 3901 __ JumpIfNotSmi(r4, &with_write_barrier); | |
| 3902 | |
| 3903 // Store the value. | |
| 3904 // We may need a register containing the address end_elements below, so | |
| 3905 // write back the value in end_elements. | |
| 3906 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch)); | |
| 3907 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex)); | |
| 3908 } else { | |
| 3909 // Check if we could survive without allocation. | |
| 3910 __ cmp(scratch, r4); | |
| 3911 __ b(gt, &call_builtin); | |
| 3912 | |
| 3913 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize)); | |
| 3914 __ StoreNumberToDoubleElements(r4, scratch, elements, r5, d0, | |
| 3915 &call_builtin, argc * kDoubleSize); | |
| 3916 } | |
| 3917 | |
| 3918 // Save new length. | |
| 3919 __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); | |
| 3920 __ Drop(argc + 1); | |
| 3921 __ mov(r0, scratch); | |
| 3922 __ Ret(); | |
| 3923 | |
| 3924 if (IsFastDoubleElementsKind(elements_kind())) { | |
| 3925 __ bind(&call_builtin); | |
| 3926 __ TailCallExternalReference( | |
| 3927 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); | |
| 3928 return; | |
| 3929 } | |
| 3930 | |
| 3931 __ bind(&with_write_barrier); | |
| 3932 | |
| 3933 if (IsFastSmiElementsKind(elements_kind())) { | |
| 3934 if (FLAG_trace_elements_transitions) __ jmp(&call_builtin); | |
| 3935 | |
| 3936 __ ldr(r9, FieldMemOperand(r4, HeapObject::kMapOffset)); | |
| 3937 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | |
| 3938 __ cmp(r9, ip); | |
| 3939 __ b(eq, &call_builtin); | |
| 3940 | |
| 3941 ElementsKind target_kind = IsHoleyElementsKind(elements_kind()) | |
| 3942 ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; | |
| 3943 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
| 3944 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); | |
| 3945 __ ldr(r3, ContextOperand(r3, Context::JS_ARRAY_MAPS_INDEX)); | |
| 3946 const int header_size = FixedArrayBase::kHeaderSize; | |
| 3947 // Verify that the object can be transitioned in place. | |
| 3948 const int origin_offset = header_size + elements_kind() * kPointerSize; | |
| 3949 __ ldr(r2, FieldMemOperand(receiver, origin_offset)); | |
| 3950 __ ldr(ip, FieldMemOperand(r3, HeapObject::kMapOffset)); | |
| 3951 __ cmp(r2, ip); | |
| 3952 __ b(ne, &call_builtin); | |
| 3953 | |
| 3954 const int target_offset = header_size + target_kind * kPointerSize; | |
| 3955 __ ldr(r3, FieldMemOperand(r3, target_offset)); | |
| 3956 __ mov(r2, receiver); | |
| 3957 ElementsTransitionGenerator::GenerateMapChangeElementsTransition( | |
| 3958 masm, DONT_TRACK_ALLOCATION_SITE, NULL); | |
| 3959 } | |
| 3960 | |
| 3961 // Save new length. | |
| 3962 __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); | |
| 3963 | |
| 3964 // Store the value. | |
| 3965 // We may need a register containing the address end_elements below, so write | |
| 3966 // back the value in end_elements. | |
| 3967 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch)); | |
| 3968 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex)); | |
| 3969 | |
| 3970 __ RecordWrite(elements, | |
| 3971 end_elements, | |
| 3972 r4, | |
| 3973 kLRHasNotBeenSaved, | |
| 3974 kDontSaveFPRegs, | |
| 3975 EMIT_REMEMBERED_SET, | |
| 3976 OMIT_SMI_CHECK); | |
| 3977 __ Drop(argc + 1); | |
| 3978 __ mov(r0, scratch); | |
| 3979 __ Ret(); | |
| 3980 | |
| 3981 __ bind(&attempt_to_grow_elements); | |
| 3982 // scratch: array's length + 1. | |
| 3983 | |
| 3984 if (!FLAG_inline_new) { | |
| 3985 __ bind(&call_builtin); | |
| 3986 __ TailCallExternalReference( | |
| 3987 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); | |
| 3988 return; | |
| 3989 } | |
| 3990 | |
| 3991 __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize)); | |
| 3992 // Growing elements that are SMI-only requires special handling in case the | |
| 3993 // new element is non-Smi. For now, delegate to the builtin. | |
| 3994 if (IsFastSmiElementsKind(elements_kind())) { | |
| 3995 __ JumpIfNotSmi(r2, &call_builtin); | |
| 3996 } | |
| 3997 | |
| 3998 // We could be lucky and the elements array could be at the top of new-space. | |
| 3999 // In this case we can just grow it in place by moving the allocation pointer | |
| 4000 // up. | |
| 4001 ExternalReference new_space_allocation_top = | |
| 4002 ExternalReference::new_space_allocation_top_address(isolate); | |
| 4003 ExternalReference new_space_allocation_limit = | |
| 4004 ExternalReference::new_space_allocation_limit_address(isolate); | |
| 4005 | |
| 4006 const int kAllocationDelta = 4; | |
| 4007 ASSERT(kAllocationDelta >= argc); | |
| 4008 // Load top and check if it is the end of elements. | |
| 4009 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch)); | |
| 4010 __ add(end_elements, end_elements, Operand(kEndElementsOffset)); | |
| 4011 __ mov(r4, Operand(new_space_allocation_top)); | |
| 4012 __ ldr(r3, MemOperand(r4)); | |
| 4013 __ cmp(end_elements, r3); | |
| 4014 __ b(ne, &call_builtin); | |
| 4015 | |
| 4016 __ mov(r9, Operand(new_space_allocation_limit)); | |
| 4017 __ ldr(r9, MemOperand(r9)); | |
| 4018 __ add(r3, r3, Operand(kAllocationDelta * kPointerSize)); | |
| 4019 __ cmp(r3, r9); | |
| 4020 __ b(hi, &call_builtin); | |
| 4021 | |
| 4022 // We fit and could grow elements. | |
| 4023 // Update new_space_allocation_top. | |
| 4024 __ str(r3, MemOperand(r4)); | |
| 4025 // Push the argument. | |
| 4026 __ str(r2, MemOperand(end_elements)); | |
| 4027 // Fill the rest with holes. | |
| 4028 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); | |
| 4029 for (int i = 1; i < kAllocationDelta; i++) { | |
| 4030 __ str(r3, MemOperand(end_elements, i * kPointerSize)); | |
| 4031 } | |
| 4032 | |
| 4033 // Update elements' and array's sizes. | |
| 4034 __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset)); | |
| 4035 __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); | |
| 4036 __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta))); | |
| 4037 __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); | |
| 4038 | |
| 4039 // Elements are in new space, so write barrier is not required. | |
| 4040 __ Drop(argc + 1); | |
| 4041 __ mov(r0, scratch); | |
| 4042 __ Ret(); | |
| 4043 | |
| 4044 __ bind(&call_builtin); | |
| 4045 __ TailCallExternalReference( | |
| 4046 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1); | |
| 4047 } | |
| 4048 | |
| 4049 | |
| 4050 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 3845 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
| 4051 // ----------- S t a t e ------------- | 3846 // ----------- S t a t e ------------- |
| 4052 // -- r1 : left | 3847 // -- r1 : left |
| 4053 // -- r0 : right | 3848 // -- r0 : right |
| 4054 // -- lr : return address | 3849 // -- lr : return address |
| 4055 // ----------------------------------- | 3850 // ----------------------------------- |
| 4056 Isolate* isolate = masm->isolate(); | 3851 Isolate* isolate = masm->isolate(); |
| 4057 | 3852 |
| 4058 // Load r2 with the allocation site. We stick an undefined dummy value here | 3853 // Load r2 with the allocation site. We stick an undefined dummy value here |
| 4059 // and replace it with the real allocation site later when we instantiate this | 3854 // and replace it with the real allocation site later when we instantiate this |
| (...skipping 1400 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5460 MemOperand(fp, 6 * kPointerSize), | 5255 MemOperand(fp, 6 * kPointerSize), |
| 5461 NULL); | 5256 NULL); |
| 5462 } | 5257 } |
| 5463 | 5258 |
| 5464 | 5259 |
| 5465 #undef __ | 5260 #undef __ |
| 5466 | 5261 |
| 5467 } } // namespace v8::internal | 5262 } } // namespace v8::internal |
| 5468 | 5263 |
| 5469 #endif // V8_TARGET_ARCH_ARM | 5264 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |