OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
6 | 6 |
7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 2807 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2818 } | 2818 } |
2819 | 2819 |
2820 // Called Construct on an Object that doesn't have a [[Construct]] internal | 2820 // Called Construct on an Object that doesn't have a [[Construct]] internal |
2821 // method. | 2821 // method. |
2822 __ bind(&non_constructor); | 2822 __ bind(&non_constructor); |
2823 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | 2823 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), |
2824 RelocInfo::CODE_TARGET); | 2824 RelocInfo::CODE_TARGET); |
2825 } | 2825 } |
2826 | 2826 |
2827 // static | 2827 // static |
| 2828 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
| 2829 // ----------- S t a t e ------------- |
| 2830 // -- eax : the number of arguments (not including the receiver) |
| 2831 // -- edx : the new target (either the same as the constructor or |
| 2832 // the JSFunction on which new was invoked initially) |
| 2833 // -- edi : the constructor to call (can be any Object) |
| 2834 // ----------------------------------- |
| 2835 |
| 2836 // Free up some registers. |
| 2837 // Save edx/edi to stX0/stX1. |
| 2838 __ push(edx); |
| 2839 __ push(edi); |
| 2840 __ fld_s(MemOperand(esp, 0)); |
| 2841 __ fld_s(MemOperand(esp, 4)); |
| 2842 __ lea(esp, Operand(esp, 2 * kFloatSize)); |
| 2843 |
| 2844 Register argc = eax; |
| 2845 |
| 2846 Register scratch = ecx; |
| 2847 Register scratch2 = edi; |
| 2848 |
| 2849 Register spread = ebx; |
| 2850 Register spread_map = edx; |
| 2851 |
| 2852 __ mov(spread, Operand(esp, kPointerSize)); |
| 2853 __ mov(spread_map, FieldOperand(spread, HeapObject::kMapOffset)); |
| 2854 |
| 2855 Label runtime_call, push_args; |
| 2856 // Check that the spread is an array. |
| 2857 __ CmpInstanceType(spread_map, JS_ARRAY_TYPE); |
| 2858 __ j(not_equal, &runtime_call); |
| 2859 |
| 2860 // Check that we have the original ArrayPrototype. |
| 2861 __ mov(scratch, FieldOperand(spread_map, Map::kPrototypeOffset)); |
| 2862 __ mov(scratch2, NativeContextOperand()); |
| 2863 __ cmp(scratch, |
| 2864 ContextOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
| 2865 __ j(not_equal, &runtime_call); |
| 2866 |
| 2867 // Check that the ArrayPrototype hasn't been modified in a way that would |
| 2868 // affect iteration. |
| 2869 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); |
| 2870 __ cmp(FieldOperand(scratch, Cell::kValueOffset), |
| 2871 Immediate(Smi::FromInt(Isolate::kProtectorValid))); |
| 2872 __ j(not_equal, &runtime_call); |
| 2873 |
| 2874 // Check that the map of the initial array iterator hasn't changed. |
| 2875 __ mov(scratch2, NativeContextOperand()); |
| 2876 __ mov(scratch, |
| 2877 ContextOperand(scratch2, |
| 2878 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
| 2879 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 2880 __ cmp(scratch, |
| 2881 ContextOperand(scratch2, |
| 2882 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
| 2883 __ j(not_equal, &runtime_call); |
| 2884 |
| 2885 // For FastPacked kinds, iteration will have the same effect as simply |
| 2886 // accessing each property in order. |
| 2887 Label no_protector_check; |
| 2888 __ mov(scratch, FieldOperand(spread_map, Map::kBitField2Offset)); |
| 2889 __ DecodeField<Map::ElementsKindBits>(scratch); |
| 2890 __ cmp(scratch, Immediate(LAST_FAST_ELEMENTS_KIND)); |
| 2891 __ j(above, &runtime_call); |
| 2892 // For non-FastHoley kinds, we can skip the protector check. |
| 2893 __ cmp(scratch, Immediate(FAST_SMI_ELEMENTS)); |
| 2894 __ j(equal, &no_protector_check); |
| 2895 __ cmp(scratch, Immediate(FAST_ELEMENTS)); |
| 2896 __ j(equal, &no_protector_check); |
| 2897 __ cmp(scratch, Immediate(FAST_DOUBLE_ELEMENTS)); |
| 2898 __ j(equal, &no_protector_check); |
| 2899 // Check the ArrayProtector cell. |
| 2900 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
| 2901 __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset), |
| 2902 Immediate(Smi::FromInt(Isolate::kProtectorValid))); |
| 2903 __ j(not_equal, &runtime_call); |
| 2904 |
| 2905 __ bind(&no_protector_check); |
| 2906 // Load the FixedArray backing store. |
| 2907 __ mov(spread, FieldOperand(spread, JSArray::kElementsOffset)); |
| 2908 // Free up some registers. |
| 2909 __ jmp(&push_args); |
| 2910 |
| 2911 __ bind(&runtime_call); |
| 2912 { |
| 2913 // Call the builtin for the result of the spread. |
| 2914 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2915 // Need to save these on the stack. |
| 2916 // Restore edx/edi from stX0/stX1. |
| 2917 __ lea(esp, Operand(esp, -2 * kFloatSize)); |
| 2918 __ fstp_s(MemOperand(esp, 0)); |
| 2919 __ fstp_s(MemOperand(esp, 4)); |
| 2920 __ pop(edx); |
| 2921 __ pop(edi); |
| 2922 |
| 2923 __ Push(edi); |
| 2924 __ Push(edx); |
| 2925 __ SmiTag(argc); |
| 2926 __ Push(argc); |
| 2927 __ Push(spread); |
| 2928 __ CallRuntime(Runtime::kSpreadIterableFixed); |
| 2929 __ mov(spread, eax); |
| 2930 __ Pop(argc); |
| 2931 __ SmiUntag(argc); |
| 2932 __ Pop(edx); |
| 2933 __ Pop(edi); |
| 2934 // Free up some registers. |
| 2935 // Save edx/edi to stX0/stX1. |
| 2936 __ push(edx); |
| 2937 __ push(edi); |
| 2938 __ fld_s(MemOperand(esp, 0)); |
| 2939 __ fld_s(MemOperand(esp, 4)); |
| 2940 __ lea(esp, Operand(esp, 2 * kFloatSize)); |
| 2941 } |
| 2942 |
| 2943 Register spread_len = edx; |
| 2944 Register return_address = edi; |
| 2945 __ bind(&push_args); |
| 2946 { |
| 2947 // Pop the return address and spread argument. |
| 2948 __ PopReturnAddressTo(return_address); |
| 2949 __ Pop(scratch); |
| 2950 |
| 2951 // Calculate the new nargs including the result of the spread. |
| 2952 __ mov(spread_len, FieldOperand(spread, FixedArray::kLengthOffset)); |
| 2953 __ SmiUntag(spread_len); |
| 2954 // argc += spread_len - 1. Subtract 1 for the spread itself. |
| 2955 __ lea(argc, Operand(argc, spread_len, times_1, -1)); |
| 2956 } |
| 2957 |
| 2958 // Check for stack overflow. |
| 2959 { |
| 2960 // Check the stack for overflow. We are not trying to catch interruptions |
| 2961 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 2962 Label done; |
| 2963 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); |
| 2964 // Make scratch the space we have left. The stack might already be |
| 2965 // overflowed here which will cause scratch to become negative. |
| 2966 __ neg(scratch); |
| 2967 __ add(scratch, esp); |
| 2968 __ sar(scratch, kPointerSizeLog2); |
| 2969 // Check if the arguments will overflow the stack. |
| 2970 __ cmp(scratch, spread_len); |
| 2971 __ j(greater, &done, Label::kNear); // Signed comparison. |
| 2972 __ TailCallRuntime(Runtime::kThrowStackOverflow); |
| 2973 __ bind(&done); |
| 2974 } |
| 2975 |
| 2976 // Put the evaluated spread onto the stack as additional arguments. |
| 2977 { |
| 2978 Register scratch2 = esi; |
| 2979 // __ movd(xmm2, esi); |
| 2980 // Save esi to stX0, edx/edi in stX1/stX2 now. |
| 2981 __ push(esi); |
| 2982 __ fld_s(MemOperand(esp, 0)); |
| 2983 __ lea(esp, Operand(esp, 1 * kFloatSize)); |
| 2984 |
| 2985 __ mov(scratch, Immediate(0)); |
| 2986 Label done, loop; |
| 2987 __ bind(&loop); |
| 2988 __ cmp(scratch, spread_len); |
| 2989 __ j(equal, &done, Label::kNear); |
| 2990 __ mov(scratch2, FieldOperand(spread, scratch, times_pointer_size, |
| 2991 FixedArray::kHeaderSize)); |
| 2992 __ Push(scratch2); |
| 2993 __ inc(scratch); |
| 2994 __ jmp(&loop); |
| 2995 __ bind(&done); |
| 2996 __ PushReturnAddressFrom(return_address); |
| 2997 |
| 2998 // Now Restore esi from stX0, edx/edi from stX1/stX2. |
| 2999 __ lea(esp, Operand(esp, -3 * kFloatSize)); |
| 3000 __ fstp_s(MemOperand(esp, 0)); |
| 3001 __ fstp_s(MemOperand(esp, 4)); |
| 3002 __ fstp_s(MemOperand(esp, 8)); |
| 3003 __ pop(esi); |
| 3004 __ pop(edx); |
| 3005 __ pop(edi); |
| 3006 } |
| 3007 |
| 3008 // Dispatch. |
| 3009 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 3010 } |
| 3011 |
| 3012 // static |
2828 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | 3013 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { |
2829 // ----------- S t a t e ------------- | 3014 // ----------- S t a t e ------------- |
2830 // -- edx : requested object size (untagged) | 3015 // -- edx : requested object size (untagged) |
2831 // -- esp[0] : return address | 3016 // -- esp[0] : return address |
2832 // ----------------------------------- | 3017 // ----------------------------------- |
2833 __ SmiTag(edx); | 3018 __ SmiTag(edx); |
2834 __ PopReturnAddressTo(ecx); | 3019 __ PopReturnAddressTo(ecx); |
2835 __ Push(edx); | 3020 __ Push(edx); |
2836 __ PushReturnAddressFrom(ecx); | 3021 __ PushReturnAddressFrom(ecx); |
2837 __ Move(esi, Smi::kZero); | 3022 __ Move(esi, Smi::kZero); |
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3139 | 3324 |
3140 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { | 3325 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { |
3141 Generate_OnStackReplacementHelper(masm, true); | 3326 Generate_OnStackReplacementHelper(masm, true); |
3142 } | 3327 } |
3143 | 3328 |
3144 #undef __ | 3329 #undef __ |
3145 } // namespace internal | 3330 } // namespace internal |
3146 } // namespace v8 | 3331 } // namespace v8 |
3147 | 3332 |
3148 #endif // V8_TARGET_ARCH_X87 | 3333 #endif // V8_TARGET_ARCH_X87 |
OLD | NEW |