OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_PPC | 7 #if V8_TARGET_ARCH_PPC |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 2866 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2877 // r5 - vector | 2877 // r5 - vector |
2878 Label miss; | 2878 Label miss; |
2879 int argc = arg_count(); | 2879 int argc = arg_count(); |
2880 ParameterCount actual(argc); | 2880 ParameterCount actual(argc); |
2881 | 2881 |
2882 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r7); | 2882 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r7); |
2883 __ cmp(r4, r7); | 2883 __ cmp(r4, r7); |
2884 __ bne(&miss); | 2884 __ bne(&miss); |
2885 | 2885 |
2886 __ mov(r3, Operand(arg_count())); | 2886 __ mov(r3, Operand(arg_count())); |
2887 __ SmiToPtrArrayOffset(r7, r6); | 2887 __ SmiToPtrArrayOffset(r9, r6); |
2888 __ add(r7, r5, r7); | 2888 __ add(r9, r5, r9); |
2889 __ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize)); | 2889 __ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize)); |
2890 | 2890 |
2891 // Verify that r7 contains an AllocationSite | 2891 // Verify that r7 contains an AllocationSite |
2892 __ LoadP(r8, FieldMemOperand(r7, HeapObject::kMapOffset)); | 2892 __ LoadP(r8, FieldMemOperand(r7, HeapObject::kMapOffset)); |
2893 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); | 2893 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); |
2894 __ bne(&miss); | 2894 __ bne(&miss); |
2895 | 2895 |
| 2896 // Increment the call count for monomorphic function calls. |
| 2897 const int count_offset = FixedArray::kHeaderSize + kPointerSize; |
| 2898 __ LoadP(r6, FieldMemOperand(r9, count_offset)); |
| 2899 __ AddSmiLiteral(r6, r6, Smi::FromInt(CallICNexus::kCallCountIncrement), r0); |
| 2900 __ StoreP(r6, FieldMemOperand(r9, count_offset)); |
| 2901 |
2896 __ mr(r5, r7); | 2902 __ mr(r5, r7); |
2897 __ mr(r6, r4); | 2903 __ mr(r6, r4); |
2898 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 2904 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
2899 __ TailCallStub(&stub); | 2905 __ TailCallStub(&stub); |
2900 | 2906 |
2901 __ bind(&miss); | 2907 __ bind(&miss); |
2902 GenerateMiss(masm); | 2908 GenerateMiss(masm); |
2903 | 2909 |
2904 // The slow case, we need this no matter what to complete a call after a miss. | 2910 // The slow case, we need this no matter what to complete a call after a miss. |
2905 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); | 2911 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); |
(...skipping 11 matching lines...) Expand all Loading... |
2917 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2923 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
2918 const int generic_offset = | 2924 const int generic_offset = |
2919 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 2925 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
2920 Label extra_checks_or_miss, slow_start; | 2926 Label extra_checks_or_miss, slow_start; |
2921 Label slow, non_function, wrap, cont; | 2927 Label slow, non_function, wrap, cont; |
2922 Label have_js_function; | 2928 Label have_js_function; |
2923 int argc = arg_count(); | 2929 int argc = arg_count(); |
2924 ParameterCount actual(argc); | 2930 ParameterCount actual(argc); |
2925 | 2931 |
2926 // The checks. First, does r4 match the recorded monomorphic target? | 2932 // The checks. First, does r4 match the recorded monomorphic target? |
2927 __ SmiToPtrArrayOffset(r7, r6); | 2933 __ SmiToPtrArrayOffset(r9, r6); |
2928 __ add(r7, r5, r7); | 2934 __ add(r9, r5, r9); |
2929 __ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize)); | 2935 __ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize)); |
2930 | 2936 |
2931 // We don't know that we have a weak cell. We might have a private symbol | 2937 // We don't know that we have a weak cell. We might have a private symbol |
2932 // or an AllocationSite, but the memory is safe to examine. | 2938 // or an AllocationSite, but the memory is safe to examine. |
2933 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | 2939 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
2934 // FixedArray. | 2940 // FixedArray. |
2935 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | 2941 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
2936 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | 2942 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not |
2937 // computed, meaning that it can't appear to be a pointer. If the low bit is | 2943 // computed, meaning that it can't appear to be a pointer. If the low bit is |
2938 // 0, then hash is computed, but the 0 bit prevents the field from appearing | 2944 // 0, then hash is computed, but the 0 bit prevents the field from appearing |
2939 // to be a pointer. | 2945 // to be a pointer. |
2940 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | 2946 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); |
2941 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | 2947 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == |
2942 WeakCell::kValueOffset && | 2948 WeakCell::kValueOffset && |
2943 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | 2949 WeakCell::kValueOffset == Symbol::kHashFieldSlot); |
2944 | 2950 |
2945 __ LoadP(r8, FieldMemOperand(r7, WeakCell::kValueOffset)); | 2951 __ LoadP(r8, FieldMemOperand(r7, WeakCell::kValueOffset)); |
2946 __ cmp(r4, r8); | 2952 __ cmp(r4, r8); |
2947 __ bne(&extra_checks_or_miss); | 2953 __ bne(&extra_checks_or_miss); |
2948 | 2954 |
2949 // The compare above could have been a SMI/SMI comparison. Guard against this | 2955 // The compare above could have been a SMI/SMI comparison. Guard against this |
2950 // convincing us that we have a monomorphic JSFunction. | 2956 // convincing us that we have a monomorphic JSFunction. |
2951 __ JumpIfSmi(r4, &extra_checks_or_miss); | 2957 __ JumpIfSmi(r4, &extra_checks_or_miss); |
2952 | 2958 |
| 2959 // Increment the call count for monomorphic function calls. |
| 2960 const int count_offset = FixedArray::kHeaderSize + kPointerSize; |
| 2961 __ LoadP(r6, FieldMemOperand(r9, count_offset)); |
| 2962 __ AddSmiLiteral(r6, r6, Smi::FromInt(CallICNexus::kCallCountIncrement), r0); |
| 2963 __ StoreP(r6, FieldMemOperand(r9, count_offset)); |
| 2964 |
2953 __ bind(&have_js_function); | 2965 __ bind(&have_js_function); |
2954 if (CallAsMethod()) { | 2966 if (CallAsMethod()) { |
2955 EmitContinueIfStrictOrNative(masm, &cont); | 2967 EmitContinueIfStrictOrNative(masm, &cont); |
2956 // Compute the receiver in sloppy mode. | 2968 // Compute the receiver in sloppy mode. |
2957 __ LoadP(r6, MemOperand(sp, argc * kPointerSize), r0); | 2969 __ LoadP(r6, MemOperand(sp, argc * kPointerSize), r0); |
2958 | 2970 |
2959 __ JumpIfSmi(r6, &wrap); | 2971 __ JumpIfSmi(r6, &wrap); |
2960 __ CompareObjectType(r6, r7, r7, FIRST_SPEC_OBJECT_TYPE); | 2972 __ CompareObjectType(r6, r7, r7, FIRST_SPEC_OBJECT_TYPE); |
2961 __ blt(&wrap); | 2973 __ blt(&wrap); |
2962 | 2974 |
(...skipping 23 matching lines...) Expand all Loading... |
2986 } | 2998 } |
2987 | 2999 |
2988 __ CompareRoot(r7, Heap::kuninitialized_symbolRootIndex); | 3000 __ CompareRoot(r7, Heap::kuninitialized_symbolRootIndex); |
2989 __ beq(&uninitialized); | 3001 __ beq(&uninitialized); |
2990 | 3002 |
2991 // We are going megamorphic. If the feedback is a JSFunction, it is fine | 3003 // We are going megamorphic. If the feedback is a JSFunction, it is fine |
2992 // to handle it here. More complex cases are dealt with in the runtime. | 3004 // to handle it here. More complex cases are dealt with in the runtime. |
2993 __ AssertNotSmi(r7); | 3005 __ AssertNotSmi(r7); |
2994 __ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE); | 3006 __ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE); |
2995 __ bne(&miss); | 3007 __ bne(&miss); |
2996 __ SmiToPtrArrayOffset(r7, r6); | |
2997 __ add(r7, r5, r7); | |
2998 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); | 3008 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); |
2999 __ StoreP(ip, FieldMemOperand(r7, FixedArray::kHeaderSize), r0); | 3009 __ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0); |
3000 // We have to update statistics for runtime profiling. | 3010 // We have to update statistics for runtime profiling. |
3001 __ LoadP(r7, FieldMemOperand(r5, with_types_offset)); | 3011 __ LoadP(r7, FieldMemOperand(r5, with_types_offset)); |
3002 __ SubSmiLiteral(r7, r7, Smi::FromInt(1), r0); | 3012 __ SubSmiLiteral(r7, r7, Smi::FromInt(1), r0); |
3003 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0); | 3013 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0); |
3004 __ LoadP(r7, FieldMemOperand(r5, generic_offset)); | 3014 __ LoadP(r7, FieldMemOperand(r5, generic_offset)); |
3005 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); | 3015 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); |
3006 __ StoreP(r7, FieldMemOperand(r5, generic_offset), r0); | 3016 __ StoreP(r7, FieldMemOperand(r5, generic_offset), r0); |
3007 __ b(&slow_start); | 3017 __ b(&slow_start); |
3008 | 3018 |
3009 __ bind(&uninitialized); | 3019 __ bind(&uninitialized); |
3010 | 3020 |
3011 // We are going monomorphic, provided we actually have a JSFunction. | 3021 // We are going monomorphic, provided we actually have a JSFunction. |
3012 __ JumpIfSmi(r4, &miss); | 3022 __ JumpIfSmi(r4, &miss); |
3013 | 3023 |
3014 // Goto miss case if we do not have a function. | 3024 // Goto miss case if we do not have a function. |
3015 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); | 3025 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); |
3016 __ bne(&miss); | 3026 __ bne(&miss); |
3017 | 3027 |
3018 // Make sure the function is not the Array() function, which requires special | 3028 // Make sure the function is not the Array() function, which requires special |
3019 // behavior on MISS. | 3029 // behavior on MISS. |
3020 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r7); | 3030 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r7); |
3021 __ cmp(r4, r7); | 3031 __ cmp(r4, r7); |
3022 __ beq(&miss); | 3032 __ beq(&miss); |
3023 | 3033 |
3024 // Update stats. | 3034 // Update stats. |
3025 __ LoadP(r7, FieldMemOperand(r5, with_types_offset)); | 3035 __ LoadP(r7, FieldMemOperand(r5, with_types_offset)); |
3026 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); | 3036 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); |
3027 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0); | 3037 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0); |
3028 | 3038 |
| 3039 // Initialize the call counter. |
| 3040 __ LoadSmiLiteral(r0, Smi::FromInt(CallICNexus::kCallCountIncrement)); |
| 3041 __ StoreP(r0, FieldMemOperand(r9, count_offset)); |
| 3042 |
3029 // Store the function. Use a stub since we need a frame for allocation. | 3043 // Store the function. Use a stub since we need a frame for allocation. |
3030 // r5 - vector | 3044 // r5 - vector |
3031 // r6 - slot | 3045 // r6 - slot |
3032 // r4 - function | 3046 // r4 - function |
3033 { | 3047 { |
3034 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 3048 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
3035 CreateWeakCellStub create_stub(masm->isolate()); | 3049 CreateWeakCellStub create_stub(masm->isolate()); |
3036 __ Push(r4); | 3050 __ Push(r4); |
3037 __ CallStub(&create_stub); | 3051 __ CallStub(&create_stub); |
3038 __ Pop(r4); | 3052 __ Pop(r4); |
(...skipping 2613 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5652 kStackUnwindSpace, NULL, | 5666 kStackUnwindSpace, NULL, |
5653 MemOperand(fp, 6 * kPointerSize), NULL); | 5667 MemOperand(fp, 6 * kPointerSize), NULL); |
5654 } | 5668 } |
5655 | 5669 |
5656 | 5670 |
5657 #undef __ | 5671 #undef __ |
5658 } // namespace internal | 5672 } // namespace internal |
5659 } // namespace v8 | 5673 } // namespace v8 |
5660 | 5674 |
5661 #endif // V8_TARGET_ARCH_PPC | 5675 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |