OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 979 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
990 return true; | 990 return true; |
991 } | 991 } |
992 | 992 |
993 | 993 |
994 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 994 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
995 CEntryStub::GenerateAheadOfTime(isolate); | 995 CEntryStub::GenerateAheadOfTime(isolate); |
996 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 996 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
997 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 997 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
998 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 998 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
999 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 999 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 1000 CreateWeakCellStub::GenerateAheadOfTime(isolate); |
1000 BinaryOpICStub::GenerateAheadOfTime(isolate); | 1001 BinaryOpICStub::GenerateAheadOfTime(isolate); |
1001 StoreRegistersStateStub::GenerateAheadOfTime(isolate); | 1002 StoreRegistersStateStub::GenerateAheadOfTime(isolate); |
1002 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); | 1003 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); |
1003 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 1004 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
1004 } | 1005 } |
1005 | 1006 |
1006 | 1007 |
1007 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { | 1008 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { |
1008 StoreRegistersStateStub stub(isolate); | 1009 StoreRegistersStateStub stub(isolate); |
1009 stub.GetCode(); | 1010 stub.GetCode(); |
(...skipping 1855 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2865 Label extra_checks_or_miss, slow_start; | 2866 Label extra_checks_or_miss, slow_start; |
2866 Label slow, non_function, wrap, cont; | 2867 Label slow, non_function, wrap, cont; |
2867 Label have_js_function; | 2868 Label have_js_function; |
2868 int argc = arg_count(); | 2869 int argc = arg_count(); |
2869 ParameterCount actual(argc); | 2870 ParameterCount actual(argc); |
2870 | 2871 |
2871 // The checks. First, does r1 match the recorded monomorphic target? | 2872 // The checks. First, does r1 match the recorded monomorphic target? |
2872 __ dsrl(a4, a3, 32 - kPointerSizeLog2); | 2873 __ dsrl(a4, a3, 32 - kPointerSizeLog2); |
2873 __ Daddu(a4, a2, Operand(a4)); | 2874 __ Daddu(a4, a2, Operand(a4)); |
2874 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize)); | 2875 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize)); |
2875 __ Branch(&extra_checks_or_miss, ne, a1, Operand(a4)); | 2876 |
| 2877 // We don't know that we have a weak cell. We might have a private symbol |
| 2878 // or an AllocationSite, but the memory is safe to examine. |
| 2879 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
| 2880 // FixedArray. |
| 2881 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
| 2882 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not |
| 2883 // computed, meaning that it can't appear to be a pointer. If the low bit is |
| 2884 // 0, then hash is computed, but the 0 bit prevents the field from appearing |
| 2885 // to be a pointer. |
| 2886 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); |
| 2887 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == |
| 2888 WeakCell::kValueOffset && |
| 2889 WeakCell::kValueOffset == Symbol::kHashFieldSlot); |
| 2890 |
| 2891 __ ld(a5, FieldMemOperand(a4, WeakCell::kValueOffset)); |
| 2892 __ Branch(&extra_checks_or_miss, ne, a1, Operand(a5)); |
| 2893 |
| 2894 // The compare above could have been a SMI/SMI comparison. Guard against this |
| 2895 // convincing us that we have a monomorphic JSFunction. |
| 2896 __ JumpIfSmi(a1, &extra_checks_or_miss); |
2876 | 2897 |
2877 __ bind(&have_js_function); | 2898 __ bind(&have_js_function); |
2878 if (CallAsMethod()) { | 2899 if (CallAsMethod()) { |
2879 EmitContinueIfStrictOrNative(masm, &cont); | 2900 EmitContinueIfStrictOrNative(masm, &cont); |
2880 // Compute the receiver in sloppy mode. | 2901 // Compute the receiver in sloppy mode. |
2881 __ ld(a3, MemOperand(sp, argc * kPointerSize)); | 2902 __ ld(a3, MemOperand(sp, argc * kPointerSize)); |
2882 | 2903 |
2883 __ JumpIfSmi(a3, &wrap); | 2904 __ JumpIfSmi(a3, &wrap); |
2884 __ GetObjectType(a3, a4, a4); | 2905 __ GetObjectType(a3, a4, a4); |
2885 __ Branch(&wrap, lt, a4, Operand(FIRST_SPEC_OBJECT_TYPE)); | 2906 __ Branch(&wrap, lt, a4, Operand(FIRST_SPEC_OBJECT_TYPE)); |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2942 // Make sure the function is not the Array() function, which requires special | 2963 // Make sure the function is not the Array() function, which requires special |
2943 // behavior on MISS. | 2964 // behavior on MISS. |
2944 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a4); | 2965 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a4); |
2945 __ Branch(&miss, eq, a1, Operand(a4)); | 2966 __ Branch(&miss, eq, a1, Operand(a4)); |
2946 | 2967 |
2947 // Update stats. | 2968 // Update stats. |
2948 __ ld(a4, FieldMemOperand(a2, with_types_offset)); | 2969 __ ld(a4, FieldMemOperand(a2, with_types_offset)); |
2949 __ Daddu(a4, a4, Operand(Smi::FromInt(1))); | 2970 __ Daddu(a4, a4, Operand(Smi::FromInt(1))); |
2950 __ sd(a4, FieldMemOperand(a2, with_types_offset)); | 2971 __ sd(a4, FieldMemOperand(a2, with_types_offset)); |
2951 | 2972 |
2952 // Store the function. | 2973 // Store the function. Use a stub since we need a frame for allocation. |
2953 __ dsrl(a4, a3, 32 - kPointerSizeLog2); | 2974 // a2 - vector |
2954 __ Daddu(a4, a2, Operand(a4)); | 2975 // a3 - slot |
2955 __ Daddu(a4, a4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 2976 // a1 - function |
2956 __ sd(a1, MemOperand(a4, 0)); | 2977 { |
| 2978 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2979 CreateWeakCellStub create_stub(masm->isolate()); |
| 2980 __ Push(a1); |
| 2981 __ CallStub(&create_stub); |
| 2982 __ Pop(a1); |
| 2983 } |
2957 | 2984 |
2958 // Update the write barrier. | |
2959 __ mov(a5, a1); | |
2960 __ RecordWrite(a2, a4, a5, kRAHasNotBeenSaved, kDontSaveFPRegs, | |
2961 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
2962 __ Branch(&have_js_function); | 2985 __ Branch(&have_js_function); |
2963 | 2986 |
2964 // We are here because tracing is on or we encountered a MISS case we can't | 2987 // We are here because tracing is on or we encountered a MISS case we can't |
2965 // handle here. | 2988 // handle here. |
2966 __ bind(&miss); | 2989 __ bind(&miss); |
2967 GenerateMiss(masm); | 2990 GenerateMiss(masm); |
2968 | 2991 |
2969 // the slow case | 2992 // the slow case |
2970 __ bind(&slow_start); | 2993 __ bind(&slow_start); |
2971 // Check that the function is really a JavaScript function. | 2994 // Check that the function is really a JavaScript function. |
(...skipping 2220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5192 kStackUnwindSpace, kInvalidStackOffset, | 5215 kStackUnwindSpace, kInvalidStackOffset, |
5193 MemOperand(fp, 6 * kPointerSize), NULL); | 5216 MemOperand(fp, 6 * kPointerSize), NULL); |
5194 } | 5217 } |
5195 | 5218 |
5196 | 5219 |
5197 #undef __ | 5220 #undef __ |
5198 | 5221 |
5199 } } // namespace v8::internal | 5222 } } // namespace v8::internal |
5200 | 5223 |
5201 #endif // V8_TARGET_ARCH_MIPS64 | 5224 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |