OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 955 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
966 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 966 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
967 // It is important that the following stubs are generated in this order | 967 // It is important that the following stubs are generated in this order |
968 // because pregenerated stubs can only call other pregenerated stubs. | 968 // because pregenerated stubs can only call other pregenerated stubs. |
969 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses | 969 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses |
970 // CEntryStub. | 970 // CEntryStub. |
971 CEntryStub::GenerateAheadOfTime(isolate); | 971 CEntryStub::GenerateAheadOfTime(isolate); |
972 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 972 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
973 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 973 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
974 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 974 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
975 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 975 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
976 CreateWeakCellStub::GenerateAheadOfTime(isolate); | |
977 BinaryOpICStub::GenerateAheadOfTime(isolate); | 976 BinaryOpICStub::GenerateAheadOfTime(isolate); |
978 StoreRegistersStateStub::GenerateAheadOfTime(isolate); | 977 StoreRegistersStateStub::GenerateAheadOfTime(isolate); |
979 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); | 978 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); |
980 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 979 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
981 } | 980 } |
982 | 981 |
983 | 982 |
984 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { | 983 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { |
985 StoreRegistersStateStub stub(isolate); | 984 StoreRegistersStateStub stub(isolate); |
986 stub.GetCode(); | 985 stub.GetCode(); |
(...skipping 2056 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3043 Register function = x1; | 3042 Register function = x1; |
3044 Register feedback_vector = x2; | 3043 Register feedback_vector = x2; |
3045 Register index = x3; | 3044 Register index = x3; |
3046 Register type = x4; | 3045 Register type = x4; |
3047 | 3046 |
3048 // The checks. First, does x1 match the recorded monomorphic target? | 3047 // The checks. First, does x1 match the recorded monomorphic target? |
3049 __ Add(x4, feedback_vector, | 3048 __ Add(x4, feedback_vector, |
3050 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 3049 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
3051 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); | 3050 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
3052 | 3051 |
3053 // We don't know that we have a weak cell. We might have a private symbol | 3052 __ Cmp(x4, function); |
3054 // or an AllocationSite, but the memory is safe to examine. | |
3055 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | |
3056 // FixedArray. | |
3057 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | |
3058 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | |
3059 // computed, meaning that it can't appear to be a pointer. If the low bit is | |
3060 // 0, then hash is computed, but the 0 bit prevents the field from appearing | |
3061 // to be a pointer. | |
3062 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | |
3063 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | |
3064 WeakCell::kValueOffset && | |
3065 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | |
3066 | |
3067 __ Ldr(x5, FieldMemOperand(x4, WeakCell::kValueOffset)); | |
3068 __ Cmp(x5, function); | |
3069 __ B(ne, &extra_checks_or_miss); | 3053 __ B(ne, &extra_checks_or_miss); |
3070 | 3054 |
3071 // The compare above could have been a SMI/SMI comparison. Guard against this | |
3072 // convincing us that we have a monomorphic JSFunction. | |
3073 __ JumpIfSmi(function, &extra_checks_or_miss); | |
3074 | |
3075 __ bind(&have_js_function); | 3055 __ bind(&have_js_function); |
3076 if (CallAsMethod()) { | 3056 if (CallAsMethod()) { |
3077 EmitContinueIfStrictOrNative(masm, &cont); | 3057 EmitContinueIfStrictOrNative(masm, &cont); |
3078 | 3058 |
3079 // Compute the receiver in sloppy mode. | 3059 // Compute the receiver in sloppy mode. |
3080 __ Peek(x3, argc * kPointerSize); | 3060 __ Peek(x3, argc * kPointerSize); |
3081 | 3061 |
3082 __ JumpIfSmi(x3, &wrap); | 3062 __ JumpIfSmi(x3, &wrap); |
3083 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); | 3063 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); |
3084 | 3064 |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3140 // behavior on MISS. | 3120 // behavior on MISS. |
3141 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5); | 3121 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5); |
3142 __ Cmp(function, x5); | 3122 __ Cmp(function, x5); |
3143 __ B(eq, &miss); | 3123 __ B(eq, &miss); |
3144 | 3124 |
3145 // Update stats. | 3125 // Update stats. |
3146 __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset)); | 3126 __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset)); |
3147 __ Adds(x4, x4, Operand(Smi::FromInt(1))); | 3127 __ Adds(x4, x4, Operand(Smi::FromInt(1))); |
3148 __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset)); | 3128 __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset)); |
3149 | 3129 |
3150 // Store the function. Use a stub since we need a frame for allocation. | 3130 // Store the function. |
3151 // x2 - vector | 3131 __ Add(x4, feedback_vector, |
3152 // x3 - slot | 3132 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
3153 // x1 - function | 3133 __ Str(function, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
3154 { | |
3155 FrameScope scope(masm, StackFrame::INTERNAL); | |
3156 CreateWeakCellStub create_stub(masm->isolate()); | |
3157 __ Push(function); | |
3158 __ CallStub(&create_stub); | |
3159 __ Pop(function); | |
3160 } | |
3161 | 3134 |
| 3135 __ Add(x4, feedback_vector, |
| 3136 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 3137 __ Add(x4, x4, FixedArray::kHeaderSize - kHeapObjectTag); |
| 3138 __ Str(function, MemOperand(x4, 0)); |
| 3139 |
| 3140 // Update the write barrier. |
| 3141 __ Mov(x5, function); |
| 3142 __ RecordWrite(feedback_vector, x4, x5, kLRHasNotBeenSaved, kDontSaveFPRegs, |
| 3143 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
3162 __ B(&have_js_function); | 3144 __ B(&have_js_function); |
3163 | 3145 |
3164 // We are here because tracing is on or we encountered a MISS case we can't | 3146 // We are here because tracing is on or we encountered a MISS case we can't |
3165 // handle here. | 3147 // handle here. |
3166 __ bind(&miss); | 3148 __ bind(&miss); |
3167 GenerateMiss(masm); | 3149 GenerateMiss(masm); |
3168 | 3150 |
3169 // the slow case | 3151 // the slow case |
3170 __ bind(&slow_start); | 3152 __ bind(&slow_start); |
3171 | 3153 |
(...skipping 2249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5421 kStackUnwindSpace, NULL, spill_offset, | 5403 kStackUnwindSpace, NULL, spill_offset, |
5422 MemOperand(fp, 6 * kPointerSize), NULL); | 5404 MemOperand(fp, 6 * kPointerSize), NULL); |
5423 } | 5405 } |
5424 | 5406 |
5425 | 5407 |
5426 #undef __ | 5408 #undef __ |
5427 | 5409 |
5428 } } // namespace v8::internal | 5410 } } // namespace v8::internal |
5429 | 5411 |
5430 #endif // V8_TARGET_ARCH_ARM64 | 5412 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |