| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 955 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 966 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 966 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 967 // It is important that the following stubs are generated in this order | 967 // It is important that the following stubs are generated in this order |
| 968 // because pregenerated stubs can only call other pregenerated stubs. | 968 // because pregenerated stubs can only call other pregenerated stubs. |
| 969 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses | 969 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses |
| 970 // CEntryStub. | 970 // CEntryStub. |
| 971 CEntryStub::GenerateAheadOfTime(isolate); | 971 CEntryStub::GenerateAheadOfTime(isolate); |
| 972 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 972 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 973 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 973 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 974 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 974 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
| 975 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 975 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 976 CreateWeakCellStub::GenerateAheadOfTime(isolate); |
| 976 BinaryOpICStub::GenerateAheadOfTime(isolate); | 977 BinaryOpICStub::GenerateAheadOfTime(isolate); |
| 977 StoreRegistersStateStub::GenerateAheadOfTime(isolate); | 978 StoreRegistersStateStub::GenerateAheadOfTime(isolate); |
| 978 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); | 979 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); |
| 979 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 980 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 980 } | 981 } |
| 981 | 982 |
| 982 | 983 |
| 983 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { | 984 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { |
| 984 StoreRegistersStateStub stub(isolate); | 985 StoreRegistersStateStub stub(isolate); |
| 985 stub.GetCode(); | 986 stub.GetCode(); |
| (...skipping 2056 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3042 Register function = x1; | 3043 Register function = x1; |
| 3043 Register feedback_vector = x2; | 3044 Register feedback_vector = x2; |
| 3044 Register index = x3; | 3045 Register index = x3; |
| 3045 Register type = x4; | 3046 Register type = x4; |
| 3046 | 3047 |
| 3047 // The checks. First, does x1 match the recorded monomorphic target? | 3048 // The checks. First, does x1 match the recorded monomorphic target? |
| 3048 __ Add(x4, feedback_vector, | 3049 __ Add(x4, feedback_vector, |
| 3049 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 3050 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 3050 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); | 3051 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
| 3051 | 3052 |
| 3052 __ Cmp(x4, function); | 3053 // We don't know that we have a weak cell. We might have a private symbol |
| 3054 // or an AllocationSite, but the memory is safe to examine. |
| 3055 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
| 3056 // FixedArray. |
| 3057 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
| 3058 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not |
| 3059 // computed, meaning that it can't appear to be a pointer. If the low bit is |
| 3060 // 0, then hash is computed, but the 0 bit prevents the field from appearing |
| 3061 // to be a pointer. |
| 3062 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); |
| 3063 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == |
| 3064 WeakCell::kValueOffset && |
| 3065 WeakCell::kValueOffset == Symbol::kHashFieldSlot); |
| 3066 |
| 3067 __ Ldr(x5, FieldMemOperand(x4, WeakCell::kValueOffset)); |
| 3068 __ Cmp(x5, function); |
| 3053 __ B(ne, &extra_checks_or_miss); | 3069 __ B(ne, &extra_checks_or_miss); |
| 3054 | 3070 |
| 3071 // The compare above could have been a SMI/SMI comparison. Guard against this |
| 3072 // convincing us that we have a monomorphic JSFunction. |
| 3073 __ JumpIfSmi(function, &extra_checks_or_miss); |
| 3074 |
| 3055 __ bind(&have_js_function); | 3075 __ bind(&have_js_function); |
| 3056 if (CallAsMethod()) { | 3076 if (CallAsMethod()) { |
| 3057 EmitContinueIfStrictOrNative(masm, &cont); | 3077 EmitContinueIfStrictOrNative(masm, &cont); |
| 3058 | 3078 |
| 3059 // Compute the receiver in sloppy mode. | 3079 // Compute the receiver in sloppy mode. |
| 3060 __ Peek(x3, argc * kPointerSize); | 3080 __ Peek(x3, argc * kPointerSize); |
| 3061 | 3081 |
| 3062 __ JumpIfSmi(x3, &wrap); | 3082 __ JumpIfSmi(x3, &wrap); |
| 3063 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); | 3083 __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt); |
| 3064 | 3084 |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3120 // behavior on MISS. | 3140 // behavior on MISS. |
| 3121 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5); | 3141 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5); |
| 3122 __ Cmp(function, x5); | 3142 __ Cmp(function, x5); |
| 3123 __ B(eq, &miss); | 3143 __ B(eq, &miss); |
| 3124 | 3144 |
| 3125 // Update stats. | 3145 // Update stats. |
| 3126 __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset)); | 3146 __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset)); |
| 3127 __ Adds(x4, x4, Operand(Smi::FromInt(1))); | 3147 __ Adds(x4, x4, Operand(Smi::FromInt(1))); |
| 3128 __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset)); | 3148 __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset)); |
| 3129 | 3149 |
| 3130 // Store the function. | 3150 // Store the function. Use a stub since we need a frame for allocation. |
| 3131 __ Add(x4, feedback_vector, | 3151 // x2 - vector |
| 3132 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 3152 // x3 - slot |
| 3133 __ Str(function, FieldMemOperand(x4, FixedArray::kHeaderSize)); | 3153 // x1 - function |
| 3154 { |
| 3155 FrameScope scope(masm, StackFrame::INTERNAL); |
| 3156 CreateWeakCellStub create_stub(masm->isolate()); |
| 3157 __ Push(function); |
| 3158 __ CallStub(&create_stub); |
| 3159 __ Pop(function); |
| 3160 } |
| 3134 | 3161 |
| 3135 __ Add(x4, feedback_vector, | |
| 3136 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | |
| 3137 __ Add(x4, x4, FixedArray::kHeaderSize - kHeapObjectTag); | |
| 3138 __ Str(function, MemOperand(x4, 0)); | |
| 3139 | |
| 3140 // Update the write barrier. | |
| 3141 __ Mov(x5, function); | |
| 3142 __ RecordWrite(feedback_vector, x4, x5, kLRHasNotBeenSaved, kDontSaveFPRegs, | |
| 3143 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 3144 __ B(&have_js_function); | 3162 __ B(&have_js_function); |
| 3145 | 3163 |
| 3146 // We are here because tracing is on or we encountered a MISS case we can't | 3164 // We are here because tracing is on or we encountered a MISS case we can't |
| 3147 // handle here. | 3165 // handle here. |
| 3148 __ bind(&miss); | 3166 __ bind(&miss); |
| 3149 GenerateMiss(masm); | 3167 GenerateMiss(masm); |
| 3150 | 3168 |
| 3151 // the slow case | 3169 // the slow case |
| 3152 __ bind(&slow_start); | 3170 __ bind(&slow_start); |
| 3153 | 3171 |
| (...skipping 2249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5403 kStackUnwindSpace, NULL, spill_offset, | 5421 kStackUnwindSpace, NULL, spill_offset, |
| 5404 MemOperand(fp, 6 * kPointerSize), NULL); | 5422 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5405 } | 5423 } |
| 5406 | 5424 |
| 5407 | 5425 |
| 5408 #undef __ | 5426 #undef __ |
| 5409 | 5427 |
| 5410 } } // namespace v8::internal | 5428 } } // namespace v8::internal |
| 5411 | 5429 |
| 5412 #endif // V8_TARGET_ARCH_ARM64 | 5430 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |