OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
9 #include "src/debug/debug.h" | 9 #include "src/debug/debug.h" |
10 #include "src/deoptimizer.h" | 10 #include "src/deoptimizer.h" |
(...skipping 1001 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1012 FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset)); | 1012 FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset)); |
1013 __ Bind(&bytecode_array_loaded); | 1013 __ Bind(&bytecode_array_loaded); |
1014 | 1014 |
1015 // Check whether we should continue to use the interpreter. | 1015 // Check whether we should continue to use the interpreter. |
1016 Label switch_to_different_code_kind; | 1016 Label switch_to_different_code_kind; |
1017 __ Ldr(x0, FieldMemOperand(x0, SharedFunctionInfo::kCodeOffset)); | 1017 __ Ldr(x0, FieldMemOperand(x0, SharedFunctionInfo::kCodeOffset)); |
1018 __ Cmp(x0, Operand(masm->CodeObject())); // Self-reference to this code. | 1018 __ Cmp(x0, Operand(masm->CodeObject())); // Self-reference to this code. |
1019 __ B(ne, &switch_to_different_code_kind); | 1019 __ B(ne, &switch_to_different_code_kind); |
1020 | 1020 |
1021 // Increment invocation count for the function. | 1021 // Increment invocation count for the function. |
1022 __ Ldr(x11, FieldMemOperand(x1, JSFunction::kLiteralsOffset)); | 1022 __ Ldr(x11, FieldMemOperand(x1, JSFunction::kFeedbackVectorOffset)); |
1023 __ Ldr(x11, FieldMemOperand(x11, LiteralsArray::kFeedbackVectorOffset)); | |
1024 __ Ldr(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex * | 1023 __ Ldr(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex * |
1025 kPointerSize + | 1024 kPointerSize + |
1026 TypeFeedbackVector::kHeaderSize)); | 1025 TypeFeedbackVector::kHeaderSize)); |
1027 __ Add(x10, x10, Operand(Smi::FromInt(1))); | 1026 __ Add(x10, x10, Operand(Smi::FromInt(1))); |
1028 __ Str(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex * | 1027 __ Str(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex * |
1029 kPointerSize + | 1028 kPointerSize + |
1030 TypeFeedbackVector::kHeaderSize)); | 1029 TypeFeedbackVector::kHeaderSize)); |
1031 | 1030 |
1032 // Check function data field is actually a BytecodeArray object. | 1031 // Check function data field is actually a BytecodeArray object. |
1033 if (FLAG_debug_code) { | 1032 if (FLAG_debug_code) { |
(...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1371 Register temp = x5; | 1370 Register temp = x5; |
1372 Register array_pointer = x6; | 1371 Register array_pointer = x6; |
1373 | 1372 |
1374 // Does the native context match? | 1373 // Does the native context match? |
1375 __ Add(array_pointer, map, Operand(index, LSL, kPointerSizeLog2)); | 1374 __ Add(array_pointer, map, Operand(index, LSL, kPointerSizeLog2)); |
1376 __ Ldr(temp, FieldMemOperand(array_pointer, | 1375 __ Ldr(temp, FieldMemOperand(array_pointer, |
1377 SharedFunctionInfo::kOffsetToPreviousContext)); | 1376 SharedFunctionInfo::kOffsetToPreviousContext)); |
1378 __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | 1377 __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); |
1379 __ Cmp(temp, native_context); | 1378 __ Cmp(temp, native_context); |
1380 __ B(ne, &loop_bottom); | 1379 __ B(ne, &loop_bottom); |
1381 // Literals available? | 1380 // Feedback vector available? |
1382 __ Ldr(temp, FieldMemOperand(array_pointer, | 1381 __ Ldr(temp, FieldMemOperand(array_pointer, |
1383 SharedFunctionInfo::kOffsetToPreviousLiterals)); | 1382 SharedFunctionInfo::kOffsetToPreviousLiterals)); |
1384 __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | 1383 __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); |
1385 __ JumpIfSmi(temp, &gotta_call_runtime); | 1384 __ JumpIfSmi(temp, &gotta_call_runtime); |
1386 | 1385 |
1387 // Save the literals in the closure. | 1386 // Save the feedback vector in the closure. |
1388 __ Str(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset)); | 1387 __ Str(temp, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset)); |
1389 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, x7, | 1388 __ RecordWriteField(closure, JSFunction::kFeedbackVectorOffset, temp, x7, |
1390 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | 1389 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, |
1391 OMIT_SMI_CHECK); | 1390 OMIT_SMI_CHECK); |
1392 | 1391 |
1393 // Code available? | 1392 // Code available? |
1394 Register entry = x7; | 1393 Register entry = x7; |
1395 __ Ldr(entry, | 1394 __ Ldr(entry, |
1396 FieldMemOperand(array_pointer, | 1395 FieldMemOperand(array_pointer, |
1397 SharedFunctionInfo::kOffsetToPreviousCachedCode)); | 1396 SharedFunctionInfo::kOffsetToPreviousCachedCode)); |
1398 __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); | 1397 __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); |
1399 __ JumpIfSmi(entry, &try_shared); | 1398 __ JumpIfSmi(entry, &try_shared); |
(...skipping 1827 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3227 __ Unreachable(); | 3226 __ Unreachable(); |
3228 } | 3227 } |
3229 } | 3228 } |
3230 | 3229 |
3231 #undef __ | 3230 #undef __ |
3232 | 3231 |
3233 } // namespace internal | 3232 } // namespace internal |
3234 } // namespace v8 | 3233 } // namespace v8 |
3235 | 3234 |
3236 #endif // V8_TARGET_ARCH_ARM | 3235 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |