OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 2915 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2926 | 2926 |
2927 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { | 2927 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { |
2928 __ Ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 2928 __ Ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
2929 __ Ldr(vector, FieldMemOperand(vector, | 2929 __ Ldr(vector, FieldMemOperand(vector, |
2930 JSFunction::kSharedFunctionInfoOffset)); | 2930 JSFunction::kSharedFunctionInfoOffset)); |
2931 __ Ldr(vector, FieldMemOperand(vector, | 2931 __ Ldr(vector, FieldMemOperand(vector, |
2932 SharedFunctionInfo::kFeedbackVectorOffset)); | 2932 SharedFunctionInfo::kFeedbackVectorOffset)); |
2933 } | 2933 } |
2934 | 2934 |
2935 | 2935 |
2936 void CallIC_ArrayStub::Generate(MacroAssembler* masm) { | 2936 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
2937 // x1 - function | 2937 // x1 - function |
2938 // x3 - slot id | 2938 // x3 - slot id |
2939 // x2 - vector | 2939 // x2 - vector |
2940 Label miss; | 2940 // x4 - allocation site (loaded from vector[slot]) |
2941 Register function = x1; | 2941 Register function = x1; |
2942 Register feedback_vector = x2; | 2942 Register feedback_vector = x2; |
2943 Register index = x3; | 2943 Register index = x3; |
2944 Register scratch = x4; | 2944 Register allocation_site = x4; |
| 2945 Register scratch = x5; |
2945 | 2946 |
2946 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch); | 2947 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch); |
2947 __ Cmp(function, scratch); | 2948 __ Cmp(function, scratch); |
2948 __ B(ne, &miss); | 2949 __ B(ne, miss); |
2949 | 2950 |
2950 __ Mov(x0, Operand(arg_count())); | 2951 __ Mov(x0, Operand(arg_count())); |
2951 | 2952 |
2952 __ Add(scratch, feedback_vector, | |
2953 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | |
2954 __ Ldr(scratch, FieldMemOperand(scratch, FixedArray::kHeaderSize)); | |
2955 | |
2956 // Verify that scratch contains an AllocationSite | |
2957 Register map = x5; | |
2958 __ Ldr(map, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
2959 __ JumpIfNotRoot(map, Heap::kAllocationSiteMapRootIndex, &miss); | |
2960 | |
2961 // Increment the call count for monomorphic function calls. | 2953 // Increment the call count for monomorphic function calls. |
2962 __ Add(feedback_vector, feedback_vector, | 2954 __ Add(feedback_vector, feedback_vector, |
2963 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 2955 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
2964 __ Add(feedback_vector, feedback_vector, | 2956 __ Add(feedback_vector, feedback_vector, |
2965 Operand(FixedArray::kHeaderSize + kPointerSize)); | 2957 Operand(FixedArray::kHeaderSize + kPointerSize)); |
2966 __ Ldr(index, FieldMemOperand(feedback_vector, 0)); | 2958 __ Ldr(index, FieldMemOperand(feedback_vector, 0)); |
2967 __ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2959 __ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2968 __ Str(index, FieldMemOperand(feedback_vector, 0)); | 2960 __ Str(index, FieldMemOperand(feedback_vector, 0)); |
2969 | 2961 |
2970 Register allocation_site = feedback_vector; | 2962 // Set up arguments for the array constructor stub. |
2971 Register original_constructor = index; | 2963 Register allocation_site_arg = feedback_vector; |
2972 __ Mov(allocation_site, scratch); | 2964 Register original_constructor_arg = index; |
2973 __ Mov(original_constructor, function); | 2965 __ Mov(allocation_site_arg, allocation_site); |
| 2966 __ Mov(original_constructor_arg, function); |
2974 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 2967 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
2975 __ TailCallStub(&stub); | 2968 __ TailCallStub(&stub); |
2976 | |
2977 __ bind(&miss); | |
2978 GenerateMiss(masm); | |
2979 | |
2980 // The slow case, we need this no matter what to complete a call after a miss. | |
2981 __ Mov(x0, arg_count()); | |
2982 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
2983 } | 2969 } |
2984 | 2970 |
2985 | 2971 |
2986 void CallICStub::Generate(MacroAssembler* masm) { | 2972 void CallICStub::Generate(MacroAssembler* masm) { |
2987 ASM_LOCATION("CallICStub"); | 2973 ASM_LOCATION("CallICStub"); |
2988 | 2974 |
2989 // x1 - function | 2975 // x1 - function |
2990 // x3 - slot id (Smi) | 2976 // x3 - slot id (Smi) |
2991 // x2 - vector | 2977 // x2 - vector |
2992 const int with_types_offset = | 2978 const int with_types_offset = |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3060 | 3046 |
3061 __ bind(&slow); | 3047 __ bind(&slow); |
3062 EmitSlowCase(masm, argc); | 3048 EmitSlowCase(masm, argc); |
3063 | 3049 |
3064 if (CallAsMethod()) { | 3050 if (CallAsMethod()) { |
3065 __ bind(&wrap); | 3051 __ bind(&wrap); |
3066 EmitWrapCase(masm, argc, &cont); | 3052 EmitWrapCase(masm, argc, &cont); |
3067 } | 3053 } |
3068 | 3054 |
3069 __ bind(&extra_checks_or_miss); | 3055 __ bind(&extra_checks_or_miss); |
3070 Label uninitialized, miss; | 3056 Label uninitialized, miss, not_allocation_site; |
3071 | 3057 |
3072 __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &slow_start); | 3058 __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &slow_start); |
3073 | 3059 |
| 3060 __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset)); |
| 3061 __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, ¬_allocation_site); |
| 3062 |
| 3063 HandleArrayCase(masm, &miss); |
| 3064 |
| 3065 __ bind(¬_allocation_site); |
| 3066 |
3074 // The following cases attempt to handle MISS cases without going to the | 3067 // The following cases attempt to handle MISS cases without going to the |
3075 // runtime. | 3068 // runtime. |
3076 if (FLAG_trace_ic) { | 3069 if (FLAG_trace_ic) { |
3077 __ jmp(&miss); | 3070 __ jmp(&miss); |
3078 } | 3071 } |
3079 | 3072 |
3080 __ JumpIfRoot(x4, Heap::kuninitialized_symbolRootIndex, &miss); | 3073 __ JumpIfRoot(x4, Heap::kuninitialized_symbolRootIndex, &miss); |
3081 | 3074 |
3082 // We are going megamorphic. If the feedback is a JSFunction, it is fine | 3075 // We are going megamorphic. If the feedback is a JSFunction, it is fine |
3083 // to handle it here. More complex cases are dealt with in the runtime. | 3076 // to handle it here. More complex cases are dealt with in the runtime. |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3154 | 3147 |
3155 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 3148 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
3156 ASM_LOCATION("CallICStub[Miss]"); | 3149 ASM_LOCATION("CallICStub[Miss]"); |
3157 | 3150 |
3158 FrameScope scope(masm, StackFrame::INTERNAL); | 3151 FrameScope scope(masm, StackFrame::INTERNAL); |
3159 | 3152 |
3160 // Push the receiver and the function and feedback info. | 3153 // Push the receiver and the function and feedback info. |
3161 __ Push(x1, x2, x3); | 3154 __ Push(x1, x2, x3); |
3162 | 3155 |
3163 // Call the entry. | 3156 // Call the entry. |
3164 Runtime::FunctionId id = GetICState() == DEFAULT | 3157 __ CallRuntime(Runtime::kCallIC_Miss, 3); |
3165 ? Runtime::kCallIC_Miss | |
3166 : Runtime::kCallIC_Customization_Miss; | |
3167 __ CallRuntime(id, 3); | |
3168 | 3158 |
3169 // Move result to edi and exit the internal frame. | 3159 // Move result to edi and exit the internal frame. |
3170 __ Mov(x1, x0); | 3160 __ Mov(x1, x0); |
3171 } | 3161 } |
3172 | 3162 |
3173 | 3163 |
3174 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 3164 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
3175 // If the receiver is a smi trigger the non-string case. | 3165 // If the receiver is a smi trigger the non-string case. |
3176 if (check_mode_ == RECEIVER_IS_UNKNOWN) { | 3166 if (check_mode_ == RECEIVER_IS_UNKNOWN) { |
3177 __ JumpIfSmi(object_, receiver_not_string_); | 3167 __ JumpIfSmi(object_, receiver_not_string_); |
(...skipping 1221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4399 } | 4389 } |
4400 | 4390 |
4401 | 4391 |
4402 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 4392 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
4403 EmitLoadTypeFeedbackVector(masm, x2); | 4393 EmitLoadTypeFeedbackVector(masm, x2); |
4404 CallICStub stub(isolate(), state()); | 4394 CallICStub stub(isolate(), state()); |
4405 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4395 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
4406 } | 4396 } |
4407 | 4397 |
4408 | 4398 |
4409 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { | |
4410 EmitLoadTypeFeedbackVector(masm, x2); | |
4411 CallIC_ArrayStub stub(isolate(), state()); | |
4412 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | |
4413 } | |
4414 | |
4415 | |
4416 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } | 4399 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } |
4417 | 4400 |
4418 | 4401 |
4419 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | 4402 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4420 GenerateImpl(masm, true); | 4403 GenerateImpl(masm, true); |
4421 } | 4404 } |
4422 | 4405 |
4423 | 4406 |
4424 static void HandleArrayCases(MacroAssembler* masm, Register feedback, | 4407 static void HandleArrayCases(MacroAssembler* masm, Register feedback, |
4425 Register receiver_map, Register scratch1, | 4408 Register receiver_map, Register scratch1, |
(...skipping 1586 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6012 MemOperand(fp, 6 * kPointerSize), NULL); | 5995 MemOperand(fp, 6 * kPointerSize), NULL); |
6013 } | 5996 } |
6014 | 5997 |
6015 | 5998 |
6016 #undef __ | 5999 #undef __ |
6017 | 6000 |
6018 } // namespace internal | 6001 } // namespace internal |
6019 } // namespace v8 | 6002 } // namespace v8 |
6020 | 6003 |
6021 #endif // V8_TARGET_ARCH_ARM64 | 6004 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |