OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 1918 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1929 } | 1929 } |
1930 | 1930 |
1931 | 1931 |
1932 void CallICStub::Generate(MacroAssembler* masm) { | 1932 void CallICStub::Generate(MacroAssembler* masm) { |
1933 // ----------- S t a t e ------------- | 1933 // ----------- S t a t e ------------- |
1934 // -- rdi - function | 1934 // -- rdi - function |
1935 // -- rdx - slot id | 1935 // -- rdx - slot id |
1936 // -- rbx - vector | 1936 // -- rbx - vector |
1937 // ----------------------------------- | 1937 // ----------------------------------- |
1938 Isolate* isolate = masm->isolate(); | 1938 Isolate* isolate = masm->isolate(); |
1939 const int with_types_offset = | |
1940 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | |
1941 const int generic_offset = | |
1942 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | |
1943 Label extra_checks_or_miss, call, call_function; | 1939 Label extra_checks_or_miss, call, call_function; |
1944 int argc = arg_count(); | 1940 int argc = arg_count(); |
1945 StackArgumentsAccessor args(rsp, argc); | 1941 StackArgumentsAccessor args(rsp, argc); |
1946 ParameterCount actual(argc); | 1942 ParameterCount actual(argc); |
1947 | 1943 |
1948 // The checks. First, does rdi match the recorded monomorphic target? | 1944 // The checks. First, does rdi match the recorded monomorphic target? |
1949 __ SmiToInteger32(rdx, rdx); | 1945 __ SmiToInteger32(rdx, rdx); |
1950 __ movp(rcx, | 1946 __ movp(rcx, |
1951 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); | 1947 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); |
1952 | 1948 |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2006 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate)); | 2002 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate)); |
2007 __ j(equal, &uninitialized); | 2003 __ j(equal, &uninitialized); |
2008 | 2004 |
2009 // We are going megamorphic. If the feedback is a JSFunction, it is fine | 2005 // We are going megamorphic. If the feedback is a JSFunction, it is fine |
2010 // to handle it here. More complex cases are dealt with in the runtime. | 2006 // to handle it here. More complex cases are dealt with in the runtime. |
2011 __ AssertNotSmi(rcx); | 2007 __ AssertNotSmi(rcx); |
2012 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx); | 2008 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx); |
2013 __ j(not_equal, &miss); | 2009 __ j(not_equal, &miss); |
2014 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), | 2010 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), |
2015 TypeFeedbackVector::MegamorphicSentinel(isolate)); | 2011 TypeFeedbackVector::MegamorphicSentinel(isolate)); |
2016 // We have to update statistics for runtime profiling. | |
2017 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(-1)); | |
2018 __ SmiAddConstant(FieldOperand(rbx, generic_offset), Smi::FromInt(1)); | |
2019 | 2012 |
2020 __ bind(&call); | 2013 __ bind(&call); |
2021 __ Set(rax, argc); | 2014 __ Set(rax, argc); |
2022 __ Jump(masm->isolate()->builtins()->Call(convert_mode()), | 2015 __ Jump(masm->isolate()->builtins()->Call(convert_mode()), |
2023 RelocInfo::CODE_TARGET); | 2016 RelocInfo::CODE_TARGET); |
2024 | 2017 |
2025 __ bind(&uninitialized); | 2018 __ bind(&uninitialized); |
2026 | 2019 |
2027 // We are going monomorphic, provided we actually have a JSFunction. | 2020 // We are going monomorphic, provided we actually have a JSFunction. |
2028 __ JumpIfSmi(rdi, &miss); | 2021 __ JumpIfSmi(rdi, &miss); |
2029 | 2022 |
2030 // Goto miss case if we do not have a function. | 2023 // Goto miss case if we do not have a function. |
2031 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 2024 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
2032 __ j(not_equal, &miss); | 2025 __ j(not_equal, &miss); |
2033 | 2026 |
2034 // Make sure the function is not the Array() function, which requires special | 2027 // Make sure the function is not the Array() function, which requires special |
2035 // behavior on MISS. | 2028 // behavior on MISS. |
2036 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx); | 2029 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx); |
2037 __ cmpp(rdi, rcx); | 2030 __ cmpp(rdi, rcx); |
2038 __ j(equal, &miss); | 2031 __ j(equal, &miss); |
2039 | 2032 |
2040 // Make sure the function belongs to the same native context. | 2033 // Make sure the function belongs to the same native context. |
2041 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); | 2034 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); |
2042 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX)); | 2035 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX)); |
2043 __ cmpp(rcx, NativeContextOperand()); | 2036 __ cmpp(rcx, NativeContextOperand()); |
2044 __ j(not_equal, &miss); | 2037 __ j(not_equal, &miss); |
2045 | 2038 |
2046 // Update stats. | |
2047 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1)); | |
2048 | |
2049 // Initialize the call counter. | 2039 // Initialize the call counter. |
2050 __ Move(FieldOperand(rbx, rdx, times_pointer_size, | 2040 __ Move(FieldOperand(rbx, rdx, times_pointer_size, |
2051 FixedArray::kHeaderSize + kPointerSize), | 2041 FixedArray::kHeaderSize + kPointerSize), |
2052 Smi::FromInt(CallICNexus::kCallCountIncrement)); | 2042 Smi::FromInt(CallICNexus::kCallCountIncrement)); |
2053 | 2043 |
2054 // Store the function. Use a stub since we need a frame for allocation. | 2044 // Store the function. Use a stub since we need a frame for allocation. |
2055 // rbx - vector | 2045 // rbx - vector |
2056 // rdx - slot (needs to be in smi form) | 2046 // rdx - slot (needs to be in smi form) |
2057 // rdi - function | 2047 // rdi - function |
2058 { | 2048 { |
(...skipping 3344 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5403 kStackSpace, nullptr, return_value_operand, NULL); | 5393 kStackSpace, nullptr, return_value_operand, NULL); |
5404 } | 5394 } |
5405 | 5395 |
5406 | 5396 |
5407 #undef __ | 5397 #undef __ |
5408 | 5398 |
5409 } // namespace internal | 5399 } // namespace internal |
5410 } // namespace v8 | 5400 } // namespace v8 |
5411 | 5401 |
5412 #endif // V8_TARGET_ARCH_X64 | 5402 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |