| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 1947 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1958 StringCharLoadGenerator::Generate(masm, | 1958 StringCharLoadGenerator::Generate(masm, |
| 1959 object_, | 1959 object_, |
| 1960 index_, | 1960 index_, |
| 1961 result_, | 1961 result_, |
| 1962 &call_runtime_); | 1962 &call_runtime_); |
| 1963 | 1963 |
| 1964 __ SmiTag(result_); | 1964 __ SmiTag(result_); |
| 1965 __ bind(&exit_); | 1965 __ bind(&exit_); |
| 1966 } | 1966 } |
| 1967 | 1967 |
| 1968 // Note: feedback_vector and slot are clobbered after the call. |
| 1969 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, |
| 1970 Register slot) { |
| 1971 __ dsrl(t0, slot, 32 - kPointerSizeLog2); |
| 1972 __ Daddu(slot, feedback_vector, Operand(t0)); |
| 1973 __ ld(t0, FieldMemOperand(slot, FixedArray::kHeaderSize + kPointerSize)); |
| 1974 __ Daddu(t0, t0, Operand(Smi::FromInt(1))); |
| 1975 __ sd(t0, FieldMemOperand(slot, FixedArray::kHeaderSize + kPointerSize)); |
| 1976 } |
| 1968 | 1977 |
| 1969 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | 1978 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
| 1970 // a1 - function | 1979 // a1 - function |
| 1971 // a3 - slot id | 1980 // a3 - slot id |
| 1972 // a2 - vector | 1981 // a2 - vector |
| 1973 // a4 - allocation site (loaded from vector[slot]) | 1982 // a4 - allocation site (loaded from vector[slot]) |
| 1974 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at); | 1983 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at); |
| 1975 __ Branch(miss, ne, a1, Operand(at)); | 1984 __ Branch(miss, ne, a1, Operand(at)); |
| 1976 | 1985 |
| 1977 __ li(a0, Operand(arg_count())); | 1986 __ li(a0, Operand(arg_count())); |
| 1978 | 1987 |
| 1979 // Increment the call count for monomorphic function calls. | 1988 // Increment the call count for monomorphic function calls. |
| 1980 __ dsrl(t0, a3, 32 - kPointerSizeLog2); | 1989 IncrementCallCount(masm, a2, a3); |
| 1981 __ Daddu(a3, a2, Operand(t0)); | |
| 1982 __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize)); | |
| 1983 __ Daddu(t0, t0, Operand(Smi::FromInt(1))); | |
| 1984 __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize)); | |
| 1985 | 1990 |
| 1986 __ mov(a2, a4); | 1991 __ mov(a2, a4); |
| 1987 __ mov(a3, a1); | 1992 __ mov(a3, a1); |
| 1988 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 1993 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
| 1989 __ TailCallStub(&stub); | 1994 __ TailCallStub(&stub); |
| 1990 } | 1995 } |
| 1991 | 1996 |
| 1992 | 1997 |
| 1993 void CallICStub::Generate(MacroAssembler* masm) { | 1998 void CallICStub::Generate(MacroAssembler* masm) { |
| 1994 // a1 - function | 1999 // a1 - function |
| 1995 // a3 - slot id (Smi) | 2000 // a3 - slot id (Smi) |
| 1996 // a2 - vector | 2001 // a2 - vector |
| 1997 Label extra_checks_or_miss, call, call_function; | 2002 Label extra_checks_or_miss, call, call_function, call_count_incremented; |
| 1998 int argc = arg_count(); | 2003 int argc = arg_count(); |
| 1999 ParameterCount actual(argc); | 2004 ParameterCount actual(argc); |
| 2000 | 2005 |
| 2001 // The checks. First, does r1 match the recorded monomorphic target? | 2006 // The checks. First, does r1 match the recorded monomorphic target? |
| 2002 __ dsrl(a4, a3, 32 - kPointerSizeLog2); | 2007 __ dsrl(a4, a3, 32 - kPointerSizeLog2); |
| 2003 __ Daddu(a4, a2, Operand(a4)); | 2008 __ Daddu(a4, a2, Operand(a4)); |
| 2004 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize)); | 2009 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize)); |
| 2005 | 2010 |
| 2006 // We don't know that we have a weak cell. We might have a private symbol | 2011 // We don't know that we have a weak cell. We might have a private symbol |
| 2007 // or an AllocationSite, but the memory is safe to examine. | 2012 // or an AllocationSite, but the memory is safe to examine. |
| 2008 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | 2013 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
| 2009 // FixedArray. | 2014 // FixedArray. |
| 2010 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | 2015 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
| 2011 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | 2016 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not |
| 2012 // computed, meaning that it can't appear to be a pointer. If the low bit is | 2017 // computed, meaning that it can't appear to be a pointer. If the low bit is |
| 2013 // 0, then hash is computed, but the 0 bit prevents the field from appearing | 2018 // 0, then hash is computed, but the 0 bit prevents the field from appearing |
| 2014 // to be a pointer. | 2019 // to be a pointer. |
| 2015 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | 2020 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); |
| 2016 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | 2021 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == |
| 2017 WeakCell::kValueOffset && | 2022 WeakCell::kValueOffset && |
| 2018 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | 2023 WeakCell::kValueOffset == Symbol::kHashFieldSlot); |
| 2019 | 2024 |
| 2020 __ ld(a5, FieldMemOperand(a4, WeakCell::kValueOffset)); | 2025 __ ld(a5, FieldMemOperand(a4, WeakCell::kValueOffset)); |
| 2021 __ Branch(&extra_checks_or_miss, ne, a1, Operand(a5)); | 2026 __ Branch(&extra_checks_or_miss, ne, a1, Operand(a5)); |
| 2022 | 2027 |
| 2023 // The compare above could have been a SMI/SMI comparison. Guard against this | 2028 // The compare above could have been a SMI/SMI comparison. Guard against this |
| 2024 // convincing us that we have a monomorphic JSFunction. | 2029 // convincing us that we have a monomorphic JSFunction. |
| 2025 __ JumpIfSmi(a1, &extra_checks_or_miss); | 2030 __ JumpIfSmi(a1, &extra_checks_or_miss); |
| 2026 | 2031 |
| 2032 __ bind(&call_function); |
| 2027 // Increment the call count for monomorphic function calls. | 2033 // Increment the call count for monomorphic function calls. |
| 2028 __ dsrl(t0, a3, 32 - kPointerSizeLog2); | 2034 IncrementCallCount(masm, a2, a3); |
| 2029 __ Daddu(a3, a2, Operand(t0)); | |
| 2030 __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize)); | |
| 2031 __ Daddu(t0, t0, Operand(Smi::FromInt(1))); | |
| 2032 __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize)); | |
| 2033 | 2035 |
| 2034 __ bind(&call_function); | |
| 2035 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), | 2036 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), |
| 2036 tail_call_mode()), | 2037 tail_call_mode()), |
| 2037 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg), | 2038 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg), |
| 2038 USE_DELAY_SLOT); | 2039 USE_DELAY_SLOT); |
| 2039 __ li(a0, Operand(argc)); // In delay slot. | 2040 __ li(a0, Operand(argc)); // In delay slot. |
| 2040 | 2041 |
| 2041 __ bind(&extra_checks_or_miss); | 2042 __ bind(&extra_checks_or_miss); |
| 2042 Label uninitialized, miss, not_allocation_site; | 2043 Label uninitialized, miss, not_allocation_site; |
| 2043 | 2044 |
| 2044 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 2045 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 2066 // to handle it here. More complex cases are dealt with in the runtime. | 2067 // to handle it here. More complex cases are dealt with in the runtime. |
| 2067 __ AssertNotSmi(a4); | 2068 __ AssertNotSmi(a4); |
| 2068 __ GetObjectType(a4, a5, a5); | 2069 __ GetObjectType(a4, a5, a5); |
| 2069 __ Branch(&miss, ne, a5, Operand(JS_FUNCTION_TYPE)); | 2070 __ Branch(&miss, ne, a5, Operand(JS_FUNCTION_TYPE)); |
| 2070 __ dsrl(a4, a3, 32 - kPointerSizeLog2); | 2071 __ dsrl(a4, a3, 32 - kPointerSizeLog2); |
| 2071 __ Daddu(a4, a2, Operand(a4)); | 2072 __ Daddu(a4, a2, Operand(a4)); |
| 2072 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 2073 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 2073 __ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize)); | 2074 __ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize)); |
| 2074 | 2075 |
| 2075 __ bind(&call); | 2076 __ bind(&call); |
| 2077 IncrementCallCount(masm, a2, a3); |
| 2078 |
| 2079 __ bind(&call_count_incremented); |
| 2080 |
| 2076 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), | 2081 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), |
| 2077 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg), | 2082 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg), |
| 2078 USE_DELAY_SLOT); | 2083 USE_DELAY_SLOT); |
| 2079 __ li(a0, Operand(argc)); // In delay slot. | 2084 __ li(a0, Operand(argc)); // In delay slot. |
| 2080 | 2085 |
| 2081 __ bind(&uninitialized); | 2086 __ bind(&uninitialized); |
| 2082 | 2087 |
| 2083 // We are going monomorphic, provided we actually have a JSFunction. | 2088 // We are going monomorphic, provided we actually have a JSFunction. |
| 2084 __ JumpIfSmi(a1, &miss); | 2089 __ JumpIfSmi(a1, &miss); |
| 2085 | 2090 |
| 2086 // Goto miss case if we do not have a function. | 2091 // Goto miss case if we do not have a function. |
| 2087 __ GetObjectType(a1, a4, a4); | 2092 __ GetObjectType(a1, a4, a4); |
| 2088 __ Branch(&miss, ne, a4, Operand(JS_FUNCTION_TYPE)); | 2093 __ Branch(&miss, ne, a4, Operand(JS_FUNCTION_TYPE)); |
| 2089 | 2094 |
| 2090 // Make sure the function is not the Array() function, which requires special | 2095 // Make sure the function is not the Array() function, which requires special |
| 2091 // behavior on MISS. | 2096 // behavior on MISS. |
| 2092 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a4); | 2097 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a4); |
| 2093 __ Branch(&miss, eq, a1, Operand(a4)); | 2098 __ Branch(&miss, eq, a1, Operand(a4)); |
| 2094 | 2099 |
| 2095 // Make sure the function belongs to the same native context. | 2100 // Make sure the function belongs to the same native context. |
| 2096 __ ld(t0, FieldMemOperand(a1, JSFunction::kContextOffset)); | 2101 __ ld(t0, FieldMemOperand(a1, JSFunction::kContextOffset)); |
| 2097 __ ld(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX)); | 2102 __ ld(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX)); |
| 2098 __ ld(t1, NativeContextMemOperand()); | 2103 __ ld(t1, NativeContextMemOperand()); |
| 2099 __ Branch(&miss, ne, t0, Operand(t1)); | 2104 __ Branch(&miss, ne, t0, Operand(t1)); |
| 2100 | 2105 |
| 2101 // Initialize the call counter. | |
| 2102 __ dsrl(at, a3, 32 - kPointerSizeLog2); | |
| 2103 __ Daddu(at, a2, Operand(at)); | |
| 2104 __ li(t0, Operand(Smi::FromInt(1))); | |
| 2105 __ sd(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); | |
| 2106 | |
| 2107 // Store the function. Use a stub since we need a frame for allocation. | 2106 // Store the function. Use a stub since we need a frame for allocation. |
| 2108 // a2 - vector | 2107 // a2 - vector |
| 2109 // a3 - slot | 2108 // a3 - slot |
| 2110 // a1 - function | 2109 // a1 - function |
| 2111 { | 2110 { |
| 2112 FrameScope scope(masm, StackFrame::INTERNAL); | 2111 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2113 CreateWeakCellStub create_stub(masm->isolate()); | 2112 CreateWeakCellStub create_stub(masm->isolate()); |
| 2113 __ Push(a2, a3); |
| 2114 __ Push(cp, a1); | 2114 __ Push(cp, a1); |
| 2115 __ CallStub(&create_stub); | 2115 __ CallStub(&create_stub); |
| 2116 __ Pop(cp, a1); | 2116 __ Pop(cp, a1); |
| 2117 __ Pop(a2, a3); |
| 2117 } | 2118 } |
| 2118 | 2119 |
| 2119 __ Branch(&call_function); | 2120 __ Branch(&call_function); |
| 2120 | 2121 |
| 2121 // We are here because tracing is on or we encountered a MISS case we can't | 2122 // We are here because tracing is on or we encountered a MISS case we can't |
| 2122 // handle here. | 2123 // handle here. |
| 2123 __ bind(&miss); | 2124 __ bind(&miss); |
| 2124 GenerateMiss(masm); | 2125 GenerateMiss(masm); |
| 2125 | 2126 |
| 2126 __ Branch(&call); | 2127 __ Branch(&call_count_incremented); |
| 2127 } | 2128 } |
| 2128 | 2129 |
| 2129 | 2130 |
| 2130 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2131 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
| 2131 FrameScope scope(masm, StackFrame::INTERNAL); | 2132 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2132 | 2133 |
| 2133 // Push the receiver and the function and feedback info. | 2134 // Push the receiver and the function and feedback info. |
| 2134 __ Push(a1, a2, a3); | 2135 __ Push(a1, a2, a3); |
| 2135 | 2136 |
| 2136 // Call the entry. | 2137 // Call the entry. |
| (...skipping 3265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5402 kStackUnwindSpace, kInvalidStackOffset, | 5403 kStackUnwindSpace, kInvalidStackOffset, |
| 5403 return_value_operand, NULL); | 5404 return_value_operand, NULL); |
| 5404 } | 5405 } |
| 5405 | 5406 |
| 5406 #undef __ | 5407 #undef __ |
| 5407 | 5408 |
| 5408 } // namespace internal | 5409 } // namespace internal |
| 5409 } // namespace v8 | 5410 } // namespace v8 |
| 5410 | 5411 |
| 5411 #endif // V8_TARGET_ARCH_MIPS64 | 5412 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |