OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 1921 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1932 StringCharLoadGenerator::Generate(masm, | 1932 StringCharLoadGenerator::Generate(masm, |
1933 object_, | 1933 object_, |
1934 index_, | 1934 index_, |
1935 result_, | 1935 result_, |
1936 &call_runtime_); | 1936 &call_runtime_); |
1937 | 1937 |
1938 __ SmiTag(result_); | 1938 __ SmiTag(result_); |
1939 __ bind(&exit_); | 1939 __ bind(&exit_); |
1940 } | 1940 } |
1941 | 1941 |
1942 // Note: feedback_vector and slot are clobbered after the call. | |
1943 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, | |
1944 Register slot) { | |
1945 __ dsrl(t0, slot, 32 - kPointerSizeLog2); | |
1946 __ Daddu(slot, feedback_vector, Operand(t0)); | |
1947 __ ld(t0, FieldMemOperand(slot, FixedArray::kHeaderSize + kPointerSize)); | |
1948 __ Daddu(t0, t0, Operand(Smi::FromInt(1))); | |
1949 __ sd(t0, FieldMemOperand(slot, FixedArray::kHeaderSize + kPointerSize)); | |
1950 } | |
1951 | |
1952 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | |
1953 // a0 - number of arguments | |
1954 // a1 - function | |
1955 // a3 - slot id | |
1956 // a2 - vector | |
1957 // a4 - allocation site (loaded from vector[slot]) | |
1958 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at); | |
1959 __ Branch(miss, ne, a1, Operand(at)); | |
1960 | |
1961 // Increment the call count for monomorphic function calls. | |
1962 IncrementCallCount(masm, a2, a3); | |
1963 | |
1964 __ mov(a2, a4); | |
1965 __ mov(a3, a1); | |
1966 ArrayConstructorStub stub(masm->isolate()); | |
1967 __ TailCallStub(&stub); | |
1968 } | |
1969 | |
1970 | |
1971 void CallICStub::Generate(MacroAssembler* masm) { | |
1972 // a0 - number of arguments | |
1973 // a1 - function | |
1974 // a3 - slot id (Smi) | |
1975 // a2 - vector | |
1976 Label extra_checks_or_miss, call, call_function, call_count_incremented; | |
1977 | |
1978 // The checks. First, does r1 match the recorded monomorphic target? | |
1979 __ dsrl(a4, a3, 32 - kPointerSizeLog2); | |
1980 __ Daddu(a4, a2, Operand(a4)); | |
1981 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize)); | |
1982 | |
1983 // We don't know that we have a weak cell. We might have a private symbol | |
1984 // or an AllocationSite, but the memory is safe to examine. | |
1985 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | |
1986 // FixedArray. | |
1987 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | |
1988 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | |
1989 // computed, meaning that it can't appear to be a pointer. If the low bit is | |
1990 // 0, then hash is computed, but the 0 bit prevents the field from appearing | |
1991 // to be a pointer. | |
1992 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | |
1993 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | |
1994 WeakCell::kValueOffset && | |
1995 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | |
1996 | |
1997 __ ld(a5, FieldMemOperand(a4, WeakCell::kValueOffset)); | |
1998 __ Branch(&extra_checks_or_miss, ne, a1, Operand(a5)); | |
1999 | |
2000 // The compare above could have been a SMI/SMI comparison. Guard against this | |
2001 // convincing us that we have a monomorphic JSFunction. | |
2002 __ JumpIfSmi(a1, &extra_checks_or_miss); | |
2003 | |
2004 __ bind(&call_function); | |
2005 // Increment the call count for monomorphic function calls. | |
2006 IncrementCallCount(masm, a2, a3); | |
2007 | |
2008 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), | |
2009 tail_call_mode()), | |
2010 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg)); | |
2011 | |
2012 __ bind(&extra_checks_or_miss); | |
2013 Label uninitialized, miss, not_allocation_site; | |
2014 | |
2015 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | |
2016 __ Branch(&call, eq, a4, Operand(at)); | |
2017 | |
2018 // Verify that a4 contains an AllocationSite | |
2019 __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset)); | |
2020 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | |
2021 __ Branch(¬_allocation_site, ne, a5, Operand(at)); | |
2022 | |
2023 HandleArrayCase(masm, &miss); | |
2024 | |
2025 __ bind(¬_allocation_site); | |
2026 | |
2027 // The following cases attempt to handle MISS cases without going to the | |
2028 // runtime. | |
2029 if (FLAG_trace_ic) { | |
2030 __ Branch(&miss); | |
2031 } | |
2032 | |
2033 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex); | |
2034 __ Branch(&uninitialized, eq, a4, Operand(at)); | |
2035 | |
2036 // We are going megamorphic. If the feedback is a JSFunction, it is fine | |
2037 // to handle it here. More complex cases are dealt with in the runtime. | |
2038 __ AssertNotSmi(a4); | |
2039 __ GetObjectType(a4, a5, a5); | |
2040 __ Branch(&miss, ne, a5, Operand(JS_FUNCTION_TYPE)); | |
2041 __ dsrl(a4, a3, 32 - kPointerSizeLog2); | |
2042 __ Daddu(a4, a2, Operand(a4)); | |
2043 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | |
2044 __ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize)); | |
2045 | |
2046 __ bind(&call); | |
2047 IncrementCallCount(masm, a2, a3); | |
2048 | |
2049 __ bind(&call_count_incremented); | |
2050 | |
2051 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), | |
2052 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg)); | |
2053 | |
2054 __ bind(&uninitialized); | |
2055 | |
2056 // We are going monomorphic, provided we actually have a JSFunction. | |
2057 __ JumpIfSmi(a1, &miss); | |
2058 | |
2059 // Goto miss case if we do not have a function. | |
2060 __ GetObjectType(a1, a4, a4); | |
2061 __ Branch(&miss, ne, a4, Operand(JS_FUNCTION_TYPE)); | |
2062 | |
2063 // Make sure the function is not the Array() function, which requires special | |
2064 // behavior on MISS. | |
2065 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a4); | |
2066 __ Branch(&miss, eq, a1, Operand(a4)); | |
2067 | |
2068 // Make sure the function belongs to the same native context. | |
2069 __ ld(t0, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
2070 __ ld(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX)); | |
2071 __ ld(t1, NativeContextMemOperand()); | |
2072 __ Branch(&miss, ne, t0, Operand(t1)); | |
2073 | |
2074 // Store the function. Use a stub since we need a frame for allocation. | |
2075 // a2 - vector | |
2076 // a3 - slot | |
2077 // a1 - function | |
2078 { | |
2079 FrameScope scope(masm, StackFrame::INTERNAL); | |
2080 CreateWeakCellStub create_stub(masm->isolate()); | |
2081 __ SmiTag(a0); | |
2082 __ Push(a0); | |
2083 __ Push(a2, a3); | |
2084 __ Push(cp, a1); | |
2085 __ CallStub(&create_stub); | |
2086 __ Pop(cp, a1); | |
2087 __ Pop(a2, a3); | |
2088 __ Pop(a0); | |
2089 __ SmiUntag(a0); | |
2090 } | |
2091 | |
2092 __ Branch(&call_function); | |
2093 | |
2094 // We are here because tracing is on or we encountered a MISS case we can't | |
2095 // handle here. | |
2096 __ bind(&miss); | |
2097 GenerateMiss(masm); | |
2098 | |
2099 __ Branch(&call_count_incremented); | |
2100 } | |
2101 | |
2102 | |
2103 void CallICStub::GenerateMiss(MacroAssembler* masm) { | |
2104 FrameScope scope(masm, StackFrame::INTERNAL); | |
2105 | |
2106 // Preserve number of arguments as Smi. | |
2107 __ SmiTag(a0); | |
2108 __ Push(a0); | |
2109 | |
2110 // Push the receiver and the function and feedback info. | |
2111 __ Push(a1, a2, a3); | |
2112 | |
2113 // Call the entry. | |
2114 __ CallRuntime(Runtime::kCallIC_Miss); | |
2115 | |
2116 // Move result to a1 and exit the internal frame. | |
2117 __ mov(a1, v0); | |
2118 | |
2119 // Restore number of arguments. | |
2120 __ Pop(a0); | |
2121 __ SmiUntag(a0); | |
2122 } | |
2123 | |
2124 | |
2125 void StringCharCodeAtGenerator::GenerateSlow( | 1942 void StringCharCodeAtGenerator::GenerateSlow( |
2126 MacroAssembler* masm, EmbedMode embed_mode, | 1943 MacroAssembler* masm, EmbedMode embed_mode, |
2127 const RuntimeCallHelper& call_helper) { | 1944 const RuntimeCallHelper& call_helper) { |
2128 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); | 1945 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); |
2129 | 1946 |
2130 // Index is not a smi. | 1947 // Index is not a smi. |
2131 __ bind(&index_not_smi_); | 1948 __ bind(&index_not_smi_); |
2132 // If index is a heap number, try converting it to an integer. | 1949 // If index is a heap number, try converting it to an integer. |
2133 __ CheckMap(index_, | 1950 __ CheckMap(index_, |
2134 result_, | 1951 result_, |
(...skipping 2176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4311 kStackUnwindSpace, kInvalidStackOffset, | 4128 kStackUnwindSpace, kInvalidStackOffset, |
4312 return_value_operand, NULL); | 4129 return_value_operand, NULL); |
4313 } | 4130 } |
4314 | 4131 |
4315 #undef __ | 4132 #undef __ |
4316 | 4133 |
4317 } // namespace internal | 4134 } // namespace internal |
4318 } // namespace v8 | 4135 } // namespace v8 |
4319 | 4136 |
4320 #endif // V8_TARGET_ARCH_MIPS64 | 4137 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |