| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 1875 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1886 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 1886 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 1887 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); | 1887 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); |
| 1888 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1888 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 1889 __ Jump(at); | 1889 __ Jump(at); |
| 1890 | 1890 |
| 1891 __ bind(&non_function); | 1891 __ bind(&non_function); |
| 1892 __ mov(a3, a1); | 1892 __ mov(a3, a1); |
| 1893 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1893 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 1894 } | 1894 } |
| 1895 | 1895 |
| 1896 // Note: feedback_vector and slot are clobbered after the call. | |
| 1897 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, | |
| 1898 Register slot) { | |
| 1899 __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize); | |
| 1900 __ lw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); | |
| 1901 __ Addu(slot, slot, Operand(Smi::FromInt(1))); | |
| 1902 __ sw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); | |
| 1903 } | |
| 1904 | |
| 1905 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | |
| 1906 // a0 - number of arguments | |
| 1907 // a1 - function | |
| 1908 // a3 - slot id | |
| 1909 // a2 - vector | |
| 1910 // t0 - loaded from vector[slot] | |
| 1911 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at); | |
| 1912 __ Branch(miss, ne, a1, Operand(at)); | |
| 1913 | |
| 1914 // Increment the call count for monomorphic function calls. | |
| 1915 IncrementCallCount(masm, a2, a3); | |
| 1916 | |
| 1917 __ mov(a2, t0); | |
| 1918 __ mov(a3, a1); | |
| 1919 ArrayConstructorStub stub(masm->isolate()); | |
| 1920 __ TailCallStub(&stub); | |
| 1921 } | |
| 1922 | |
| 1923 | |
| 1924 void CallICStub::Generate(MacroAssembler* masm) { | |
| 1925 // a0 - number of arguments | |
| 1926 // a1 - function | |
| 1927 // a3 - slot id (Smi) | |
| 1928 // a2 - vector | |
| 1929 Label extra_checks_or_miss, call, call_function, call_count_incremented; | |
| 1930 | |
| 1931 // The checks. First, does r1 match the recorded monomorphic target? | |
| 1932 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize); | |
| 1933 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); | |
| 1934 | |
| 1935 // We don't know that we have a weak cell. We might have a private symbol | |
| 1936 // or an AllocationSite, but the memory is safe to examine. | |
| 1937 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | |
| 1938 // FixedArray. | |
| 1939 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | |
| 1940 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | |
| 1941 // computed, meaning that it can't appear to be a pointer. If the low bit is | |
| 1942 // 0, then hash is computed, but the 0 bit prevents the field from appearing | |
| 1943 // to be a pointer. | |
| 1944 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | |
| 1945 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | |
| 1946 WeakCell::kValueOffset && | |
| 1947 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | |
| 1948 | |
| 1949 __ lw(t1, FieldMemOperand(t0, WeakCell::kValueOffset)); | |
| 1950 __ Branch(&extra_checks_or_miss, ne, a1, Operand(t1)); | |
| 1951 | |
| 1952 // The compare above could have been a SMI/SMI comparison. Guard against this | |
| 1953 // convincing us that we have a monomorphic JSFunction. | |
| 1954 __ JumpIfSmi(a1, &extra_checks_or_miss); | |
| 1955 | |
| 1956 __ bind(&call_function); | |
| 1957 | |
| 1958 // Increment the call count for monomorphic function calls. | |
| 1959 IncrementCallCount(masm, a2, a3); | |
| 1960 | |
| 1961 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), | |
| 1962 tail_call_mode()), | |
| 1963 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg)); | |
| 1964 | |
| 1965 __ bind(&extra_checks_or_miss); | |
| 1966 Label uninitialized, miss, not_allocation_site; | |
| 1967 | |
| 1968 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | |
| 1969 __ Branch(&call, eq, t0, Operand(at)); | |
| 1970 | |
| 1971 // Verify that t0 contains an AllocationSite | |
| 1972 __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset)); | |
| 1973 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | |
| 1974 __ Branch(¬_allocation_site, ne, t1, Operand(at)); | |
| 1975 | |
| 1976 HandleArrayCase(masm, &miss); | |
| 1977 | |
| 1978 __ bind(¬_allocation_site); | |
| 1979 | |
| 1980 // The following cases attempt to handle MISS cases without going to the | |
| 1981 // runtime. | |
| 1982 if (FLAG_trace_ic) { | |
| 1983 __ Branch(&miss); | |
| 1984 } | |
| 1985 | |
| 1986 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex); | |
| 1987 __ Branch(&uninitialized, eq, t0, Operand(at)); | |
| 1988 | |
| 1989 // We are going megamorphic. If the feedback is a JSFunction, it is fine | |
| 1990 // to handle it here. More complex cases are dealt with in the runtime. | |
| 1991 __ AssertNotSmi(t0); | |
| 1992 __ GetObjectType(t0, t1, t1); | |
| 1993 __ Branch(&miss, ne, t1, Operand(JS_FUNCTION_TYPE)); | |
| 1994 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize); | |
| 1995 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | |
| 1996 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); | |
| 1997 | |
| 1998 __ bind(&call); | |
| 1999 IncrementCallCount(masm, a2, a3); | |
| 2000 | |
| 2001 __ bind(&call_count_incremented); | |
| 2002 | |
| 2003 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), | |
| 2004 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg)); | |
| 2005 | |
| 2006 __ bind(&uninitialized); | |
| 2007 | |
| 2008 // We are going monomorphic, provided we actually have a JSFunction. | |
| 2009 __ JumpIfSmi(a1, &miss); | |
| 2010 | |
| 2011 // Goto miss case if we do not have a function. | |
| 2012 __ GetObjectType(a1, t0, t0); | |
| 2013 __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE)); | |
| 2014 | |
| 2015 // Make sure the function is not the Array() function, which requires special | |
| 2016 // behavior on MISS. | |
| 2017 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t0); | |
| 2018 __ Branch(&miss, eq, a1, Operand(t0)); | |
| 2019 | |
| 2020 // Make sure the function belongs to the same native context. | |
| 2021 __ lw(t0, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
| 2022 __ lw(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX)); | |
| 2023 __ lw(t1, NativeContextMemOperand()); | |
| 2024 __ Branch(&miss, ne, t0, Operand(t1)); | |
| 2025 | |
| 2026 // Store the function. Use a stub since we need a frame for allocation. | |
| 2027 // a2 - vector | |
| 2028 // a3 - slot | |
| 2029 // a1 - function | |
| 2030 { | |
| 2031 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 2032 CreateWeakCellStub create_stub(masm->isolate()); | |
| 2033 __ SmiTag(a0); | |
| 2034 __ Push(a0); | |
| 2035 __ Push(a2, a3); | |
| 2036 __ Push(cp, a1); | |
| 2037 __ CallStub(&create_stub); | |
| 2038 __ Pop(cp, a1); | |
| 2039 __ Pop(a2, a3); | |
| 2040 __ Pop(a0); | |
| 2041 __ SmiUntag(a0); | |
| 2042 } | |
| 2043 | |
| 2044 __ Branch(&call_function); | |
| 2045 | |
| 2046 // We are here because tracing is on or we encountered a MISS case we can't | |
| 2047 // handle here. | |
| 2048 __ bind(&miss); | |
| 2049 GenerateMiss(masm); | |
| 2050 | |
| 2051 __ Branch(&call_count_incremented); | |
| 2052 } | |
| 2053 | |
| 2054 | |
| 2055 void CallICStub::GenerateMiss(MacroAssembler* masm) { | |
| 2056 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 2057 | |
| 2058 // Preserve the number of arguments as Smi. | |
| 2059 __ SmiTag(a0); | |
| 2060 __ Push(a0); | |
| 2061 | |
| 2062 // Push the receiver and the function and feedback info. | |
| 2063 __ Push(a1, a2, a3); | |
| 2064 | |
| 2065 // Call the entry. | |
| 2066 __ CallRuntime(Runtime::kCallIC_Miss); | |
| 2067 | |
| 2068 // Move result to a1 and exit the internal frame. | |
| 2069 __ mov(a1, v0); | |
| 2070 | |
| 2071 // Restore number of arguments. | |
| 2072 __ Pop(a0); | |
| 2073 __ SmiUntag(a0); | |
| 2074 } | |
| 2075 | |
| 2076 | |
| 2077 // StringCharCodeAtGenerator. | 1896 // StringCharCodeAtGenerator. |
| 2078 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 1897 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
| 2079 DCHECK(!t0.is(index_)); | 1898 DCHECK(!t0.is(index_)); |
| 2080 DCHECK(!t0.is(result_)); | 1899 DCHECK(!t0.is(result_)); |
| 2081 DCHECK(!t0.is(object_)); | 1900 DCHECK(!t0.is(object_)); |
| 2082 if (check_mode_ == RECEIVER_IS_UNKNOWN) { | 1901 if (check_mode_ == RECEIVER_IS_UNKNOWN) { |
| 2083 // If the receiver is a smi trigger the non-string case. | 1902 // If the receiver is a smi trigger the non-string case. |
| 2084 __ JumpIfSmi(object_, receiver_not_string_); | 1903 __ JumpIfSmi(object_, receiver_not_string_); |
| 2085 | 1904 |
| 2086 // Fetch the instance type of the receiver into result register. | 1905 // Fetch the instance type of the receiver into result register. |
| (...skipping 2199 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4286 kStackUnwindSpace, kInvalidStackOffset, | 4105 kStackUnwindSpace, kInvalidStackOffset, |
| 4287 return_value_operand, NULL); | 4106 return_value_operand, NULL); |
| 4288 } | 4107 } |
| 4289 | 4108 |
| 4290 #undef __ | 4109 #undef __ |
| 4291 | 4110 |
| 4292 } // namespace internal | 4111 } // namespace internal |
| 4293 } // namespace v8 | 4112 } // namespace v8 |
| 4294 | 4113 |
| 4295 #endif // V8_TARGET_ARCH_MIPS | 4114 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |