OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_S390 | 5 #if V8_TARGET_ARCH_S390 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 1842 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1853 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | 1853 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); |
1854 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset)); | 1854 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset)); |
1855 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1855 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); |
1856 __ JumpToJSEntry(ip); | 1856 __ JumpToJSEntry(ip); |
1857 | 1857 |
1858 __ bind(&non_function); | 1858 __ bind(&non_function); |
1859 __ LoadRR(r5, r3); | 1859 __ LoadRR(r5, r3); |
1860 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1860 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
1861 } | 1861 } |
1862 | 1862 |
1863 // Note: feedback_vector and slot are clobbered after the call. | |
1864 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, | |
1865 Register slot, Register temp) { | |
1866 const int count_offset = FixedArray::kHeaderSize + kPointerSize; | |
1867 __ SmiToPtrArrayOffset(temp, slot); | |
1868 __ AddP(feedback_vector, feedback_vector, temp); | |
1869 __ LoadP(slot, FieldMemOperand(feedback_vector, count_offset)); | |
1870 __ AddSmiLiteral(slot, slot, Smi::FromInt(1), temp); | |
1871 __ StoreP(slot, FieldMemOperand(feedback_vector, count_offset), temp); | |
1872 } | |
1873 | |
1874 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | |
1875 // r2 - number of arguments | |
1876 // r3 - function | |
1877 // r5 - slot id | |
1878 // r4 - vector | |
1879 // r6 - allocation site (loaded from vector[slot]) | |
1880 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7); | |
1881 __ CmpP(r3, r7); | |
1882 __ bne(miss); | |
1883 | |
1884 // Increment the call count for monomorphic function calls. | |
1885 IncrementCallCount(masm, r4, r5, r1); | |
1886 | |
1887 __ LoadRR(r4, r6); | |
1888 __ LoadRR(r5, r3); | |
1889 ArrayConstructorStub stub(masm->isolate()); | |
1890 __ TailCallStub(&stub); | |
1891 } | |
1892 | |
1893 void CallICStub::Generate(MacroAssembler* masm) { | |
1894 // r2 - number of arguments | |
1895 // r3 - function | |
1896 // r5 - slot id (Smi) | |
1897 // r4 - vector | |
1898 Label extra_checks_or_miss, call, call_function, call_count_incremented; | |
1899 | |
1900 // The checks. First, does r3 match the recorded monomorphic target? | |
1901 __ SmiToPtrArrayOffset(r8, r5); | |
1902 __ AddP(r8, r4, r8); | |
1903 __ LoadP(r6, FieldMemOperand(r8, FixedArray::kHeaderSize)); | |
1904 | |
1905 // We don't know that we have a weak cell. We might have a private symbol | |
1906 // or an AllocationSite, but the memory is safe to examine. | |
1907 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | |
1908 // FixedArray. | |
1909 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | |
1910 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not | |
1911 // computed, meaning that it can't appear to be a pointer. If the low bit is | |
1912 // 0, then hash is computed, but the 0 bit prevents the field from appearing | |
1913 // to be a pointer. | |
1914 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); | |
1915 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == | |
1916 WeakCell::kValueOffset && | |
1917 WeakCell::kValueOffset == Symbol::kHashFieldSlot); | |
1918 | |
1919 __ LoadP(r7, FieldMemOperand(r6, WeakCell::kValueOffset)); | |
1920 __ CmpP(r3, r7); | |
1921 __ bne(&extra_checks_or_miss, Label::kNear); | |
1922 | |
1923 // The compare above could have been a SMI/SMI comparison. Guard against this | |
1924 // convincing us that we have a monomorphic JSFunction. | |
1925 __ JumpIfSmi(r3, &extra_checks_or_miss); | |
1926 | |
1927 __ bind(&call_function); | |
1928 | |
1929 // Increment the call count for monomorphic function calls. | |
1930 IncrementCallCount(masm, r4, r5, r1); | |
1931 | |
1932 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), | |
1933 tail_call_mode()), | |
1934 RelocInfo::CODE_TARGET); | |
1935 | |
1936 __ bind(&extra_checks_or_miss); | |
1937 Label uninitialized, miss, not_allocation_site; | |
1938 | |
1939 __ CompareRoot(r6, Heap::kmegamorphic_symbolRootIndex); | |
1940 __ beq(&call); | |
1941 | |
1942 // Verify that r6 contains an AllocationSite | |
1943 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset)); | |
1944 __ CompareRoot(r7, Heap::kAllocationSiteMapRootIndex); | |
1945 __ bne(¬_allocation_site); | |
1946 | |
1947 // We have an allocation site. | |
1948 HandleArrayCase(masm, &miss); | |
1949 | |
1950 __ bind(¬_allocation_site); | |
1951 | |
1952 // The following cases attempt to handle MISS cases without going to the | |
1953 // runtime. | |
1954 if (FLAG_trace_ic) { | |
1955 __ b(&miss); | |
1956 } | |
1957 | |
1958 __ CompareRoot(r6, Heap::kuninitialized_symbolRootIndex); | |
1959 __ beq(&uninitialized); | |
1960 | |
1961 // We are going megamorphic. If the feedback is a JSFunction, it is fine | |
1962 // to handle it here. More complex cases are dealt with in the runtime. | |
1963 __ AssertNotSmi(r6); | |
1964 __ CompareObjectType(r6, r7, r7, JS_FUNCTION_TYPE); | |
1965 __ bne(&miss); | |
1966 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); | |
1967 __ StoreP(ip, FieldMemOperand(r8, FixedArray::kHeaderSize), r0); | |
1968 | |
1969 __ bind(&call); | |
1970 | |
1971 // Increment the call count for megamorphic function calls. | |
1972 IncrementCallCount(masm, r4, r5, r1); | |
1973 | |
1974 __ bind(&call_count_incremented); | |
1975 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), | |
1976 RelocInfo::CODE_TARGET); | |
1977 | |
1978 __ bind(&uninitialized); | |
1979 | |
1980 // We are going monomorphic, provided we actually have a JSFunction. | |
1981 __ JumpIfSmi(r3, &miss); | |
1982 | |
1983 // Goto miss case if we do not have a function. | |
1984 __ CompareObjectType(r3, r6, r6, JS_FUNCTION_TYPE); | |
1985 __ bne(&miss); | |
1986 | |
1987 // Make sure the function is not the Array() function, which requires special | |
1988 // behavior on MISS. | |
1989 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r6); | |
1990 __ CmpP(r3, r6); | |
1991 __ beq(&miss); | |
1992 | |
1993 // Make sure the function belongs to the same native context. | |
1994 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kContextOffset)); | |
1995 __ LoadP(r6, ContextMemOperand(r6, Context::NATIVE_CONTEXT_INDEX)); | |
1996 __ LoadP(ip, NativeContextMemOperand()); | |
1997 __ CmpP(r6, ip); | |
1998 __ bne(&miss); | |
1999 | |
2000 // Store the function. Use a stub since we need a frame for allocation. | |
2001 // r4 - vector | |
2002 // r5 - slot | |
2003 // r3 - function | |
2004 { | |
2005 FrameScope scope(masm, StackFrame::INTERNAL); | |
2006 CreateWeakCellStub create_stub(masm->isolate()); | |
2007 __ SmiTag(r2); | |
2008 __ Push(r2, r4, r5, cp, r3); | |
2009 __ CallStub(&create_stub); | |
2010 __ Pop(r4, r5, cp, r3); | |
2011 __ Pop(r2); | |
2012 __ SmiUntag(r2); | |
2013 } | |
2014 | |
2015 __ b(&call_function); | |
2016 | |
2017 // We are here because tracing is on or we encountered a MISS case we can't | |
2018 // handle here. | |
2019 __ bind(&miss); | |
2020 GenerateMiss(masm); | |
2021 | |
2022 __ b(&call_count_incremented); | |
2023 } | |
2024 | |
2025 void CallICStub::GenerateMiss(MacroAssembler* masm) { | |
2026 FrameScope scope(masm, StackFrame::INTERNAL); | |
2027 | |
2028 // Preserve the number of arguments as Smi. | |
2029 __ SmiTag(r2); | |
2030 | |
2031 // Push the receiver and the function and feedback info. | |
2032 __ Push(r2, r3, r4, r5); | |
2033 | |
2034 // Call the entry. | |
2035 __ CallRuntime(Runtime::kCallIC_Miss); | |
2036 | |
2037 // Move result to r3 and exit the internal frame. | |
2038 __ LoadRR(r3, r2); | |
2039 | |
2040 // Restore number of arguments. | |
2041 __ Pop(r2); | |
2042 __ SmiUntag(r2); | |
2043 } | |
2044 | |
2045 // StringCharCodeAtGenerator | 1863 // StringCharCodeAtGenerator |
2046 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 1864 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
2047 // If the receiver is a smi trigger the non-string case. | 1865 // If the receiver is a smi trigger the non-string case. |
2048 if (check_mode_ == RECEIVER_IS_UNKNOWN) { | 1866 if (check_mode_ == RECEIVER_IS_UNKNOWN) { |
2049 __ JumpIfSmi(object_, receiver_not_string_); | 1867 __ JumpIfSmi(object_, receiver_not_string_); |
2050 | 1868 |
2051 // Fetch the instance type of the receiver into result register. | 1869 // Fetch the instance type of the receiver into result register. |
2052 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); | 1870 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); |
2053 __ LoadlB(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); | 1871 __ LoadlB(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); |
2054 // If the receiver is not a string trigger the non-string case. | 1872 // If the receiver is not a string trigger the non-string case. |
(...skipping 2220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4275 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 4093 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
4276 kStackUnwindSpace, NULL, return_value_operand, NULL); | 4094 kStackUnwindSpace, NULL, return_value_operand, NULL); |
4277 } | 4095 } |
4278 | 4096 |
4279 #undef __ | 4097 #undef __ |
4280 | 4098 |
4281 } // namespace internal | 4099 } // namespace internal |
4282 } // namespace v8 | 4100 } // namespace v8 |
4283 | 4101 |
4284 #endif // V8_TARGET_ARCH_S390 | 4102 #endif // V8_TARGET_ARCH_S390 |
OLD | NEW |