OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stubs.h" | 5 #include "src/code-stubs.h" |
6 | 6 |
7 #include "src/bailout-reason.h" | 7 #include "src/bailout-reason.h" |
8 #include "src/crankshaft/hydrogen.h" | 8 #include "src/crankshaft/hydrogen.h" |
9 #include "src/crankshaft/lithium.h" | 9 #include "src/crankshaft/lithium.h" |
10 #include "src/field-index.h" | 10 #include "src/field-index.h" |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
87 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value); | 87 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value); |
88 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key, | 88 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key, |
89 HValue* value); | 89 HValue* value); |
90 | 90 |
91 HValue* BuildArrayConstructor(ElementsKind kind, | 91 HValue* BuildArrayConstructor(ElementsKind kind, |
92 AllocationSiteOverrideMode override_mode, | 92 AllocationSiteOverrideMode override_mode, |
93 ArgumentClass argument_class); | 93 ArgumentClass argument_class); |
94 HValue* BuildInternalArrayConstructor(ElementsKind kind, | 94 HValue* BuildInternalArrayConstructor(ElementsKind kind, |
95 ArgumentClass argument_class); | 95 ArgumentClass argument_class); |
96 | 96 |
| 97 // BuildCheckAndInstallOptimizedCode emits code to install the optimized |
| 98 // function found in the optimized code map at map_index in js_function, if |
| 99 // the function at map_index matches the given native_context. Builder is |
| 100 // left in the "Then()" state after the install. |
| 101 void BuildCheckAndInstallOptimizedCode(HValue* js_function, |
| 102 HValue* native_context, |
| 103 IfBuilder* builder, |
| 104 HValue* optimized_map, |
| 105 HValue* map_index); |
| 106 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context, |
| 107 HValue* code_object, HValue* literals); |
| 108 void BuildInstallCode(HValue* js_function, HValue* shared_info); |
| 109 |
| 110 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map, |
| 111 HValue* iterator, |
| 112 int field_offset); |
| 113 void BuildInstallFromOptimizedCodeMap(HValue* js_function, |
| 114 HValue* shared_info, |
| 115 HValue* native_context); |
| 116 |
97 HValue* BuildToString(HValue* input, bool convert); | 117 HValue* BuildToString(HValue* input, bool convert); |
98 HValue* BuildToPrimitive(HValue* input, HValue* input_map); | 118 HValue* BuildToPrimitive(HValue* input, HValue* input_map); |
99 | 119 |
100 private: | 120 private: |
101 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder); | 121 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder); |
102 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder, | 122 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder, |
103 ElementsKind kind); | 123 ElementsKind kind); |
104 | 124 |
105 base::SmartArrayPointer<HParameter*> parameters_; | 125 base::SmartArrayPointer<HParameter*> parameters_; |
106 HValue* arguments_length_; | 126 HValue* arguments_length_; |
(...skipping 1728 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1835 template <> | 1855 template <> |
1836 HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() { | 1856 HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() { |
1837 HValue* receiver = GetParameter(ToObjectDescriptor::kReceiverIndex); | 1857 HValue* receiver = GetParameter(ToObjectDescriptor::kReceiverIndex); |
1838 return BuildToObject(receiver); | 1858 return BuildToObject(receiver); |
1839 } | 1859 } |
1840 | 1860 |
1841 | 1861 |
1842 Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); } | 1862 Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); } |
1843 | 1863 |
1844 | 1864 |
| 1865 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode( |
| 1866 HValue* js_function, |
| 1867 HValue* native_context, |
| 1868 IfBuilder* builder, |
| 1869 HValue* optimized_map, |
| 1870 HValue* map_index) { |
| 1871 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt()); |
| 1872 HValue* context_slot = LoadFromOptimizedCodeMap( |
| 1873 optimized_map, map_index, SharedFunctionInfo::kContextOffset); |
| 1874 context_slot = Add<HLoadNamedField>(context_slot, nullptr, |
| 1875 HObjectAccess::ForWeakCellValue()); |
| 1876 HValue* osr_ast_slot = LoadFromOptimizedCodeMap( |
| 1877 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset); |
| 1878 HValue* code_object = LoadFromOptimizedCodeMap( |
| 1879 optimized_map, map_index, SharedFunctionInfo::kCachedCodeOffset); |
| 1880 code_object = Add<HLoadNamedField>(code_object, nullptr, |
| 1881 HObjectAccess::ForWeakCellValue()); |
| 1882 builder->If<HCompareObjectEqAndBranch>(native_context, |
| 1883 context_slot); |
| 1884 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none); |
| 1885 builder->And(); |
| 1886 builder->IfNot<HCompareObjectEqAndBranch>(code_object, |
| 1887 graph()->GetConstant0()); |
| 1888 builder->Then(); |
| 1889 HValue* literals = LoadFromOptimizedCodeMap(optimized_map, |
| 1890 map_index, SharedFunctionInfo::kLiteralsOffset); |
| 1891 literals = Add<HLoadNamedField>(literals, nullptr, |
| 1892 HObjectAccess::ForWeakCellValue()); |
| 1893 IfBuilder maybe_deopt(this); |
| 1894 maybe_deopt.If<HCompareObjectEqAndBranch>(literals, graph()->GetConstant0()); |
| 1895 maybe_deopt.ThenDeopt(Deoptimizer::kLiteralsWereDisposed); |
| 1896 maybe_deopt.End(); |
| 1897 |
| 1898 BuildInstallOptimizedCode(js_function, native_context, code_object, literals); |
| 1899 |
| 1900 // The builder continues in the "then" after this function. |
| 1901 } |
| 1902 |
| 1903 |
| 1904 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(HValue* js_function, |
| 1905 HValue* native_context, |
| 1906 HValue* code_object, |
| 1907 HValue* literals) { |
| 1908 Counters* counters = isolate()->counters(); |
| 1909 AddIncrementCounter(counters->fast_new_closure_install_optimized()); |
| 1910 |
| 1911 // TODO(fschneider): Idea: store proper code pointers in the optimized code |
| 1912 // map and either unmangle them on marking or do nothing as the whole map is |
| 1913 // discarded on major GC anyway. |
| 1914 Add<HStoreCodeEntry>(js_function, code_object); |
| 1915 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(), |
| 1916 literals); |
| 1917 |
| 1918 // Now link a function into a list of optimized functions. |
| 1919 HValue* optimized_functions_list = Add<HLoadNamedField>( |
| 1920 native_context, nullptr, |
| 1921 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST)); |
| 1922 Add<HStoreNamedField>(js_function, |
| 1923 HObjectAccess::ForNextFunctionLinkPointer(), |
| 1924 optimized_functions_list); |
| 1925 |
| 1926 // This store is the only one that should have a write barrier. |
| 1927 Add<HStoreNamedField>(native_context, |
| 1928 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST), |
| 1929 js_function); |
| 1930 } |
| 1931 |
| 1932 |
| 1933 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function, |
| 1934 HValue* shared_info) { |
| 1935 Add<HStoreNamedField>(js_function, |
| 1936 HObjectAccess::ForNextFunctionLinkPointer(), |
| 1937 graph()->GetConstantUndefined()); |
| 1938 HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr, |
| 1939 HObjectAccess::ForCodeOffset()); |
| 1940 Add<HStoreCodeEntry>(js_function, code_object); |
| 1941 } |
| 1942 |
| 1943 |
| 1944 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap( |
| 1945 HValue* optimized_map, |
| 1946 HValue* iterator, |
| 1947 int field_offset) { |
| 1948 // By making sure to express these loads in the form [<hvalue> + constant] |
| 1949 // the keyed load can be hoisted. |
| 1950 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength); |
| 1951 HValue* field_slot = iterator; |
| 1952 if (field_offset > 0) { |
| 1953 HValue* field_offset_value = Add<HConstant>(field_offset); |
| 1954 field_slot = AddUncasted<HAdd>(iterator, field_offset_value); |
| 1955 } |
| 1956 HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot, |
| 1957 nullptr, nullptr, FAST_ELEMENTS); |
| 1958 return field_entry; |
| 1959 } |
| 1960 |
| 1961 |
| 1962 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap( |
| 1963 HValue* js_function, |
| 1964 HValue* shared_info, |
| 1965 HValue* native_context) { |
| 1966 Counters* counters = isolate()->counters(); |
| 1967 Factory* factory = isolate()->factory(); |
| 1968 IfBuilder is_optimized(this); |
| 1969 HInstruction* optimized_map = Add<HLoadNamedField>( |
| 1970 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap()); |
| 1971 HValue* null_constant = Add<HConstant>(0); |
| 1972 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant); |
| 1973 is_optimized.Then(); |
| 1974 { |
| 1975 BuildInstallCode(js_function, shared_info); |
| 1976 } |
| 1977 is_optimized.Else(); |
| 1978 { |
| 1979 AddIncrementCounter(counters->fast_new_closure_try_optimized()); |
| 1980 // The {optimized_map} points to fixed array of 4-element entries: |
| 1981 // (native context, optimized code, literals, ast-id). |
| 1982 // Iterate through the {optimized_map} backwards. After the loop, if no |
| 1983 // matching optimized code was found, install unoptimized code. |
| 1984 // for(i = map.length() - SharedFunctionInfo::kEntryLength; |
| 1985 // i >= SharedFunctionInfo::kEntriesStart; |
| 1986 // i -= SharedFunctionInfo::kEntryLength) { ... } |
| 1987 HValue* first_entry_index = |
| 1988 Add<HConstant>(SharedFunctionInfo::kEntriesStart); |
| 1989 HValue* shared_function_entry_length = |
| 1990 Add<HConstant>(SharedFunctionInfo::kEntryLength); |
| 1991 LoopBuilder loop_builder(this, context(), LoopBuilder::kPostDecrement, |
| 1992 shared_function_entry_length); |
| 1993 HValue* array_length = Add<HLoadNamedField>( |
| 1994 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength()); |
| 1995 HValue* start_pos = |
| 1996 AddUncasted<HSub>(array_length, shared_function_entry_length); |
| 1997 HValue* slot_iterator = |
| 1998 loop_builder.BeginBody(start_pos, first_entry_index, Token::GTE); |
| 1999 { |
| 2000 IfBuilder done_check(this); |
| 2001 BuildCheckAndInstallOptimizedCode(js_function, native_context, |
| 2002 &done_check, optimized_map, |
| 2003 slot_iterator); |
| 2004 // Fall out of the loop |
| 2005 loop_builder.Break(); |
| 2006 } |
| 2007 loop_builder.EndBody(); |
| 2008 |
| 2009 // If {slot_iterator} is less than the first entry index, then we failed to |
| 2010 // find a context-dependent code and try context-independent code next. |
| 2011 IfBuilder no_optimized_code_check(this); |
| 2012 no_optimized_code_check.If<HCompareNumericAndBranch>( |
| 2013 slot_iterator, first_entry_index, Token::LT); |
| 2014 no_optimized_code_check.Then(); |
| 2015 { |
| 2016 IfBuilder shared_code_check(this); |
| 2017 HValue* shared_code = |
| 2018 Add<HLoadNamedField>(optimized_map, nullptr, |
| 2019 HObjectAccess::ForOptimizedCodeMapSharedCode()); |
| 2020 shared_code = Add<HLoadNamedField>(shared_code, nullptr, |
| 2021 HObjectAccess::ForWeakCellValue()); |
| 2022 shared_code_check.IfNot<HCompareObjectEqAndBranch>( |
| 2023 shared_code, graph()->GetConstant0()); |
| 2024 shared_code_check.Then(); |
| 2025 { |
| 2026 // Store the context-independent optimized code. |
| 2027 HValue* literals = Add<HConstant>(factory->empty_fixed_array()); |
| 2028 BuildInstallOptimizedCode(js_function, native_context, shared_code, |
| 2029 literals); |
| 2030 } |
| 2031 shared_code_check.Else(); |
| 2032 { |
| 2033 // Store the unoptimized code. |
| 2034 BuildInstallCode(js_function, shared_info); |
| 2035 } |
| 2036 } |
| 2037 } |
| 2038 } |
| 2039 |
| 2040 |
1845 template<> | 2041 template<> |
1846 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { | 2042 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { |
1847 Counters* counters = isolate()->counters(); | 2043 Counters* counters = isolate()->counters(); |
1848 Factory* factory = isolate()->factory(); | 2044 Factory* factory = isolate()->factory(); |
1849 HInstruction* empty_fixed_array = | 2045 HInstruction* empty_fixed_array = |
1850 Add<HConstant>(factory->empty_fixed_array()); | 2046 Add<HConstant>(factory->empty_fixed_array()); |
1851 HInstruction* empty_literals_array = | |
1852 Add<HConstant>(factory->empty_literals_array()); | |
1853 HValue* shared_info = GetParameter(0); | 2047 HValue* shared_info = GetParameter(0); |
1854 | 2048 |
1855 AddIncrementCounter(counters->fast_new_closure_total()); | 2049 AddIncrementCounter(counters->fast_new_closure_total()); |
1856 | 2050 |
1857 // Create a new closure from the given function info in new space | 2051 // Create a new closure from the given function info in new space |
1858 HValue* size = Add<HConstant>(JSFunction::kSize); | 2052 HValue* size = Add<HConstant>(JSFunction::kSize); |
1859 HInstruction* js_function = | 2053 HInstruction* js_function = |
1860 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE); | 2054 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE); |
1861 | 2055 |
1862 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(), | 2056 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(), |
1863 casted_stub()->kind()); | 2057 casted_stub()->kind()); |
1864 | 2058 |
1865 // Compute the function map in the current native context and set that | 2059 // Compute the function map in the current native context and set that |
1866 // as the map of the allocated object. | 2060 // as the map of the allocated object. |
1867 HInstruction* native_context = BuildGetNativeContext(); | 2061 HInstruction* native_context = BuildGetNativeContext(); |
1868 HInstruction* map_slot_value = Add<HLoadNamedField>( | 2062 HInstruction* map_slot_value = Add<HLoadNamedField>( |
1869 native_context, nullptr, HObjectAccess::ForContextSlot(map_index)); | 2063 native_context, nullptr, HObjectAccess::ForContextSlot(map_index)); |
1870 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value); | 2064 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value); |
1871 | 2065 |
1872 // Initialize the rest of the function. | 2066 // Initialize the rest of the function. |
1873 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(), | 2067 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(), |
1874 empty_fixed_array); | 2068 empty_fixed_array); |
1875 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(), | 2069 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(), |
1876 empty_fixed_array); | 2070 empty_fixed_array); |
1877 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(), | 2071 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(), |
1878 empty_literals_array); | 2072 empty_fixed_array); |
1879 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(), | 2073 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(), |
1880 graph()->GetConstantHole()); | 2074 graph()->GetConstantHole()); |
1881 Add<HStoreNamedField>( | 2075 Add<HStoreNamedField>( |
1882 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info); | 2076 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info); |
1883 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(), | 2077 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(), |
1884 context()); | 2078 context()); |
1885 Handle<Code> lazy_builtin( | 2079 |
1886 isolate()->builtins()->builtin(Builtins::kCompileLazy)); | 2080 // Initialize the code pointer in the function to be the one found in the |
1887 HConstant* lazy = Add<HConstant>(lazy_builtin); | 2081 // shared function info object. But first check if there is an optimized |
1888 Add<HStoreCodeEntry>(js_function, lazy); | 2082 // version for our context. |
1889 Add<HStoreNamedField>(js_function, | 2083 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context); |
1890 HObjectAccess::ForNextFunctionLinkPointer(), | 2084 |
1891 graph()->GetConstantUndefined()); | |
1892 return js_function; | 2085 return js_function; |
1893 } | 2086 } |
1894 | 2087 |
1895 | 2088 |
1896 Handle<Code> FastNewClosureStub::GenerateCode() { | 2089 Handle<Code> FastNewClosureStub::GenerateCode() { |
1897 return DoGenerateCode(this); | 2090 return DoGenerateCode(this); |
1898 } | 2091 } |
1899 | 2092 |
1900 | 2093 |
1901 template<> | 2094 template<> |
(...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2235 return Pop(); | 2428 return Pop(); |
2236 } | 2429 } |
2237 | 2430 |
2238 | 2431 |
2239 Handle<Code> KeyedLoadGenericStub::GenerateCode() { | 2432 Handle<Code> KeyedLoadGenericStub::GenerateCode() { |
2240 return DoGenerateCode(this); | 2433 return DoGenerateCode(this); |
2241 } | 2434 } |
2242 | 2435 |
2243 } // namespace internal | 2436 } // namespace internal |
2244 } // namespace v8 | 2437 } // namespace v8 |
OLD | NEW |