OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stubs.h" | 5 #include "src/code-stubs.h" |
6 | 6 |
7 #include "src/bailout-reason.h" | 7 #include "src/bailout-reason.h" |
8 #include "src/crankshaft/hydrogen.h" | 8 #include "src/crankshaft/hydrogen.h" |
9 #include "src/crankshaft/lithium.h" | 9 #include "src/crankshaft/lithium.h" |
10 #include "src/field-index.h" | 10 #include "src/field-index.h" |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
87 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value); | 87 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value); |
88 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key, | 88 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key, |
89 HValue* value); | 89 HValue* value); |
90 | 90 |
91 HValue* BuildArrayConstructor(ElementsKind kind, | 91 HValue* BuildArrayConstructor(ElementsKind kind, |
92 AllocationSiteOverrideMode override_mode, | 92 AllocationSiteOverrideMode override_mode, |
93 ArgumentClass argument_class); | 93 ArgumentClass argument_class); |
94 HValue* BuildInternalArrayConstructor(ElementsKind kind, | 94 HValue* BuildInternalArrayConstructor(ElementsKind kind, |
95 ArgumentClass argument_class); | 95 ArgumentClass argument_class); |
96 | 96 |
| 97 // BuildCheckAndInstallOptimizedCode emits code to install the optimized |
| 98 // function found in the optimized code map at map_index in js_function, if |
| 99 // the function at map_index matches the given native_context. Builder is |
| 100 // left in the "Then()" state after the install. |
| 101 void BuildCheckAndInstallOptimizedCode(HValue* js_function, |
| 102 HValue* native_context, |
| 103 IfBuilder* builder, |
| 104 HValue* optimized_map, |
| 105 HValue* map_index); |
| 106 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context, |
| 107 HValue* code_object, HValue* literals); |
| 108 void BuildInstallCode(HValue* js_function, HValue* shared_info); |
| 109 |
| 110 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map, |
| 111 HValue* iterator, |
| 112 int field_offset); |
| 113 void BuildInstallFromOptimizedCodeMap(HValue* js_function, |
| 114 HValue* shared_info, |
| 115 HValue* native_context); |
| 116 |
97 HValue* BuildToString(HValue* input, bool convert); | 117 HValue* BuildToString(HValue* input, bool convert); |
98 HValue* BuildToPrimitive(HValue* input, HValue* input_map); | 118 HValue* BuildToPrimitive(HValue* input, HValue* input_map); |
99 | 119 |
100 private: | 120 private: |
101 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder); | 121 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder); |
102 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder, | 122 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder, |
103 ElementsKind kind); | 123 ElementsKind kind); |
104 | 124 |
105 base::SmartArrayPointer<HParameter*> parameters_; | 125 base::SmartArrayPointer<HParameter*> parameters_; |
106 HValue* arguments_length_; | 126 HValue* arguments_length_; |
(...skipping 1730 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1837 template <> | 1857 template <> |
1838 HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() { | 1858 HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() { |
1839 HValue* receiver = GetParameter(ToObjectDescriptor::kReceiverIndex); | 1859 HValue* receiver = GetParameter(ToObjectDescriptor::kReceiverIndex); |
1840 return BuildToObject(receiver); | 1860 return BuildToObject(receiver); |
1841 } | 1861 } |
1842 | 1862 |
1843 | 1863 |
1844 Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); } | 1864 Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); } |
1845 | 1865 |
1846 | 1866 |
| 1867 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode( |
| 1868 HValue* js_function, |
| 1869 HValue* native_context, |
| 1870 IfBuilder* builder, |
| 1871 HValue* optimized_map, |
| 1872 HValue* map_index) { |
| 1873 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt()); |
| 1874 HValue* context_slot = LoadFromOptimizedCodeMap( |
| 1875 optimized_map, map_index, SharedFunctionInfo::kContextOffset); |
| 1876 context_slot = Add<HLoadNamedField>(context_slot, nullptr, |
| 1877 HObjectAccess::ForWeakCellValue()); |
| 1878 HValue* osr_ast_slot = LoadFromOptimizedCodeMap( |
| 1879 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset); |
| 1880 HValue* code_object = LoadFromOptimizedCodeMap( |
| 1881 optimized_map, map_index, SharedFunctionInfo::kCachedCodeOffset); |
| 1882 code_object = Add<HLoadNamedField>(code_object, nullptr, |
| 1883 HObjectAccess::ForWeakCellValue()); |
| 1884 builder->If<HCompareObjectEqAndBranch>(native_context, |
| 1885 context_slot); |
| 1886 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none); |
| 1887 builder->And(); |
| 1888 builder->IfNot<HCompareObjectEqAndBranch>(code_object, |
| 1889 graph()->GetConstant0()); |
| 1890 builder->Then(); |
| 1891 HValue* literals = LoadFromOptimizedCodeMap(optimized_map, |
| 1892 map_index, SharedFunctionInfo::kLiteralsOffset); |
| 1893 literals = Add<HLoadNamedField>(literals, nullptr, |
| 1894 HObjectAccess::ForWeakCellValue()); |
| 1895 IfBuilder maybe_deopt(this); |
| 1896 maybe_deopt.If<HCompareObjectEqAndBranch>(literals, graph()->GetConstant0()); |
| 1897 maybe_deopt.ThenDeopt(Deoptimizer::kLiteralsWereDisposed); |
| 1898 maybe_deopt.End(); |
| 1899 |
| 1900 BuildInstallOptimizedCode(js_function, native_context, code_object, literals); |
| 1901 |
| 1902 // The builder continues in the "then" after this function. |
| 1903 } |
| 1904 |
| 1905 |
| 1906 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(HValue* js_function, |
| 1907 HValue* native_context, |
| 1908 HValue* code_object, |
| 1909 HValue* literals) { |
| 1910 Counters* counters = isolate()->counters(); |
| 1911 AddIncrementCounter(counters->fast_new_closure_install_optimized()); |
| 1912 |
| 1913 // TODO(fschneider): Idea: store proper code pointers in the optimized code |
| 1914 // map and either unmangle them on marking or do nothing as the whole map is |
| 1915 // discarded on major GC anyway. |
| 1916 Add<HStoreCodeEntry>(js_function, code_object); |
| 1917 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(), |
| 1918 literals); |
| 1919 |
| 1920 // Now link a function into a list of optimized functions. |
| 1921 HValue* optimized_functions_list = Add<HLoadNamedField>( |
| 1922 native_context, nullptr, |
| 1923 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST)); |
| 1924 Add<HStoreNamedField>(js_function, |
| 1925 HObjectAccess::ForNextFunctionLinkPointer(), |
| 1926 optimized_functions_list); |
| 1927 |
| 1928 // This store is the only one that should have a write barrier. |
| 1929 Add<HStoreNamedField>(native_context, |
| 1930 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST), |
| 1931 js_function); |
| 1932 } |
| 1933 |
| 1934 |
| 1935 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function, |
| 1936 HValue* shared_info) { |
| 1937 Add<HStoreNamedField>(js_function, |
| 1938 HObjectAccess::ForNextFunctionLinkPointer(), |
| 1939 graph()->GetConstantUndefined()); |
| 1940 HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr, |
| 1941 HObjectAccess::ForCodeOffset()); |
| 1942 Add<HStoreCodeEntry>(js_function, code_object); |
| 1943 } |
| 1944 |
| 1945 |
| 1946 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap( |
| 1947 HValue* optimized_map, |
| 1948 HValue* iterator, |
| 1949 int field_offset) { |
| 1950 // By making sure to express these loads in the form [<hvalue> + constant] |
| 1951 // the keyed load can be hoisted. |
| 1952 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength); |
| 1953 HValue* field_slot = iterator; |
| 1954 if (field_offset > 0) { |
| 1955 HValue* field_offset_value = Add<HConstant>(field_offset); |
| 1956 field_slot = AddUncasted<HAdd>(iterator, field_offset_value); |
| 1957 } |
| 1958 HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot, |
| 1959 nullptr, nullptr, FAST_ELEMENTS); |
| 1960 return field_entry; |
| 1961 } |
| 1962 |
| 1963 |
| 1964 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap( |
| 1965 HValue* js_function, |
| 1966 HValue* shared_info, |
| 1967 HValue* native_context) { |
| 1968 Counters* counters = isolate()->counters(); |
| 1969 Factory* factory = isolate()->factory(); |
| 1970 IfBuilder is_optimized(this); |
| 1971 HInstruction* optimized_map = Add<HLoadNamedField>( |
| 1972 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap()); |
| 1973 HValue* null_constant = Add<HConstant>(0); |
| 1974 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant); |
| 1975 is_optimized.Then(); |
| 1976 { |
| 1977 BuildInstallCode(js_function, shared_info); |
| 1978 } |
| 1979 is_optimized.Else(); |
| 1980 { |
| 1981 AddIncrementCounter(counters->fast_new_closure_try_optimized()); |
| 1982 // The {optimized_map} points to fixed array of 4-element entries: |
| 1983 // (native context, optimized code, literals, ast-id). |
| 1984 // Iterate through the {optimized_map} backwards. After the loop, if no |
| 1985 // matching optimized code was found, install unoptimized code. |
| 1986 // for(i = map.length() - SharedFunctionInfo::kEntryLength; |
| 1987 // i >= SharedFunctionInfo::kEntriesStart; |
| 1988 // i -= SharedFunctionInfo::kEntryLength) { ... } |
| 1989 HValue* first_entry_index = |
| 1990 Add<HConstant>(SharedFunctionInfo::kEntriesStart); |
| 1991 HValue* shared_function_entry_length = |
| 1992 Add<HConstant>(SharedFunctionInfo::kEntryLength); |
| 1993 LoopBuilder loop_builder(this, context(), LoopBuilder::kPostDecrement, |
| 1994 shared_function_entry_length); |
| 1995 HValue* array_length = Add<HLoadNamedField>( |
| 1996 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength()); |
| 1997 HValue* start_pos = |
| 1998 AddUncasted<HSub>(array_length, shared_function_entry_length); |
| 1999 HValue* slot_iterator = |
| 2000 loop_builder.BeginBody(start_pos, first_entry_index, Token::GTE); |
| 2001 { |
| 2002 IfBuilder done_check(this); |
| 2003 BuildCheckAndInstallOptimizedCode(js_function, native_context, |
| 2004 &done_check, optimized_map, |
| 2005 slot_iterator); |
| 2006 // Fall out of the loop |
| 2007 loop_builder.Break(); |
| 2008 } |
| 2009 loop_builder.EndBody(); |
| 2010 |
| 2011 // If {slot_iterator} is less than the first entry index, then we failed to |
| 2012 // find a context-dependent code and try context-independent code next. |
| 2013 IfBuilder no_optimized_code_check(this); |
| 2014 no_optimized_code_check.If<HCompareNumericAndBranch>( |
| 2015 slot_iterator, first_entry_index, Token::LT); |
| 2016 no_optimized_code_check.Then(); |
| 2017 { |
| 2018 IfBuilder shared_code_check(this); |
| 2019 HValue* shared_code = |
| 2020 Add<HLoadNamedField>(optimized_map, nullptr, |
| 2021 HObjectAccess::ForOptimizedCodeMapSharedCode()); |
| 2022 shared_code = Add<HLoadNamedField>(shared_code, nullptr, |
| 2023 HObjectAccess::ForWeakCellValue()); |
| 2024 shared_code_check.IfNot<HCompareObjectEqAndBranch>( |
| 2025 shared_code, graph()->GetConstant0()); |
| 2026 shared_code_check.Then(); |
| 2027 { |
| 2028 // Store the context-independent optimized code. |
| 2029 HValue* literals = Add<HConstant>(factory->empty_fixed_array()); |
| 2030 BuildInstallOptimizedCode(js_function, native_context, shared_code, |
| 2031 literals); |
| 2032 } |
| 2033 shared_code_check.Else(); |
| 2034 { |
| 2035 // Store the unoptimized code. |
| 2036 BuildInstallCode(js_function, shared_info); |
| 2037 } |
| 2038 } |
| 2039 } |
| 2040 } |
| 2041 |
| 2042 |
1847 template<> | 2043 template<> |
1848 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { | 2044 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { |
1849 Counters* counters = isolate()->counters(); | 2045 Counters* counters = isolate()->counters(); |
1850 Factory* factory = isolate()->factory(); | 2046 Factory* factory = isolate()->factory(); |
1851 HInstruction* empty_fixed_array = | 2047 HInstruction* empty_fixed_array = |
1852 Add<HConstant>(factory->empty_fixed_array()); | 2048 Add<HConstant>(factory->empty_fixed_array()); |
1853 HInstruction* empty_literals_array = | |
1854 Add<HConstant>(factory->empty_literals_array()); | |
1855 HValue* shared_info = GetParameter(0); | 2049 HValue* shared_info = GetParameter(0); |
1856 | 2050 |
1857 AddIncrementCounter(counters->fast_new_closure_total()); | 2051 AddIncrementCounter(counters->fast_new_closure_total()); |
1858 | 2052 |
1859 // Create a new closure from the given function info in new space | 2053 // Create a new closure from the given function info in new space |
1860 HValue* size = Add<HConstant>(JSFunction::kSize); | 2054 HValue* size = Add<HConstant>(JSFunction::kSize); |
1861 HInstruction* js_function = | 2055 HInstruction* js_function = |
1862 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE); | 2056 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE); |
1863 | 2057 |
1864 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(), | 2058 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(), |
1865 casted_stub()->kind()); | 2059 casted_stub()->kind()); |
1866 | 2060 |
1867 // Compute the function map in the current native context and set that | 2061 // Compute the function map in the current native context and set that |
1868 // as the map of the allocated object. | 2062 // as the map of the allocated object. |
1869 HInstruction* native_context = BuildGetNativeContext(); | 2063 HInstruction* native_context = BuildGetNativeContext(); |
1870 HInstruction* map_slot_value = Add<HLoadNamedField>( | 2064 HInstruction* map_slot_value = Add<HLoadNamedField>( |
1871 native_context, nullptr, HObjectAccess::ForContextSlot(map_index)); | 2065 native_context, nullptr, HObjectAccess::ForContextSlot(map_index)); |
1872 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value); | 2066 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value); |
1873 | 2067 |
1874 // Initialize the rest of the function. | 2068 // Initialize the rest of the function. |
1875 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(), | 2069 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(), |
1876 empty_fixed_array); | 2070 empty_fixed_array); |
1877 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(), | 2071 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(), |
1878 empty_fixed_array); | 2072 empty_fixed_array); |
1879 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(), | 2073 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(), |
1880 empty_literals_array); | 2074 empty_fixed_array); |
1881 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(), | 2075 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(), |
1882 graph()->GetConstantHole()); | 2076 graph()->GetConstantHole()); |
1883 Add<HStoreNamedField>( | 2077 Add<HStoreNamedField>( |
1884 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info); | 2078 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info); |
1885 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(), | 2079 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(), |
1886 context()); | 2080 context()); |
1887 Handle<Code> lazy_builtin( | 2081 |
1888 isolate()->builtins()->builtin(Builtins::kCompileLazy)); | 2082 // Initialize the code pointer in the function to be the one found in the |
1889 HConstant* lazy = Add<HConstant>(lazy_builtin); | 2083 // shared function info object. But first check if there is an optimized |
1890 Add<HStoreCodeEntry>(js_function, lazy); | 2084 // version for our context. |
1891 Add<HStoreNamedField>(js_function, | 2085 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context); |
1892 HObjectAccess::ForNextFunctionLinkPointer(), | 2086 |
1893 graph()->GetConstantUndefined()); | |
1894 return js_function; | 2087 return js_function; |
1895 } | 2088 } |
1896 | 2089 |
1897 | 2090 |
1898 Handle<Code> FastNewClosureStub::GenerateCode() { | 2091 Handle<Code> FastNewClosureStub::GenerateCode() { |
1899 return DoGenerateCode(this); | 2092 return DoGenerateCode(this); |
1900 } | 2093 } |
1901 | 2094 |
1902 | 2095 |
1903 template<> | 2096 template<> |
(...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2237 return Pop(); | 2430 return Pop(); |
2238 } | 2431 } |
2239 | 2432 |
2240 | 2433 |
2241 Handle<Code> KeyedLoadGenericStub::GenerateCode() { | 2434 Handle<Code> KeyedLoadGenericStub::GenerateCode() { |
2242 return DoGenerateCode(this); | 2435 return DoGenerateCode(this); |
2243 } | 2436 } |
2244 | 2437 |
2245 } // namespace internal | 2438 } // namespace internal |
2246 } // namespace v8 | 2439 } // namespace v8 |
OLD | NEW |