OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/bailout-reason.h" | 7 #include "src/bailout-reason.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/field-index.h" | 9 #include "src/field-index.h" |
10 #include "src/hydrogen.h" | 10 #include "src/hydrogen.h" |
(...skipping 1947 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1958 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap()); | 1958 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap()); |
1959 HValue* null_constant = Add<HConstant>(0); | 1959 HValue* null_constant = Add<HConstant>(0); |
1960 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant); | 1960 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant); |
1961 is_optimized.Then(); | 1961 is_optimized.Then(); |
1962 { | 1962 { |
1963 BuildInstallCode(js_function, shared_info); | 1963 BuildInstallCode(js_function, shared_info); |
1964 } | 1964 } |
1965 is_optimized.Else(); | 1965 is_optimized.Else(); |
1966 { | 1966 { |
1967 AddIncrementCounter(counters->fast_new_closure_try_optimized()); | 1967 AddIncrementCounter(counters->fast_new_closure_try_optimized()); |
1968 // optimized_map points to fixed array of 3-element entries | 1968 // The {optimized_map} points to fixed array of 4-element entries: |
1969 // (native context, optimized code, literals). | 1969 // (native context, optimized code, literals, ast-id). |
1970 // Map must never be empty, so check the first elements. | 1970 // Iterate through the {optimized_map} backwards. After the loop, if no |
| 1971 // matching optimized code was found, install unoptimized code. |
| 1972 // for(i = map.length() - SharedFunctionInfo::kEntryLength; |
| 1973 // i >= SharedFunctionInfo::kEntriesStart; |
| 1974 // i -= SharedFunctionInfo::kEntryLength) { ... } |
1971 HValue* first_entry_index = | 1975 HValue* first_entry_index = |
1972 Add<HConstant>(SharedFunctionInfo::kEntriesStart); | 1976 Add<HConstant>(SharedFunctionInfo::kEntriesStart); |
1973 IfBuilder already_in(this); | 1977 HValue* shared_function_entry_length = |
1974 BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in, | 1978 Add<HConstant>(SharedFunctionInfo::kEntryLength); |
1975 optimized_map, first_entry_index); | 1979 LoopBuilder loop_builder(this, context(), LoopBuilder::kPostDecrement, |
1976 already_in.Else(); | 1980 shared_function_entry_length); |
| 1981 HValue* array_length = Add<HLoadNamedField>( |
| 1982 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength()); |
| 1983 HValue* start_pos = |
| 1984 AddUncasted<HSub>(array_length, shared_function_entry_length); |
| 1985 HValue* slot_iterator = |
| 1986 loop_builder.BeginBody(start_pos, first_entry_index, Token::GTE); |
1977 { | 1987 { |
1978 // Iterate through the rest of map backwards. Do not double check first | 1988 IfBuilder done_check(this); |
1979 // entry. After the loop, if no matching optimized code was found, | 1989 BuildCheckAndInstallOptimizedCode(js_function, native_context, |
1980 // install unoptimized code. | 1990 &done_check, optimized_map, |
1981 // for(i = map.length() - SharedFunctionInfo::kEntryLength; | 1991 slot_iterator); |
1982 // i > SharedFunctionInfo::kEntriesStart; | 1992 // Fall out of the loop |
1983 // i -= SharedFunctionInfo::kEntryLength) { .. } | 1993 loop_builder.Break(); |
1984 HValue* shared_function_entry_length = | 1994 } |
1985 Add<HConstant>(SharedFunctionInfo::kEntryLength); | 1995 loop_builder.EndBody(); |
1986 LoopBuilder loop_builder(this, | 1996 |
1987 context(), | 1997 // If {slot_iterator} is less than the first entry index, then we failed to |
1988 LoopBuilder::kPostDecrement, | 1998 // find a context-dependent code and try context-independent code next. |
1989 shared_function_entry_length); | 1999 IfBuilder no_optimized_code_check(this); |
1990 HValue* array_length = Add<HLoadNamedField>( | 2000 no_optimized_code_check.If<HCompareNumericAndBranch>( |
1991 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength()); | 2001 slot_iterator, first_entry_index, Token::LT); |
1992 HValue* start_pos = AddUncasted<HSub>(array_length, | 2002 no_optimized_code_check.Then(); |
1993 shared_function_entry_length); | 2003 { |
1994 HValue* slot_iterator = loop_builder.BeginBody(start_pos, | 2004 IfBuilder shared_code_check(this); |
1995 first_entry_index, | 2005 HValue* shared_code = |
1996 Token::GT); | 2006 Add<HLoadNamedField>(optimized_map, nullptr, |
| 2007 HObjectAccess::ForOptimizedCodeMapSharedCode()); |
| 2008 shared_code_check.IfNot<HCompareObjectEqAndBranch>( |
| 2009 shared_code, graph()->GetConstantUndefined()); |
| 2010 shared_code_check.Then(); |
1997 { | 2011 { |
1998 IfBuilder done_check(this); | 2012 // Store the context-independent optimized code. |
1999 BuildCheckAndInstallOptimizedCode(js_function, native_context, | 2013 HValue* literals = Add<HConstant>(factory->empty_fixed_array()); |
2000 &done_check, | 2014 BuildInstallOptimizedCode(js_function, native_context, shared_code, |
2001 optimized_map, | 2015 literals); |
2002 slot_iterator); | |
2003 // Fall out of the loop | |
2004 loop_builder.Break(); | |
2005 } | 2016 } |
2006 loop_builder.EndBody(); | 2017 shared_code_check.Else(); |
2007 | |
2008 // If slot_iterator equals first entry index, then we failed to find a | |
2009 // context-dependent code and try context-independent code next. | |
2010 IfBuilder no_optimized_code_check(this); | |
2011 no_optimized_code_check.If<HCompareNumericAndBranch>( | |
2012 slot_iterator, first_entry_index, Token::EQ); | |
2013 no_optimized_code_check.Then(); | |
2014 { | 2018 { |
2015 IfBuilder shared_code_check(this); | 2019 // Store the unoptimized code. |
2016 HValue* shared_code = Add<HLoadNamedField>( | 2020 BuildInstallCode(js_function, shared_info); |
2017 optimized_map, nullptr, | |
2018 HObjectAccess::ForOptimizedCodeMapSharedCode()); | |
2019 shared_code_check.IfNot<HCompareObjectEqAndBranch>( | |
2020 shared_code, graph()->GetConstantUndefined()); | |
2021 shared_code_check.Then(); | |
2022 { | |
2023 // Store the context-independent optimized code. | |
2024 HValue* literals = Add<HConstant>(factory->empty_fixed_array()); | |
2025 BuildInstallOptimizedCode(js_function, native_context, shared_code, | |
2026 literals); | |
2027 } | |
2028 shared_code_check.Else(); | |
2029 { | |
2030 // Store the unoptimized code. | |
2031 BuildInstallCode(js_function, shared_info); | |
2032 } | |
2033 } | 2021 } |
2034 } | 2022 } |
2035 } | 2023 } |
2036 } | 2024 } |
2037 | 2025 |
2038 | 2026 |
2039 template<> | 2027 template<> |
2040 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { | 2028 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { |
2041 Counters* counters = isolate()->counters(); | 2029 Counters* counters = isolate()->counters(); |
2042 Factory* factory = isolate()->factory(); | 2030 Factory* factory = isolate()->factory(); |
(...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2487 return Pop(); | 2475 return Pop(); |
2488 } | 2476 } |
2489 | 2477 |
2490 | 2478 |
2491 Handle<Code> KeyedLoadGenericStub::GenerateCode() { | 2479 Handle<Code> KeyedLoadGenericStub::GenerateCode() { |
2492 return DoGenerateCode(this); | 2480 return DoGenerateCode(this); |
2493 } | 2481 } |
2494 | 2482 |
2495 } // namespace internal | 2483 } // namespace internal |
2496 } // namespace v8 | 2484 } // namespace v8 |
OLD | NEW |