| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler.h" | 5 #include "src/compiler.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 #include <memory> | 8 #include <memory> |
| 9 | 9 |
| 10 #include "src/asmjs/asm-js.h" | 10 #include "src/asmjs/asm-js.h" |
| (...skipping 613 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 624 return info->code(); | 624 return info->code(); |
| 625 } | 625 } |
| 626 | 626 |
| 627 MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap( | 627 MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap( |
| 628 Handle<JSFunction> function, BailoutId osr_ast_id) { | 628 Handle<JSFunction> function, BailoutId osr_ast_id) { |
| 629 RuntimeCallTimerScope runtimeTimer( | 629 RuntimeCallTimerScope runtimeTimer( |
| 630 function->GetIsolate(), | 630 function->GetIsolate(), |
| 631 &RuntimeCallStats::CompileGetFromOptimizedCodeMap); | 631 &RuntimeCallStats::CompileGetFromOptimizedCodeMap); |
| 632 Handle<SharedFunctionInfo> shared(function->shared()); | 632 Handle<SharedFunctionInfo> shared(function->shared()); |
| 633 DisallowHeapAllocation no_gc; | 633 DisallowHeapAllocation no_gc; |
| 634 CodeAndLiterals cached = shared->SearchOptimizedCodeMap( | 634 CodeAndVector cached = shared->SearchOptimizedCodeMap( |
| 635 function->context()->native_context(), osr_ast_id); | 635 function->context()->native_context(), osr_ast_id); |
| 636 if (cached.code != nullptr) { | 636 if (cached.code != nullptr) { |
| 637 // Caching of optimized code enabled and optimized code found. | 637 // Caching of optimized code enabled and optimized code found. |
| 638 if (cached.literals != nullptr) function->set_literals(cached.literals); | 638 if (cached.vector != nullptr) function->set_feedback_vector(cached.vector); |
| 639 DCHECK(!cached.code->marked_for_deoptimization()); | 639 DCHECK(!cached.code->marked_for_deoptimization()); |
| 640 DCHECK(function->shared()->is_compiled()); | 640 DCHECK(function->shared()->is_compiled()); |
| 641 return Handle<Code>(cached.code); | 641 return Handle<Code>(cached.code); |
| 642 } | 642 } |
| 643 return MaybeHandle<Code>(); | 643 return MaybeHandle<Code>(); |
| 644 } | 644 } |
| 645 | 645 |
| 646 void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) { | 646 void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) { |
| 647 Handle<Code> code = info->code(); | 647 Handle<Code> code = info->code(); |
| 648 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do. | 648 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do. |
| 649 | 649 |
| 650 // Function context specialization folds-in the function context, | 650 // Function context specialization folds-in the function context, |
| 651 // so no sharing can occur. | 651 // so no sharing can occur. |
| 652 if (info->is_function_context_specializing()) return; | 652 if (info->is_function_context_specializing()) return; |
| 653 // Frame specialization implies function context specialization. | 653 // Frame specialization implies function context specialization. |
| 654 DCHECK(!info->is_frame_specializing()); | 654 DCHECK(!info->is_frame_specializing()); |
| 655 | 655 |
| 656 // TODO(4764): When compiling for OSR from bytecode, BailoutId might derive | 656 // TODO(4764): When compiling for OSR from bytecode, BailoutId might derive |
| 657 // from bytecode offset and overlap with actual BailoutId. No caching! | 657 // from bytecode offset and overlap with actual BailoutId. No caching! |
| 658 if (info->is_osr() && info->is_optimizing_from_bytecode()) return; | 658 if (info->is_osr() && info->is_optimizing_from_bytecode()) return; |
| 659 | 659 |
| 660 // Cache optimized context-specific code. | 660 // Cache optimized context-specific code. |
| 661 Handle<JSFunction> function = info->closure(); | 661 Handle<JSFunction> function = info->closure(); |
| 662 Handle<SharedFunctionInfo> shared(function->shared()); | 662 Handle<SharedFunctionInfo> shared(function->shared()); |
| 663 Handle<LiteralsArray> literals(function->literals()); | 663 Handle<TypeFeedbackVector> vector(function->feedback_vector()); |
| 664 Handle<Context> native_context(function->context()->native_context()); | 664 Handle<Context> native_context(function->context()->native_context()); |
| 665 SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code, | 665 SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code, |
| 666 literals, info->osr_ast_id()); | 666 vector, info->osr_ast_id()); |
| 667 } | 667 } |
| 668 | 668 |
| 669 bool GetOptimizedCodeNow(CompilationJob* job) { | 669 bool GetOptimizedCodeNow(CompilationJob* job) { |
| 670 CompilationInfo* info = job->info(); | 670 CompilationInfo* info = job->info(); |
| 671 Isolate* isolate = info->isolate(); | 671 Isolate* isolate = info->isolate(); |
| 672 | 672 |
| 673 // Parsing is not required when optimizing from existing bytecode. | 673 // Parsing is not required when optimizing from existing bytecode. |
| 674 if (!info->is_optimizing_from_bytecode()) { | 674 if (!info->is_optimizing_from_bytecode()) { |
| 675 if (!Compiler::ParseAndAnalyze(info->parse_info())) return false; | 675 if (!Compiler::ParseAndAnalyze(info->parse_info())) return false; |
| 676 EnsureFeedbackMetadata(info); | 676 EnsureFeedbackMetadata(info); |
| (...skipping 1104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1781 void Compiler::PostInstantiation(Handle<JSFunction> function, | 1781 void Compiler::PostInstantiation(Handle<JSFunction> function, |
| 1782 PretenureFlag pretenure) { | 1782 PretenureFlag pretenure) { |
| 1783 Handle<SharedFunctionInfo> shared(function->shared()); | 1783 Handle<SharedFunctionInfo> shared(function->shared()); |
| 1784 | 1784 |
| 1785 if (FLAG_always_opt && shared->allows_lazy_compilation() && | 1785 if (FLAG_always_opt && shared->allows_lazy_compilation() && |
| 1786 !function->shared()->HasAsmWasmData() && | 1786 !function->shared()->HasAsmWasmData() && |
| 1787 function->shared()->is_compiled()) { | 1787 function->shared()->is_compiled()) { |
| 1788 function->MarkForOptimization(); | 1788 function->MarkForOptimization(); |
| 1789 } | 1789 } |
| 1790 | 1790 |
| 1791 CodeAndLiterals cached = shared->SearchOptimizedCodeMap( | 1791 CodeAndVector cached = shared->SearchOptimizedCodeMap( |
| 1792 function->context()->native_context(), BailoutId::None()); | 1792 function->context()->native_context(), BailoutId::None()); |
| 1793 if (cached.code != nullptr) { | 1793 if (cached.code != nullptr) { |
| 1794 // Caching of optimized code enabled and optimized code found. | 1794 // Caching of optimized code enabled and optimized code found. |
| 1795 DCHECK(!cached.code->marked_for_deoptimization()); | 1795 DCHECK(!cached.code->marked_for_deoptimization()); |
| 1796 DCHECK(function->shared()->is_compiled()); | 1796 DCHECK(function->shared()->is_compiled()); |
| 1797 function->ReplaceCode(cached.code); | 1797 function->ReplaceCode(cached.code); |
| 1798 } | 1798 } |
| 1799 | 1799 |
| 1800 if (cached.literals != nullptr) { | 1800 if (cached.vector != nullptr) { |
| 1801 DCHECK(shared->is_compiled()); | 1801 DCHECK(shared->is_compiled()); |
| 1802 function->set_literals(cached.literals); | 1802 function->set_feedback_vector(cached.vector); |
| 1803 } else if (shared->is_compiled()) { | 1803 } else if (shared->is_compiled()) { |
| 1804 // TODO(mvstanton): pass pretenure flag to EnsureLiterals. | 1804 // TODO(mvstanton): pass pretenure flag to EnsureLiterals. |
| 1805 JSFunction::EnsureLiterals(function); | 1805 JSFunction::EnsureLiterals(function); |
| 1806 } | 1806 } |
| 1807 } | 1807 } |
| 1808 | 1808 |
| 1809 } // namespace internal | 1809 } // namespace internal |
| 1810 } // namespace v8 | 1810 } // namespace v8 |
| OLD | NEW |