Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(221)

Side by Side Diff: src/compiler.cc

Issue 2240463002: [Interpreter] Introduce InterpreterCompilationJob (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@offheap_peekhole
Patch Set: DISALLOW_COPY_AND_ASSIGN Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler.h" 5 #include "src/compiler.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 #include <memory> 8 #include <memory>
9 9
10 #include "src/asmjs/asm-js.h" 10 #include "src/asmjs/asm-js.h"
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
233 : SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS; 233 : SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS;
234 } 234 }
235 235
236 bool CompilationInfo::ExpectsJSReceiverAsReceiver() { 236 bool CompilationInfo::ExpectsJSReceiverAsReceiver() {
237 return is_sloppy(parse_info()->language_mode()) && !parse_info()->is_native(); 237 return is_sloppy(parse_info()->language_mode()) && !parse_info()->is_native();
238 } 238 }
239 239
240 // ---------------------------------------------------------------------------- 240 // ----------------------------------------------------------------------------
241 // Implementation of CompilationJob 241 // Implementation of CompilationJob
242 242
243 CompilationJob::Status CompilationJob::CreateGraph() { 243 CompilationJob::Status CompilationJob::PrepareJob() {
244 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
244 DisallowJavascriptExecution no_js(isolate()); 245 DisallowJavascriptExecution no_js(isolate());
245 DCHECK(info()->IsOptimizing());
246 246
247 if (FLAG_trace_opt) { 247 if (FLAG_trace_opt && info()->IsOptimizing()) {
248 OFStream os(stdout); 248 OFStream os(stdout);
249 os << "[compiling method " << Brief(*info()->closure()) << " using " 249 os << "[compiling method " << Brief(*info()->closure()) << " using "
250 << compiler_name_; 250 << compiler_name_;
251 if (info()->is_osr()) os << " OSR"; 251 if (info()->is_osr()) os << " OSR";
252 os << "]" << std::endl; 252 os << "]" << std::endl;
253 } 253 }
254 254
255 // Delegate to the underlying implementation. 255 // Delegate to the underlying implementation.
256 DCHECK_EQ(SUCCEEDED, last_status()); 256 DCHECK(state() == State::kReadyToPrepare);
257 ScopedTimer t(&time_taken_to_create_graph_); 257 ScopedTimer t(&time_taken_to_prepare_);
258 return SetLastStatus(CreateGraphImpl()); 258 return UpdateState(PrepareJobImpl(), State::kReadyToExecute);
259 } 259 }
260 260
261 CompilationJob::Status CompilationJob::OptimizeGraph() { 261 CompilationJob::Status CompilationJob::ExecuteJob() {
262 DisallowHeapAllocation no_allocation; 262 DisallowHeapAllocation no_allocation;
263 DisallowHandleAllocation no_handles; 263 DisallowHandleAllocation no_handles;
264 DisallowHandleDereference no_deref; 264 DisallowHandleDereference no_deref;
265 DisallowCodeDependencyChange no_dependency_change; 265 DisallowCodeDependencyChange no_dependency_change;
266 266
267 // Delegate to the underlying implementation. 267 // Delegate to the underlying implementation.
268 DCHECK_EQ(SUCCEEDED, last_status()); 268 DCHECK(state() == State::kReadyToExecute);
269 ScopedTimer t(&time_taken_to_optimize_); 269 ScopedTimer t(&time_taken_to_execute_);
270 return SetLastStatus(OptimizeGraphImpl()); 270 return UpdateState(ExecuteJobImpl(), State::kReadyToFinalize);
271 } 271 }
272 272
273 CompilationJob::Status CompilationJob::GenerateCode() { 273 CompilationJob::Status CompilationJob::FinalizeJob() {
274 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
274 DisallowCodeDependencyChange no_dependency_change; 275 DisallowCodeDependencyChange no_dependency_change;
275 DisallowJavascriptExecution no_js(isolate()); 276 DisallowJavascriptExecution no_js(isolate());
276 DCHECK(!info()->dependencies()->HasAborted()); 277 DCHECK(!info()->dependencies()->HasAborted());
277 278
278 // Delegate to the underlying implementation. 279 // Delegate to the underlying implementation.
279 DCHECK_EQ(SUCCEEDED, last_status()); 280 DCHECK(state() == State::kReadyToFinalize);
280 ScopedTimer t(&time_taken_to_codegen_); 281 ScopedTimer t(&time_taken_to_finalize_);
281 return SetLastStatus(GenerateCodeImpl()); 282 return UpdateState(FinalizeJobImpl(), State::kSucceeded);
282 }
283
284
285 namespace {
286
287 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
288 Handle<Code> code) {
289 Handle<WeakCell> cell = Code::WeakCellFor(code);
290 Heap* heap = isolate->heap();
291 if (heap->InNewSpace(*object)) {
292 heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
293 } else {
294 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
295 dep =
296 DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
297 heap->AddWeakObjectToCodeDependency(object, dep);
298 }
299 }
300
301 } // namespace
302
303 void CompilationJob::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) {
304 // TODO(turbofan): Move this to pipeline.cc once Crankshaft dies.
305 Isolate* const isolate = code->GetIsolate();
306 DCHECK(code->is_optimized_code());
307 std::vector<Handle<Map>> maps;
308 std::vector<Handle<HeapObject>> objects;
309 {
310 DisallowHeapAllocation no_gc;
311 int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
312 RelocInfo::ModeMask(RelocInfo::CELL);
313 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
314 RelocInfo::Mode mode = it.rinfo()->rmode();
315 if (mode == RelocInfo::CELL &&
316 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
317 objects.push_back(handle(it.rinfo()->target_cell(), isolate));
318 } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
319 code->IsWeakObjectInOptimizedCode(
320 it.rinfo()->target_object())) {
321 Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
322 isolate);
323 if (object->IsMap()) {
324 maps.push_back(Handle<Map>::cast(object));
325 } else {
326 objects.push_back(object);
327 }
328 }
329 }
330 }
331 for (Handle<Map> map : maps) {
332 if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) {
333 isolate->heap()->AddRetainedMap(map);
334 }
335 Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
336 }
337 for (Handle<HeapObject> object : objects) {
338 AddWeakObjectToCodeDependency(isolate, object, code);
339 }
340 code->set_can_have_weak_objects(true);
341 } 283 }
342 284
343 void CompilationJob::RecordOptimizationStats() { 285 void CompilationJob::RecordOptimizationStats() {
286 DCHECK(info()->IsOptimizing());
344 Handle<JSFunction> function = info()->closure(); 287 Handle<JSFunction> function = info()->closure();
345 if (!function->IsOptimized()) { 288 if (!function->IsOptimized()) {
346 // Concurrent recompilation and OSR may race. Increment only once. 289 // Concurrent recompilation and OSR may race. Increment only once.
347 int opt_count = function->shared()->opt_count(); 290 int opt_count = function->shared()->opt_count();
348 function->shared()->set_opt_count(opt_count + 1); 291 function->shared()->set_opt_count(opt_count + 1);
349 } 292 }
350 double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF(); 293 double ms_creategraph = time_taken_to_prepare_.InMillisecondsF();
351 double ms_optimize = time_taken_to_optimize_.InMillisecondsF(); 294 double ms_optimize = time_taken_to_execute_.InMillisecondsF();
352 double ms_codegen = time_taken_to_codegen_.InMillisecondsF(); 295 double ms_codegen = time_taken_to_finalize_.InMillisecondsF();
353 if (FLAG_trace_opt) { 296 if (FLAG_trace_opt) {
354 PrintF("[optimizing "); 297 PrintF("[optimizing ");
355 function->ShortPrint(); 298 function->ShortPrint();
356 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize, 299 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
357 ms_codegen); 300 ms_codegen);
358 } 301 }
359 if (FLAG_trace_opt_stats) { 302 if (FLAG_trace_opt_stats) {
360 static double compilation_time = 0.0; 303 static double compilation_time = 0.0;
361 static int compiled_functions = 0; 304 static int compiled_functions = 0;
362 static int code_size = 0; 305 static int code_size = 0;
363 306
364 compilation_time += (ms_creategraph + ms_optimize + ms_codegen); 307 compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
365 compiled_functions++; 308 compiled_functions++;
366 code_size += function->shared()->SourceSize(); 309 code_size += function->shared()->SourceSize();
367 PrintF("Compiled: %d functions with %d byte source size in %fms.\n", 310 PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
368 compiled_functions, 311 compiled_functions,
369 code_size, 312 code_size,
370 compilation_time); 313 compilation_time);
371 } 314 }
372 if (FLAG_hydrogen_stats) { 315 if (FLAG_hydrogen_stats) {
373 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_, 316 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_,
374 time_taken_to_optimize_, 317 time_taken_to_execute_,
375 time_taken_to_codegen_); 318 time_taken_to_finalize_);
376 } 319 }
377 } 320 }
378 321
379 // ---------------------------------------------------------------------------- 322 // ----------------------------------------------------------------------------
380 // Local helper methods that make up the compilation pipeline. 323 // Local helper methods that make up the compilation pipeline.
381 324
382 namespace { 325 namespace {
383 326
384 bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) { 327 bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) {
385 return shared->is_toplevel() && shared->script()->IsScript() && 328 return shared->is_toplevel() && shared->script()->IsScript() &&
(...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after
665 } 608 }
666 609
667 JSFunction::EnsureLiterals(info->closure()); 610 JSFunction::EnsureLiterals(info->closure());
668 611
669 TimerEventScope<TimerEventRecompileSynchronous> timer(isolate); 612 TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
670 RuntimeCallTimerScope runtimeTimer(isolate, 613 RuntimeCallTimerScope runtimeTimer(isolate,
671 &RuntimeCallStats::RecompileSynchronous); 614 &RuntimeCallStats::RecompileSynchronous);
672 TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED( 615 TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
673 isolate, &tracing::TraceEventStatsTable::RecompileSynchronous); 616 isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
674 617
675 if (job->CreateGraph() != CompilationJob::SUCCEEDED || 618 if (job->PrepareJob() != CompilationJob::SUCCEEDED ||
676 job->OptimizeGraph() != CompilationJob::SUCCEEDED || 619 job->ExecuteJob() != CompilationJob::SUCCEEDED ||
677 job->GenerateCode() != CompilationJob::SUCCEEDED) { 620 job->FinalizeJob() != CompilationJob::SUCCEEDED) {
678 if (FLAG_trace_opt) { 621 if (FLAG_trace_opt) {
679 PrintF("[aborted optimizing "); 622 PrintF("[aborted optimizing ");
680 info->closure()->ShortPrint(); 623 info->closure()->ShortPrint();
681 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); 624 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
682 } 625 }
683 return false; 626 return false;
684 } 627 }
685 628
686 // Success! 629 // Success!
687 job->RecordOptimizationStats(); 630 job->RecordOptimizationStats();
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
728 // Reopen handles in the new CompilationHandleScope. 671 // Reopen handles in the new CompilationHandleScope.
729 info->ReopenHandlesInNewHandleScope(); 672 info->ReopenHandlesInNewHandleScope();
730 info->parse_info()->ReopenHandlesInNewHandleScope(); 673 info->parse_info()->ReopenHandlesInNewHandleScope();
731 674
732 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate()); 675 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
733 RuntimeCallTimerScope runtimeTimer(info->isolate(), 676 RuntimeCallTimerScope runtimeTimer(info->isolate(),
734 &RuntimeCallStats::RecompileSynchronous); 677 &RuntimeCallStats::RecompileSynchronous);
735 TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED( 678 TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
736 isolate, &tracing::TraceEventStatsTable::RecompileSynchronous); 679 isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
737 680
738 if (job->CreateGraph() != CompilationJob::SUCCEEDED) return false; 681 if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false;
739 isolate->optimizing_compile_dispatcher()->QueueForOptimization(job); 682 isolate->optimizing_compile_dispatcher()->QueueForOptimization(job);
740 683
741 if (FLAG_trace_concurrent_recompilation) { 684 if (FLAG_trace_concurrent_recompilation) {
742 PrintF(" ** Queued "); 685 PrintF(" ** Queued ");
743 info->closure()->ShortPrint(); 686 info->closure()->ShortPrint();
744 PrintF(" for concurrent optimization.\n"); 687 PrintF(" for concurrent optimization.\n");
745 } 688 }
746 return true; 689 return true;
747 } 690 }
748 691
(...skipping 1145 matching lines...) Expand 10 before | Expand all | Expand 10 after
1894 Handle<SharedFunctionInfo> shared = info->shared_info(); 1837 Handle<SharedFunctionInfo> shared = info->shared_info();
1895 shared->code()->set_profiler_ticks(0); 1838 shared->code()->set_profiler_ticks(0);
1896 1839
1897 DCHECK(!shared->HasDebugInfo()); 1840 DCHECK(!shared->HasDebugInfo());
1898 1841
1899 // 1) Optimization on the concurrent thread may have failed. 1842 // 1) Optimization on the concurrent thread may have failed.
1900 // 2) The function may have already been optimized by OSR. Simply continue. 1843 // 2) The function may have already been optimized by OSR. Simply continue.
1901 // Except when OSR already disabled optimization for some reason. 1844 // Except when OSR already disabled optimization for some reason.
1902 // 3) The code may have already been invalidated due to dependency change. 1845 // 3) The code may have already been invalidated due to dependency change.
1903 // 4) Code generation may have failed. 1846 // 4) Code generation may have failed.
1904 if (job->last_status() == CompilationJob::SUCCEEDED) { 1847 if (job->state() == CompilationJob::State::kReadyToFinalize) {
1905 if (shared->optimization_disabled()) { 1848 if (shared->optimization_disabled()) {
1906 job->RetryOptimization(kOptimizationDisabled); 1849 job->RetryOptimization(kOptimizationDisabled);
1907 } else if (info->dependencies()->HasAborted()) { 1850 } else if (info->dependencies()->HasAborted()) {
1908 job->RetryOptimization(kBailedOutDueToDependencyChange); 1851 job->RetryOptimization(kBailedOutDueToDependencyChange);
1909 } else if (job->GenerateCode() == CompilationJob::SUCCEEDED) { 1852 } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) {
1910 job->RecordOptimizationStats(); 1853 job->RecordOptimizationStats();
1911 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); 1854 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
1912 if (shared->SearchOptimizedCodeMap(info->context()->native_context(), 1855 if (shared->SearchOptimizedCodeMap(info->context()->native_context(),
1913 info->osr_ast_id()).code == nullptr) { 1856 info->osr_ast_id()).code == nullptr) {
1914 InsertCodeIntoOptimizedCodeMap(info); 1857 InsertCodeIntoOptimizedCodeMap(info);
1915 } 1858 }
1916 if (FLAG_trace_opt) { 1859 if (FLAG_trace_opt) {
1917 PrintF("[completed optimizing "); 1860 PrintF("[completed optimizing ");
1918 info->closure()->ShortPrint(); 1861 info->closure()->ShortPrint();
1919 PrintF("]\n"); 1862 PrintF("]\n");
1920 } 1863 }
1921 info->closure()->ReplaceCode(*info->code()); 1864 info->closure()->ReplaceCode(*info->code());
1922 return; 1865 return;
1923 } 1866 }
1924 } 1867 }
1925 1868
1926 DCHECK(job->last_status() != CompilationJob::SUCCEEDED); 1869 DCHECK(job->state() == CompilationJob::State::kFailed);
1927 if (FLAG_trace_opt) { 1870 if (FLAG_trace_opt) {
1928 PrintF("[aborted optimizing "); 1871 PrintF("[aborted optimizing ");
1929 info->closure()->ShortPrint(); 1872 info->closure()->ShortPrint();
1930 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); 1873 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
1931 } 1874 }
1932 info->closure()->ReplaceCode(shared->code()); 1875 info->closure()->ReplaceCode(shared->code());
1933 } 1876 }
1934 1877
1935 void Compiler::PostInstantiation(Handle<JSFunction> function, 1878 void Compiler::PostInstantiation(Handle<JSFunction> function,
1936 PretenureFlag pretenure) { 1879 PretenureFlag pretenure) {
(...skipping 14 matching lines...) Expand all
1951 1894
1952 if (cached.literals != nullptr) { 1895 if (cached.literals != nullptr) {
1953 DCHECK(shared->is_compiled()); 1896 DCHECK(shared->is_compiled());
1954 function->set_literals(cached.literals); 1897 function->set_literals(cached.literals);
1955 } else if (shared->is_compiled()) { 1898 } else if (shared->is_compiled()) {
1956 // TODO(mvstanton): pass pretenure flag to EnsureLiterals. 1899 // TODO(mvstanton): pass pretenure flag to EnsureLiterals.
1957 JSFunction::EnsureLiterals(function); 1900 JSFunction::EnsureLiterals(function);
1958 } 1901 }
1959 } 1902 }
1960 1903
1904 namespace {
1905
1906 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
1907 Handle<Code> code) {
1908 Handle<WeakCell> cell = Code::WeakCellFor(code);
1909 Heap* heap = isolate->heap();
1910 if (heap->InNewSpace(*object)) {
1911 heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
1912 } else {
1913 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
1914 dep =
1915 DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
1916 heap->AddWeakObjectToCodeDependency(object, dep);
1917 }
1918 }
1919
1920 } // namespace
1921
1922 // static
1923 void Compiler::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) {
1924 // TODO(turbofan): Move this to pipeline.cc once Crankshaft dies.
1925 Isolate* const isolate = code->GetIsolate();
1926 DCHECK(code->is_optimized_code());
1927 std::vector<Handle<Map>> maps;
1928 std::vector<Handle<HeapObject>> objects;
1929 {
1930 DisallowHeapAllocation no_gc;
1931 int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
1932 RelocInfo::ModeMask(RelocInfo::CELL);
1933 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
1934 RelocInfo::Mode mode = it.rinfo()->rmode();
1935 if (mode == RelocInfo::CELL &&
1936 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
1937 objects.push_back(handle(it.rinfo()->target_cell(), isolate));
1938 } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
1939 code->IsWeakObjectInOptimizedCode(
1940 it.rinfo()->target_object())) {
1941 Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
1942 isolate);
1943 if (object->IsMap()) {
1944 maps.push_back(Handle<Map>::cast(object));
1945 } else {
1946 objects.push_back(object);
1947 }
1948 }
1949 }
1950 }
1951 for (Handle<Map> map : maps) {
1952 if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) {
1953 isolate->heap()->AddRetainedMap(map);
1954 }
1955 Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
1956 }
1957 for (Handle<HeapObject> object : objects) {
1958 AddWeakObjectToCodeDependency(isolate, object, code);
1959 }
1960 code->set_can_have_weak_objects(true);
1961 }
1962
1961 } // namespace internal 1963 } // namespace internal
1962 } // namespace v8 1964 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698