OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler.h" | 5 #include "src/compiler.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <memory> | 8 #include <memory> |
9 | 9 |
10 #include "src/asmjs/asm-js.h" | 10 #include "src/asmjs/asm-js.h" |
(...skipping 242 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
253 os << "]" << std::endl; | 253 os << "]" << std::endl; |
254 } | 254 } |
255 | 255 |
256 // Delegate to the underlying implementation. | 256 // Delegate to the underlying implementation. |
257 DCHECK(state() == State::kReadyToPrepare); | 257 DCHECK(state() == State::kReadyToPrepare); |
258 ScopedTimer t(&time_taken_to_prepare_); | 258 ScopedTimer t(&time_taken_to_prepare_); |
259 return UpdateState(PrepareJobImpl(), State::kReadyToExecute); | 259 return UpdateState(PrepareJobImpl(), State::kReadyToExecute); |
260 } | 260 } |
261 | 261 |
262 CompilationJob::Status CompilationJob::ExecuteJob() { | 262 CompilationJob::Status CompilationJob::ExecuteJob() { |
263 DisallowHeapAllocation no_allocation; | 263 std::unique_ptr<DisallowHeapAllocation> no_allocation; |
264 DisallowHandleAllocation no_handles; | 264 std::unique_ptr<DisallowHandleAllocation> no_handles; |
265 DisallowHandleDereference no_deref; | 265 std::unique_ptr<DisallowHandleDereference> no_deref; |
266 DisallowCodeDependencyChange no_dependency_change; | 266 std::unique_ptr<DisallowCodeDependencyChange> no_dependency_change; |
| 267 if (can_execute_on_background_thread()) { |
| 268 no_allocation.reset(new DisallowHeapAllocation()); |
| 269 no_handles.reset(new DisallowHandleAllocation()); |
| 270 no_deref.reset(new DisallowHandleDereference()); |
| 271 no_dependency_change.reset(new DisallowCodeDependencyChange()); |
| 272 } else { |
| 273 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id())); |
| 274 } |
267 | 275 |
268 // Delegate to the underlying implementation. | 276 // Delegate to the underlying implementation. |
269 DCHECK(state() == State::kReadyToExecute); | 277 DCHECK(state() == State::kReadyToExecute); |
270 ScopedTimer t(&time_taken_to_execute_); | 278 ScopedTimer t(&time_taken_to_execute_); |
271 return UpdateState(ExecuteJobImpl(), State::kReadyToFinalize); | 279 return UpdateState(ExecuteJobImpl(), State::kReadyToFinalize); |
272 } | 280 } |
273 | 281 |
274 CompilationJob::Status CompilationJob::FinalizeJob() { | 282 CompilationJob::Status CompilationJob::FinalizeJob() { |
275 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id())); | 283 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id())); |
276 DisallowCodeDependencyChange no_dependency_change; | 284 DisallowCodeDependencyChange no_dependency_change; |
277 DisallowJavascriptExecution no_js(isolate()); | 285 DisallowJavascriptExecution no_js(isolate()); |
278 DCHECK(!info()->dependencies()->HasAborted()); | 286 DCHECK(!info()->dependencies()->HasAborted()); |
279 | 287 |
280 // Delegate to the underlying implementation. | 288 // Delegate to the underlying implementation. |
281 DCHECK(state() == State::kReadyToFinalize); | 289 DCHECK(state() == State::kReadyToFinalize); |
282 ScopedTimer t(&time_taken_to_finalize_); | 290 ScopedTimer t(&time_taken_to_finalize_); |
283 return UpdateState(FinalizeJobImpl(), State::kSucceeded); | 291 return UpdateState(FinalizeJobImpl(), State::kSucceeded); |
284 } | 292 } |
285 | 293 |
| 294 void CompilationJob::RecordUnoptimizedCompilationStats() const { |
| 295 DCHECK(!info()->IsOptimizing()); |
| 296 |
| 297 int code_size; |
| 298 if (info()->has_bytecode_array()) { |
| 299 code_size = info()->bytecode_array()->SizeIncludingMetadata(); |
| 300 } else { |
| 301 code_size = info()->code()->SizeIncludingMetadata(); |
| 302 } |
| 303 |
| 304 Counters* counters = isolate()->counters(); |
| 305 // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually. |
| 306 counters->total_baseline_code_size()->Increment(code_size); |
| 307 counters->total_baseline_compile_count()->Increment(1); |
| 308 |
| 309 // TODO(5203): Add timers for each phase of compilation. |
| 310 } |
| 311 |
| 312 void CompilationJob::RecordOptimizedCompilationStats() const { |
| 313 DCHECK(info()->IsOptimizing()); |
| 314 Handle<JSFunction> function = info()->closure(); |
| 315 if (!function->IsOptimized()) { |
| 316 // Concurrent recompilation and OSR may race. Increment only once. |
| 317 int opt_count = function->shared()->opt_count(); |
| 318 function->shared()->set_opt_count(opt_count + 1); |
| 319 } |
| 320 double ms_creategraph = time_taken_to_prepare_.InMillisecondsF(); |
| 321 double ms_optimize = time_taken_to_execute_.InMillisecondsF(); |
| 322 double ms_codegen = time_taken_to_finalize_.InMillisecondsF(); |
| 323 if (FLAG_trace_opt) { |
| 324 PrintF("[optimizing "); |
| 325 function->ShortPrint(); |
| 326 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize, |
| 327 ms_codegen); |
| 328 } |
| 329 if (FLAG_trace_opt_stats) { |
| 330 static double compilation_time = 0.0; |
| 331 static int compiled_functions = 0; |
| 332 static int code_size = 0; |
| 333 |
| 334 compilation_time += (ms_creategraph + ms_optimize + ms_codegen); |
| 335 compiled_functions++; |
| 336 code_size += function->shared()->SourceSize(); |
| 337 PrintF("Compiled: %d functions with %d byte source size in %fms.\n", |
| 338 compiled_functions, code_size, compilation_time); |
| 339 } |
| 340 if (FLAG_hydrogen_stats) { |
| 341 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_, |
| 342 time_taken_to_execute_, |
| 343 time_taken_to_finalize_); |
| 344 } |
| 345 } |
| 346 |
286 namespace { | 347 namespace { |
287 | 348 |
288 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object, | 349 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object, |
289 Handle<Code> code) { | 350 Handle<Code> code) { |
290 Handle<WeakCell> cell = Code::WeakCellFor(code); | 351 Handle<WeakCell> cell = Code::WeakCellFor(code); |
291 Heap* heap = isolate->heap(); | 352 Heap* heap = isolate->heap(); |
292 if (heap->InNewSpace(*object)) { | 353 if (heap->InNewSpace(*object)) { |
293 heap->AddWeakNewSpaceObjectToCodeDependency(object, cell); | 354 heap->AddWeakNewSpaceObjectToCodeDependency(object, cell); |
294 } else { | 355 } else { |
295 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object)); | 356 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object)); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
334 isolate->heap()->AddRetainedMap(map); | 395 isolate->heap()->AddRetainedMap(map); |
335 } | 396 } |
336 Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code); | 397 Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code); |
337 } | 398 } |
338 for (Handle<HeapObject> object : objects) { | 399 for (Handle<HeapObject> object : objects) { |
339 AddWeakObjectToCodeDependency(isolate, object, code); | 400 AddWeakObjectToCodeDependency(isolate, object, code); |
340 } | 401 } |
341 code->set_can_have_weak_objects(true); | 402 code->set_can_have_weak_objects(true); |
342 } | 403 } |
343 | 404 |
344 void CompilationJob::RecordOptimizationStats() { | |
345 DCHECK(info()->IsOptimizing()); | |
346 Handle<JSFunction> function = info()->closure(); | |
347 if (!function->IsOptimized()) { | |
348 // Concurrent recompilation and OSR may race. Increment only once. | |
349 int opt_count = function->shared()->opt_count(); | |
350 function->shared()->set_opt_count(opt_count + 1); | |
351 } | |
352 double ms_creategraph = time_taken_to_prepare_.InMillisecondsF(); | |
353 double ms_optimize = time_taken_to_execute_.InMillisecondsF(); | |
354 double ms_codegen = time_taken_to_finalize_.InMillisecondsF(); | |
355 if (FLAG_trace_opt) { | |
356 PrintF("[optimizing "); | |
357 function->ShortPrint(); | |
358 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize, | |
359 ms_codegen); | |
360 } | |
361 if (FLAG_trace_opt_stats) { | |
362 static double compilation_time = 0.0; | |
363 static int compiled_functions = 0; | |
364 static int code_size = 0; | |
365 | |
366 compilation_time += (ms_creategraph + ms_optimize + ms_codegen); | |
367 compiled_functions++; | |
368 code_size += function->shared()->SourceSize(); | |
369 PrintF("Compiled: %d functions with %d byte source size in %fms.\n", | |
370 compiled_functions, code_size, compilation_time); | |
371 } | |
372 if (FLAG_hydrogen_stats) { | |
373 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_, | |
374 time_taken_to_execute_, | |
375 time_taken_to_finalize_); | |
376 } | |
377 } | |
378 | |
379 // ---------------------------------------------------------------------------- | 405 // ---------------------------------------------------------------------------- |
380 // Local helper methods that make up the compilation pipeline. | 406 // Local helper methods that make up the compilation pipeline. |
381 | 407 |
382 namespace { | 408 namespace { |
383 | 409 |
384 bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) { | 410 bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) { |
385 return shared->is_toplevel() && shared->script()->IsScript() && | 411 return shared->is_toplevel() && shared->script()->IsScript() && |
386 Script::cast(shared->script())->compilation_type() == | 412 Script::cast(shared->script())->compilation_type() == |
387 Script::COMPILATION_TYPE_EVAL; | 413 Script::COMPILATION_TYPE_EVAL; |
388 } | 414 } |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
459 // Checks whether top level functions should be passed by the filter. | 485 // Checks whether top level functions should be passed by the filter. |
460 if (info->shared_info()->is_toplevel()) { | 486 if (info->shared_info()->is_toplevel()) { |
461 Vector<const char> filter = CStrVector(FLAG_ignition_filter); | 487 Vector<const char> filter = CStrVector(FLAG_ignition_filter); |
462 return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*'); | 488 return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*'); |
463 } | 489 } |
464 | 490 |
465 // Finally respect the filter. | 491 // Finally respect the filter. |
466 return info->shared_info()->PassesFilter(FLAG_ignition_filter); | 492 return info->shared_info()->PassesFilter(FLAG_ignition_filter); |
467 } | 493 } |
468 | 494 |
469 int CodeAndMetadataSize(CompilationInfo* info) { | 495 CompilationJob* GetUnoptimizedCompilationJob(CompilationInfo* info) { |
470 if (info->has_bytecode_array()) { | 496 // Function should have been parsed and analyzed before creating a compilation |
471 return info->bytecode_array()->SizeIncludingMetadata(); | 497 // job. |
| 498 DCHECK_NOT_NULL(info->literal()); |
| 499 DCHECK_NOT_NULL(info->scope()); |
| 500 DCHECK(!(FLAG_validate_asm && info->scope()->asm_module())); |
| 501 |
| 502 EnsureFeedbackMetadata(info); |
| 503 |
| 504 if (ShouldUseIgnition(info)) { |
| 505 return interpreter::Interpreter::NewCompilationJob(info); |
| 506 } else { |
| 507 return FullCodeGenerator::NewCompilationJob(info); |
472 } | 508 } |
473 return info->code()->SizeIncludingMetadata(); | |
474 } | 509 } |
475 | 510 |
476 bool GenerateUnoptimizedCode(CompilationInfo* info) { | 511 bool GenerateUnoptimizedCode(CompilationInfo* info) { |
477 bool success; | |
478 EnsureFeedbackMetadata(info); | |
479 if (FLAG_validate_asm && info->scope()->asm_module() && | 512 if (FLAG_validate_asm && info->scope()->asm_module() && |
480 !info->shared_info()->is_asm_wasm_broken()) { | 513 !info->shared_info()->is_asm_wasm_broken()) { |
481 MaybeHandle<FixedArray> wasm_data; | 514 MaybeHandle<FixedArray> wasm_data; |
482 wasm_data = AsmJs::ConvertAsmToWasm(info->parse_info()); | 515 wasm_data = AsmJs::ConvertAsmToWasm(info->parse_info()); |
483 if (!wasm_data.is_null()) { | 516 if (!wasm_data.is_null()) { |
484 info->shared_info()->set_asm_wasm_data(*wasm_data.ToHandleChecked()); | 517 info->shared_info()->set_asm_wasm_data(*wasm_data.ToHandleChecked()); |
485 info->SetCode(info->isolate()->builtins()->InstantiateAsmJs()); | 518 info->SetCode(info->isolate()->builtins()->InstantiateAsmJs()); |
486 return true; | 519 return true; |
487 } | 520 } |
488 } | 521 } |
489 if (ShouldUseIgnition(info)) { | 522 |
490 success = interpreter::Interpreter::MakeBytecode(info); | 523 std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info)); |
491 } else { | 524 if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false; |
492 success = FullCodeGenerator::MakeCode(info); | 525 if (job->ExecuteJob() != CompilationJob::SUCCEEDED) return false; |
493 } | 526 if (job->FinalizeJob() != CompilationJob::SUCCEEDED) return false; |
494 if (success) { | 527 return true; |
495 Isolate* isolate = info->isolate(); | |
496 Counters* counters = isolate->counters(); | |
497 // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually. | |
498 counters->total_baseline_code_size()->Increment(CodeAndMetadataSize(info)); | |
499 counters->total_baseline_compile_count()->Increment(1); | |
500 } | |
501 return success; | |
502 } | 528 } |
503 | 529 |
504 bool CompileUnoptimizedCode(CompilationInfo* info) { | 530 bool CompileUnoptimizedCode(CompilationInfo* info) { |
505 DCHECK(AllowCompilation::IsAllowed(info->isolate())); | 531 DCHECK(AllowCompilation::IsAllowed(info->isolate())); |
506 if (!Compiler::Analyze(info->parse_info()) || | 532 if (!Compiler::Analyze(info->parse_info()) || |
507 !GenerateUnoptimizedCode(info)) { | 533 !GenerateUnoptimizedCode(info)) { |
508 Isolate* isolate = info->isolate(); | 534 Isolate* isolate = info->isolate(); |
509 if (!isolate->has_pending_exception()) isolate->StackOverflow(); | 535 if (!isolate->has_pending_exception()) isolate->StackOverflow(); |
510 return false; | 536 return false; |
511 } | 537 } |
(...skipping 15 matching lines...) Expand all Loading... |
527 shared->ClearBytecodeArray(); | 553 shared->ClearBytecodeArray(); |
528 } | 554 } |
529 DCHECK(!info->code().is_null()); | 555 DCHECK(!info->code().is_null()); |
530 shared->ReplaceCode(*info->code()); | 556 shared->ReplaceCode(*info->code()); |
531 if (info->has_bytecode_array()) { | 557 if (info->has_bytecode_array()) { |
532 DCHECK(!shared->HasBytecodeArray()); // Only compiled once. | 558 DCHECK(!shared->HasBytecodeArray()); // Only compiled once. |
533 shared->set_bytecode_array(*info->bytecode_array()); | 559 shared->set_bytecode_array(*info->bytecode_array()); |
534 } | 560 } |
535 } | 561 } |
536 | 562 |
| 563 void InstallUnoptimizedCode(CompilationInfo* info) { |
| 564 Handle<SharedFunctionInfo> shared = info->shared_info(); |
| 565 |
| 566 // Update the shared function info with the scope info. |
| 567 InstallSharedScopeInfo(info, shared); |
| 568 |
| 569 // Install compilation result on the shared function info |
| 570 InstallSharedCompilationResult(info, shared); |
| 571 |
| 572 // Record the function compilation event. |
| 573 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); |
| 574 } |
| 575 |
537 MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) { | 576 MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) { |
538 VMState<COMPILER> state(info->isolate()); | 577 VMState<COMPILER> state(info->isolate()); |
539 PostponeInterruptsScope postpone(info->isolate()); | 578 PostponeInterruptsScope postpone(info->isolate()); |
540 | 579 |
541 // Create a canonical handle scope before internalizing parsed values if | 580 // Create a canonical handle scope before internalizing parsed values if |
542 // compiling bytecode. This is required for off-thread bytecode generation. | 581 // compiling bytecode. This is required for off-thread bytecode generation. |
543 std::unique_ptr<CanonicalHandleScope> canonical; | 582 std::unique_ptr<CanonicalHandleScope> canonical; |
544 if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info->isolate())); | 583 if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info->isolate())); |
545 | 584 |
546 // Parse and update CompilationInfo with the results. | 585 // Parse and update CompilationInfo with the results. |
547 if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>(); | 586 if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>(); |
548 Handle<SharedFunctionInfo> shared = info->shared_info(); | 587 DCHECK_EQ(info->shared_info()->language_mode(), |
549 DCHECK_EQ(shared->language_mode(), info->literal()->language_mode()); | 588 info->literal()->language_mode()); |
550 | 589 |
551 // Compile either unoptimized code or bytecode for the interpreter. | 590 // Compile either unoptimized code or bytecode for the interpreter. |
552 if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>(); | 591 if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>(); |
553 | 592 |
554 // Update the shared function info with the scope info. | 593 InstallUnoptimizedCode(info); |
555 InstallSharedScopeInfo(info, shared); | |
556 | |
557 // Install compilation result on the shared function info | |
558 InstallSharedCompilationResult(info, shared); | |
559 | |
560 // Record the function compilation event. | |
561 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); | |
562 | 594 |
563 return info->code(); | 595 return info->code(); |
564 } | 596 } |
565 | 597 |
| 598 CompilationJob::Status FinalizeUnoptimizedCompilationJob(CompilationJob* job) { |
| 599 CompilationJob::Status status = job->FinalizeJob(); |
| 600 if (status == CompilationJob::SUCCEEDED) { |
| 601 InstallUnoptimizedCode(job->info()); |
| 602 } |
| 603 return status; |
| 604 } |
| 605 |
566 MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap( | 606 MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap( |
567 Handle<JSFunction> function, BailoutId osr_ast_id) { | 607 Handle<JSFunction> function, BailoutId osr_ast_id) { |
568 Handle<SharedFunctionInfo> shared(function->shared()); | 608 Handle<SharedFunctionInfo> shared(function->shared()); |
569 DisallowHeapAllocation no_gc; | 609 DisallowHeapAllocation no_gc; |
570 CodeAndLiterals cached = shared->SearchOptimizedCodeMap( | 610 CodeAndLiterals cached = shared->SearchOptimizedCodeMap( |
571 function->context()->native_context(), osr_ast_id); | 611 function->context()->native_context(), osr_ast_id); |
572 if (cached.code != nullptr) { | 612 if (cached.code != nullptr) { |
573 // Caching of optimized code enabled and optimized code found. | 613 // Caching of optimized code enabled and optimized code found. |
574 if (cached.literals != nullptr) function->set_literals(cached.literals); | 614 if (cached.literals != nullptr) function->set_literals(cached.literals); |
575 DCHECK(!cached.code->marked_for_deoptimization()); | 615 DCHECK(!cached.code->marked_for_deoptimization()); |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
677 job->FinalizeJob() != CompilationJob::SUCCEEDED) { | 717 job->FinalizeJob() != CompilationJob::SUCCEEDED) { |
678 if (FLAG_trace_opt) { | 718 if (FLAG_trace_opt) { |
679 PrintF("[aborted optimizing "); | 719 PrintF("[aborted optimizing "); |
680 info->closure()->ShortPrint(); | 720 info->closure()->ShortPrint(); |
681 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); | 721 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); |
682 } | 722 } |
683 return false; | 723 return false; |
684 } | 724 } |
685 | 725 |
686 // Success! | 726 // Success! |
687 job->RecordOptimizationStats(); | |
688 DCHECK(!isolate->has_pending_exception()); | 727 DCHECK(!isolate->has_pending_exception()); |
689 InsertCodeIntoOptimizedCodeMap(info); | 728 InsertCodeIntoOptimizedCodeMap(info); |
690 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); | 729 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); |
691 return true; | 730 return true; |
692 } | 731 } |
693 | 732 |
694 bool GetOptimizedCodeLater(CompilationJob* job) { | 733 bool GetOptimizedCodeLater(CompilationJob* job) { |
695 CompilationInfo* info = job->info(); | 734 CompilationInfo* info = job->info(); |
696 Isolate* isolate = info->isolate(); | 735 Isolate* isolate = info->isolate(); |
697 | 736 |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
837 return isolate->builtins()->InOptimizationQueue(); | 876 return isolate->builtins()->InOptimizationQueue(); |
838 } | 877 } |
839 } else { | 878 } else { |
840 if (GetOptimizedCodeNow(job.get())) return info->code(); | 879 if (GetOptimizedCodeNow(job.get())) return info->code(); |
841 } | 880 } |
842 | 881 |
843 if (isolate->has_pending_exception()) isolate->clear_pending_exception(); | 882 if (isolate->has_pending_exception()) isolate->clear_pending_exception(); |
844 return MaybeHandle<Code>(); | 883 return MaybeHandle<Code>(); |
845 } | 884 } |
846 | 885 |
| 886 CompilationJob::Status FinalizeOptimizedCompilationJob(CompilationJob* job) { |
| 887 CompilationInfo* info = job->info(); |
| 888 Isolate* isolate = info->isolate(); |
| 889 |
| 890 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate()); |
| 891 RuntimeCallTimerScope runtimeTimer(isolate, |
| 892 &RuntimeCallStats::RecompileSynchronous); |
| 893 TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED( |
| 894 isolate, &tracing::TraceEventStatsTable::RecompileSynchronous); |
| 895 |
| 896 Handle<SharedFunctionInfo> shared = info->shared_info(); |
| 897 shared->code()->set_profiler_ticks(0); |
| 898 |
| 899 DCHECK(!shared->HasDebugInfo()); |
| 900 |
| 901 // 1) Optimization on the concurrent thread may have failed. |
| 902 // 2) The function may have already been optimized by OSR. Simply continue. |
| 903 // Except when OSR already disabled optimization for some reason. |
| 904 // 3) The code may have already been invalidated due to dependency change. |
| 905 // 4) Code generation may have failed. |
| 906 if (job->state() == CompilationJob::State::kReadyToFinalize) { |
| 907 if (shared->optimization_disabled()) { |
| 908 job->RetryOptimization(kOptimizationDisabled); |
| 909 } else if (info->dependencies()->HasAborted()) { |
| 910 job->RetryOptimization(kBailedOutDueToDependencyChange); |
| 911 } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) { |
| 912 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); |
| 913 if (shared |
| 914 ->SearchOptimizedCodeMap(info->context()->native_context(), |
| 915 info->osr_ast_id()) |
| 916 .code == nullptr) { |
| 917 InsertCodeIntoOptimizedCodeMap(info); |
| 918 } |
| 919 if (FLAG_trace_opt) { |
| 920 PrintF("[completed optimizing "); |
| 921 info->closure()->ShortPrint(); |
| 922 PrintF("]\n"); |
| 923 } |
| 924 info->closure()->ReplaceCode(*info->code()); |
| 925 return CompilationJob::SUCCEEDED; |
| 926 } |
| 927 } |
| 928 |
| 929 DCHECK(job->state() == CompilationJob::State::kFailed); |
| 930 if (FLAG_trace_opt) { |
| 931 PrintF("[aborted optimizing "); |
| 932 info->closure()->ShortPrint(); |
| 933 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); |
| 934 } |
| 935 info->closure()->ReplaceCode(shared->code()); |
| 936 return CompilationJob::FAILED; |
| 937 } |
| 938 |
847 class InterpreterActivationsFinder : public ThreadVisitor, | 939 class InterpreterActivationsFinder : public ThreadVisitor, |
848 public OptimizedFunctionVisitor { | 940 public OptimizedFunctionVisitor { |
849 public: | 941 public: |
850 explicit InterpreterActivationsFinder(SharedFunctionInfo* shared) | 942 explicit InterpreterActivationsFinder(SharedFunctionInfo* shared) |
851 : shared_(shared), has_activations_(false) {} | 943 : shared_(shared), has_activations_(false) {} |
852 | 944 |
853 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { | 945 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { |
854 Address* activation_pc_address = nullptr; | 946 Address* activation_pc_address = nullptr; |
855 JavaScriptFrameIterator it(isolate, top); | 947 JavaScriptFrameIterator it(isolate, top); |
856 for (; !it.done(); it.Advance()) { | 948 for (; !it.done(); it.Advance()) { |
(...skipping 1027 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1884 } | 1976 } |
1885 | 1977 |
1886 MaybeHandle<Code> Compiler::GetOptimizedCodeForOSR(Handle<JSFunction> function, | 1978 MaybeHandle<Code> Compiler::GetOptimizedCodeForOSR(Handle<JSFunction> function, |
1887 BailoutId osr_ast_id, | 1979 BailoutId osr_ast_id, |
1888 JavaScriptFrame* osr_frame) { | 1980 JavaScriptFrame* osr_frame) { |
1889 DCHECK(!osr_ast_id.IsNone()); | 1981 DCHECK(!osr_ast_id.IsNone()); |
1890 DCHECK_NOT_NULL(osr_frame); | 1982 DCHECK_NOT_NULL(osr_frame); |
1891 return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame); | 1983 return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame); |
1892 } | 1984 } |
1893 | 1985 |
1894 void Compiler::FinalizeCompilationJob(CompilationJob* raw_job) { | 1986 CompilationJob* Compiler::PrepareUnoptimizedCompilationJob( |
| 1987 CompilationInfo* info) { |
| 1988 VMState<COMPILER> state(info->isolate()); |
| 1989 std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info)); |
| 1990 if (job->PrepareJob() != CompilationJob::SUCCEEDED) { |
| 1991 return nullptr; |
| 1992 } |
| 1993 return job.release(); |
| 1994 } |
| 1995 |
| 1996 bool Compiler::FinalizeCompilationJob(CompilationJob* raw_job) { |
1895 // Take ownership of compilation job. Deleting job also tears down the zone. | 1997 // Take ownership of compilation job. Deleting job also tears down the zone. |
1896 std::unique_ptr<CompilationJob> job(raw_job); | 1998 std::unique_ptr<CompilationJob> job(raw_job); |
1897 CompilationInfo* info = job->info(); | |
1898 Isolate* isolate = info->isolate(); | |
1899 | 1999 |
1900 VMState<COMPILER> state(isolate); | 2000 VMState<COMPILER> state(job->info()->isolate()); |
1901 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate()); | 2001 CompilationJob::Status status; |
1902 RuntimeCallTimerScope runtimeTimer(isolate, | 2002 if (job->info()->IsOptimizing()) { |
1903 &RuntimeCallStats::RecompileSynchronous); | 2003 status = FinalizeOptimizedCompilationJob(job.get()); |
1904 TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED( | 2004 } else { |
1905 isolate, &tracing::TraceEventStatsTable::RecompileSynchronous); | 2005 status = FinalizeUnoptimizedCompilationJob(job.get()); |
1906 | |
1907 Handle<SharedFunctionInfo> shared = info->shared_info(); | |
1908 shared->code()->set_profiler_ticks(0); | |
1909 | |
1910 DCHECK(!shared->HasDebugInfo()); | |
1911 | |
1912 // 1) Optimization on the concurrent thread may have failed. | |
1913 // 2) The function may have already been optimized by OSR. Simply continue. | |
1914 // Except when OSR already disabled optimization for some reason. | |
1915 // 3) The code may have already been invalidated due to dependency change. | |
1916 // 4) Code generation may have failed. | |
1917 if (job->state() == CompilationJob::State::kReadyToFinalize) { | |
1918 if (shared->optimization_disabled()) { | |
1919 job->RetryOptimization(kOptimizationDisabled); | |
1920 } else if (info->dependencies()->HasAborted()) { | |
1921 job->RetryOptimization(kBailedOutDueToDependencyChange); | |
1922 } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) { | |
1923 job->RecordOptimizationStats(); | |
1924 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); | |
1925 if (shared->SearchOptimizedCodeMap(info->context()->native_context(), | |
1926 info->osr_ast_id()).code == nullptr) { | |
1927 InsertCodeIntoOptimizedCodeMap(info); | |
1928 } | |
1929 if (FLAG_trace_opt) { | |
1930 PrintF("[completed optimizing "); | |
1931 info->closure()->ShortPrint(); | |
1932 PrintF("]\n"); | |
1933 } | |
1934 info->closure()->ReplaceCode(*info->code()); | |
1935 return; | |
1936 } | |
1937 } | 2006 } |
1938 | 2007 if (status == CompilationJob::SUCCEEDED) { |
1939 DCHECK(job->state() == CompilationJob::State::kFailed); | 2008 job->RecordCompilationStats(); |
1940 if (FLAG_trace_opt) { | |
1941 PrintF("[aborted optimizing "); | |
1942 info->closure()->ShortPrint(); | |
1943 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); | |
1944 } | 2009 } |
1945 info->closure()->ReplaceCode(shared->code()); | 2010 return status == CompilationJob::SUCCEEDED; |
1946 } | 2011 } |
1947 | 2012 |
1948 void Compiler::PostInstantiation(Handle<JSFunction> function, | 2013 void Compiler::PostInstantiation(Handle<JSFunction> function, |
1949 PretenureFlag pretenure) { | 2014 PretenureFlag pretenure) { |
1950 Handle<SharedFunctionInfo> shared(function->shared()); | 2015 Handle<SharedFunctionInfo> shared(function->shared()); |
1951 | 2016 |
1952 if (FLAG_always_opt && shared->allows_lazy_compilation()) { | 2017 if (FLAG_always_opt && shared->allows_lazy_compilation()) { |
1953 function->MarkForOptimization(); | 2018 function->MarkForOptimization(); |
1954 } | 2019 } |
1955 | 2020 |
(...skipping 10 matching lines...) Expand all Loading... |
1966 DCHECK(shared->is_compiled()); | 2031 DCHECK(shared->is_compiled()); |
1967 function->set_literals(cached.literals); | 2032 function->set_literals(cached.literals); |
1968 } else if (shared->is_compiled()) { | 2033 } else if (shared->is_compiled()) { |
1969 // TODO(mvstanton): pass pretenure flag to EnsureLiterals. | 2034 // TODO(mvstanton): pass pretenure flag to EnsureLiterals. |
1970 JSFunction::EnsureLiterals(function); | 2035 JSFunction::EnsureLiterals(function); |
1971 } | 2036 } |
1972 } | 2037 } |
1973 | 2038 |
1974 } // namespace internal | 2039 } // namespace internal |
1975 } // namespace v8 | 2040 } // namespace v8 |
OLD | NEW |