Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(105)

Side by Side Diff: src/compiler.cc

Issue 2251713002: [Compiler] Add compile to CompilerDispatcherJob. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@offheap_compilerdispatcher
Patch Set: Fix comment Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler.h ('k') | src/compiler-dispatcher/compiler-dispatcher-job.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler.h" 5 #include "src/compiler.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 #include <memory> 8 #include <memory>
9 9
10 #include "src/asmjs/asm-js.h" 10 #include "src/asmjs/asm-js.h"
(...skipping 242 matching lines...) Expand 10 before | Expand all | Expand 10 after
253 os << "]" << std::endl; 253 os << "]" << std::endl;
254 } 254 }
255 255
256 // Delegate to the underlying implementation. 256 // Delegate to the underlying implementation.
257 DCHECK(state() == State::kReadyToPrepare); 257 DCHECK(state() == State::kReadyToPrepare);
258 ScopedTimer t(&time_taken_to_prepare_); 258 ScopedTimer t(&time_taken_to_prepare_);
259 return UpdateState(PrepareJobImpl(), State::kReadyToExecute); 259 return UpdateState(PrepareJobImpl(), State::kReadyToExecute);
260 } 260 }
261 261
262 CompilationJob::Status CompilationJob::ExecuteJob() { 262 CompilationJob::Status CompilationJob::ExecuteJob() {
263 DisallowHeapAllocation no_allocation; 263 std::unique_ptr<DisallowHeapAllocation> no_allocation;
264 DisallowHandleAllocation no_handles; 264 std::unique_ptr<DisallowHandleAllocation> no_handles;
265 DisallowHandleDereference no_deref; 265 std::unique_ptr<DisallowHandleDereference> no_deref;
266 DisallowCodeDependencyChange no_dependency_change; 266 std::unique_ptr<DisallowCodeDependencyChange> no_dependency_change;
267 if (can_execute_on_background_thread()) {
268 no_allocation.reset(new DisallowHeapAllocation());
269 no_handles.reset(new DisallowHandleAllocation());
270 no_deref.reset(new DisallowHandleDereference());
271 no_dependency_change.reset(new DisallowCodeDependencyChange());
272 } else {
273 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
274 }
267 275
268 // Delegate to the underlying implementation. 276 // Delegate to the underlying implementation.
269 DCHECK(state() == State::kReadyToExecute); 277 DCHECK(state() == State::kReadyToExecute);
270 ScopedTimer t(&time_taken_to_execute_); 278 ScopedTimer t(&time_taken_to_execute_);
271 return UpdateState(ExecuteJobImpl(), State::kReadyToFinalize); 279 return UpdateState(ExecuteJobImpl(), State::kReadyToFinalize);
272 } 280 }
273 281
274 CompilationJob::Status CompilationJob::FinalizeJob() { 282 CompilationJob::Status CompilationJob::FinalizeJob() {
275 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id())); 283 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
276 DisallowCodeDependencyChange no_dependency_change; 284 DisallowCodeDependencyChange no_dependency_change;
277 DisallowJavascriptExecution no_js(isolate()); 285 DisallowJavascriptExecution no_js(isolate());
278 DCHECK(!info()->dependencies()->HasAborted()); 286 DCHECK(!info()->dependencies()->HasAborted());
279 287
280 // Delegate to the underlying implementation. 288 // Delegate to the underlying implementation.
281 DCHECK(state() == State::kReadyToFinalize); 289 DCHECK(state() == State::kReadyToFinalize);
282 ScopedTimer t(&time_taken_to_finalize_); 290 ScopedTimer t(&time_taken_to_finalize_);
283 return UpdateState(FinalizeJobImpl(), State::kSucceeded); 291 return UpdateState(FinalizeJobImpl(), State::kSucceeded);
284 } 292 }
285 293
294 void CompilationJob::RecordUnoptimizedCompilationStats() const {
295 int code_size;
296 if (info()->has_bytecode_array()) {
297 code_size = info()->bytecode_array()->SizeIncludingMetadata();
298 } else {
299 code_size = info()->code()->SizeIncludingMetadata();
300 }
301
302 Counters* counters = isolate()->counters();
303 // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually.
304 counters->total_baseline_code_size()->Increment(code_size);
305 counters->total_baseline_compile_count()->Increment(1);
306
307 // TODO(5203): Add timers for each phase of compilation.
308 }
309
310 void CompilationJob::RecordOptimizedCompilationStats() const {
311 DCHECK(info()->IsOptimizing());
312 Handle<JSFunction> function = info()->closure();
313 if (!function->IsOptimized()) {
314 // Concurrent recompilation and OSR may race. Increment only once.
315 int opt_count = function->shared()->opt_count();
316 function->shared()->set_opt_count(opt_count + 1);
317 }
318 double ms_creategraph = time_taken_to_prepare_.InMillisecondsF();
319 double ms_optimize = time_taken_to_execute_.InMillisecondsF();
320 double ms_codegen = time_taken_to_finalize_.InMillisecondsF();
321 if (FLAG_trace_opt) {
322 PrintF("[optimizing ");
323 function->ShortPrint();
324 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
325 ms_codegen);
326 }
327 if (FLAG_trace_opt_stats) {
328 static double compilation_time = 0.0;
329 static int compiled_functions = 0;
330 static int code_size = 0;
331
332 compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
333 compiled_functions++;
334 code_size += function->shared()->SourceSize();
335 PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
336 compiled_functions, code_size, compilation_time);
337 }
338 if (FLAG_hydrogen_stats) {
339 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_,
340 time_taken_to_execute_,
341 time_taken_to_finalize_);
342 }
343 }
344
286 namespace { 345 namespace {
287 346
288 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object, 347 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
289 Handle<Code> code) { 348 Handle<Code> code) {
290 Handle<WeakCell> cell = Code::WeakCellFor(code); 349 Handle<WeakCell> cell = Code::WeakCellFor(code);
291 Heap* heap = isolate->heap(); 350 Heap* heap = isolate->heap();
292 if (heap->InNewSpace(*object)) { 351 if (heap->InNewSpace(*object)) {
293 heap->AddWeakNewSpaceObjectToCodeDependency(object, cell); 352 heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
294 } else { 353 } else {
295 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object)); 354 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
334 isolate->heap()->AddRetainedMap(map); 393 isolate->heap()->AddRetainedMap(map);
335 } 394 }
336 Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code); 395 Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
337 } 396 }
338 for (Handle<HeapObject> object : objects) { 397 for (Handle<HeapObject> object : objects) {
339 AddWeakObjectToCodeDependency(isolate, object, code); 398 AddWeakObjectToCodeDependency(isolate, object, code);
340 } 399 }
341 code->set_can_have_weak_objects(true); 400 code->set_can_have_weak_objects(true);
342 } 401 }
343 402
344 void CompilationJob::RecordOptimizationStats() {
345 DCHECK(info()->IsOptimizing());
346 Handle<JSFunction> function = info()->closure();
347 if (!function->IsOptimized()) {
348 // Concurrent recompilation and OSR may race. Increment only once.
349 int opt_count = function->shared()->opt_count();
350 function->shared()->set_opt_count(opt_count + 1);
351 }
352 double ms_creategraph = time_taken_to_prepare_.InMillisecondsF();
353 double ms_optimize = time_taken_to_execute_.InMillisecondsF();
354 double ms_codegen = time_taken_to_finalize_.InMillisecondsF();
355 if (FLAG_trace_opt) {
356 PrintF("[optimizing ");
357 function->ShortPrint();
358 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
359 ms_codegen);
360 }
361 if (FLAG_trace_opt_stats) {
362 static double compilation_time = 0.0;
363 static int compiled_functions = 0;
364 static int code_size = 0;
365
366 compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
367 compiled_functions++;
368 code_size += function->shared()->SourceSize();
369 PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
370 compiled_functions, code_size, compilation_time);
371 }
372 if (FLAG_hydrogen_stats) {
373 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_,
374 time_taken_to_execute_,
375 time_taken_to_finalize_);
376 }
377 }
378
379 // ---------------------------------------------------------------------------- 403 // ----------------------------------------------------------------------------
380 // Local helper methods that make up the compilation pipeline. 404 // Local helper methods that make up the compilation pipeline.
381 405
382 namespace { 406 namespace {
383 407
384 bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) { 408 bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) {
385 return shared->is_toplevel() && shared->script()->IsScript() && 409 return shared->is_toplevel() && shared->script()->IsScript() &&
386 Script::cast(shared->script())->compilation_type() == 410 Script::cast(shared->script())->compilation_type() ==
387 Script::COMPILATION_TYPE_EVAL; 411 Script::COMPILATION_TYPE_EVAL;
388 } 412 }
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
459 // Checks whether top level functions should be passed by the filter. 483 // Checks whether top level functions should be passed by the filter.
460 if (info->shared_info()->is_toplevel()) { 484 if (info->shared_info()->is_toplevel()) {
461 Vector<const char> filter = CStrVector(FLAG_ignition_filter); 485 Vector<const char> filter = CStrVector(FLAG_ignition_filter);
462 return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*'); 486 return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*');
463 } 487 }
464 488
465 // Finally respect the filter. 489 // Finally respect the filter.
466 return info->shared_info()->PassesFilter(FLAG_ignition_filter); 490 return info->shared_info()->PassesFilter(FLAG_ignition_filter);
467 } 491 }
468 492
469 int CodeAndMetadataSize(CompilationInfo* info) { 493 CompilationJob* GetUnoptimizedCompilationJob(CompilationInfo* info) {
470 if (info->has_bytecode_array()) { 494 // Function should have been parsed and analyzed before creating a compilation
471 return info->bytecode_array()->SizeIncludingMetadata(); 495 // job.
496 DCHECK_NOT_NULL(info->literal());
497 DCHECK_NOT_NULL(info->scope());
498
499 EnsureFeedbackMetadata(info);
500 if (ShouldUseIgnition(info)) {
501 return interpreter::Interpreter::NewCompilationJob(info);
502 } else {
503 return FullCodeGenerator::NewCompilationJob(info);
472 } 504 }
473 return info->code()->SizeIncludingMetadata();
474 } 505 }
475 506
476 bool GenerateUnoptimizedCode(CompilationInfo* info) { 507 bool GenerateUnoptimizedCode(CompilationInfo* info) {
477 bool success;
478 EnsureFeedbackMetadata(info);
479 if (FLAG_validate_asm && info->scope()->asm_module() && 508 if (FLAG_validate_asm && info->scope()->asm_module() &&
480 !info->shared_info()->is_asm_wasm_broken()) { 509 !info->shared_info()->is_asm_wasm_broken()) {
510 EnsureFeedbackMetadata(info);
481 MaybeHandle<FixedArray> wasm_data; 511 MaybeHandle<FixedArray> wasm_data;
482 wasm_data = AsmJs::ConvertAsmToWasm(info->parse_info()); 512 wasm_data = AsmJs::ConvertAsmToWasm(info->parse_info());
483 if (!wasm_data.is_null()) { 513 if (!wasm_data.is_null()) {
484 info->shared_info()->set_asm_wasm_data(*wasm_data.ToHandleChecked()); 514 info->shared_info()->set_asm_wasm_data(*wasm_data.ToHandleChecked());
485 info->SetCode(info->isolate()->builtins()->InstantiateAsmJs()); 515 info->SetCode(info->isolate()->builtins()->InstantiateAsmJs());
486 return true; 516 return true;
487 } 517 }
488 } 518 }
489 if (ShouldUseIgnition(info)) { 519
490 success = interpreter::Interpreter::MakeBytecode(info); 520 std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info));
491 } else { 521 if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false;
492 success = FullCodeGenerator::MakeCode(info); 522 if (job->ExecuteJob() != CompilationJob::SUCCEEDED) return false;
493 } 523 if (job->FinalizeJob() != CompilationJob::SUCCEEDED) return false;
494 if (success) { 524 job->RecordUnoptimizedCompilationStats();
495 Isolate* isolate = info->isolate(); 525 return true;
496 Counters* counters = isolate->counters();
497 // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually.
498 counters->total_baseline_code_size()->Increment(CodeAndMetadataSize(info));
499 counters->total_baseline_compile_count()->Increment(1);
500 }
501 return success;
502 } 526 }
503 527
504 bool CompileUnoptimizedCode(CompilationInfo* info) { 528 bool CompileUnoptimizedCode(CompilationInfo* info) {
505 DCHECK(AllowCompilation::IsAllowed(info->isolate())); 529 DCHECK(AllowCompilation::IsAllowed(info->isolate()));
506 if (!Compiler::Analyze(info->parse_info()) || 530 if (!Compiler::Analyze(info->parse_info()) ||
507 !GenerateUnoptimizedCode(info)) { 531 !GenerateUnoptimizedCode(info)) {
508 Isolate* isolate = info->isolate(); 532 Isolate* isolate = info->isolate();
509 if (!isolate->has_pending_exception()) isolate->StackOverflow(); 533 if (!isolate->has_pending_exception()) isolate->StackOverflow();
510 return false; 534 return false;
511 } 535 }
(...skipping 15 matching lines...) Expand all
527 shared->ClearBytecodeArray(); 551 shared->ClearBytecodeArray();
528 } 552 }
529 DCHECK(!info->code().is_null()); 553 DCHECK(!info->code().is_null());
530 shared->ReplaceCode(*info->code()); 554 shared->ReplaceCode(*info->code());
531 if (info->has_bytecode_array()) { 555 if (info->has_bytecode_array()) {
532 DCHECK(!shared->HasBytecodeArray()); // Only compiled once. 556 DCHECK(!shared->HasBytecodeArray()); // Only compiled once.
533 shared->set_bytecode_array(*info->bytecode_array()); 557 shared->set_bytecode_array(*info->bytecode_array());
534 } 558 }
535 } 559 }
536 560
561 void InstallUnoptimizedCode(CompilationInfo* info) {
562 Handle<SharedFunctionInfo> shared = info->shared_info();
563
564 // Update the shared function info with the scope info.
565 InstallSharedScopeInfo(info, shared);
566
567 // Install compilation result on the shared function info
568 InstallSharedCompilationResult(info, shared);
569
570 // Record the function compilation event.
571 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
572 }
573
537 MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) { 574 MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) {
538 VMState<COMPILER> state(info->isolate()); 575 VMState<COMPILER> state(info->isolate());
539 PostponeInterruptsScope postpone(info->isolate()); 576 PostponeInterruptsScope postpone(info->isolate());
540 577
541 // Create a canonical handle scope before internalizing parsed values if 578 // Create a canonical handle scope before internalizing parsed values if
542 // compiling bytecode. This is required for off-thread bytecode generation. 579 // compiling bytecode. This is required for off-thread bytecode generation.
543 std::unique_ptr<CanonicalHandleScope> canonical; 580 std::unique_ptr<CanonicalHandleScope> canonical;
544 if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info->isolate())); 581 if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info->isolate()));
545 582
546 // Parse and update CompilationInfo with the results. 583 // Parse and update CompilationInfo with the results.
547 if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>(); 584 if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>();
548 Handle<SharedFunctionInfo> shared = info->shared_info(); 585 DCHECK_EQ(info->shared_info()->language_mode(),
549 DCHECK_EQ(shared->language_mode(), info->literal()->language_mode()); 586 info->literal()->language_mode());
550 587
551 // Compile either unoptimized code or bytecode for the interpreter. 588 // Compile either unoptimized code or bytecode for the interpreter.
552 if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>(); 589 if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
553 590
554 // Update the shared function info with the scope info. 591 InstallUnoptimizedCode(info);
555 InstallSharedScopeInfo(info, shared);
556
557 // Install compilation result on the shared function info
558 InstallSharedCompilationResult(info, shared);
559
560 // Record the function compilation event.
561 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
562 592
563 return info->code(); 593 return info->code();
564 } 594 }
565 595
596 CompilationJob::Status FinalizeUnoptimizedCompilationJob(CompilationJob* job) {
597 CompilationJob::Status status = job->FinalizeJob();
598 if (status == CompilationJob::SUCCEEDED) {
599 DCHECK(!job->info()->shared_info()->is_compiled());
600 InstallUnoptimizedCode(job->info());
601 job->RecordUnoptimizedCompilationStats();
602 }
603 return status;
604 }
605
566 MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap( 606 MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
567 Handle<JSFunction> function, BailoutId osr_ast_id) { 607 Handle<JSFunction> function, BailoutId osr_ast_id) {
568 Handle<SharedFunctionInfo> shared(function->shared()); 608 Handle<SharedFunctionInfo> shared(function->shared());
569 DisallowHeapAllocation no_gc; 609 DisallowHeapAllocation no_gc;
570 CodeAndLiterals cached = shared->SearchOptimizedCodeMap( 610 CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
571 function->context()->native_context(), osr_ast_id); 611 function->context()->native_context(), osr_ast_id);
572 if (cached.code != nullptr) { 612 if (cached.code != nullptr) {
573 // Caching of optimized code enabled and optimized code found. 613 // Caching of optimized code enabled and optimized code found.
574 if (cached.literals != nullptr) function->set_literals(cached.literals); 614 if (cached.literals != nullptr) function->set_literals(cached.literals);
575 DCHECK(!cached.code->marked_for_deoptimization()); 615 DCHECK(!cached.code->marked_for_deoptimization());
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
677 job->FinalizeJob() != CompilationJob::SUCCEEDED) { 717 job->FinalizeJob() != CompilationJob::SUCCEEDED) {
678 if (FLAG_trace_opt) { 718 if (FLAG_trace_opt) {
679 PrintF("[aborted optimizing "); 719 PrintF("[aborted optimizing ");
680 info->closure()->ShortPrint(); 720 info->closure()->ShortPrint();
681 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); 721 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
682 } 722 }
683 return false; 723 return false;
684 } 724 }
685 725
686 // Success! 726 // Success!
687 job->RecordOptimizationStats(); 727 job->RecordOptimizedCompilationStats();
688 DCHECK(!isolate->has_pending_exception()); 728 DCHECK(!isolate->has_pending_exception());
689 InsertCodeIntoOptimizedCodeMap(info); 729 InsertCodeIntoOptimizedCodeMap(info);
690 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); 730 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
691 return true; 731 return true;
692 } 732 }
693 733
694 bool GetOptimizedCodeLater(CompilationJob* job) { 734 bool GetOptimizedCodeLater(CompilationJob* job) {
695 CompilationInfo* info = job->info(); 735 CompilationInfo* info = job->info();
696 Isolate* isolate = info->isolate(); 736 Isolate* isolate = info->isolate();
697 737
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
837 return isolate->builtins()->InOptimizationQueue(); 877 return isolate->builtins()->InOptimizationQueue();
838 } 878 }
839 } else { 879 } else {
840 if (GetOptimizedCodeNow(job.get())) return info->code(); 880 if (GetOptimizedCodeNow(job.get())) return info->code();
841 } 881 }
842 882
843 if (isolate->has_pending_exception()) isolate->clear_pending_exception(); 883 if (isolate->has_pending_exception()) isolate->clear_pending_exception();
844 return MaybeHandle<Code>(); 884 return MaybeHandle<Code>();
845 } 885 }
846 886
887 CompilationJob::Status FinalizeOptimizedCompilationJob(CompilationJob* job) {
888 CompilationInfo* info = job->info();
889 Isolate* isolate = info->isolate();
890
891 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
892 RuntimeCallTimerScope runtimeTimer(isolate,
893 &RuntimeCallStats::RecompileSynchronous);
894 TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
895 isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
896
897 Handle<SharedFunctionInfo> shared = info->shared_info();
898 shared->code()->set_profiler_ticks(0);
899
900 DCHECK(!shared->HasDebugInfo());
901
902 // 1) Optimization on the concurrent thread may have failed.
903 // 2) The function may have already been optimized by OSR. Simply continue.
904 // Except when OSR already disabled optimization for some reason.
905 // 3) The code may have already been invalidated due to dependency change.
906 // 4) Code generation may have failed.
907 if (job->state() == CompilationJob::State::kReadyToFinalize) {
908 if (shared->optimization_disabled()) {
909 job->RetryOptimization(kOptimizationDisabled);
910 } else if (info->dependencies()->HasAborted()) {
911 job->RetryOptimization(kBailedOutDueToDependencyChange);
912 } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) {
913 job->RecordOptimizedCompilationStats();
914 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
915 if (shared
916 ->SearchOptimizedCodeMap(info->context()->native_context(),
917 info->osr_ast_id())
918 .code == nullptr) {
919 InsertCodeIntoOptimizedCodeMap(info);
920 }
921 if (FLAG_trace_opt) {
922 PrintF("[completed optimizing ");
923 info->closure()->ShortPrint();
924 PrintF("]\n");
925 }
926 info->closure()->ReplaceCode(*info->code());
927 return CompilationJob::SUCCEEDED;
928 }
929 }
930
931 DCHECK(job->state() == CompilationJob::State::kFailed);
932 if (FLAG_trace_opt) {
933 PrintF("[aborted optimizing ");
934 info->closure()->ShortPrint();
935 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
936 }
937 info->closure()->ReplaceCode(shared->code());
938 return CompilationJob::FAILED;
939 }
940
847 class InterpreterActivationsFinder : public ThreadVisitor, 941 class InterpreterActivationsFinder : public ThreadVisitor,
848 public OptimizedFunctionVisitor { 942 public OptimizedFunctionVisitor {
849 public: 943 public:
850 explicit InterpreterActivationsFinder(SharedFunctionInfo* shared) 944 explicit InterpreterActivationsFinder(SharedFunctionInfo* shared)
851 : shared_(shared), has_activations_(false) {} 945 : shared_(shared), has_activations_(false) {}
852 946
853 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { 947 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
854 Address* activation_pc_address = nullptr; 948 Address* activation_pc_address = nullptr;
855 JavaScriptFrameIterator it(isolate, top); 949 JavaScriptFrameIterator it(isolate, top);
856 for (; !it.done(); it.Advance()) { 950 for (; !it.done(); it.Advance()) {
(...skipping 1031 matching lines...) Expand 10 before | Expand all | Expand 10 after
1888 } 1982 }
1889 1983
1890 MaybeHandle<Code> Compiler::GetOptimizedCodeForOSR(Handle<JSFunction> function, 1984 MaybeHandle<Code> Compiler::GetOptimizedCodeForOSR(Handle<JSFunction> function,
1891 BailoutId osr_ast_id, 1985 BailoutId osr_ast_id,
1892 JavaScriptFrame* osr_frame) { 1986 JavaScriptFrame* osr_frame) {
1893 DCHECK(!osr_ast_id.IsNone()); 1987 DCHECK(!osr_ast_id.IsNone());
1894 DCHECK_NOT_NULL(osr_frame); 1988 DCHECK_NOT_NULL(osr_frame);
1895 return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame); 1989 return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame);
1896 } 1990 }
1897 1991
1898 void Compiler::FinalizeCompilationJob(CompilationJob* raw_job) { 1992 CompilationJob* Compiler::PrepareUnoptimizedCompilationJob(
1993 CompilationInfo* info) {
1994 VMState<COMPILER> state(info->isolate());
1995 std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info));
1996 if (job->PrepareJob() != CompilationJob::SUCCEEDED) {
1997 return nullptr;
1998 }
1999 return job.release();
2000 }
2001
2002 bool Compiler::FinalizeCompilationJob(CompilationJob* raw_job) {
1899 // Take ownership of compilation job. Deleting job also tears down the zone. 2003 // Take ownership of compilation job. Deleting job also tears down the zone.
1900 std::unique_ptr<CompilationJob> job(raw_job); 2004 std::unique_ptr<CompilationJob> job(raw_job);
1901 CompilationInfo* info = job->info();
1902 Isolate* isolate = info->isolate();
1903 2005
1904 VMState<COMPILER> state(isolate); 2006 VMState<COMPILER> state(job->info()->isolate());
1905 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate()); 2007 if (job->info()->IsOptimizing()) {
1906 RuntimeCallTimerScope runtimeTimer(isolate, 2008 return FinalizeOptimizedCompilationJob(job.get()) ==
1907 &RuntimeCallStats::RecompileSynchronous); 2009 CompilationJob::SUCCEEDED;
1908 TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED( 2010 } else {
1909 isolate, &tracing::TraceEventStatsTable::RecompileSynchronous); 2011 return FinalizeUnoptimizedCompilationJob(job.get()) ==
1910 2012 CompilationJob::SUCCEEDED;
1911 Handle<SharedFunctionInfo> shared = info->shared_info();
1912 shared->code()->set_profiler_ticks(0);
1913
1914 DCHECK(!shared->HasDebugInfo());
1915
1916 // 1) Optimization on the concurrent thread may have failed.
1917 // 2) The function may have already been optimized by OSR. Simply continue.
1918 // Except when OSR already disabled optimization for some reason.
1919 // 3) The code may have already been invalidated due to dependency change.
1920 // 4) Code generation may have failed.
1921 if (job->state() == CompilationJob::State::kReadyToFinalize) {
1922 if (shared->optimization_disabled()) {
1923 job->RetryOptimization(kOptimizationDisabled);
1924 } else if (info->dependencies()->HasAborted()) {
1925 job->RetryOptimization(kBailedOutDueToDependencyChange);
1926 } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) {
1927 job->RecordOptimizationStats();
1928 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
1929 if (shared->SearchOptimizedCodeMap(info->context()->native_context(),
1930 info->osr_ast_id()).code == nullptr) {
1931 InsertCodeIntoOptimizedCodeMap(info);
1932 }
1933 if (FLAG_trace_opt) {
1934 PrintF("[completed optimizing ");
1935 info->closure()->ShortPrint();
1936 PrintF("]\n");
1937 }
1938 info->closure()->ReplaceCode(*info->code());
1939 return;
1940 }
1941 } 2013 }
1942
1943 DCHECK(job->state() == CompilationJob::State::kFailed);
1944 if (FLAG_trace_opt) {
1945 PrintF("[aborted optimizing ");
1946 info->closure()->ShortPrint();
1947 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
1948 }
1949 info->closure()->ReplaceCode(shared->code());
1950 } 2014 }
1951 2015
1952 void Compiler::PostInstantiation(Handle<JSFunction> function, 2016 void Compiler::PostInstantiation(Handle<JSFunction> function,
1953 PretenureFlag pretenure) { 2017 PretenureFlag pretenure) {
1954 Handle<SharedFunctionInfo> shared(function->shared()); 2018 Handle<SharedFunctionInfo> shared(function->shared());
1955 2019
1956 if (FLAG_always_opt && shared->allows_lazy_compilation()) { 2020 if (FLAG_always_opt && shared->allows_lazy_compilation()) {
1957 function->MarkForOptimization(); 2021 function->MarkForOptimization();
1958 } 2022 }
1959 2023
(...skipping 10 matching lines...) Expand all
1970 DCHECK(shared->is_compiled()); 2034 DCHECK(shared->is_compiled());
1971 function->set_literals(cached.literals); 2035 function->set_literals(cached.literals);
1972 } else if (shared->is_compiled()) { 2036 } else if (shared->is_compiled()) {
1973 // TODO(mvstanton): pass pretenure flag to EnsureLiterals. 2037 // TODO(mvstanton): pass pretenure flag to EnsureLiterals.
1974 JSFunction::EnsureLiterals(function); 2038 JSFunction::EnsureLiterals(function);
1975 } 2039 }
1976 } 2040 }
1977 2041
1978 } // namespace internal 2042 } // namespace internal
1979 } // namespace v8 2043 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler.h ('k') | src/compiler-dispatcher/compiler-dispatcher-job.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698