Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(63)

Side by Side Diff: src/heap/incremental-marking.cc

Issue 2293883002: Make incremental marking tracing format consistent with GC tracing. (Closed)
Patch Set: Move marking restart function to inl file Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/incremental-marking.h ('k') | src/heap/incremental-marking-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/incremental-marking.h" 5 #include "src/heap/incremental-marking.h"
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/compilation-cache.h" 8 #include "src/compilation-cache.h"
9 #include "src/conversions.h" 9 #include "src/conversions.h"
10 #include "src/heap/gc-idle-time-handler.h" 10 #include "src/heap/gc-idle-time-handler.h"
(...skipping 453 matching lines...) Expand 10 before | Expand all | Expand 10 after
464 } else { 464 } else {
465 RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL); 465 RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL);
466 } 466 }
467 } 467 }
468 468
469 469
470 void IncrementalMarking::NotifyOfHighPromotionRate() { 470 void IncrementalMarking::NotifyOfHighPromotionRate() {
471 if (IsMarking()) { 471 if (IsMarking()) {
472 if (marking_speed_ < kFastMarking) { 472 if (marking_speed_ < kFastMarking) {
473 if (FLAG_trace_gc) { 473 if (FLAG_trace_gc) {
474 PrintIsolate(heap()->isolate(), 474 heap()->isolate()->PrintWithTimestamp(
475 "Increasing marking speed to %d " 475 "Increasing marking speed to %d "
476 "due to high promotion rate\n", 476 "due to high promotion rate\n",
477 static_cast<int>(kFastMarking)); 477 static_cast<int>(kFastMarking));
478 } 478 }
479 marking_speed_ = kFastMarking; 479 marking_speed_ = kFastMarking;
480 } 480 }
481 } 481 }
482 } 482 }
483 483
484 484
485 static void PatchIncrementalMarkingRecordWriteStubs( 485 static void PatchIncrementalMarkingRecordWriteStubs(
486 Heap* heap, RecordWriteStub::Mode mode) { 486 Heap* heap, RecordWriteStub::Mode mode) {
487 UnseededNumberDictionary* stubs = heap->code_stubs(); 487 UnseededNumberDictionary* stubs = heap->code_stubs();
(...skipping 11 matching lines...) Expand all
499 RecordWriteStub::Patch(Code::cast(e), mode); 499 RecordWriteStub::Patch(Code::cast(e), mode);
500 } 500 }
501 } 501 }
502 } 502 }
503 } 503 }
504 } 504 }
505 505
506 506
507 void IncrementalMarking::Start(const char* reason) { 507 void IncrementalMarking::Start(const char* reason) {
508 if (FLAG_trace_incremental_marking) { 508 if (FLAG_trace_incremental_marking) {
509 PrintF("[IncrementalMarking] Start (%s)\n", 509 heap()->isolate()->PrintWithTimestamp(
510 (reason == nullptr) ? "unknown reason" : reason); 510 "[IncrementalMarking] Start (%s)\n",
511 (reason == nullptr) ? "unknown reason" : reason);
511 } 512 }
512 DCHECK(FLAG_incremental_marking); 513 DCHECK(FLAG_incremental_marking);
513 DCHECK(state_ == STOPPED); 514 DCHECK(state_ == STOPPED);
514 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); 515 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
515 DCHECK(!heap_->isolate()->serializer_enabled()); 516 DCHECK(!heap_->isolate()->serializer_enabled());
516 517
517 HistogramTimerScope incremental_marking_scope( 518 HistogramTimerScope incremental_marking_scope(
518 heap_->isolate()->counters()->gc_incremental_marking_start()); 519 heap_->isolate()->counters()->gc_incremental_marking_start());
519 TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart"); 520 TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart");
520 ResetStepCounters(); 521 ResetStepCounters();
522 heap_->tracer()->NotifyIncrementalMarkingStart();
521 523
522 was_activated_ = true; 524 was_activated_ = true;
523 525
524 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { 526 if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
525 StartMarking(); 527 StartMarking();
526 } else { 528 } else {
527 if (FLAG_trace_incremental_marking) { 529 if (FLAG_trace_incremental_marking) {
528 PrintF("[IncrementalMarking] Start sweeping.\n"); 530 heap()->isolate()->PrintWithTimestamp(
531 "[IncrementalMarking] Start sweeping.\n");
529 } 532 }
530 state_ = SWEEPING; 533 state_ = SWEEPING;
531 } 534 }
532 535
533 heap_->new_space()->AddAllocationObserver(&observer_); 536 heap_->new_space()->AddAllocationObserver(&observer_);
534 537
535 incremental_marking_job()->Start(heap_); 538 incremental_marking_job()->Start(heap_);
536 } 539 }
537 540
538 541
539 void IncrementalMarking::StartMarking() { 542 void IncrementalMarking::StartMarking() {
540 if (heap_->isolate()->serializer_enabled()) { 543 if (heap_->isolate()->serializer_enabled()) {
541 // Black allocation currently starts when we start incremental marking, 544 // Black allocation currently starts when we start incremental marking,
542 // but we cannot enable black allocation while deserializing. Hence, we 545 // but we cannot enable black allocation while deserializing. Hence, we
543 // have to delay the start of incremental marking in that case. 546 // have to delay the start of incremental marking in that case.
544 if (FLAG_trace_incremental_marking) { 547 if (FLAG_trace_incremental_marking) {
545 PrintF("[IncrementalMarking] Start delayed - serializer\n"); 548 heap()->isolate()->PrintWithTimestamp(
549 "[IncrementalMarking] Start delayed - serializer\n");
546 } 550 }
547 return; 551 return;
548 } 552 }
549 if (FLAG_trace_incremental_marking) { 553 if (FLAG_trace_incremental_marking) {
550 PrintF("[IncrementalMarking] Start marking\n"); 554 heap()->isolate()->PrintWithTimestamp(
555 "[IncrementalMarking] Start marking\n");
551 } 556 }
552 557
553 is_compacting_ = !FLAG_never_compact && 558 is_compacting_ = !FLAG_never_compact &&
554 heap_->mark_compact_collector()->StartCompaction( 559 heap_->mark_compact_collector()->StartCompaction(
555 MarkCompactCollector::INCREMENTAL_COMPACTION); 560 MarkCompactCollector::INCREMENTAL_COMPACTION);
556 561
557 state_ = MARKING; 562 state_ = MARKING;
558 563
559 if (heap_->UsingEmbedderHeapTracer()) { 564 if (heap_->UsingEmbedderHeapTracer()) {
560 TRACE_GC(heap()->tracer(), 565 TRACE_GC(heap()->tracer(),
(...skipping 21 matching lines...) Expand all
582 587
583 heap_->CompletelyClearInstanceofCache(); 588 heap_->CompletelyClearInstanceofCache();
584 heap_->isolate()->compilation_cache()->MarkCompactPrologue(); 589 heap_->isolate()->compilation_cache()->MarkCompactPrologue();
585 590
586 // Mark strong roots grey. 591 // Mark strong roots grey.
587 IncrementalMarkingRootMarkingVisitor visitor(this); 592 IncrementalMarkingRootMarkingVisitor visitor(this);
588 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); 593 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
589 594
590 // Ready to start incremental marking. 595 // Ready to start incremental marking.
591 if (FLAG_trace_incremental_marking) { 596 if (FLAG_trace_incremental_marking) {
592 PrintF("[IncrementalMarking] Running\n"); 597 heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n");
593 } 598 }
594 } 599 }
595 600
596 void IncrementalMarking::StartBlackAllocation() { 601 void IncrementalMarking::StartBlackAllocation() {
597 DCHECK(FLAG_black_allocation); 602 DCHECK(FLAG_black_allocation);
598 DCHECK(IsMarking()); 603 DCHECK(IsMarking());
599 black_allocation_ = true; 604 black_allocation_ = true;
600 heap()->old_space()->MarkAllocationInfoBlack(); 605 heap()->old_space()->MarkAllocationInfoBlack();
601 heap()->map_space()->MarkAllocationInfoBlack(); 606 heap()->map_space()->MarkAllocationInfoBlack();
602 heap()->code_space()->MarkAllocationInfoBlack(); 607 heap()->code_space()->MarkAllocationInfoBlack();
603 if (FLAG_trace_incremental_marking) { 608 if (FLAG_trace_incremental_marking) {
604 PrintF("[IncrementalMarking] Black allocation started\n"); 609 heap()->isolate()->PrintWithTimestamp(
610 "[IncrementalMarking] Black allocation started\n");
605 } 611 }
606 } 612 }
607 613
608 void IncrementalMarking::FinishBlackAllocation() { 614 void IncrementalMarking::FinishBlackAllocation() {
609 if (black_allocation_) { 615 if (black_allocation_) {
610 black_allocation_ = false; 616 black_allocation_ = false;
611 if (FLAG_trace_incremental_marking) { 617 if (FLAG_trace_incremental_marking) {
612 PrintF("[IncrementalMarking] Black allocation finished\n"); 618 heap()->isolate()->PrintWithTimestamp(
619 "[IncrementalMarking] Black allocation finished\n");
613 } 620 }
614 } 621 }
615 } 622 }
616 623
617 void IncrementalMarking::MarkRoots() { 624 void IncrementalMarking::MarkRoots() {
618 DCHECK(!finalize_marking_completed_); 625 DCHECK(!finalize_marking_completed_);
619 DCHECK(IsMarking()); 626 DCHECK(IsMarking());
620 627
621 IncrementalMarkingRootMarkingVisitor visitor(this); 628 IncrementalMarkingRootMarkingVisitor visitor(this);
622 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); 629 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
772 ProcessWeakCells(); 779 ProcessWeakCells();
773 780
774 int marking_progress = 781 int marking_progress =
775 abs(old_marking_deque_top - 782 abs(old_marking_deque_top -
776 heap_->mark_compact_collector()->marking_deque()->top()); 783 heap_->mark_compact_collector()->marking_deque()->top());
777 784
778 double end = heap_->MonotonicallyIncreasingTimeInMs(); 785 double end = heap_->MonotonicallyIncreasingTimeInMs();
779 double delta = end - start; 786 double delta = end - start;
780 heap_->tracer()->AddMarkingTime(delta); 787 heap_->tracer()->AddMarkingTime(delta);
781 if (FLAG_trace_incremental_marking) { 788 if (FLAG_trace_incremental_marking) {
782 PrintF( 789 heap()->isolate()->PrintWithTimestamp(
783 "[IncrementalMarking] Finalize incrementally round %d, " 790 "[IncrementalMarking] Finalize incrementally round %d, "
784 "spent %d ms, marking progress %d.\n", 791 "spent %d ms, marking progress %d.\n",
785 static_cast<int>(delta), incremental_marking_finalization_rounds_, 792 static_cast<int>(delta), incremental_marking_finalization_rounds_,
786 marking_progress); 793 marking_progress);
787 } 794 }
788 795
789 ++incremental_marking_finalization_rounds_; 796 ++incremental_marking_finalization_rounds_;
790 if ((incremental_marking_finalization_rounds_ >= 797 if ((incremental_marking_finalization_rounds_ >=
791 FLAG_max_incremental_marking_finalization_rounds) || 798 FLAG_max_incremental_marking_finalization_rounds) ||
792 (marking_progress < 799 (marking_progress <
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
922 // A scavenge may have pushed new objects on the marking deque (due to black 929 // A scavenge may have pushed new objects on the marking deque (due to black
923 // allocation) even in COMPLETE state. This may happen if scavenges are 930 // allocation) even in COMPLETE state. This may happen if scavenges are
924 // forced e.g. in tests. It should not happen when COMPLETE was set when 931 // forced e.g. in tests. It should not happen when COMPLETE was set when
925 // incremental marking finished and a regular GC was triggered after that 932 // incremental marking finished and a regular GC was triggered after that
926 // because should_hurry_ will force a full GC. 933 // because should_hurry_ will force a full GC.
927 if (!heap_->mark_compact_collector()->marking_deque()->IsEmpty()) { 934 if (!heap_->mark_compact_collector()->marking_deque()->IsEmpty()) {
928 double start = 0.0; 935 double start = 0.0;
929 if (FLAG_trace_incremental_marking || FLAG_print_cumulative_gc_stat) { 936 if (FLAG_trace_incremental_marking || FLAG_print_cumulative_gc_stat) {
930 start = heap_->MonotonicallyIncreasingTimeInMs(); 937 start = heap_->MonotonicallyIncreasingTimeInMs();
931 if (FLAG_trace_incremental_marking) { 938 if (FLAG_trace_incremental_marking) {
932 PrintF("[IncrementalMarking] Hurry\n"); 939 heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n");
933 } 940 }
934 } 941 }
935 // TODO(gc) hurry can mark objects it encounters black as mutator 942 // TODO(gc) hurry can mark objects it encounters black as mutator
936 // was stopped. 943 // was stopped.
937 ProcessMarkingDeque(0, FORCE_COMPLETION); 944 ProcessMarkingDeque(0, FORCE_COMPLETION);
938 state_ = COMPLETE; 945 state_ = COMPLETE;
939 if (FLAG_trace_incremental_marking || FLAG_print_cumulative_gc_stat) { 946 if (FLAG_trace_incremental_marking || FLAG_print_cumulative_gc_stat) {
940 double end = heap_->MonotonicallyIncreasingTimeInMs(); 947 double end = heap_->MonotonicallyIncreasingTimeInMs();
941 double delta = end - start; 948 double delta = end - start;
942 heap_->tracer()->AddMarkingTime(delta); 949 heap_->tracer()->AddMarkingTime(delta);
943 if (FLAG_trace_incremental_marking) { 950 if (FLAG_trace_incremental_marking) {
944 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n", 951 heap()->isolate()->PrintWithTimestamp(
945 static_cast<int>(delta)); 952 "[IncrementalMarking] Complete (hurry), spent %d ms.\n",
953 static_cast<int>(delta));
946 } 954 }
947 } 955 }
948 } 956 }
949 957
950 Object* context = heap_->native_contexts_list(); 958 Object* context = heap_->native_contexts_list();
951 while (!context->IsUndefined(heap_->isolate())) { 959 while (!context->IsUndefined(heap_->isolate())) {
952 // GC can happen when the context is not fully initialized, 960 // GC can happen when the context is not fully initialized,
953 // so the cache can be undefined. 961 // so the cache can be undefined.
954 HeapObject* cache = HeapObject::cast( 962 HeapObject* cache = HeapObject::cast(
955 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); 963 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
956 if (!cache->IsUndefined(heap_->isolate())) { 964 if (!cache->IsUndefined(heap_->isolate())) {
957 MarkBit mark_bit = ObjectMarking::MarkBitFrom(cache); 965 MarkBit mark_bit = ObjectMarking::MarkBitFrom(cache);
958 if (Marking::IsGrey(mark_bit)) { 966 if (Marking::IsGrey(mark_bit)) {
959 Marking::GreyToBlack(mark_bit); 967 Marking::GreyToBlack(mark_bit);
960 MemoryChunk::IncrementLiveBytesFromGC(cache, cache->Size()); 968 MemoryChunk::IncrementLiveBytesFromGC(cache, cache->Size());
961 } 969 }
962 } 970 }
963 context = Context::cast(context)->next_context_link(); 971 context = Context::cast(context)->next_context_link();
964 } 972 }
965 } 973 }
966 974
967 975
968 void IncrementalMarking::Stop() { 976 void IncrementalMarking::Stop() {
969 if (IsStopped()) return; 977 if (IsStopped()) return;
970 if (FLAG_trace_incremental_marking) { 978 if (FLAG_trace_incremental_marking) {
971 PrintF("[IncrementalMarking] Stopping.\n"); 979 heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Stopping.\n");
972 } 980 }
973 981
974 heap_->new_space()->RemoveAllocationObserver(&observer_); 982 heap_->new_space()->RemoveAllocationObserver(&observer_);
975 IncrementalMarking::set_should_hurry(false); 983 IncrementalMarking::set_should_hurry(false);
976 ResetStepCounters(); 984 ResetStepCounters();
977 if (IsMarking()) { 985 if (IsMarking()) {
978 PatchIncrementalMarkingRecordWriteStubs(heap_, 986 PatchIncrementalMarkingRecordWriteStubs(heap_,
979 RecordWriteStub::STORE_BUFFER_ONLY); 987 RecordWriteStub::STORE_BUFFER_ONLY);
980 DeactivateIncrementalWriteBarrier(); 988 DeactivateIncrementalWriteBarrier();
981 } 989 }
982 heap_->isolate()->stack_guard()->ClearGC(); 990 heap_->isolate()->stack_guard()->ClearGC();
983 state_ = STOPPED; 991 state_ = STOPPED;
984 is_compacting_ = false; 992 is_compacting_ = false;
985 FinishBlackAllocation(); 993 FinishBlackAllocation();
986 } 994 }
987 995
988 996
989 void IncrementalMarking::Finalize() { 997 void IncrementalMarking::Finalize() {
990 Hurry(); 998 Hurry();
991 Stop(); 999 Stop();
992 } 1000 }
993 1001
994 1002
995 void IncrementalMarking::FinalizeMarking(CompletionAction action) { 1003 void IncrementalMarking::FinalizeMarking(CompletionAction action) {
996 DCHECK(!finalize_marking_completed_); 1004 DCHECK(!finalize_marking_completed_);
997 if (FLAG_trace_incremental_marking) { 1005 if (FLAG_trace_incremental_marking) {
998 PrintF( 1006 heap()->isolate()->PrintWithTimestamp(
999 "[IncrementalMarking] requesting finalization of incremental " 1007 "[IncrementalMarking] requesting finalization of incremental "
1000 "marking.\n"); 1008 "marking.\n");
1001 } 1009 }
1002 request_type_ = FINALIZATION; 1010 request_type_ = FINALIZATION;
1003 if (action == GC_VIA_STACK_GUARD) { 1011 if (action == GC_VIA_STACK_GUARD) {
1004 heap_->isolate()->stack_guard()->RequestGC(); 1012 heap_->isolate()->stack_guard()->RequestGC();
1005 } 1013 }
1006 } 1014 }
1007 1015
1008 1016
1009 void IncrementalMarking::MarkingComplete(CompletionAction action) { 1017 void IncrementalMarking::MarkingComplete(CompletionAction action) {
1010 state_ = COMPLETE; 1018 state_ = COMPLETE;
1011 // We will set the stack guard to request a GC now. This will mean the rest 1019 // We will set the stack guard to request a GC now. This will mean the rest
1012 // of the GC gets performed as soon as possible (we can't do a GC here in a 1020 // of the GC gets performed as soon as possible (we can't do a GC here in a
1013 // record-write context). If a few things get allocated between now and then 1021 // record-write context). If a few things get allocated between now and then
1014 // that shouldn't make us do a scavenge and keep being incremental, so we set 1022 // that shouldn't make us do a scavenge and keep being incremental, so we set
1015 // the should-hurry flag to indicate that there can't be much work left to do. 1023 // the should-hurry flag to indicate that there can't be much work left to do.
1016 set_should_hurry(true); 1024 set_should_hurry(true);
1017 if (FLAG_trace_incremental_marking) { 1025 if (FLAG_trace_incremental_marking) {
1018 PrintF("[IncrementalMarking] Complete (normal).\n"); 1026 heap()->isolate()->PrintWithTimestamp(
1027 "[IncrementalMarking] Complete (normal).\n");
1019 } 1028 }
1020 request_type_ = COMPLETE_MARKING; 1029 request_type_ = COMPLETE_MARKING;
1021 if (action == GC_VIA_STACK_GUARD) { 1030 if (action == GC_VIA_STACK_GUARD) {
1022 heap_->isolate()->stack_guard()->RequestGC(); 1031 heap_->isolate()->stack_guard()->RequestGC();
1023 } 1032 }
1024 } 1033 }
1025 1034
1026 1035
1027 void IncrementalMarking::Epilogue() { 1036 void IncrementalMarking::Epilogue() {
1028 was_activated_ = false; 1037 was_activated_ = false;
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1065 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); 1074 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD);
1066 } 1075 }
1067 } 1076 }
1068 1077
1069 1078
1070 void IncrementalMarking::SpeedUp() { 1079 void IncrementalMarking::SpeedUp() {
1071 bool speed_up = false; 1080 bool speed_up = false;
1072 1081
1073 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) { 1082 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) {
1074 if (FLAG_trace_incremental_marking) { 1083 if (FLAG_trace_incremental_marking) {
1075 PrintIsolate(heap()->isolate(), "Speed up marking after %d steps\n", 1084 heap()->isolate()->PrintWithTimestamp(
1076 static_cast<int>(kMarkingSpeedAccellerationInterval)); 1085 "[IncrementalMarking] Speed up marking after %d steps\n",
1086 static_cast<int>(kMarkingSpeedAccellerationInterval));
1077 } 1087 }
1078 speed_up = true; 1088 speed_up = true;
1079 } 1089 }
1080 1090
1081 bool space_left_is_very_small = 1091 bool space_left_is_very_small =
1082 (old_generation_space_available_at_start_of_incremental_ < 10 * MB); 1092 (old_generation_space_available_at_start_of_incremental_ < 10 * MB);
1083 1093
1084 bool only_1_nth_of_space_that_was_available_still_left = 1094 bool only_1_nth_of_space_that_was_available_still_left =
1085 (SpaceLeftInOldSpace() * (marking_speed_ + 1) < 1095 (SpaceLeftInOldSpace() * (marking_speed_ + 1) <
1086 old_generation_space_available_at_start_of_incremental_); 1096 old_generation_space_available_at_start_of_incremental_);
1087 1097
1088 if (space_left_is_very_small || 1098 if (space_left_is_very_small ||
1089 only_1_nth_of_space_that_was_available_still_left) { 1099 only_1_nth_of_space_that_was_available_still_left) {
1090 if (FLAG_trace_incremental_marking) 1100 if (FLAG_trace_incremental_marking)
1091 PrintIsolate(heap()->isolate(), 1101 heap()->isolate()->PrintWithTimestamp(
1092 "Speed up marking because of low space left\n"); 1102 "[IncrementalMarking] Speed up marking because of low space left\n");
1093 speed_up = true; 1103 speed_up = true;
1094 } 1104 }
1095 1105
1096 bool size_of_old_space_multiplied_by_n_during_marking = 1106 bool size_of_old_space_multiplied_by_n_during_marking =
1097 (heap_->PromotedTotalSize() > 1107 (heap_->PromotedTotalSize() >
1098 (marking_speed_ + 1) * 1108 (marking_speed_ + 1) *
1099 old_generation_space_used_at_start_of_incremental_); 1109 old_generation_space_used_at_start_of_incremental_);
1100 if (size_of_old_space_multiplied_by_n_during_marking) { 1110 if (size_of_old_space_multiplied_by_n_during_marking) {
1101 speed_up = true; 1111 speed_up = true;
1102 if (FLAG_trace_incremental_marking) { 1112 if (FLAG_trace_incremental_marking) {
1103 PrintIsolate(heap()->isolate(), 1113 heap()->isolate()->PrintWithTimestamp(
1104 "Speed up marking because of heap size increase\n"); 1114 "[IncrementalMarking] Speed up marking because of heap size "
1115 "increase\n");
1105 } 1116 }
1106 } 1117 }
1107 1118
1108 int64_t promoted_during_marking = 1119 int64_t promoted_during_marking =
1109 heap_->PromotedTotalSize() - 1120 heap_->PromotedTotalSize() -
1110 old_generation_space_used_at_start_of_incremental_; 1121 old_generation_space_used_at_start_of_incremental_;
1111 intptr_t delay = marking_speed_ * MB; 1122 intptr_t delay = marking_speed_ * MB;
1112 intptr_t scavenge_slack = heap_->MaxSemiSpaceSize(); 1123 intptr_t scavenge_slack = heap_->MaxSemiSpaceSize();
1113 1124
1114 // We try to scan at at least twice the speed that we are allocating. 1125 // We try to scan at at least twice the speed that we are allocating.
1115 if (promoted_during_marking > bytes_scanned_ / 2 + scavenge_slack + delay) { 1126 if (promoted_during_marking > bytes_scanned_ / 2 + scavenge_slack + delay) {
1116 if (FLAG_trace_incremental_marking) { 1127 if (FLAG_trace_incremental_marking) {
1117 PrintIsolate(heap()->isolate(), 1128 heap()->isolate()->PrintWithTimestamp(
1118 "Speed up marking because marker was not keeping up\n"); 1129 "[IncrementalMarking] Speed up marking because marker was not "
1130 "keeping up\n");
1119 } 1131 }
1120 speed_up = true; 1132 speed_up = true;
1121 } 1133 }
1122 1134
1123 if (speed_up) { 1135 if (speed_up) {
1124 if (state_ != MARKING) { 1136 if (state_ != MARKING) {
1125 if (FLAG_trace_incremental_marking) { 1137 if (FLAG_trace_incremental_marking) {
1126 PrintIsolate(heap()->isolate(), 1138 heap()->isolate()->PrintWithTimestamp(
1127 "Postponing speeding up marking until marking starts\n"); 1139 "[IncrementalMarking] Postponing speeding up marking until marking "
1140 "starts\n");
1128 } 1141 }
1129 } else { 1142 } else {
1130 marking_speed_ += kMarkingSpeedAccelleration; 1143 marking_speed_ += kMarkingSpeedAccelleration;
1131 marking_speed_ = static_cast<int>( 1144 marking_speed_ = static_cast<int>(
1132 Min(kMaxMarkingSpeed, static_cast<intptr_t>(marking_speed_ * 1.3))); 1145 Min(kMaxMarkingSpeed, static_cast<intptr_t>(marking_speed_ * 1.3)));
1133 if (FLAG_trace_incremental_marking) { 1146 if (FLAG_trace_incremental_marking) {
1134 PrintIsolate(heap()->isolate(), "Marking speed increased to %d\n", 1147 heap()->isolate()->PrintWithTimestamp(
1135 marking_speed_); 1148 "[IncrementalMarking] Marking speed increased to %d\n",
1149 marking_speed_);
1136 } 1150 }
1137 } 1151 }
1138 } 1152 }
1139 } 1153 }
1140 1154
1141 void IncrementalMarking::FinalizeSweeping() { 1155 void IncrementalMarking::FinalizeSweeping() {
1142 DCHECK(state_ == SWEEPING); 1156 DCHECK(state_ == SWEEPING);
1143 if (heap_->mark_compact_collector()->sweeping_in_progress() && 1157 if (heap_->mark_compact_collector()->sweeping_in_progress() &&
1144 (heap_->mark_compact_collector()->sweeper().IsSweepingCompleted() || 1158 (heap_->mark_compact_collector()->sweeper().IsSweepingCompleted() ||
1145 !FLAG_concurrent_sweeping)) { 1159 !FLAG_concurrent_sweeping)) {
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
1277 1291
1278 1292
1279 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { 1293 void IncrementalMarking::IncrementIdleMarkingDelayCounter() {
1280 idle_marking_delay_counter_++; 1294 idle_marking_delay_counter_++;
1281 } 1295 }
1282 1296
1283 1297
1284 void IncrementalMarking::ClearIdleMarkingDelayCounter() { 1298 void IncrementalMarking::ClearIdleMarkingDelayCounter() {
1285 idle_marking_delay_counter_ = 0; 1299 idle_marking_delay_counter_ = 0;
1286 } 1300 }
1301
1287 } // namespace internal 1302 } // namespace internal
1288 } // namespace v8 1303 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/incremental-marking.h ('k') | src/heap/incremental-marking-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698