| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 472 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 483 | 483 |
| 484 | 484 |
| 485 bool IncrementalMarking::WorthActivating() { | 485 bool IncrementalMarking::WorthActivating() { |
| 486 #ifndef DEBUG | 486 #ifndef DEBUG |
| 487 static const intptr_t kActivationThreshold = 8 * MB; | 487 static const intptr_t kActivationThreshold = 8 * MB; |
| 488 #else | 488 #else |
| 489 // TODO(gc) consider setting this to some low level so that some | 489 // TODO(gc) consider setting this to some low level so that some |
| 490 // debug tests run with incremental marking and some without. | 490 // debug tests run with incremental marking and some without. |
| 491 static const intptr_t kActivationThreshold = 0; | 491 static const intptr_t kActivationThreshold = 0; |
| 492 #endif | 492 #endif |
| 493 | 493 // Only start incremental marking in a safe state: 1) when expose GC is |
| 494 // deactivated, 2) when incremental marking is turned on, 3) when we are |
| 495 // currently not in a GC, and 4) when we are currently not serializing |
| 496 // or deserializing the heap. |
| 494 return !FLAG_expose_gc && | 497 return !FLAG_expose_gc && |
| 495 FLAG_incremental_marking && | 498 FLAG_incremental_marking && |
| 499 FLAG_incremental_marking_steps && |
| 500 heap_->gc_state() == Heap::NOT_IN_GC && |
| 496 !Serializer::enabled() && | 501 !Serializer::enabled() && |
| 502 heap_->isolate()->IsInitialized() && |
| 497 heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold; | 503 heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold; |
| 498 } | 504 } |
| 499 | 505 |
| 500 | 506 |
| 501 void IncrementalMarking::ActivateGeneratedStub(Code* stub) { | 507 void IncrementalMarking::ActivateGeneratedStub(Code* stub) { |
| 502 ASSERT(RecordWriteStub::GetMode(stub) == | 508 ASSERT(RecordWriteStub::GetMode(stub) == |
| 503 RecordWriteStub::STORE_BUFFER_ONLY); | 509 RecordWriteStub::STORE_BUFFER_ONLY); |
| 504 | 510 |
| 505 if (!IsMarking()) { | 511 if (!IsMarking()) { |
| 506 // Initially stub is generated in STORE_BUFFER_ONLY mode thus | 512 // Initially stub is generated in STORE_BUFFER_ONLY mode thus |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 554 if (state_ == STOPPED && marking_deque_memory_committed_) { | 560 if (state_ == STOPPED && marking_deque_memory_committed_) { |
| 555 bool success = marking_deque_memory_->Uncommit( | 561 bool success = marking_deque_memory_->Uncommit( |
| 556 reinterpret_cast<Address>(marking_deque_memory_->address()), | 562 reinterpret_cast<Address>(marking_deque_memory_->address()), |
| 557 marking_deque_memory_->size()); | 563 marking_deque_memory_->size()); |
| 558 CHECK(success); | 564 CHECK(success); |
| 559 marking_deque_memory_committed_ = false; | 565 marking_deque_memory_committed_ = false; |
| 560 } | 566 } |
| 561 } | 567 } |
| 562 | 568 |
| 563 | 569 |
| 564 void IncrementalMarking::Start() { | 570 void IncrementalMarking::Start(CompactionFlag flag) { |
| 565 if (FLAG_trace_incremental_marking) { | 571 if (FLAG_trace_incremental_marking) { |
| 566 PrintF("[IncrementalMarking] Start\n"); | 572 PrintF("[IncrementalMarking] Start\n"); |
| 567 } | 573 } |
| 568 ASSERT(FLAG_incremental_marking); | 574 ASSERT(FLAG_incremental_marking); |
| 569 ASSERT(FLAG_incremental_marking_steps); | 575 ASSERT(FLAG_incremental_marking_steps); |
| 570 ASSERT(state_ == STOPPED); | 576 ASSERT(state_ == STOPPED); |
| 571 ASSERT(heap_->gc_state() == Heap::NOT_IN_GC); | 577 ASSERT(heap_->gc_state() == Heap::NOT_IN_GC); |
| 572 ASSERT(!Serializer::enabled()); | 578 ASSERT(!Serializer::enabled()); |
| 573 ASSERT(heap_->isolate()->IsInitialized()); | 579 ASSERT(heap_->isolate()->IsInitialized()); |
| 574 | 580 |
| 575 ResetStepCounters(); | 581 ResetStepCounters(); |
| 576 | 582 |
| 577 if (heap_->IsSweepingComplete()) { | 583 if (heap_->IsSweepingComplete()) { |
| 578 StartMarking(ALLOW_COMPACTION); | 584 StartMarking(flag); |
| 579 } else { | 585 } else { |
| 580 if (FLAG_trace_incremental_marking) { | 586 if (FLAG_trace_incremental_marking) { |
| 581 PrintF("[IncrementalMarking] Start sweeping.\n"); | 587 PrintF("[IncrementalMarking] Start sweeping.\n"); |
| 582 } | 588 } |
| 583 state_ = SWEEPING; | 589 state_ = SWEEPING; |
| 584 } | 590 } |
| 585 | 591 |
| 586 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); | 592 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); |
| 587 } | 593 } |
| 588 | 594 |
| (...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 859 PrintF("[IncrementalMarking] Complete (normal).\n"); | 865 PrintF("[IncrementalMarking] Complete (normal).\n"); |
| 860 } | 866 } |
| 861 if (action == GC_VIA_STACK_GUARD) { | 867 if (action == GC_VIA_STACK_GUARD) { |
| 862 heap_->isolate()->stack_guard()->RequestGC(); | 868 heap_->isolate()->stack_guard()->RequestGC(); |
| 863 } | 869 } |
| 864 } | 870 } |
| 865 | 871 |
| 866 | 872 |
| 867 void IncrementalMarking::OldSpaceStep(intptr_t allocated) { | 873 void IncrementalMarking::OldSpaceStep(intptr_t allocated) { |
| 868 if (IsStopped() && WorthActivating() && heap_->NextGCIsLikelyToBeFull()) { | 874 if (IsStopped() && WorthActivating() && heap_->NextGCIsLikelyToBeFull()) { |
| 869 // Only start incremental marking in a save state: 1) when we are not in | 875 // TODO(hpayer): Let's play safe for now, but compaction should be |
| 870 // a GC, 2) when we turned-on incremental marking, 3) when we are | 876 // in principle possible. |
| 871 // currently not serializing or deserializing the heap. | 877 Start(PREVENT_COMPACTION); |
| 872 if (heap_->gc_state() != Heap::NOT_IN_GC || | |
| 873 !FLAG_incremental_marking || | |
| 874 !FLAG_incremental_marking_steps || | |
| 875 Serializer::enabled() || | |
| 876 !heap_->isolate()->IsInitialized()) { | |
| 877 return; | |
| 878 } | |
| 879 Start(); | |
| 880 } else { | 878 } else { |
| 881 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); | 879 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); |
| 882 } | 880 } |
| 883 } | 881 } |
| 884 | 882 |
| 885 | 883 |
| 886 void IncrementalMarking::Step(intptr_t allocated_bytes, | 884 void IncrementalMarking::Step(intptr_t allocated_bytes, |
| 887 CompletionAction action) { | 885 CompletionAction action) { |
| 888 if (heap_->gc_state() != Heap::NOT_IN_GC || | 886 if (heap_->gc_state() != Heap::NOT_IN_GC || |
| 889 !FLAG_incremental_marking || | 887 !FLAG_incremental_marking || |
| (...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1025 bytes_scanned_ = 0; | 1023 bytes_scanned_ = 0; |
| 1026 write_barriers_invoked_since_last_step_ = 0; | 1024 write_barriers_invoked_since_last_step_ = 0; |
| 1027 } | 1025 } |
| 1028 | 1026 |
| 1029 | 1027 |
| 1030 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 1028 int64_t IncrementalMarking::SpaceLeftInOldSpace() { |
| 1031 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); | 1029 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); |
| 1032 } | 1030 } |
| 1033 | 1031 |
| 1034 } } // namespace v8::internal | 1032 } } // namespace v8::internal |
| OLD | NEW |