Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(349)

Unified Diff: src/heap/spaces.h

Issue 1625753002: Allocation sampling for paged/lo spaces (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: WIP: Allocation sampling for paged/lo spaces Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: src/heap/spaces.h
diff --git a/src/heap/spaces.h b/src/heap/spaces.h
index 6f470e1f71d1c40dd24ebaef62753971fbdcade3..d390efa3264f84a2cd0e02566686752416ed8383 100644
--- a/src/heap/spaces.h
+++ b/src/heap/spaces.h
@@ -19,8 +19,8 @@
namespace v8 {
namespace internal {
+class AllocationObserver;
class CompactionSpaceCollection;
-class InlineAllocationObserver;
class Isolate;
// -----------------------------------------------------------------------------
@@ -957,7 +957,8 @@ class LargePage : public MemoryChunk {
class Space : public Malloced {
public:
Space(Heap* heap, AllocationSpace id, Executability executable)
- : heap_(heap),
+ : allocation_observers_paused_(false),
ofrobots 2016/01/26 00:38:25 As per suggestion above, it might make sense for H
mattloring 2016/01/30 00:38:03 This is currently enforced by the PauseAllocationO
+ heap_(heap),
id_(id),
executable_(executable),
committed_(0),
@@ -973,6 +974,26 @@ class Space : public Malloced {
// Identity used in error reporting.
AllocationSpace identity() { return id_; }
+ virtual void AddAllocationObserver(AllocationObserver* observer) {
+ allocation_observers_->Add(observer);
+ }
+
+ virtual void RemoveAllocationObserver(AllocationObserver* observer) {
+ bool removed = allocation_observers_->RemoveElement(observer);
+ static_cast<void>(removed);
+ DCHECK(removed);
+ }
+
+ virtual void PauseAllocationObservers() {
+ allocation_observers_paused_ = true;
+ }
+
+ virtual void ResumeAllocationObservers() {
+ allocation_observers_paused_ = false;
+ }
+
+ void AllocationStep(Address soon_object, int size);
+
// Return the total amount committed memory for this space, i.e., allocatable
// memory and page headers.
virtual intptr_t CommittedMemory() { return committed_; }
@@ -1019,6 +1040,10 @@ class Space : public Malloced {
DCHECK_GE(committed_, 0);
}
+ List<AllocationObserver*>* allocation_observers_ =
+ new List<AllocationObserver*>();
+ bool allocation_observers_paused_;
+
private:
Heap* heap_;
AllocationSpace id_;
@@ -2574,8 +2599,7 @@ class NewSpace : public Space {
to_space_(heap, kToSpace),
from_space_(heap, kFromSpace),
reservation_(),
- top_on_previous_step_(0),
- inline_allocation_observers_paused_(false) {}
+ top_on_previous_step_(0) {}
// Sets up the new space using the given chunk.
bool SetUp(int reserved_semispace_size_, int max_semi_space_size);
@@ -2752,20 +2776,15 @@ class NewSpace : public Space {
void UpdateInlineAllocationLimit(int size_in_bytes);
- // Allows observation of inline allocation. The observer->Step() method gets
- // called after every step_size bytes have been allocated (approximately).
- // This works by adjusting the allocation limit to a lower value and adjusting
- // it after each step.
- void AddInlineAllocationObserver(InlineAllocationObserver* observer);
-
- // Removes a previously installed observer.
- void RemoveInlineAllocationObserver(InlineAllocationObserver* observer);
-
void DisableInlineAllocationSteps() {
top_on_previous_step_ = 0;
UpdateInlineAllocationLimit(0);
}
+ void AddAllocationObserver(AllocationObserver* observer) override;
+
+ void RemoveAllocationObserver(AllocationObserver* observer) override;
+
// Get the extent of the inactive semispace (for use as a marking stack,
// or to zap it). Notice: space-addresses are not necessarily on the
// same page, so FromSpaceStart() might be above FromSpaceEnd().
@@ -2861,14 +2880,7 @@ class NewSpace : public Space {
// mark-compact collection.
AllocationInfo allocation_info_;
- // When inline allocation stepping is active, either because of incremental
- // marking or because of idle scavenge, we 'interrupt' inline allocation every
- // once in a while. This is done by setting allocation_info_.limit to be lower
- // than the actual limit and and increasing it in steps to guarantee that the
- // observers are notified periodically.
- List<InlineAllocationObserver*> inline_allocation_observers_;
Address top_on_previous_step_;
- bool inline_allocation_observers_paused_;
HistogramInfo* allocated_histogram_;
HistogramInfo* promoted_histogram_;
@@ -2885,26 +2897,23 @@ class NewSpace : public Space {
size_t size);
intptr_t GetNextInlineAllocationStepSize();
void StartNextInlineAllocationStep();
- void PauseInlineAllocationObservers();
- void ResumeInlineAllocationObservers();
+ void PauseAllocationObservers() override;
+ void ResumeAllocationObservers() override;
friend class PauseInlineAllocationObserversScope;
friend class SemiSpaceIterator;
};
-class PauseInlineAllocationObserversScope {
+class PauseAllocationObserversScope {
public:
- explicit PauseInlineAllocationObserversScope(NewSpace* new_space)
- : new_space_(new_space) {
- new_space_->PauseInlineAllocationObservers();
- }
- ~PauseInlineAllocationObserversScope() {
- new_space_->ResumeInlineAllocationObservers();
+ explicit PauseAllocationObserversScope(Space* space) : space_(space) {
+ space_->PauseAllocationObservers();
}
+ ~PauseAllocationObserversScope() { space_->ResumeAllocationObservers(); }
private:
- NewSpace* new_space_;
- DISALLOW_COPY_AND_ASSIGN(PauseInlineAllocationObserversScope);
+ Space* space_;
+ DISALLOW_COPY_AND_ASSIGN(PauseAllocationObserversScope);
};
// -----------------------------------------------------------------------------
« src/heap/heap.cc ('K') | « src/heap/incremental-marking.cc ('k') | src/heap/spaces.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698