Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(668)

Side by Side Diff: src/store-buffer.h

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 22 matching lines...) Expand all
33 #include "globals.h" 33 #include "globals.h"
34 #include "platform.h" 34 #include "platform.h"
35 #include "v8globals.h" 35 #include "v8globals.h"
36 36
37 namespace v8 { 37 namespace v8 {
38 namespace internal { 38 namespace internal {
39 39
40 40
41 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); 41 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
42 42
43 43 // TODO(gc) ISOLATESMERGE convert to non static class
Erik Corry 2011/04/20 20:07:40 Is this not done?
Vyacheslav Egorov (Chromium) 2011/04/24 11:24:08 indeed.
44 // Used to implement the write barrier by collecting addresses of pointers 44 // Used to implement the write barrier by collecting addresses of pointers
45 // between spaces. 45 // between spaces.
46 class StoreBuffer : public AllStatic { 46 class StoreBuffer {
47 public: 47 public:
48 static inline Address TopAddress(); 48 explicit StoreBuffer(Heap* heap);
49 49
50 static void Setup(); 50 static void StoreBufferOverflow(Isolate* isolate);
51 static void TearDown(); 51
52 inline Address TopAddress();
53
54 void Setup();
55 void TearDown();
52 56
53 // This is used by the mutator to enter addresses into the store buffer. 57 // This is used by the mutator to enter addresses into the store buffer.
54 static inline void Mark(Address addr); 58 inline void Mark(Address addr);
55 59
56 // This is used by the heap traversal to enter the addresses into the store 60 // This is used by the heap traversal to enter the addresses into the store
57 // buffer that should still be in the store buffer after GC. It enters 61 // buffer that should still be in the store buffer after GC. It enters
58 // addresses directly into the old buffer because the GC starts by wiping the 62 // addresses directly into the old buffer because the GC starts by wiping the
59 // old buffer and thereafter only visits each cell once so there is no need 63 // old buffer and thereafter only visits each cell once so there is no need
60 // to attempt to remove any dupes. During the first part of a GC we 64 // to attempt to remove any dupes. During the first part of a GC we
61 // are using the store buffer to access the old spaces and at the same time 65 // are using the store buffer to access the old spaces and at the same time
62 // we are rebuilding the store buffer using this function. There is, however 66 // we are rebuilding the store buffer using this function. There is, however
63 // no issue of overwriting the buffer we are iterating over, because this 67 // no issue of overwriting the buffer we are iterating over, because this
64 // stage of the scavenge can only reduce the number of addresses in the store 68 // stage of the scavenge can only reduce the number of addresses in the store
65 // buffer (some objects are promoted so pointers to them do not need to be in 69 // buffer (some objects are promoted so pointers to them do not need to be in
66 // the store buffer). The later parts of the GC scan the pages that are 70 // the store buffer). The later parts of the GC scan the pages that are
67 // exempt from the store buffer and process the promotion queue. These steps 71 // exempt from the store buffer and process the promotion queue. These steps
68 // can overflow this buffer. We check for this and on overflow we call the 72 // can overflow this buffer. We check for this and on overflow we call the
69 // callback set up with the StoreBufferRebuildScope object. 73 // callback set up with the StoreBufferRebuildScope object.
70 static inline void EnterDirectlyIntoStoreBuffer(Address addr); 74 inline void EnterDirectlyIntoStoreBuffer(Address addr);
71 75
72 // Iterates over all pointers that go from old space to new space. It will 76 // Iterates over all pointers that go from old space to new space. It will
73 // delete the store buffer as it starts so the callback should reenter 77 // delete the store buffer as it starts so the callback should reenter
74 // surviving old-to-new pointers into the store buffer to rebuild it. 78 // surviving old-to-new pointers into the store buffer to rebuild it.
75 static void IteratePointersToNewSpace(ObjectSlotCallback callback); 79 void IteratePointersToNewSpace(ObjectSlotCallback callback);
76 80
77 static const int kStoreBufferOverflowBit = 1 << 16; 81 static const int kStoreBufferOverflowBit = 1 << 16;
78 static const int kStoreBufferSize = kStoreBufferOverflowBit; 82 static const int kStoreBufferSize = kStoreBufferOverflowBit;
79 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); 83 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address);
80 static const int kOldStoreBufferLength = kStoreBufferLength * 64; 84 static const int kOldStoreBufferLength = kStoreBufferLength * 64;
81 static const int kHashMapLengthLog2 = 12; 85 static const int kHashMapLengthLog2 = 12;
82 static const int kHashMapLength = 1 << kHashMapLengthLog2; 86 static const int kHashMapLength = 1 << kHashMapLengthLog2;
83 87
84 static void Compact(); 88 void Compact();
85 static void GCPrologue(GCType type, GCCallbackFlags flags); 89 static void GCPrologue(GCType type, GCCallbackFlags flags);
86 static void GCEpilogue(GCType type, GCCallbackFlags flags); 90 static void GCEpilogue(GCType type, GCCallbackFlags flags);
87 91
88 static Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); } 92 Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); }
89 static Object*** Start() { return reinterpret_cast<Object***>(old_start_); } 93 Object*** Start() { return reinterpret_cast<Object***>(old_start_); }
90 static Object*** Top() { return reinterpret_cast<Object***>(old_top_); } 94 Object*** Top() { return reinterpret_cast<Object***>(old_top_); }
91 static void SetTop(Object*** top) { 95 void SetTop(Object*** top) {
92 ASSERT(top >= Start()); 96 ASSERT(top >= Start());
93 ASSERT(top <= Limit()); 97 ASSERT(top <= Limit());
94 old_top_ = reinterpret_cast<Address*>(top); 98 old_top_ = reinterpret_cast<Address*>(top);
95 } 99 }
96 100
97 static bool old_buffer_is_sorted() { return old_buffer_is_sorted_; } 101 bool old_buffer_is_sorted() { return old_buffer_is_sorted_; }
98 static bool old_buffer_is_filtered() { return old_buffer_is_filtered_; } 102 bool old_buffer_is_filtered() { return old_buffer_is_filtered_; }
99 103
100 // Goes through the store buffer removing pointers to things that have 104 // Goes through the store buffer removing pointers to things that have
101 // been promoted. Rebuilds the store buffer completely if it overflowed. 105 // been promoted. Rebuilds the store buffer completely if it overflowed.
102 static void SortUniq(); 106 void SortUniq();
103 107
104 static void HandleFullness(); 108 void HandleFullness();
105 static void Verify(); 109 void Verify();
106 110
107 static bool PrepareForIteration(); 111 bool PrepareForIteration();
108 112
109 #ifdef DEBUG 113 #ifdef DEBUG
110 static void Clean(); 114 void Clean();
111 // Slow, for asserts only. 115 // Slow, for asserts only.
112 static bool CellIsInStoreBuffer(Address cell); 116 bool CellIsInStoreBuffer(Address cell);
113 #endif 117 #endif
114 118
115 private: 119 private:
120 Heap* heap_;
121
116 // The store buffer is divided up into a new buffer that is constantly being 122 // The store buffer is divided up into a new buffer that is constantly being
117 // filled by mutator activity and an old buffer that is filled with the data 123 // filled by mutator activity and an old buffer that is filled with the data
118 // from the new buffer after compression. 124 // from the new buffer after compression.
119 static Address* start_; 125 Address* start_;
120 static Address* limit_; 126 Address* limit_;
121 127
122 static Address* old_start_; 128 Address* old_start_;
123 static Address* old_limit_; 129 Address* old_limit_;
124 static Address* old_top_; 130 Address* old_top_;
125 131
126 static bool old_buffer_is_sorted_; 132 bool old_buffer_is_sorted_;
127 static bool old_buffer_is_filtered_; 133 bool old_buffer_is_filtered_;
128 static bool during_gc_; 134 bool during_gc_;
129 // The garbage collector iterates over many pointers to new space that are not 135 // The garbage collector iterates over many pointers to new space that are not
130 // handled by the store buffer. This flag indicates whether the pointers 136 // handled by the store buffer. This flag indicates whether the pointers
131 // found by the callbacks should be added to the store buffer or not. 137 // found by the callbacks should be added to the store buffer or not.
132 static bool store_buffer_rebuilding_enabled_; 138 bool store_buffer_rebuilding_enabled_;
133 static StoreBufferCallback callback_; 139 StoreBufferCallback callback_;
134 static bool may_move_store_buffer_entries_; 140 bool may_move_store_buffer_entries_;
135 141
136 static VirtualMemory* virtual_memory_; 142 VirtualMemory* virtual_memory_;
137 static uintptr_t* hash_map_1_; 143 uintptr_t* hash_map_1_;
138 static uintptr_t* hash_map_2_; 144 uintptr_t* hash_map_2_;
139 145
140 static void CheckForFullBuffer(); 146 void CheckForFullBuffer();
141 static void Uniq(); 147 void Uniq();
142 static void ZapHashTables(); 148 void ZapHashTables();
143 static bool HashTablesAreZapped(); 149 bool HashTablesAreZapped();
144 static void FilterScanOnScavengeEntries(); 150 void FilterScanOnScavengeEntries();
145 static void ExemptPopularPages(int prime_sample_step, int threshold); 151 void ExemptPopularPages(int prime_sample_step, int threshold);
146 152
147 friend class StoreBufferRebuildScope; 153 friend class StoreBufferRebuildScope;
148 friend class DontMoveStoreBufferEntriesScope; 154 friend class DontMoveStoreBufferEntriesScope;
149 }; 155 };
150 156
151 157
152 class StoreBufferRebuildScope { 158 class StoreBufferRebuildScope {
153 public: 159 public:
154 explicit StoreBufferRebuildScope(StoreBufferCallback callback) 160 explicit StoreBufferRebuildScope(Heap* heap,
155 : stored_state_(StoreBuffer::store_buffer_rebuilding_enabled_), 161 StoreBuffer* store_buffer,
156 stored_callback_(StoreBuffer::callback_) { 162 StoreBufferCallback callback)
157 StoreBuffer::store_buffer_rebuilding_enabled_ = true; 163 : heap_(heap),
158 StoreBuffer::callback_ = callback; 164 store_buffer_(store_buffer),
159 (*callback)(NULL, kStoreBufferScanningPageEvent); 165 stored_state_(store_buffer->store_buffer_rebuilding_enabled_),
166 stored_callback_(store_buffer->callback_) {
167 store_buffer_->store_buffer_rebuilding_enabled_ = true;
168 store_buffer_->callback_ = callback;
169 (*callback)(heap, NULL, kStoreBufferScanningPageEvent);
160 } 170 }
161 171
162 ~StoreBufferRebuildScope() { 172 ~StoreBufferRebuildScope() {
163 StoreBuffer::callback_ = stored_callback_; 173 store_buffer_->callback_ = stored_callback_;
164 StoreBuffer::store_buffer_rebuilding_enabled_ = stored_state_; 174 store_buffer_->store_buffer_rebuilding_enabled_ = stored_state_;
165 StoreBuffer::CheckForFullBuffer(); 175 store_buffer_->CheckForFullBuffer();
166 } 176 }
167 177
168 private: 178 private:
179 Heap* heap_;
180 StoreBuffer* store_buffer_;
169 bool stored_state_; 181 bool stored_state_;
170 StoreBufferCallback stored_callback_; 182 StoreBufferCallback stored_callback_;
171 }; 183 };
172 184
173 185
174 class DontMoveStoreBufferEntriesScope { 186 class DontMoveStoreBufferEntriesScope {
175 public: 187 public:
176 DontMoveStoreBufferEntriesScope() : 188 explicit DontMoveStoreBufferEntriesScope(StoreBuffer* store_buffer)
177 stored_state_(StoreBuffer::may_move_store_buffer_entries_) { 189 : store_buffer_(store_buffer),
178 StoreBuffer::may_move_store_buffer_entries_ = false; 190 stored_state_(store_buffer->may_move_store_buffer_entries_) {
191 store_buffer_->may_move_store_buffer_entries_ = false;
179 } 192 }
180 193
181 ~DontMoveStoreBufferEntriesScope() { 194 ~DontMoveStoreBufferEntriesScope() {
182 StoreBuffer::may_move_store_buffer_entries_ = stored_state_; 195 store_buffer_->may_move_store_buffer_entries_ = stored_state_;
183 } 196 }
184 197
185 private: 198 private:
199 StoreBuffer* store_buffer_;
186 bool stored_state_; 200 bool stored_state_;
187 }; 201 };
188 202
189 } } // namespace v8::internal 203 } } // namespace v8::internal
190 204
191 #endif // V8_STORE_BUFFER_H_ 205 #endif // V8_STORE_BUFFER_H_
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698