Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(17)

Side by Side Diff: runtime/vm/gc_marker.cc

Issue 1176183002: Refactor delay set to allow more than one marking visitor (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Remove DrainTask Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/gc_marker.h ('k') | runtime/vm/raw_object.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/gc_marker.h" 5 #include "vm/gc_marker.h"
6 6
7 #include <map> 7 #include <map>
8 #include <utility> 8 #include <utility>
9 #include <vector> 9 #include <vector>
10 10
11 #include "vm/allocation.h" 11 #include "vm/allocation.h"
12 #include "vm/dart_api_state.h" 12 #include "vm/dart_api_state.h"
13 #include "vm/isolate.h" 13 #include "vm/isolate.h"
14 #include "vm/log.h" 14 #include "vm/log.h"
15 #include "vm/pages.h" 15 #include "vm/pages.h"
16 #include "vm/raw_object.h" 16 #include "vm/raw_object.h"
17 #include "vm/stack_frame.h" 17 #include "vm/stack_frame.h"
18 #include "vm/thread_pool.h"
18 #include "vm/visitor.h" 19 #include "vm/visitor.h"
19 #include "vm/object_id_ring.h" 20 #include "vm/object_id_ring.h"
20 21
21 namespace dart { 22 namespace dart {
22 23
23 // A simple chunked marking stack. 24 // A simple chunked marking stack.
24 class MarkingStack : public ValueObject { 25 class MarkingStack : public ValueObject {
25 public: 26 public:
26 MarkingStack() 27 MarkingStack()
27 : head_(new MarkingStackChunk()), 28 : head_(new MarkingStackChunk()),
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
113 114
114 MarkingStackChunk* head_; 115 MarkingStackChunk* head_;
115 MarkingStackChunk* empty_chunks_; 116 MarkingStackChunk* empty_chunks_;
116 RawObject** marking_stack_; 117 RawObject** marking_stack_;
117 uint32_t top_; 118 uint32_t top_;
118 119
119 DISALLOW_COPY_AND_ASSIGN(MarkingStack); 120 DISALLOW_COPY_AND_ASSIGN(MarkingStack);
120 }; 121 };
121 122
122 123
124 class DelaySet {
125 private:
126 typedef std::multimap<RawObject*, RawWeakProperty*> Map;
127 typedef std::pair<RawObject*, RawWeakProperty*> MapEntry;
128
129 public:
130 DelaySet() : mutex_(new Mutex()) {}
131 ~DelaySet() { delete mutex_; }
132
133 // Returns 'true' if this inserted a new key (not just added a value).
134 bool Insert(RawWeakProperty* raw_weak) {
135 MutexLocker ml(mutex_);
136 RawObject* raw_key = raw_weak->ptr()->key_;
137 bool new_key = (delay_set_.find(raw_key) == delay_set_.end());
138 delay_set_.insert(std::make_pair(raw_key, raw_weak));
139 return new_key;
140 }
141
142 void ClearReferences() {
143 MutexLocker ml(mutex_);
144 for (Map::iterator it = delay_set_.begin(); it != delay_set_.end(); ++it) {
145 WeakProperty::Clear(it->second);
146 }
147 }
148
149 // Visit all values with a key equal to raw_obj.
150 void VisitValuesForKey(RawObject* raw_obj, ObjectPointerVisitor* visitor) {
151 // Extract the range into a temporary vector to iterate over it
152 // while delay_set_ may be modified.
153 std::vector<MapEntry> temp_copy;
154 {
155 MutexLocker ml(mutex_);
156 std::pair<Map::iterator, Map::iterator> ret =
157 delay_set_.equal_range(raw_obj);
158 temp_copy.insert(temp_copy.end(), ret.first, ret.second);
159 delay_set_.erase(ret.first, ret.second);
160 }
161 for (std::vector<MapEntry>::iterator it = temp_copy.begin();
162 it != temp_copy.end(); ++it) {
163 it->second->VisitPointers(visitor);
164 }
165 }
166
167 private:
168 Map delay_set_;
169 Mutex* mutex_;
170 };
171
172
123 class MarkingVisitor : public ObjectPointerVisitor { 173 class MarkingVisitor : public ObjectPointerVisitor {
124 public: 174 public:
125 MarkingVisitor(Isolate* isolate, 175 MarkingVisitor(Isolate* isolate,
126 Heap* heap, 176 Heap* heap,
127 PageSpace* page_space, 177 PageSpace* page_space,
128 MarkingStack* marking_stack, 178 MarkingStack* marking_stack,
179 DelaySet* delay_set,
129 bool visit_function_code) 180 bool visit_function_code)
130 : ObjectPointerVisitor(isolate), 181 : ObjectPointerVisitor(isolate),
131 thread_(Thread::Current()), 182 thread_(Thread::Current()),
132 heap_(heap), 183 heap_(heap),
133 vm_heap_(Dart::vm_isolate()->heap()), 184 vm_heap_(Dart::vm_isolate()->heap()),
134 class_table_(isolate->class_table()), 185 class_table_(isolate->class_table()),
135 page_space_(page_space), 186 page_space_(page_space),
136 marking_stack_(marking_stack), 187 marking_stack_(marking_stack),
188 delay_set_(delay_set),
137 visiting_old_object_(NULL), 189 visiting_old_object_(NULL),
138 visit_function_code_(visit_function_code) { 190 visit_function_code_(visit_function_code) {
139 ASSERT(heap_ != vm_heap_); 191 ASSERT(heap_ != vm_heap_);
140 ASSERT(thread_->isolate() == isolate); 192 ASSERT(thread_->isolate() == isolate);
141 } 193 }
142 194
195 ~MarkingVisitor() {
196 // 'Finalize' should be explicitly called before destruction.
197 ASSERT(marking_stack_ == NULL);
198 }
199
143 MarkingStack* marking_stack() const { return marking_stack_; } 200 MarkingStack* marking_stack() const { return marking_stack_; }
144 201
145 void VisitPointers(RawObject** first, RawObject** last) { 202 void VisitPointers(RawObject** first, RawObject** last) {
146 for (RawObject** current = first; current <= last; current++) { 203 for (RawObject** current = first; current <= last; current++) {
147 MarkObject(*current, current); 204 MarkObject(*current, current);
148 } 205 }
149 } 206 }
150 207
151 bool visit_function_code() const { return visit_function_code_; } 208 bool visit_function_code() const { return visit_function_code_; }
152 209
153 virtual GrowableArray<RawFunction*>* skipped_code_functions() { 210 virtual MallocGrowableArray<RawFunction*>* skipped_code_functions() {
154 return &skipped_code_functions_; 211 return &skipped_code_functions_;
155 } 212 }
156 213
157 void DelayWeakProperty(RawWeakProperty* raw_weak) { 214 // Returns the mark bit. Sets the watch bit if unmarked. (The prior value of
158 RawObject* raw_key = raw_weak->ptr()->key_; 215 // the watched bit is returned in 'watched_before' for validation purposes.)
159 DelaySet::iterator it = delay_set_.find(raw_key); 216 // TODO(koda): When synchronizing header bits, this goes in a single CAS loop.
160 if (it != delay_set_.end()) { 217 static bool EnsureWatchedIfWhite(RawObject* obj, bool* watched_before) {
161 ASSERT(raw_key->IsWatched()); 218 if (obj->IsMarked()) {
219 return false;
220 }
221 if (!obj->IsWatched()) {
222 *watched_before = false;
223 obj->SetWatchedBitUnsynchronized();
162 } else { 224 } else {
163 ASSERT(!raw_key->IsWatched()); 225 *watched_before = true;
164 raw_key->SetWatchedBitUnsynchronized();
165 } 226 }
166 delay_set_.insert(std::make_pair(raw_key, raw_weak)); 227 return true;
167 } 228 }
168 229
230 void ProcessWeakProperty(RawWeakProperty* raw_weak) {
231 // The fate of the weak property is determined by its key.
232 RawObject* raw_key = raw_weak->ptr()->key_;
233 bool watched_before = false;
234 if (raw_key->IsHeapObject() &&
235 raw_key->IsOldObject() &&
236 EnsureWatchedIfWhite(raw_key, &watched_before)) {
237 // Key is white. Delay the weak property.
238 bool new_key = delay_set_->Insert(raw_weak);
239 ASSERT(new_key == !watched_before);
240 } else {
241 // Key is gray or black. Make the weak property black.
242 raw_weak->VisitPointers(this);
243 }
244 }
245
246 // Called when all marking is complete.
169 void Finalize() { 247 void Finalize() {
170 DelaySet::iterator it = delay_set_.begin();
171 for (; it != delay_set_.end(); ++it) {
172 WeakProperty::Clear(it->second);
173 }
174 if (!visit_function_code_) { 248 if (!visit_function_code_) {
175 DetachCode(); 249 DetachCode();
176 } 250 }
251 // Fail fast on attempts to mark after finalizing.
252 marking_stack_ = NULL;
177 } 253 }
178 254
179 void VisitingOldObject(RawObject* obj) { 255 void VisitingOldObject(RawObject* obj) {
180 ASSERT((obj == NULL) || obj->IsOldObject()); 256 ASSERT((obj == NULL) || obj->IsOldObject());
181 visiting_old_object_ = obj; 257 visiting_old_object_ = obj;
182 } 258 }
183 259
184 private: 260 private:
185 void MarkAndPush(RawObject* raw_obj) { 261 void MarkAndPush(RawObject* raw_obj) {
186 ASSERT(raw_obj->IsHeapObject()); 262 ASSERT(raw_obj->IsHeapObject());
187 ASSERT((FLAG_verify_before_gc || FLAG_verify_before_gc) ? 263 ASSERT((FLAG_verify_before_gc || FLAG_verify_before_gc) ?
188 page_space_->Contains(RawObject::ToAddr(raw_obj)) : 264 page_space_->Contains(RawObject::ToAddr(raw_obj)) :
189 true); 265 true);
190 266
191 // Mark the object and push it on the marking stack. 267 // Mark the object and push it on the marking stack.
192 ASSERT(!raw_obj->IsMarked()); 268 ASSERT(!raw_obj->IsMarked());
193 const bool is_watched = raw_obj->IsWatched(); 269 const bool is_watched = raw_obj->IsWatched();
194 raw_obj->SetMarkBitUnsynchronized(); 270 raw_obj->SetMarkBitUnsynchronized();
195 raw_obj->ClearRememberedBitUnsynchronized(); 271 raw_obj->ClearRememberedBitUnsynchronized();
196 raw_obj->ClearWatchedBitUnsynchronized(); 272 raw_obj->ClearWatchedBitUnsynchronized();
197 if (is_watched) { 273 if (is_watched) {
198 std::pair<DelaySet::iterator, DelaySet::iterator> ret; 274 delay_set_->VisitValuesForKey(raw_obj, this);
199 // Visit all elements with a key equal to raw_obj.
200 ret = delay_set_.equal_range(raw_obj);
201 // Create a copy of the range in a temporary vector to iterate over it
202 // while delay_set_ may be modified.
203 std::vector<DelaySetEntry> temp_copy(ret.first, ret.second);
204 delay_set_.erase(ret.first, ret.second);
205 for (std::vector<DelaySetEntry>::iterator it = temp_copy.begin();
206 it != temp_copy.end(); ++it) {
207 it->second->VisitPointers(this);
208 }
209 } 275 }
210 marking_stack_->Push(raw_obj); 276 marking_stack_->Push(raw_obj);
211 } 277 }
212 278
213 void MarkObject(RawObject* raw_obj, RawObject** p) { 279 void MarkObject(RawObject* raw_obj, RawObject** p) {
214 // Fast exit if the raw object is a Smi. 280 // Fast exit if the raw object is a Smi.
215 if (!raw_obj->IsHeapObject()) { 281 if (!raw_obj->IsHeapObject()) {
216 return; 282 return;
217 } 283 }
218 284
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
278 if (FLAG_log_code_drop) { 344 if (FLAG_log_code_drop) {
279 unoptimized_code_count++; 345 unoptimized_code_count++;
280 } 346 }
281 } 347 }
282 } 348 }
283 if (FLAG_log_code_drop) { 349 if (FLAG_log_code_drop) {
284 ISL_Print(" total detached current: %" Pd "\n", current_code_count); 350 ISL_Print(" total detached current: %" Pd "\n", current_code_count);
285 ISL_Print(" total detached unoptimized: %" Pd "\n", 351 ISL_Print(" total detached unoptimized: %" Pd "\n",
286 unoptimized_code_count); 352 unoptimized_code_count);
287 } 353 }
354 // Clean up.
355 skipped_code_functions_.Clear();
288 } 356 }
289 357
290 Thread* thread_; 358 Thread* thread_;
291 Heap* heap_; 359 Heap* heap_;
292 Heap* vm_heap_; 360 Heap* vm_heap_;
293 ClassTable* class_table_; 361 ClassTable* class_table_;
294 PageSpace* page_space_; 362 PageSpace* page_space_;
295 MarkingStack* marking_stack_; 363 MarkingStack* marking_stack_;
364 DelaySet* delay_set_;
296 RawObject* visiting_old_object_; 365 RawObject* visiting_old_object_;
297 typedef std::multimap<RawObject*, RawWeakProperty*> DelaySet;
298 typedef std::pair<RawObject*, RawWeakProperty*> DelaySetEntry;
299 DelaySet delay_set_;
300 const bool visit_function_code_; 366 const bool visit_function_code_;
301 GrowableArray<RawFunction*> skipped_code_functions_; 367 MallocGrowableArray<RawFunction*> skipped_code_functions_;
302 368
303 DISALLOW_IMPLICIT_CONSTRUCTORS(MarkingVisitor); 369 DISALLOW_IMPLICIT_CONSTRUCTORS(MarkingVisitor);
304 }; 370 };
305 371
306 372
307 static bool IsUnreachable(const RawObject* raw_obj) { 373 static bool IsUnreachable(const RawObject* raw_obj) {
308 if (!raw_obj->IsHeapObject()) { 374 if (!raw_obj->IsHeapObject()) {
309 return false; 375 return false;
310 } 376 }
311 if (raw_obj == Object::null()) { 377 if (raw_obj == Object::null()) {
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
442 visitor->VisitingOldObject(raw_obj); 508 visitor->VisitingOldObject(raw_obj);
443 const intptr_t class_id = raw_obj->GetClassId(); 509 const intptr_t class_id = raw_obj->GetClassId();
444 // Currently, classes are considered roots (see issue 18284), so at this 510 // Currently, classes are considered roots (see issue 18284), so at this
445 // point, they should all be marked. 511 // point, they should all be marked.
446 ASSERT(isolate->class_table()->At(class_id)->IsMarked()); 512 ASSERT(isolate->class_table()->At(class_id)->IsMarked());
447 if (class_id != kWeakPropertyCid) { 513 if (class_id != kWeakPropertyCid) {
448 marked_bytes_ += raw_obj->VisitPointers(visitor); 514 marked_bytes_ += raw_obj->VisitPointers(visitor);
449 } else { 515 } else {
450 RawWeakProperty* raw_weak = reinterpret_cast<RawWeakProperty*>(raw_obj); 516 RawWeakProperty* raw_weak = reinterpret_cast<RawWeakProperty*>(raw_obj);
451 marked_bytes_ += raw_weak->Size(); 517 marked_bytes_ += raw_weak->Size();
452 ProcessWeakProperty(raw_weak, visitor); 518 visitor->ProcessWeakProperty(raw_weak);
453 } 519 }
454 } 520 }
455 visitor->VisitingOldObject(NULL); 521 visitor->VisitingOldObject(NULL);
456 } 522 }
457 523
458 524
459 void GCMarker::ProcessWeakProperty(RawWeakProperty* raw_weak,
460 MarkingVisitor* visitor) {
461 // The fate of the weak property is determined by its key.
462 RawObject* raw_key = raw_weak->ptr()->key_;
463 if (raw_key->IsHeapObject() &&
464 raw_key->IsOldObject() &&
465 !raw_key->IsMarked()) {
466 // Key is white. Delay the weak property.
467 visitor->DelayWeakProperty(raw_weak);
468 } else {
469 // Key is gray or black. Make the weak property black.
470 raw_weak->VisitPointers(visitor);
471 }
472 }
473
474
475 void GCMarker::ProcessWeakTables(PageSpace* page_space) { 525 void GCMarker::ProcessWeakTables(PageSpace* page_space) {
476 for (int sel = 0; 526 for (int sel = 0;
477 sel < Heap::kNumWeakSelectors; 527 sel < Heap::kNumWeakSelectors;
478 sel++) { 528 sel++) {
479 WeakTable* table = heap_->GetWeakTable( 529 WeakTable* table = heap_->GetWeakTable(
480 Heap::kOld, static_cast<Heap::WeakSelector>(sel)); 530 Heap::kOld, static_cast<Heap::WeakSelector>(sel));
481 intptr_t size = table->size(); 531 intptr_t size = table->size();
482 for (intptr_t i = 0; i < size; i++) { 532 for (intptr_t i = 0; i < size; i++) {
483 if (table->IsValidEntryAt(i)) { 533 if (table->IsValidEntryAt(i)) {
484 RawObject* raw_obj = table->ObjectAt(i); 534 RawObject* raw_obj = table->ObjectAt(i);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
523 PageSpace* page_space, 573 PageSpace* page_space,
524 bool invoke_api_callbacks, 574 bool invoke_api_callbacks,
525 bool collect_code) { 575 bool collect_code) {
526 const bool visit_function_code = !collect_code; 576 const bool visit_function_code = !collect_code;
527 Prologue(isolate, invoke_api_callbacks); 577 Prologue(isolate, invoke_api_callbacks);
528 // The API prologue/epilogue may create/destroy zones, so we must not 578 // The API prologue/epilogue may create/destroy zones, so we must not
529 // depend on zone allocations surviving beyond the epilogue callback. 579 // depend on zone allocations surviving beyond the epilogue callback.
530 { 580 {
531 StackZone zone(isolate); 581 StackZone zone(isolate);
532 MarkingStack marking_stack; 582 MarkingStack marking_stack;
533 MarkingVisitor mark( 583 DelaySet delay_set;
534 isolate, heap_, page_space, &marking_stack, visit_function_code); 584 MarkingVisitor mark(isolate, heap_, page_space, &marking_stack,
585 &delay_set, visit_function_code);
535 IterateRoots(isolate, &mark, !invoke_api_callbacks); 586 IterateRoots(isolate, &mark, !invoke_api_callbacks);
536 DrainMarkingStack(isolate, &mark); 587 DrainMarkingStack(isolate, &mark);
537 IterateWeakReferences(isolate, &mark); 588 IterateWeakReferences(isolate, &mark);
538 MarkingWeakVisitor mark_weak; 589 MarkingWeakVisitor mark_weak;
539 IterateWeakRoots(isolate, &mark_weak, invoke_api_callbacks); 590 IterateWeakRoots(isolate, &mark_weak, invoke_api_callbacks);
540 mark.Finalize(); 591 mark.Finalize();
592 delay_set.ClearReferences();
541 ProcessWeakTables(page_space); 593 ProcessWeakTables(page_space);
542 ProcessObjectIdTable(isolate); 594 ProcessObjectIdTable(isolate);
543 } 595 }
544 Epilogue(isolate, invoke_api_callbacks); 596 Epilogue(isolate, invoke_api_callbacks);
545 } 597 }
546 598
547 } // namespace dart 599 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/gc_marker.h ('k') | runtime/vm/raw_object.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698