OLD | NEW |
---|---|
1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/gc_marker.h" | 5 #include "vm/gc_marker.h" |
6 | 6 |
7 #include <map> | 7 #include <map> |
8 #include <utility> | 8 #include <utility> |
9 #include <vector> | 9 #include <vector> |
10 | 10 |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
67 it->second->VisitPointers(visitor); | 67 it->second->VisitPointers(visitor); |
68 } | 68 } |
69 } | 69 } |
70 | 70 |
71 private: | 71 private: |
72 Map delay_set_; | 72 Map delay_set_; |
73 Mutex* mutex_; | 73 Mutex* mutex_; |
74 }; | 74 }; |
75 | 75 |
76 | 76 |
77 class SkippedCodeFunctions { | |
78 public: | |
79 SkippedCodeFunctions() {} | |
80 | |
81 void Add(RawFunction* func) { | |
82 skipped_code_functions_.Add(func); | |
83 } | |
84 | |
85 void DetachCode() { | |
86 intptr_t unoptimized_code_count = 0; | |
87 intptr_t current_code_count = 0; | |
88 for (int i = 0; i < skipped_code_functions_.length(); i++) { | |
89 RawFunction* func = skipped_code_functions_[i]; | |
90 RawCode* code = func->ptr()->instructions_->ptr()->code_; | |
91 if (!code->IsMarked()) { | |
92 // If the code wasn't strongly visited through other references | |
93 // after skipping the function's code pointer, then we disconnect the | |
94 // code from the function. | |
95 func->StorePointer( | |
96 &(func->ptr()->instructions_), | |
97 StubCode::LazyCompile_entry()->code()->ptr()->instructions_); | |
98 if (FLAG_log_code_drop) { | |
99 // NOTE: This code runs while GC is in progress and runs within | |
100 // a NoHandleScope block. Hence it is not okay to use a regular Zone | |
101 // or Scope handle. We use a direct stack handle so the raw pointer in | |
102 // this handle is not traversed. The use of a handle is mainly to | |
103 // be able to reuse the handle based code and avoid having to add | |
104 // helper functions to the raw object interface. | |
105 String name; | |
106 name = func->ptr()->name_; | |
107 ISL_Print("Detaching code: %s\n", name.ToCString()); | |
108 current_code_count++; | |
109 } | |
110 } | |
111 | |
112 code = func->ptr()->unoptimized_code_; | |
113 if (!code->IsMarked()) { | |
114 // If the code wasn't strongly visited through other references | |
115 // after skipping the function's code pointer, then we disconnect the | |
116 // code from the function. | |
117 func->StorePointer(&(func->ptr()->unoptimized_code_), Code::null()); | |
118 if (FLAG_log_code_drop) { | |
119 unoptimized_code_count++; | |
120 } | |
121 } | |
122 } | |
123 if (FLAG_log_code_drop) { | |
124 ISL_Print(" total detached current: %" Pd "\n", current_code_count); | |
125 ISL_Print(" total detached unoptimized: %" Pd "\n", | |
126 unoptimized_code_count); | |
127 } | |
128 // Clean up. | |
129 skipped_code_functions_.Clear(); | |
130 } | |
131 | |
132 private: | |
133 GrowableArray<RawFunction*> skipped_code_functions_; | |
Ivan Posva
2015/08/27 16:41:30
DISALLOW*
koda
2015/08/27 17:51:32
Done.
| |
134 }; | |
135 | |
136 | |
77 class MarkingVisitor : public ObjectPointerVisitor { | 137 class MarkingVisitor : public ObjectPointerVisitor { |
78 public: | 138 public: |
79 MarkingVisitor(Isolate* isolate, | 139 MarkingVisitor(Isolate* isolate, |
80 Heap* heap, | 140 Heap* heap, |
81 PageSpace* page_space, | 141 PageSpace* page_space, |
82 MarkingStack* marking_stack, | 142 MarkingStack* marking_stack, |
83 DelaySet* delay_set, | 143 DelaySet* delay_set, |
84 bool visit_function_code) | 144 SkippedCodeFunctions* skipped_code_functions) |
85 : ObjectPointerVisitor(isolate), | 145 : ObjectPointerVisitor(isolate), |
86 thread_(Thread::Current()), | 146 thread_(Thread::Current()), |
87 heap_(heap), | 147 heap_(heap), |
88 vm_heap_(Dart::vm_isolate()->heap()), | 148 vm_heap_(Dart::vm_isolate()->heap()), |
89 class_table_(isolate->class_table()), | 149 class_table_(isolate->class_table()), |
90 page_space_(page_space), | 150 page_space_(page_space), |
91 work_list_(marking_stack), | 151 work_list_(marking_stack), |
92 delay_set_(delay_set), | 152 delay_set_(delay_set), |
93 visiting_old_object_(NULL), | 153 visiting_old_object_(NULL), |
94 visit_function_code_(visit_function_code), | 154 skipped_code_functions_(skipped_code_functions), |
95 marked_bytes_(0) { | 155 marked_bytes_(0) { |
96 ASSERT(heap_ != vm_heap_); | 156 ASSERT(heap_ != vm_heap_); |
97 ASSERT(thread_->isolate() == isolate); | 157 ASSERT(thread_->isolate() == isolate); |
98 } | 158 } |
99 | 159 |
100 uintptr_t marked_bytes() const { return marked_bytes_; } | 160 uintptr_t marked_bytes() const { return marked_bytes_; } |
101 | 161 |
102 // Returns true if some non-zero amount of work was performed. | 162 // Returns true if some non-zero amount of work was performed. |
103 bool DrainMarkingStack() { | 163 bool DrainMarkingStack() { |
104 RawObject* raw_obj = work_list_.Pop(); | 164 RawObject* raw_obj = work_list_.Pop(); |
(...skipping 19 matching lines...) Expand all Loading... | |
124 VisitingOldObject(NULL); | 184 VisitingOldObject(NULL); |
125 return true; | 185 return true; |
126 } | 186 } |
127 | 187 |
128 void VisitPointers(RawObject** first, RawObject** last) { | 188 void VisitPointers(RawObject** first, RawObject** last) { |
129 for (RawObject** current = first; current <= last; current++) { | 189 for (RawObject** current = first; current <= last; current++) { |
130 MarkObject(*current, current); | 190 MarkObject(*current, current); |
131 } | 191 } |
132 } | 192 } |
133 | 193 |
134 bool visit_function_code() const { return visit_function_code_; } | 194 bool visit_function_code() const { |
195 return skipped_code_functions_ == NULL; | |
196 } | |
135 | 197 |
136 virtual MallocGrowableArray<RawFunction*>* skipped_code_functions() { | 198 virtual void add_skipped_code_function(RawFunction* func) { |
137 return &skipped_code_functions_; | 199 ASSERT(!visit_function_code()); |
200 isolate()->counters()->Increment("skip", 1); | |
201 skipped_code_functions_->Add(func); | |
138 } | 202 } |
139 | 203 |
140 // Returns the mark bit. Sets the watch bit if unmarked. (The prior value of | 204 // Returns the mark bit. Sets the watch bit if unmarked. (The prior value of |
141 // the watched bit is returned in 'watched_before' for validation purposes.) | 205 // the watched bit is returned in 'watched_before' for validation purposes.) |
142 // TODO(koda): When synchronizing header bits, this goes in a single CAS loop. | 206 // TODO(koda): When synchronizing header bits, this goes in a single CAS loop. |
143 static bool EnsureWatchedIfWhite(RawObject* obj, bool* watched_before) { | 207 static bool EnsureWatchedIfWhite(RawObject* obj, bool* watched_before) { |
144 if (obj->IsMarked()) { | 208 if (obj->IsMarked()) { |
145 return false; | 209 return false; |
146 } | 210 } |
147 if (!obj->IsWatched()) { | 211 if (!obj->IsWatched()) { |
(...skipping 16 matching lines...) Expand all Loading... | |
164 bool new_key = delay_set_->Insert(raw_weak); | 228 bool new_key = delay_set_->Insert(raw_weak); |
165 ASSERT(new_key == !watched_before); | 229 ASSERT(new_key == !watched_before); |
166 } else { | 230 } else { |
167 // Key is gray or black. Make the weak property black. | 231 // Key is gray or black. Make the weak property black. |
168 raw_weak->VisitPointers(this); | 232 raw_weak->VisitPointers(this); |
169 } | 233 } |
170 } | 234 } |
171 | 235 |
172 // Called when all marking is complete. | 236 // Called when all marking is complete. |
173 void Finalize() { | 237 void Finalize() { |
174 if (!visit_function_code_) { | |
Ivan Posva
2015/08/27 16:41:30
Why was this taken out of the marker? Shouldn't th
koda
2015/08/27 17:51:31
Moved back, as discussed offline.
| |
175 DetachCode(); | |
176 } | |
177 work_list_.Finalize(); | 238 work_list_.Finalize(); |
178 } | 239 } |
179 | 240 |
180 void VisitingOldObject(RawObject* obj) { | 241 void VisitingOldObject(RawObject* obj) { |
181 ASSERT((obj == NULL) || obj->IsOldObject()); | 242 ASSERT((obj == NULL) || obj->IsOldObject()); |
182 visiting_old_object_ = obj; | 243 visiting_old_object_ = obj; |
183 } | 244 } |
184 | 245 |
185 private: | 246 private: |
186 class WorkList : public ValueObject { | 247 class WorkList : public ValueObject { |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
276 } | 337 } |
277 if (RawObject::IsVariableSizeClassId(raw_obj->GetClassId())) { | 338 if (RawObject::IsVariableSizeClassId(raw_obj->GetClassId())) { |
278 class_table_->UpdateLiveOld(raw_obj->GetClassId(), raw_obj->Size()); | 339 class_table_->UpdateLiveOld(raw_obj->GetClassId(), raw_obj->Size()); |
279 } else { | 340 } else { |
280 class_table_->UpdateLiveOld(raw_obj->GetClassId(), 0); | 341 class_table_->UpdateLiveOld(raw_obj->GetClassId(), 0); |
281 } | 342 } |
282 | 343 |
283 MarkAndPush(raw_obj); | 344 MarkAndPush(raw_obj); |
284 } | 345 } |
285 | 346 |
286 void DetachCode() { | |
287 intptr_t unoptimized_code_count = 0; | |
288 intptr_t current_code_count = 0; | |
289 for (int i = 0; i < skipped_code_functions_.length(); i++) { | |
290 RawFunction* func = skipped_code_functions_[i]; | |
291 RawCode* code = func->ptr()->instructions_->ptr()->code_; | |
292 if (!code->IsMarked()) { | |
293 // If the code wasn't strongly visited through other references | |
294 // after skipping the function's code pointer, then we disconnect the | |
295 // code from the function. | |
296 func->StorePointer( | |
297 &(func->ptr()->instructions_), | |
298 StubCode::LazyCompile_entry()->code()->ptr()->instructions_); | |
299 if (FLAG_log_code_drop) { | |
300 // NOTE: This code runs while GC is in progress and runs within | |
301 // a NoHandleScope block. Hence it is not okay to use a regular Zone | |
302 // or Scope handle. We use a direct stack handle so the raw pointer in | |
303 // this handle is not traversed. The use of a handle is mainly to | |
304 // be able to reuse the handle based code and avoid having to add | |
305 // helper functions to the raw object interface. | |
306 String name; | |
307 name = func->ptr()->name_; | |
308 ISL_Print("Detaching code: %s\n", name.ToCString()); | |
309 current_code_count++; | |
310 } | |
311 } | |
312 | |
313 code = func->ptr()->unoptimized_code_; | |
314 if (!code->IsMarked()) { | |
315 // If the code wasn't strongly visited through other references | |
316 // after skipping the function's code pointer, then we disconnect the | |
317 // code from the function. | |
318 func->StorePointer(&(func->ptr()->unoptimized_code_), Code::null()); | |
319 if (FLAG_log_code_drop) { | |
320 unoptimized_code_count++; | |
321 } | |
322 } | |
323 } | |
324 if (FLAG_log_code_drop) { | |
325 ISL_Print(" total detached current: %" Pd "\n", current_code_count); | |
326 ISL_Print(" total detached unoptimized: %" Pd "\n", | |
327 unoptimized_code_count); | |
328 } | |
329 // Clean up. | |
330 skipped_code_functions_.Clear(); | |
331 } | |
332 | |
333 Thread* thread_; | 347 Thread* thread_; |
334 Heap* heap_; | 348 Heap* heap_; |
335 Heap* vm_heap_; | 349 Heap* vm_heap_; |
336 ClassTable* class_table_; | 350 ClassTable* class_table_; |
337 PageSpace* page_space_; | 351 PageSpace* page_space_; |
338 WorkList work_list_; | 352 WorkList work_list_; |
339 DelaySet* delay_set_; | 353 DelaySet* delay_set_; |
340 RawObject* visiting_old_object_; | 354 RawObject* visiting_old_object_; |
341 const bool visit_function_code_; | 355 SkippedCodeFunctions* skipped_code_functions_; |
342 MallocGrowableArray<RawFunction*> skipped_code_functions_; | |
343 uintptr_t marked_bytes_; | 356 uintptr_t marked_bytes_; |
344 | 357 |
345 DISALLOW_IMPLICIT_CONSTRUCTORS(MarkingVisitor); | 358 DISALLOW_IMPLICIT_CONSTRUCTORS(MarkingVisitor); |
346 }; | 359 }; |
347 | 360 |
348 | 361 |
349 static bool IsUnreachable(const RawObject* raw_obj) { | 362 static bool IsUnreachable(const RawObject* raw_obj) { |
350 if (!raw_obj->IsHeapObject()) { | 363 if (!raw_obj->IsHeapObject()) { |
351 return false; | 364 return false; |
352 } | 365 } |
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
519 ObjectIdRing* ring = isolate->object_id_ring(); | 532 ObjectIdRing* ring = isolate->object_id_ring(); |
520 ASSERT(ring != NULL); | 533 ASSERT(ring != NULL); |
521 ring->VisitPointers(&visitor); | 534 ring->VisitPointers(&visitor); |
522 } | 535 } |
523 | 536 |
524 | 537 |
525 void GCMarker::MarkObjects(Isolate* isolate, | 538 void GCMarker::MarkObjects(Isolate* isolate, |
526 PageSpace* page_space, | 539 PageSpace* page_space, |
527 bool invoke_api_callbacks, | 540 bool invoke_api_callbacks, |
528 bool collect_code) { | 541 bool collect_code) { |
529 const bool visit_function_code = !collect_code; | |
530 Prologue(isolate, invoke_api_callbacks); | 542 Prologue(isolate, invoke_api_callbacks); |
531 // The API prologue/epilogue may create/destroy zones, so we must not | 543 // The API prologue/epilogue may create/destroy zones, so we must not |
532 // depend on zone allocations surviving beyond the epilogue callback. | 544 // depend on zone allocations surviving beyond the epilogue callback. |
533 { | 545 { |
534 StackZone zone(isolate); | 546 StackZone zone(isolate); |
535 MarkingStack marking_stack; | 547 MarkingStack marking_stack; |
536 DelaySet delay_set; | 548 DelaySet delay_set; |
549 SkippedCodeFunctions* skipped_code_functions = | |
550 collect_code ? new SkippedCodeFunctions() : NULL; | |
Ivan Posva
2015/08/27 16:41:30
Why is this not stack or zone allocated?
koda
2015/08/27 17:51:32
Done.
| |
537 MarkingVisitor mark(isolate, heap_, page_space, &marking_stack, | 551 MarkingVisitor mark(isolate, heap_, page_space, &marking_stack, |
538 &delay_set, visit_function_code); | 552 &delay_set, skipped_code_functions); |
539 IterateRoots(isolate, &mark, !invoke_api_callbacks); | 553 IterateRoots(isolate, &mark, !invoke_api_callbacks); |
540 mark.DrainMarkingStack(); | 554 mark.DrainMarkingStack(); |
541 IterateWeakReferences(isolate, &mark); | 555 IterateWeakReferences(isolate, &mark); |
542 MarkingWeakVisitor mark_weak; | 556 MarkingWeakVisitor mark_weak; |
543 IterateWeakRoots(isolate, &mark_weak, invoke_api_callbacks); | 557 IterateWeakRoots(isolate, &mark_weak, invoke_api_callbacks); |
544 // TODO(koda): Add hand-over callback and centralize skipped code functions. | 558 // TODO(koda): Add hand-over callback. |
545 marked_bytes_ = mark.marked_bytes(); | 559 marked_bytes_ = mark.marked_bytes(); |
546 mark.Finalize(); | 560 mark.Finalize(); |
561 if (collect_code) { | |
Ivan Posva
2015/08/27 16:41:30
if (skipped_code_functions != null) {
To protect
koda
2015/08/27 17:51:31
Done.
| |
562 skipped_code_functions->DetachCode(); | |
563 delete skipped_code_functions; | |
564 } | |
547 delay_set.ClearReferences(); | 565 delay_set.ClearReferences(); |
548 ProcessWeakTables(page_space); | 566 ProcessWeakTables(page_space); |
549 ProcessObjectIdTable(isolate); | 567 ProcessObjectIdTable(isolate); |
550 } | 568 } |
551 Epilogue(isolate, invoke_api_callbacks); | 569 Epilogue(isolate, invoke_api_callbacks); |
552 } | 570 } |
553 | 571 |
554 } // namespace dart | 572 } // namespace dart |
OLD | NEW |