| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/heap.h" | 5 #include "vm/heap.h" |
| 6 | 6 |
| 7 #include "platform/assert.h" | 7 #include "platform/assert.h" |
| 8 #include "platform/utils.h" | 8 #include "platform/utils.h" |
| 9 #include "vm/flags.h" | 9 #include "vm/flags.h" |
| 10 #include "vm/isolate.h" | 10 #include "vm/isolate.h" |
| (...skipping 29 matching lines...) Expand all Loading... |
| 40 gc_new_space_in_progress_(false), | 40 gc_new_space_in_progress_(false), |
| 41 gc_old_space_in_progress_(false) { | 41 gc_old_space_in_progress_(false) { |
| 42 UpdateGlobalMaxUsed(); | 42 UpdateGlobalMaxUsed(); |
| 43 for (int sel = 0; sel < kNumWeakSelectors; sel++) { | 43 for (int sel = 0; sel < kNumWeakSelectors; sel++) { |
| 44 new_weak_tables_[sel] = new WeakTable(); | 44 new_weak_tables_[sel] = new WeakTable(); |
| 45 old_weak_tables_[sel] = new WeakTable(); | 45 old_weak_tables_[sel] = new WeakTable(); |
| 46 } | 46 } |
| 47 stats_.num_ = 0; | 47 stats_.num_ = 0; |
| 48 } | 48 } |
| 49 | 49 |
| 50 | |
| 51 Heap::~Heap() { | 50 Heap::~Heap() { |
| 52 delete barrier_; | 51 delete barrier_; |
| 53 delete barrier_done_; | 52 delete barrier_done_; |
| 54 | 53 |
| 55 for (int sel = 0; sel < kNumWeakSelectors; sel++) { | 54 for (int sel = 0; sel < kNumWeakSelectors; sel++) { |
| 56 delete new_weak_tables_[sel]; | 55 delete new_weak_tables_[sel]; |
| 57 delete old_weak_tables_[sel]; | 56 delete old_weak_tables_[sel]; |
| 58 } | 57 } |
| 59 } | 58 } |
| 60 | 59 |
| 61 | |
| 62 uword Heap::AllocateNew(intptr_t size) { | 60 uword Heap::AllocateNew(intptr_t size) { |
| 63 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); | 61 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); |
| 64 // Currently, only the Dart thread may allocate in new space. | 62 // Currently, only the Dart thread may allocate in new space. |
| 65 isolate()->AssertCurrentThreadIsMutator(); | 63 isolate()->AssertCurrentThreadIsMutator(); |
| 66 uword addr = new_space_.TryAllocate(size); | 64 uword addr = new_space_.TryAllocate(size); |
| 67 if (addr == 0) { | 65 if (addr == 0) { |
| 68 // This call to CollectGarbage might end up "reusing" a collection spawned | 66 // This call to CollectGarbage might end up "reusing" a collection spawned |
| 69 // from a different thread and will be racing to allocate the requested | 67 // from a different thread and will be racing to allocate the requested |
| 70 // memory with other threads being released after the collection. | 68 // memory with other threads being released after the collection. |
| 71 CollectGarbage(kNew); | 69 CollectGarbage(kNew); |
| 72 addr = new_space_.TryAllocate(size); | 70 addr = new_space_.TryAllocate(size); |
| 73 if (addr == 0) { | 71 if (addr == 0) { |
| 74 return AllocateOld(size, HeapPage::kData); | 72 return AllocateOld(size, HeapPage::kData); |
| 75 } | 73 } |
| 76 } | 74 } |
| 77 return addr; | 75 return addr; |
| 78 } | 76 } |
| 79 | 77 |
| 80 | |
| 81 uword Heap::AllocateOld(intptr_t size, HeapPage::PageType type) { | 78 uword Heap::AllocateOld(intptr_t size, HeapPage::PageType type) { |
| 82 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); | 79 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); |
| 83 uword addr = old_space_.TryAllocate(size, type); | 80 uword addr = old_space_.TryAllocate(size, type); |
| 84 if (addr != 0) { | 81 if (addr != 0) { |
| 85 return addr; | 82 return addr; |
| 86 } | 83 } |
| 87 // If we are in the process of running a sweep, wait for the sweeper to free | 84 // If we are in the process of running a sweep, wait for the sweeper to free |
| 88 // memory. | 85 // memory. |
| 89 Thread* thread = Thread::Current(); | 86 Thread* thread = Thread::Current(); |
| 90 if (thread->CanCollectGarbage()) { | 87 if (thread->CanCollectGarbage()) { |
| (...skipping 27 matching lines...) Expand all Loading... |
| 118 addr = old_space_.TryAllocate(size, type, PageSpace::kForceGrowth); | 115 addr = old_space_.TryAllocate(size, type, PageSpace::kForceGrowth); |
| 119 if (addr != 0) { | 116 if (addr != 0) { |
| 120 return addr; | 117 return addr; |
| 121 } | 118 } |
| 122 // Give up allocating this object. | 119 // Give up allocating this object. |
| 123 OS::PrintErr("Exhausted heap space, trying to allocate %" Pd " bytes.\n", | 120 OS::PrintErr("Exhausted heap space, trying to allocate %" Pd " bytes.\n", |
| 124 size); | 121 size); |
| 125 return 0; | 122 return 0; |
| 126 } | 123 } |
| 127 | 124 |
| 128 | |
| 129 void Heap::AllocateExternal(intptr_t size, Space space) { | 125 void Heap::AllocateExternal(intptr_t size, Space space) { |
| 130 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); | 126 ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0); |
| 131 if (space == kNew) { | 127 if (space == kNew) { |
| 132 isolate()->AssertCurrentThreadIsMutator(); | 128 isolate()->AssertCurrentThreadIsMutator(); |
| 133 new_space_.AllocateExternal(size); | 129 new_space_.AllocateExternal(size); |
| 134 if (new_space_.ExternalInWords() > (FLAG_new_gen_ext_limit * MBInWords)) { | 130 if (new_space_.ExternalInWords() > (FLAG_new_gen_ext_limit * MBInWords)) { |
| 135 // Attempt to free some external allocation by a scavenge. (If the total | 131 // Attempt to free some external allocation by a scavenge. (If the total |
| 136 // remains above the limit, next external alloc will trigger another.) | 132 // remains above the limit, next external alloc will trigger another.) |
| 137 CollectGarbage(kNew); | 133 CollectGarbage(kNew); |
| 138 } | 134 } |
| (...skipping 17 matching lines...) Expand all Loading... |
| 156 | 152 |
| 157 void Heap::PromoteExternal(intptr_t size) { | 153 void Heap::PromoteExternal(intptr_t size) { |
| 158 new_space_.FreeExternal(size); | 154 new_space_.FreeExternal(size); |
| 159 old_space_.AllocateExternal(size); | 155 old_space_.AllocateExternal(size); |
| 160 } | 156 } |
| 161 | 157 |
| 162 bool Heap::Contains(uword addr) const { | 158 bool Heap::Contains(uword addr) const { |
| 163 return new_space_.Contains(addr) || old_space_.Contains(addr); | 159 return new_space_.Contains(addr) || old_space_.Contains(addr); |
| 164 } | 160 } |
| 165 | 161 |
| 166 | |
| 167 bool Heap::NewContains(uword addr) const { | 162 bool Heap::NewContains(uword addr) const { |
| 168 return new_space_.Contains(addr); | 163 return new_space_.Contains(addr); |
| 169 } | 164 } |
| 170 | 165 |
| 171 | |
| 172 bool Heap::OldContains(uword addr) const { | 166 bool Heap::OldContains(uword addr) const { |
| 173 return old_space_.Contains(addr); | 167 return old_space_.Contains(addr); |
| 174 } | 168 } |
| 175 | 169 |
| 176 | |
| 177 bool Heap::CodeContains(uword addr) const { | 170 bool Heap::CodeContains(uword addr) const { |
| 178 return old_space_.Contains(addr, HeapPage::kExecutable); | 171 return old_space_.Contains(addr, HeapPage::kExecutable); |
| 179 } | 172 } |
| 180 | 173 |
| 181 | |
| 182 bool Heap::DataContains(uword addr) const { | 174 bool Heap::DataContains(uword addr) const { |
| 183 return old_space_.DataContains(addr); | 175 return old_space_.DataContains(addr); |
| 184 } | 176 } |
| 185 | 177 |
| 186 | |
| 187 void Heap::VisitObjects(ObjectVisitor* visitor) const { | 178 void Heap::VisitObjects(ObjectVisitor* visitor) const { |
| 188 new_space_.VisitObjects(visitor); | 179 new_space_.VisitObjects(visitor); |
| 189 old_space_.VisitObjects(visitor); | 180 old_space_.VisitObjects(visitor); |
| 190 } | 181 } |
| 191 | 182 |
| 192 | |
| 193 void Heap::VisitObjectsNoImagePages(ObjectVisitor* visitor) const { | 183 void Heap::VisitObjectsNoImagePages(ObjectVisitor* visitor) const { |
| 194 new_space_.VisitObjects(visitor); | 184 new_space_.VisitObjects(visitor); |
| 195 old_space_.VisitObjectsNoImagePages(visitor); | 185 old_space_.VisitObjectsNoImagePages(visitor); |
| 196 } | 186 } |
| 197 | 187 |
| 198 | |
| 199 void Heap::VisitObjectsImagePages(ObjectVisitor* visitor) const { | 188 void Heap::VisitObjectsImagePages(ObjectVisitor* visitor) const { |
| 200 old_space_.VisitObjectsImagePages(visitor); | 189 old_space_.VisitObjectsImagePages(visitor); |
| 201 } | 190 } |
| 202 | 191 |
| 203 | |
| 204 HeapIterationScope::HeapIterationScope(bool writable) | 192 HeapIterationScope::HeapIterationScope(bool writable) |
| 205 : StackResource(Thread::Current()), | 193 : StackResource(Thread::Current()), |
| 206 old_space_(isolate()->heap()->old_space()), | 194 old_space_(isolate()->heap()->old_space()), |
| 207 writable_(writable) { | 195 writable_(writable) { |
| 208 { | 196 { |
| 209 // It's not yet safe to iterate over a paged space while it's concurrently | 197 // It's not yet safe to iterate over a paged space while it's concurrently |
| 210 // sweeping, so wait for any such task to complete first. | 198 // sweeping, so wait for any such task to complete first. |
| 211 MonitorLocker ml(old_space_->tasks_lock()); | 199 MonitorLocker ml(old_space_->tasks_lock()); |
| 212 #if defined(DEBUG) | 200 #if defined(DEBUG) |
| 213 // We currently don't support nesting of HeapIterationScopes. | 201 // We currently don't support nesting of HeapIterationScopes. |
| 214 ASSERT(old_space_->iterating_thread_ != thread()); | 202 ASSERT(old_space_->iterating_thread_ != thread()); |
| 215 #endif | 203 #endif |
| 216 while (old_space_->tasks() > 0) { | 204 while (old_space_->tasks() > 0) { |
| 217 ml.WaitWithSafepointCheck(thread()); | 205 ml.WaitWithSafepointCheck(thread()); |
| 218 } | 206 } |
| 219 #if defined(DEBUG) | 207 #if defined(DEBUG) |
| 220 ASSERT(old_space_->iterating_thread_ == NULL); | 208 ASSERT(old_space_->iterating_thread_ == NULL); |
| 221 old_space_->iterating_thread_ = thread(); | 209 old_space_->iterating_thread_ = thread(); |
| 222 #endif | 210 #endif |
| 223 old_space_->set_tasks(1); | 211 old_space_->set_tasks(1); |
| 224 } | 212 } |
| 225 | 213 |
| 226 if (writable_) { | 214 if (writable_) { |
| 227 thread()->heap()->WriteProtectCode(false); | 215 thread()->heap()->WriteProtectCode(false); |
| 228 } | 216 } |
| 229 } | 217 } |
| 230 | 218 |
| 231 | |
| 232 HeapIterationScope::~HeapIterationScope() { | 219 HeapIterationScope::~HeapIterationScope() { |
| 233 if (writable_) { | 220 if (writable_) { |
| 234 thread()->heap()->WriteProtectCode(true); | 221 thread()->heap()->WriteProtectCode(true); |
| 235 } | 222 } |
| 236 | 223 |
| 237 MonitorLocker ml(old_space_->tasks_lock()); | 224 MonitorLocker ml(old_space_->tasks_lock()); |
| 238 #if defined(DEBUG) | 225 #if defined(DEBUG) |
| 239 ASSERT(old_space_->iterating_thread_ == thread()); | 226 ASSERT(old_space_->iterating_thread_ == thread()); |
| 240 old_space_->iterating_thread_ = NULL; | 227 old_space_->iterating_thread_ = NULL; |
| 241 #endif | 228 #endif |
| 242 ASSERT(old_space_->tasks() == 1); | 229 ASSERT(old_space_->tasks() == 1); |
| 243 old_space_->set_tasks(0); | 230 old_space_->set_tasks(0); |
| 244 ml.NotifyAll(); | 231 ml.NotifyAll(); |
| 245 } | 232 } |
| 246 | 233 |
| 247 | |
| 248 void Heap::IterateObjects(ObjectVisitor* visitor) const { | 234 void Heap::IterateObjects(ObjectVisitor* visitor) const { |
| 249 // The visitor must not allocate from the heap. | 235 // The visitor must not allocate from the heap. |
| 250 NoSafepointScope no_safepoint_scope_; | 236 NoSafepointScope no_safepoint_scope_; |
| 251 new_space_.VisitObjects(visitor); | 237 new_space_.VisitObjects(visitor); |
| 252 IterateOldObjects(visitor); | 238 IterateOldObjects(visitor); |
| 253 } | 239 } |
| 254 | 240 |
| 255 | |
| 256 void Heap::IterateOldObjects(ObjectVisitor* visitor) const { | 241 void Heap::IterateOldObjects(ObjectVisitor* visitor) const { |
| 257 HeapIterationScope heap_iteration_scope; | 242 HeapIterationScope heap_iteration_scope; |
| 258 old_space_.VisitObjects(visitor); | 243 old_space_.VisitObjects(visitor); |
| 259 } | 244 } |
| 260 | 245 |
| 261 | |
| 262 void Heap::IterateOldObjectsNoImagePages(ObjectVisitor* visitor) const { | 246 void Heap::IterateOldObjectsNoImagePages(ObjectVisitor* visitor) const { |
| 263 HeapIterationScope heap_iteration_scope; | 247 HeapIterationScope heap_iteration_scope; |
| 264 old_space_.VisitObjectsNoImagePages(visitor); | 248 old_space_.VisitObjectsNoImagePages(visitor); |
| 265 } | 249 } |
| 266 | 250 |
| 267 | |
| 268 void Heap::VisitObjectPointers(ObjectPointerVisitor* visitor) const { | 251 void Heap::VisitObjectPointers(ObjectPointerVisitor* visitor) const { |
| 269 new_space_.VisitObjectPointers(visitor); | 252 new_space_.VisitObjectPointers(visitor); |
| 270 old_space_.VisitObjectPointers(visitor); | 253 old_space_.VisitObjectPointers(visitor); |
| 271 } | 254 } |
| 272 | 255 |
| 273 | |
| 274 RawInstructions* Heap::FindObjectInCodeSpace(FindObjectVisitor* visitor) const { | 256 RawInstructions* Heap::FindObjectInCodeSpace(FindObjectVisitor* visitor) const { |
| 275 // Only executable pages can have RawInstructions objects. | 257 // Only executable pages can have RawInstructions objects. |
| 276 RawObject* raw_obj = old_space_.FindObject(visitor, HeapPage::kExecutable); | 258 RawObject* raw_obj = old_space_.FindObject(visitor, HeapPage::kExecutable); |
| 277 ASSERT((raw_obj == Object::null()) || | 259 ASSERT((raw_obj == Object::null()) || |
| 278 (raw_obj->GetClassId() == kInstructionsCid)); | 260 (raw_obj->GetClassId() == kInstructionsCid)); |
| 279 return reinterpret_cast<RawInstructions*>(raw_obj); | 261 return reinterpret_cast<RawInstructions*>(raw_obj); |
| 280 } | 262 } |
| 281 | 263 |
| 282 | |
| 283 RawObject* Heap::FindOldObject(FindObjectVisitor* visitor) const { | 264 RawObject* Heap::FindOldObject(FindObjectVisitor* visitor) const { |
| 284 HeapIterationScope heap_iteration_scope; | 265 HeapIterationScope heap_iteration_scope; |
| 285 return old_space_.FindObject(visitor, HeapPage::kData); | 266 return old_space_.FindObject(visitor, HeapPage::kData); |
| 286 } | 267 } |
| 287 | 268 |
| 288 | |
| 289 RawObject* Heap::FindNewObject(FindObjectVisitor* visitor) const { | 269 RawObject* Heap::FindNewObject(FindObjectVisitor* visitor) const { |
| 290 return new_space_.FindObject(visitor); | 270 return new_space_.FindObject(visitor); |
| 291 } | 271 } |
| 292 | 272 |
| 293 | |
| 294 RawObject* Heap::FindObject(FindObjectVisitor* visitor) const { | 273 RawObject* Heap::FindObject(FindObjectVisitor* visitor) const { |
| 295 // The visitor must not allocate from the heap. | 274 // The visitor must not allocate from the heap. |
| 296 NoSafepointScope no_safepoint_scope; | 275 NoSafepointScope no_safepoint_scope; |
| 297 RawObject* raw_obj = FindNewObject(visitor); | 276 RawObject* raw_obj = FindNewObject(visitor); |
| 298 if (raw_obj != Object::null()) { | 277 if (raw_obj != Object::null()) { |
| 299 return raw_obj; | 278 return raw_obj; |
| 300 } | 279 } |
| 301 raw_obj = FindOldObject(visitor); | 280 raw_obj = FindOldObject(visitor); |
| 302 if (raw_obj != Object::null()) { | 281 if (raw_obj != Object::null()) { |
| 303 return raw_obj; | 282 return raw_obj; |
| 304 } | 283 } |
| 305 raw_obj = FindObjectInCodeSpace(visitor); | 284 raw_obj = FindObjectInCodeSpace(visitor); |
| 306 return raw_obj; | 285 return raw_obj; |
| 307 } | 286 } |
| 308 | 287 |
| 309 | |
| 310 bool Heap::BeginNewSpaceGC(Thread* thread) { | 288 bool Heap::BeginNewSpaceGC(Thread* thread) { |
| 311 MonitorLocker ml(&gc_in_progress_monitor_); | 289 MonitorLocker ml(&gc_in_progress_monitor_); |
| 312 bool start_gc_on_thread = true; | 290 bool start_gc_on_thread = true; |
| 313 while (gc_new_space_in_progress_ || gc_old_space_in_progress_) { | 291 while (gc_new_space_in_progress_ || gc_old_space_in_progress_) { |
| 314 start_gc_on_thread = !gc_new_space_in_progress_; | 292 start_gc_on_thread = !gc_new_space_in_progress_; |
| 315 ml.WaitWithSafepointCheck(thread); | 293 ml.WaitWithSafepointCheck(thread); |
| 316 } | 294 } |
| 317 if (start_gc_on_thread) { | 295 if (start_gc_on_thread) { |
| 318 gc_new_space_in_progress_ = true; | 296 gc_new_space_in_progress_ = true; |
| 319 return true; | 297 return true; |
| 320 } | 298 } |
| 321 return false; | 299 return false; |
| 322 } | 300 } |
| 323 | 301 |
| 324 | |
| 325 void Heap::EndNewSpaceGC() { | 302 void Heap::EndNewSpaceGC() { |
| 326 MonitorLocker ml(&gc_in_progress_monitor_); | 303 MonitorLocker ml(&gc_in_progress_monitor_); |
| 327 ASSERT(gc_new_space_in_progress_); | 304 ASSERT(gc_new_space_in_progress_); |
| 328 gc_new_space_in_progress_ = false; | 305 gc_new_space_in_progress_ = false; |
| 329 ml.NotifyAll(); | 306 ml.NotifyAll(); |
| 330 } | 307 } |
| 331 | 308 |
| 332 | |
| 333 bool Heap::BeginOldSpaceGC(Thread* thread) { | 309 bool Heap::BeginOldSpaceGC(Thread* thread) { |
| 334 MonitorLocker ml(&gc_in_progress_monitor_); | 310 MonitorLocker ml(&gc_in_progress_monitor_); |
| 335 bool start_gc_on_thread = true; | 311 bool start_gc_on_thread = true; |
| 336 while (gc_new_space_in_progress_ || gc_old_space_in_progress_) { | 312 while (gc_new_space_in_progress_ || gc_old_space_in_progress_) { |
| 337 start_gc_on_thread = !gc_old_space_in_progress_; | 313 start_gc_on_thread = !gc_old_space_in_progress_; |
| 338 ml.WaitWithSafepointCheck(thread); | 314 ml.WaitWithSafepointCheck(thread); |
| 339 } | 315 } |
| 340 if (start_gc_on_thread) { | 316 if (start_gc_on_thread) { |
| 341 gc_old_space_in_progress_ = true; | 317 gc_old_space_in_progress_ = true; |
| 342 return true; | 318 return true; |
| 343 } | 319 } |
| 344 return false; | 320 return false; |
| 345 } | 321 } |
| 346 | 322 |
| 347 | |
| 348 void Heap::EndOldSpaceGC() { | 323 void Heap::EndOldSpaceGC() { |
| 349 MonitorLocker ml(&gc_in_progress_monitor_); | 324 MonitorLocker ml(&gc_in_progress_monitor_); |
| 350 ASSERT(gc_old_space_in_progress_); | 325 ASSERT(gc_old_space_in_progress_); |
| 351 gc_old_space_in_progress_ = false; | 326 gc_old_space_in_progress_ = false; |
| 352 ml.NotifyAll(); | 327 ml.NotifyAll(); |
| 353 } | 328 } |
| 354 | 329 |
| 355 | |
| 356 #ifndef PRODUCT | 330 #ifndef PRODUCT |
| 357 void Heap::UpdateClassHeapStatsBeforeGC(Heap::Space space) { | 331 void Heap::UpdateClassHeapStatsBeforeGC(Heap::Space space) { |
| 358 ClassTable* class_table = isolate()->class_table(); | 332 ClassTable* class_table = isolate()->class_table(); |
| 359 if (space == kNew) { | 333 if (space == kNew) { |
| 360 class_table->ResetCountersNew(); | 334 class_table->ResetCountersNew(); |
| 361 } else { | 335 } else { |
| 362 class_table->ResetCountersOld(); | 336 class_table->ResetCountersOld(); |
| 363 } | 337 } |
| 364 } | 338 } |
| 365 #endif | 339 #endif |
| 366 | 340 |
| 367 | |
| 368 void Heap::EvacuateNewSpace(Thread* thread, GCReason reason) { | 341 void Heap::EvacuateNewSpace(Thread* thread, GCReason reason) { |
| 369 ASSERT(reason == kFull); | 342 ASSERT(reason == kFull); |
| 370 if (BeginNewSpaceGC(thread)) { | 343 if (BeginNewSpaceGC(thread)) { |
| 371 RecordBeforeGC(kNew, kFull); | 344 RecordBeforeGC(kNew, kFull); |
| 372 VMTagScope tagScope(thread, VMTag::kGCNewSpaceTagId); | 345 VMTagScope tagScope(thread, VMTag::kGCNewSpaceTagId); |
| 373 TIMELINE_FUNCTION_GC_DURATION(thread, "EvacuateNewGeneration"); | 346 TIMELINE_FUNCTION_GC_DURATION(thread, "EvacuateNewGeneration"); |
| 374 NOT_IN_PRODUCT(UpdateClassHeapStatsBeforeGC(kNew)); | 347 NOT_IN_PRODUCT(UpdateClassHeapStatsBeforeGC(kNew)); |
| 375 new_space_.Evacuate(); | 348 new_space_.Evacuate(); |
| 376 NOT_IN_PRODUCT(isolate()->class_table()->UpdatePromoted()); | 349 NOT_IN_PRODUCT(isolate()->class_table()->UpdatePromoted()); |
| 377 RecordAfterGC(kNew); | 350 RecordAfterGC(kNew); |
| 378 PrintStats(); | 351 PrintStats(); |
| 379 NOT_IN_PRODUCT(PrintStatsToTimeline(&tds)); | 352 NOT_IN_PRODUCT(PrintStatsToTimeline(&tds)); |
| 380 EndNewSpaceGC(); | 353 EndNewSpaceGC(); |
| 381 } | 354 } |
| 382 } | 355 } |
| 383 | 356 |
| 384 | |
| 385 void Heap::CollectNewSpaceGarbage(Thread* thread, | 357 void Heap::CollectNewSpaceGarbage(Thread* thread, |
| 386 ApiCallbacks api_callbacks, | 358 ApiCallbacks api_callbacks, |
| 387 GCReason reason) { | 359 GCReason reason) { |
| 388 ASSERT((reason == kNewSpace) || (reason == kFull)); | 360 ASSERT((reason == kNewSpace) || (reason == kFull)); |
| 389 if (BeginNewSpaceGC(thread)) { | 361 if (BeginNewSpaceGC(thread)) { |
| 390 bool invoke_api_callbacks = (api_callbacks == kInvokeApiCallbacks); | 362 bool invoke_api_callbacks = (api_callbacks == kInvokeApiCallbacks); |
| 391 RecordBeforeGC(kNew, reason); | 363 RecordBeforeGC(kNew, reason); |
| 392 VMTagScope tagScope(thread, VMTag::kGCNewSpaceTagId); | 364 VMTagScope tagScope(thread, VMTag::kGCNewSpaceTagId); |
| 393 TIMELINE_FUNCTION_GC_DURATION(thread, "CollectNewGeneration"); | 365 TIMELINE_FUNCTION_GC_DURATION(thread, "CollectNewGeneration"); |
| 394 NOT_IN_PRODUCT(UpdateClassHeapStatsBeforeGC(kNew)); | 366 NOT_IN_PRODUCT(UpdateClassHeapStatsBeforeGC(kNew)); |
| 395 new_space_.Scavenge(invoke_api_callbacks); | 367 new_space_.Scavenge(invoke_api_callbacks); |
| 396 NOT_IN_PRODUCT(isolate()->class_table()->UpdatePromoted()); | 368 NOT_IN_PRODUCT(isolate()->class_table()->UpdatePromoted()); |
| 397 RecordAfterGC(kNew); | 369 RecordAfterGC(kNew); |
| 398 PrintStats(); | 370 PrintStats(); |
| 399 NOT_IN_PRODUCT(PrintStatsToTimeline(&tds)); | 371 NOT_IN_PRODUCT(PrintStatsToTimeline(&tds)); |
| 400 EndNewSpaceGC(); | 372 EndNewSpaceGC(); |
| 401 if ((reason == kNewSpace) && old_space_.NeedsGarbageCollection()) { | 373 if ((reason == kNewSpace) && old_space_.NeedsGarbageCollection()) { |
| 402 // Old collections should call the API callbacks. | 374 // Old collections should call the API callbacks. |
| 403 CollectOldSpaceGarbage(thread, kInvokeApiCallbacks, kPromotion); | 375 CollectOldSpaceGarbage(thread, kInvokeApiCallbacks, kPromotion); |
| 404 } | 376 } |
| 405 } | 377 } |
| 406 } | 378 } |
| 407 | 379 |
| 408 | |
| 409 void Heap::CollectOldSpaceGarbage(Thread* thread, | 380 void Heap::CollectOldSpaceGarbage(Thread* thread, |
| 410 ApiCallbacks api_callbacks, | 381 ApiCallbacks api_callbacks, |
| 411 GCReason reason) { | 382 GCReason reason) { |
| 412 ASSERT((reason != kNewSpace)); | 383 ASSERT((reason != kNewSpace)); |
| 413 if (BeginOldSpaceGC(thread)) { | 384 if (BeginOldSpaceGC(thread)) { |
| 414 bool invoke_api_callbacks = (api_callbacks == kInvokeApiCallbacks); | 385 bool invoke_api_callbacks = (api_callbacks == kInvokeApiCallbacks); |
| 415 RecordBeforeGC(kOld, reason); | 386 RecordBeforeGC(kOld, reason); |
| 416 VMTagScope tagScope(thread, VMTag::kGCOldSpaceTagId); | 387 VMTagScope tagScope(thread, VMTag::kGCOldSpaceTagId); |
| 417 TIMELINE_FUNCTION_GC_DURATION(thread, "CollectOldGeneration"); | 388 TIMELINE_FUNCTION_GC_DURATION(thread, "CollectOldGeneration"); |
| 418 NOT_IN_PRODUCT(UpdateClassHeapStatsBeforeGC(kOld)); | 389 NOT_IN_PRODUCT(UpdateClassHeapStatsBeforeGC(kOld)); |
| 419 old_space_.MarkSweep(invoke_api_callbacks); | 390 old_space_.MarkSweep(invoke_api_callbacks); |
| 420 RecordAfterGC(kOld); | 391 RecordAfterGC(kOld); |
| 421 PrintStats(); | 392 PrintStats(); |
| 422 NOT_IN_PRODUCT(PrintStatsToTimeline(&tds)); | 393 NOT_IN_PRODUCT(PrintStatsToTimeline(&tds)); |
| 423 // Some Code objects may have been collected so invalidate handler cache. | 394 // Some Code objects may have been collected so invalidate handler cache. |
| 424 thread->isolate()->handler_info_cache()->Clear(); | 395 thread->isolate()->handler_info_cache()->Clear(); |
| 425 thread->isolate()->catch_entry_state_cache()->Clear(); | 396 thread->isolate()->catch_entry_state_cache()->Clear(); |
| 426 EndOldSpaceGC(); | 397 EndOldSpaceGC(); |
| 427 } | 398 } |
| 428 } | 399 } |
| 429 | 400 |
| 430 | |
| 431 void Heap::CollectGarbage(Space space, | 401 void Heap::CollectGarbage(Space space, |
| 432 ApiCallbacks api_callbacks, | 402 ApiCallbacks api_callbacks, |
| 433 GCReason reason) { | 403 GCReason reason) { |
| 434 Thread* thread = Thread::Current(); | 404 Thread* thread = Thread::Current(); |
| 435 switch (space) { | 405 switch (space) { |
| 436 case kNew: { | 406 case kNew: { |
| 437 CollectNewSpaceGarbage(thread, api_callbacks, reason); | 407 CollectNewSpaceGarbage(thread, api_callbacks, reason); |
| 438 break; | 408 break; |
| 439 } | 409 } |
| 440 case kOld: | 410 case kOld: |
| 441 case kCode: { | 411 case kCode: { |
| 442 CollectOldSpaceGarbage(thread, api_callbacks, reason); | 412 CollectOldSpaceGarbage(thread, api_callbacks, reason); |
| 443 break; | 413 break; |
| 444 } | 414 } |
| 445 default: | 415 default: |
| 446 UNREACHABLE(); | 416 UNREACHABLE(); |
| 447 } | 417 } |
| 448 } | 418 } |
| 449 | 419 |
| 450 | |
| 451 void Heap::CollectGarbage(Space space) { | 420 void Heap::CollectGarbage(Space space) { |
| 452 Thread* thread = Thread::Current(); | 421 Thread* thread = Thread::Current(); |
| 453 if (space == kOld) { | 422 if (space == kOld) { |
| 454 CollectOldSpaceGarbage(thread, kInvokeApiCallbacks, kOldSpace); | 423 CollectOldSpaceGarbage(thread, kInvokeApiCallbacks, kOldSpace); |
| 455 } else { | 424 } else { |
| 456 ASSERT(space == kNew); | 425 ASSERT(space == kNew); |
| 457 CollectNewSpaceGarbage(thread, kInvokeApiCallbacks, kNewSpace); | 426 CollectNewSpaceGarbage(thread, kInvokeApiCallbacks, kNewSpace); |
| 458 } | 427 } |
| 459 } | 428 } |
| 460 | 429 |
| 461 | |
| 462 void Heap::CollectAllGarbage() { | 430 void Heap::CollectAllGarbage() { |
| 463 Thread* thread = Thread::Current(); | 431 Thread* thread = Thread::Current(); |
| 464 | 432 |
| 465 // New space is evacuated so this GC will collect all dead objects | 433 // New space is evacuated so this GC will collect all dead objects |
| 466 // kept alive by a cross-generational pointer. | 434 // kept alive by a cross-generational pointer. |
| 467 EvacuateNewSpace(thread, kFull); | 435 EvacuateNewSpace(thread, kFull); |
| 468 CollectOldSpaceGarbage(thread, kInvokeApiCallbacks, kFull); | 436 CollectOldSpaceGarbage(thread, kInvokeApiCallbacks, kFull); |
| 469 } | 437 } |
| 470 | 438 |
| 471 | |
| 472 void Heap::WaitForSweeperTasks(Thread* thread) { | 439 void Heap::WaitForSweeperTasks(Thread* thread) { |
| 473 MonitorLocker ml(old_space_.tasks_lock()); | 440 MonitorLocker ml(old_space_.tasks_lock()); |
| 474 while (old_space_.tasks() > 0) { | 441 while (old_space_.tasks() > 0) { |
| 475 ml.WaitWithSafepointCheck(thread); | 442 ml.WaitWithSafepointCheck(thread); |
| 476 } | 443 } |
| 477 } | 444 } |
| 478 | 445 |
| 479 | |
| 480 void Heap::UpdateGlobalMaxUsed() { | 446 void Heap::UpdateGlobalMaxUsed() { |
| 481 ASSERT(isolate_ != NULL); | 447 ASSERT(isolate_ != NULL); |
| 482 // We are accessing the used in words count for both new and old space | 448 // We are accessing the used in words count for both new and old space |
| 483 // without synchronizing. The value of this metric is approximate. | 449 // without synchronizing. The value of this metric is approximate. |
| 484 isolate_->GetHeapGlobalUsedMaxMetric()->SetValue( | 450 isolate_->GetHeapGlobalUsedMaxMetric()->SetValue( |
| 485 (UsedInWords(Heap::kNew) * kWordSize) + | 451 (UsedInWords(Heap::kNew) * kWordSize) + |
| 486 (UsedInWords(Heap::kOld) * kWordSize)); | 452 (UsedInWords(Heap::kOld) * kWordSize)); |
| 487 } | 453 } |
| 488 | 454 |
| 489 | |
| 490 void Heap::InitGrowthControl() { | 455 void Heap::InitGrowthControl() { |
| 491 old_space_.InitGrowthControl(); | 456 old_space_.InitGrowthControl(); |
| 492 } | 457 } |
| 493 | 458 |
| 494 | |
| 495 void Heap::SetGrowthControlState(bool state) { | 459 void Heap::SetGrowthControlState(bool state) { |
| 496 old_space_.SetGrowthControlState(state); | 460 old_space_.SetGrowthControlState(state); |
| 497 } | 461 } |
| 498 | 462 |
| 499 | |
| 500 bool Heap::GrowthControlState() { | 463 bool Heap::GrowthControlState() { |
| 501 return old_space_.GrowthControlState(); | 464 return old_space_.GrowthControlState(); |
| 502 } | 465 } |
| 503 | 466 |
| 504 | |
| 505 void Heap::WriteProtect(bool read_only) { | 467 void Heap::WriteProtect(bool read_only) { |
| 506 read_only_ = read_only; | 468 read_only_ = read_only; |
| 507 new_space_.WriteProtect(read_only); | 469 new_space_.WriteProtect(read_only); |
| 508 old_space_.WriteProtect(read_only); | 470 old_space_.WriteProtect(read_only); |
| 509 } | 471 } |
| 510 | 472 |
| 511 | |
| 512 intptr_t Heap::TopOffset(Heap::Space space) { | 473 intptr_t Heap::TopOffset(Heap::Space space) { |
| 513 if (space == kNew) { | 474 if (space == kNew) { |
| 514 return OFFSET_OF(Heap, new_space_) + Scavenger::top_offset(); | 475 return OFFSET_OF(Heap, new_space_) + Scavenger::top_offset(); |
| 515 } else { | 476 } else { |
| 516 ASSERT(space == kOld); | 477 ASSERT(space == kOld); |
| 517 return OFFSET_OF(Heap, old_space_) + PageSpace::top_offset(); | 478 return OFFSET_OF(Heap, old_space_) + PageSpace::top_offset(); |
| 518 } | 479 } |
| 519 } | 480 } |
| 520 | 481 |
| 521 | |
| 522 intptr_t Heap::EndOffset(Heap::Space space) { | 482 intptr_t Heap::EndOffset(Heap::Space space) { |
| 523 if (space == kNew) { | 483 if (space == kNew) { |
| 524 return OFFSET_OF(Heap, new_space_) + Scavenger::end_offset(); | 484 return OFFSET_OF(Heap, new_space_) + Scavenger::end_offset(); |
| 525 } else { | 485 } else { |
| 526 ASSERT(space == kOld); | 486 ASSERT(space == kOld); |
| 527 return OFFSET_OF(Heap, old_space_) + PageSpace::end_offset(); | 487 return OFFSET_OF(Heap, old_space_) + PageSpace::end_offset(); |
| 528 } | 488 } |
| 529 } | 489 } |
| 530 | 490 |
| 531 | |
| 532 void Heap::Init(Isolate* isolate, | 491 void Heap::Init(Isolate* isolate, |
| 533 intptr_t max_new_gen_words, | 492 intptr_t max_new_gen_words, |
| 534 intptr_t max_old_gen_words, | 493 intptr_t max_old_gen_words, |
| 535 intptr_t max_external_words) { | 494 intptr_t max_external_words) { |
| 536 ASSERT(isolate->heap() == NULL); | 495 ASSERT(isolate->heap() == NULL); |
| 537 Heap* heap = new Heap(isolate, max_new_gen_words, max_old_gen_words, | 496 Heap* heap = new Heap(isolate, max_new_gen_words, max_old_gen_words, |
| 538 max_external_words); | 497 max_external_words); |
| 539 isolate->set_heap(heap); | 498 isolate->set_heap(heap); |
| 540 } | 499 } |
| 541 | 500 |
| 542 | |
| 543 void Heap::RegionName(Heap* heap, Space space, char* name, intptr_t name_size) { | 501 void Heap::RegionName(Heap* heap, Space space, char* name, intptr_t name_size) { |
| 544 const bool no_isolate_name = (heap == NULL) || (heap->isolate() == NULL) || | 502 const bool no_isolate_name = (heap == NULL) || (heap->isolate() == NULL) || |
| 545 (heap->isolate()->debugger_name() == NULL); | 503 (heap->isolate()->debugger_name() == NULL); |
| 546 const char* isolate_name = | 504 const char* isolate_name = |
| 547 no_isolate_name ? "<unknown>" : heap->isolate()->debugger_name(); | 505 no_isolate_name ? "<unknown>" : heap->isolate()->debugger_name(); |
| 548 const char* space_name = NULL; | 506 const char* space_name = NULL; |
| 549 switch (space) { | 507 switch (space) { |
| 550 case kNew: | 508 case kNew: |
| 551 space_name = "newspace"; | 509 space_name = "newspace"; |
| 552 break; | 510 break; |
| 553 case kOld: | 511 case kOld: |
| 554 space_name = "oldspace"; | 512 space_name = "oldspace"; |
| 555 break; | 513 break; |
| 556 case kCode: | 514 case kCode: |
| 557 space_name = "codespace"; | 515 space_name = "codespace"; |
| 558 break; | 516 break; |
| 559 default: | 517 default: |
| 560 UNREACHABLE(); | 518 UNREACHABLE(); |
| 561 } | 519 } |
| 562 OS::SNPrint(name, name_size, "dart-%s %s", space_name, isolate_name); | 520 OS::SNPrint(name, name_size, "dart-%s %s", space_name, isolate_name); |
| 563 } | 521 } |
| 564 | 522 |
| 565 | |
| 566 void Heap::AddRegionsToObjectSet(ObjectSet* set) const { | 523 void Heap::AddRegionsToObjectSet(ObjectSet* set) const { |
| 567 new_space_.AddRegionsToObjectSet(set); | 524 new_space_.AddRegionsToObjectSet(set); |
| 568 old_space_.AddRegionsToObjectSet(set); | 525 old_space_.AddRegionsToObjectSet(set); |
| 569 } | 526 } |
| 570 | 527 |
| 571 | |
| 572 ObjectSet* Heap::CreateAllocatedObjectSet( | 528 ObjectSet* Heap::CreateAllocatedObjectSet( |
| 573 Zone* zone, | 529 Zone* zone, |
| 574 MarkExpectation mark_expectation) const { | 530 MarkExpectation mark_expectation) const { |
| 575 ObjectSet* allocated_set = new (zone) ObjectSet(zone); | 531 ObjectSet* allocated_set = new (zone) ObjectSet(zone); |
| 576 | 532 |
| 577 this->AddRegionsToObjectSet(allocated_set); | 533 this->AddRegionsToObjectSet(allocated_set); |
| 578 { | 534 { |
| 579 VerifyObjectVisitor object_visitor(isolate(), allocated_set, | 535 VerifyObjectVisitor object_visitor(isolate(), allocated_set, |
| 580 mark_expectation); | 536 mark_expectation); |
| 581 this->VisitObjectsNoImagePages(&object_visitor); | 537 this->VisitObjectsNoImagePages(&object_visitor); |
| 582 } | 538 } |
| 583 { | 539 { |
| 584 VerifyObjectVisitor object_visitor(isolate(), allocated_set, | 540 VerifyObjectVisitor object_visitor(isolate(), allocated_set, |
| 585 kRequireMarked); | 541 kRequireMarked); |
| 586 this->VisitObjectsImagePages(&object_visitor); | 542 this->VisitObjectsImagePages(&object_visitor); |
| 587 } | 543 } |
| 588 | 544 |
| 589 Isolate* vm_isolate = Dart::vm_isolate(); | 545 Isolate* vm_isolate = Dart::vm_isolate(); |
| 590 vm_isolate->heap()->AddRegionsToObjectSet(allocated_set); | 546 vm_isolate->heap()->AddRegionsToObjectSet(allocated_set); |
| 591 { | 547 { |
| 592 // VM isolate heap is premarked. | 548 // VM isolate heap is premarked. |
| 593 VerifyObjectVisitor vm_object_visitor(isolate(), allocated_set, | 549 VerifyObjectVisitor vm_object_visitor(isolate(), allocated_set, |
| 594 kRequireMarked); | 550 kRequireMarked); |
| 595 vm_isolate->heap()->VisitObjects(&vm_object_visitor); | 551 vm_isolate->heap()->VisitObjects(&vm_object_visitor); |
| 596 } | 552 } |
| 597 | 553 |
| 598 return allocated_set; | 554 return allocated_set; |
| 599 } | 555 } |
| 600 | 556 |
| 601 | |
| 602 bool Heap::Verify(MarkExpectation mark_expectation) const { | 557 bool Heap::Verify(MarkExpectation mark_expectation) const { |
| 603 HeapIterationScope heap_iteration_scope; | 558 HeapIterationScope heap_iteration_scope; |
| 604 return VerifyGC(mark_expectation); | 559 return VerifyGC(mark_expectation); |
| 605 } | 560 } |
| 606 | 561 |
| 607 | |
| 608 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { | 562 bool Heap::VerifyGC(MarkExpectation mark_expectation) const { |
| 609 StackZone stack_zone(Thread::Current()); | 563 StackZone stack_zone(Thread::Current()); |
| 610 ObjectSet* allocated_set = | 564 ObjectSet* allocated_set = |
| 611 CreateAllocatedObjectSet(stack_zone.GetZone(), mark_expectation); | 565 CreateAllocatedObjectSet(stack_zone.GetZone(), mark_expectation); |
| 612 VerifyPointersVisitor visitor(isolate(), allocated_set); | 566 VerifyPointersVisitor visitor(isolate(), allocated_set); |
| 613 VisitObjectPointers(&visitor); | 567 VisitObjectPointers(&visitor); |
| 614 | 568 |
| 615 // Only returning a value so that Heap::Validate can be called from an ASSERT. | 569 // Only returning a value so that Heap::Validate can be called from an ASSERT. |
| 616 return true; | 570 return true; |
| 617 } | 571 } |
| 618 | 572 |
| 619 | |
| 620 void Heap::PrintSizes() const { | 573 void Heap::PrintSizes() const { |
| 621 OS::PrintErr( | 574 OS::PrintErr( |
| 622 "New space (%" Pd64 "k of %" Pd64 | 575 "New space (%" Pd64 "k of %" Pd64 |
| 623 "k) " | 576 "k) " |
| 624 "Old space (%" Pd64 "k of %" Pd64 "k)\n", | 577 "Old space (%" Pd64 "k of %" Pd64 "k)\n", |
| 625 (UsedInWords(kNew) / KBInWords), (CapacityInWords(kNew) / KBInWords), | 578 (UsedInWords(kNew) / KBInWords), (CapacityInWords(kNew) / KBInWords), |
| 626 (UsedInWords(kOld) / KBInWords), (CapacityInWords(kOld) / KBInWords)); | 579 (UsedInWords(kOld) / KBInWords), (CapacityInWords(kOld) / KBInWords)); |
| 627 } | 580 } |
| 628 | 581 |
| 629 | |
| 630 int64_t Heap::UsedInWords(Space space) const { | 582 int64_t Heap::UsedInWords(Space space) const { |
| 631 return space == kNew ? new_space_.UsedInWords() : old_space_.UsedInWords(); | 583 return space == kNew ? new_space_.UsedInWords() : old_space_.UsedInWords(); |
| 632 } | 584 } |
| 633 | 585 |
| 634 | |
| 635 int64_t Heap::CapacityInWords(Space space) const { | 586 int64_t Heap::CapacityInWords(Space space) const { |
| 636 return space == kNew ? new_space_.CapacityInWords() | 587 return space == kNew ? new_space_.CapacityInWords() |
| 637 : old_space_.CapacityInWords(); | 588 : old_space_.CapacityInWords(); |
| 638 } | 589 } |
| 639 | 590 |
| 640 | |
| 641 int64_t Heap::ExternalInWords(Space space) const { | 591 int64_t Heap::ExternalInWords(Space space) const { |
| 642 return space == kNew ? new_space_.ExternalInWords() | 592 return space == kNew ? new_space_.ExternalInWords() |
| 643 : old_space_.ExternalInWords(); | 593 : old_space_.ExternalInWords(); |
| 644 } | 594 } |
| 645 | 595 |
| 646 | |
| 647 int64_t Heap::GCTimeInMicros(Space space) const { | 596 int64_t Heap::GCTimeInMicros(Space space) const { |
| 648 if (space == kNew) { | 597 if (space == kNew) { |
| 649 return new_space_.gc_time_micros(); | 598 return new_space_.gc_time_micros(); |
| 650 } | 599 } |
| 651 return old_space_.gc_time_micros(); | 600 return old_space_.gc_time_micros(); |
| 652 } | 601 } |
| 653 | 602 |
| 654 | |
| 655 intptr_t Heap::Collections(Space space) const { | 603 intptr_t Heap::Collections(Space space) const { |
| 656 if (space == kNew) { | 604 if (space == kNew) { |
| 657 return new_space_.collections(); | 605 return new_space_.collections(); |
| 658 } | 606 } |
| 659 return old_space_.collections(); | 607 return old_space_.collections(); |
| 660 } | 608 } |
| 661 | 609 |
| 662 | |
| 663 const char* Heap::GCReasonToString(GCReason gc_reason) { | 610 const char* Heap::GCReasonToString(GCReason gc_reason) { |
| 664 switch (gc_reason) { | 611 switch (gc_reason) { |
| 665 case kNewSpace: | 612 case kNewSpace: |
| 666 return "new space"; | 613 return "new space"; |
| 667 case kPromotion: | 614 case kPromotion: |
| 668 return "promotion"; | 615 return "promotion"; |
| 669 case kOldSpace: | 616 case kOldSpace: |
| 670 return "old space"; | 617 return "old space"; |
| 671 case kFull: | 618 case kFull: |
| 672 return "full"; | 619 return "full"; |
| 673 case kGCAtAlloc: | 620 case kGCAtAlloc: |
| 674 return "debugging"; | 621 return "debugging"; |
| 675 case kGCTestCase: | 622 case kGCTestCase: |
| 676 return "test case"; | 623 return "test case"; |
| 677 default: | 624 default: |
| 678 UNREACHABLE(); | 625 UNREACHABLE(); |
| 679 return ""; | 626 return ""; |
| 680 } | 627 } |
| 681 } | 628 } |
| 682 | 629 |
| 683 | |
| 684 int64_t Heap::PeerCount() const { | 630 int64_t Heap::PeerCount() const { |
| 685 return new_weak_tables_[kPeers]->count() + old_weak_tables_[kPeers]->count(); | 631 return new_weak_tables_[kPeers]->count() + old_weak_tables_[kPeers]->count(); |
| 686 } | 632 } |
| 687 | 633 |
| 688 #if !defined(HASH_IN_OBJECT_HEADER) | 634 #if !defined(HASH_IN_OBJECT_HEADER) |
| 689 int64_t Heap::HashCount() const { | 635 int64_t Heap::HashCount() const { |
| 690 return new_weak_tables_[kHashes]->count() + | 636 return new_weak_tables_[kHashes]->count() + |
| 691 old_weak_tables_[kHashes]->count(); | 637 old_weak_tables_[kHashes]->count(); |
| 692 } | 638 } |
| 693 #endif | 639 #endif |
| 694 | 640 |
| 695 | |
| 696 int64_t Heap::ObjectIdCount() const { | 641 int64_t Heap::ObjectIdCount() const { |
| 697 return new_weak_tables_[kObjectIds]->count() + | 642 return new_weak_tables_[kObjectIds]->count() + |
| 698 old_weak_tables_[kObjectIds]->count(); | 643 old_weak_tables_[kObjectIds]->count(); |
| 699 } | 644 } |
| 700 | 645 |
| 701 | |
| 702 void Heap::ResetObjectIdTable() { | 646 void Heap::ResetObjectIdTable() { |
| 703 new_weak_tables_[kObjectIds]->Reset(); | 647 new_weak_tables_[kObjectIds]->Reset(); |
| 704 old_weak_tables_[kObjectIds]->Reset(); | 648 old_weak_tables_[kObjectIds]->Reset(); |
| 705 } | 649 } |
| 706 | 650 |
| 707 | |
| 708 intptr_t Heap::GetWeakEntry(RawObject* raw_obj, WeakSelector sel) const { | 651 intptr_t Heap::GetWeakEntry(RawObject* raw_obj, WeakSelector sel) const { |
| 709 if (raw_obj->IsNewObject()) { | 652 if (raw_obj->IsNewObject()) { |
| 710 return new_weak_tables_[sel]->GetValue(raw_obj); | 653 return new_weak_tables_[sel]->GetValue(raw_obj); |
| 711 } | 654 } |
| 712 ASSERT(raw_obj->IsOldObject()); | 655 ASSERT(raw_obj->IsOldObject()); |
| 713 return old_weak_tables_[sel]->GetValue(raw_obj); | 656 return old_weak_tables_[sel]->GetValue(raw_obj); |
| 714 } | 657 } |
| 715 | 658 |
| 716 | |
| 717 void Heap::SetWeakEntry(RawObject* raw_obj, WeakSelector sel, intptr_t val) { | 659 void Heap::SetWeakEntry(RawObject* raw_obj, WeakSelector sel, intptr_t val) { |
| 718 if (raw_obj->IsNewObject()) { | 660 if (raw_obj->IsNewObject()) { |
| 719 new_weak_tables_[sel]->SetValue(raw_obj, val); | 661 new_weak_tables_[sel]->SetValue(raw_obj, val); |
| 720 } else { | 662 } else { |
| 721 ASSERT(raw_obj->IsOldObject()); | 663 ASSERT(raw_obj->IsOldObject()); |
| 722 old_weak_tables_[sel]->SetValue(raw_obj, val); | 664 old_weak_tables_[sel]->SetValue(raw_obj, val); |
| 723 } | 665 } |
| 724 } | 666 } |
| 725 | 667 |
| 726 | |
| 727 #ifndef PRODUCT | 668 #ifndef PRODUCT |
| 728 void Heap::PrintToJSONObject(Space space, JSONObject* object) const { | 669 void Heap::PrintToJSONObject(Space space, JSONObject* object) const { |
| 729 if (space == kNew) { | 670 if (space == kNew) { |
| 730 new_space_.PrintToJSONObject(object); | 671 new_space_.PrintToJSONObject(object); |
| 731 } else { | 672 } else { |
| 732 old_space_.PrintToJSONObject(object); | 673 old_space_.PrintToJSONObject(object); |
| 733 } | 674 } |
| 734 } | 675 } |
| 735 #endif // PRODUCT | 676 #endif // PRODUCT |
| 736 | 677 |
| 737 | |
| 738 void Heap::RecordBeforeGC(Space space, GCReason reason) { | 678 void Heap::RecordBeforeGC(Space space, GCReason reason) { |
| 739 ASSERT((space == kNew && gc_new_space_in_progress_) || | 679 ASSERT((space == kNew && gc_new_space_in_progress_) || |
| 740 (space == kOld && gc_old_space_in_progress_)); | 680 (space == kOld && gc_old_space_in_progress_)); |
| 741 stats_.num_++; | 681 stats_.num_++; |
| 742 stats_.space_ = space; | 682 stats_.space_ = space; |
| 743 stats_.reason_ = reason; | 683 stats_.reason_ = reason; |
| 744 stats_.before_.micros_ = OS::GetCurrentMonotonicMicros(); | 684 stats_.before_.micros_ = OS::GetCurrentMonotonicMicros(); |
| 745 stats_.before_.new_ = new_space_.GetCurrentUsage(); | 685 stats_.before_.new_ = new_space_.GetCurrentUsage(); |
| 746 stats_.before_.old_ = old_space_.GetCurrentUsage(); | 686 stats_.before_.old_ = old_space_.GetCurrentUsage(); |
| 747 for (int i = 0; i < GCStats::kTimeEntries; i++) | 687 for (int i = 0; i < GCStats::kTimeEntries; i++) |
| 748 stats_.times_[i] = 0; | 688 stats_.times_[i] = 0; |
| 749 for (int i = 0; i < GCStats::kDataEntries; i++) | 689 for (int i = 0; i < GCStats::kDataEntries; i++) |
| 750 stats_.data_[i] = 0; | 690 stats_.data_[i] = 0; |
| 751 } | 691 } |
| 752 | 692 |
| 753 | |
| 754 void Heap::RecordAfterGC(Space space) { | 693 void Heap::RecordAfterGC(Space space) { |
| 755 stats_.after_.micros_ = OS::GetCurrentMonotonicMicros(); | 694 stats_.after_.micros_ = OS::GetCurrentMonotonicMicros(); |
| 756 int64_t delta = stats_.after_.micros_ - stats_.before_.micros_; | 695 int64_t delta = stats_.after_.micros_ - stats_.before_.micros_; |
| 757 if (stats_.space_ == kNew) { | 696 if (stats_.space_ == kNew) { |
| 758 new_space_.AddGCTime(delta); | 697 new_space_.AddGCTime(delta); |
| 759 new_space_.IncrementCollections(); | 698 new_space_.IncrementCollections(); |
| 760 } else { | 699 } else { |
| 761 old_space_.AddGCTime(delta); | 700 old_space_.AddGCTime(delta); |
| 762 old_space_.IncrementCollections(); | 701 old_space_.IncrementCollections(); |
| 763 } | 702 } |
| 764 stats_.after_.new_ = new_space_.GetCurrentUsage(); | 703 stats_.after_.new_ = new_space_.GetCurrentUsage(); |
| 765 stats_.after_.old_ = old_space_.GetCurrentUsage(); | 704 stats_.after_.old_ = old_space_.GetCurrentUsage(); |
| 766 ASSERT((space == kNew && gc_new_space_in_progress_) || | 705 ASSERT((space == kNew && gc_new_space_in_progress_) || |
| 767 (space == kOld && gc_old_space_in_progress_)); | 706 (space == kOld && gc_old_space_in_progress_)); |
| 768 #ifndef PRODUCT | 707 #ifndef PRODUCT |
| 769 if (FLAG_support_service && Service::gc_stream.enabled() && | 708 if (FLAG_support_service && Service::gc_stream.enabled() && |
| 770 !ServiceIsolate::IsServiceIsolateDescendant(Isolate::Current())) { | 709 !ServiceIsolate::IsServiceIsolateDescendant(Isolate::Current())) { |
| 771 ServiceEvent event(Isolate::Current(), ServiceEvent::kGC); | 710 ServiceEvent event(Isolate::Current(), ServiceEvent::kGC); |
| 772 event.set_gc_stats(&stats_); | 711 event.set_gc_stats(&stats_); |
| 773 Service::HandleEvent(&event); | 712 Service::HandleEvent(&event); |
| 774 } | 713 } |
| 775 #endif // !PRODUCT | 714 #endif // !PRODUCT |
| 776 } | 715 } |
| 777 | 716 |
| 778 | |
| 779 void Heap::PrintStats() { | 717 void Heap::PrintStats() { |
| 780 if (!FLAG_verbose_gc) return; | 718 if (!FLAG_verbose_gc) return; |
| 781 | 719 |
| 782 if ((FLAG_verbose_gc_hdr != 0) && | 720 if ((FLAG_verbose_gc_hdr != 0) && |
| 783 (((stats_.num_ - 1) % FLAG_verbose_gc_hdr) == 0)) { | 721 (((stats_.num_ - 1) % FLAG_verbose_gc_hdr) == 0)) { |
| 784 OS::PrintErr( | 722 OS::PrintErr( |
| 785 "[ | | | | " | 723 "[ | | | | " |
| 786 "| new gen | new gen | new gen " | 724 "| new gen | new gen | new gen " |
| 787 "| old gen | old gen | old gen " | 725 "| old gen | old gen | old gen " |
| 788 "| sweep | safe- | roots/| stbuf/| tospc/| weaks/| ]\n" | 726 "| sweep | safe- | roots/| stbuf/| tospc/| weaks/| ]\n" |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 835 MicrosecondsToMilliseconds(stats_.times_[3]), | 773 MicrosecondsToMilliseconds(stats_.times_[3]), |
| 836 MicrosecondsToMilliseconds(stats_.times_[4]), | 774 MicrosecondsToMilliseconds(stats_.times_[4]), |
| 837 MicrosecondsToMilliseconds(stats_.times_[5]), | 775 MicrosecondsToMilliseconds(stats_.times_[5]), |
| 838 stats_.data_[0], | 776 stats_.data_[0], |
| 839 stats_.data_[1], | 777 stats_.data_[1], |
| 840 stats_.data_[2], | 778 stats_.data_[2], |
| 841 stats_.data_[3]); | 779 stats_.data_[3]); |
| 842 // clang-format on | 780 // clang-format on |
| 843 } | 781 } |
| 844 | 782 |
| 845 | |
| 846 void Heap::PrintStatsToTimeline(TimelineEventScope* event) { | 783 void Heap::PrintStatsToTimeline(TimelineEventScope* event) { |
| 847 #if !defined(PRODUCT) | 784 #if !defined(PRODUCT) |
| 848 if ((event == NULL) || !event->enabled()) { | 785 if ((event == NULL) || !event->enabled()) { |
| 849 return; | 786 return; |
| 850 } | 787 } |
| 851 event->SetNumArguments(12); | 788 event->SetNumArguments(12); |
| 852 event->FormatArgument(0, "Before.New.Used (kB)", "%" Pd "", | 789 event->FormatArgument(0, "Before.New.Used (kB)", "%" Pd "", |
| 853 RoundWordsToKB(stats_.before_.new_.used_in_words)); | 790 RoundWordsToKB(stats_.before_.new_.used_in_words)); |
| 854 event->FormatArgument(1, "After.New.Used (kB)", "%" Pd "", | 791 event->FormatArgument(1, "After.New.Used (kB)", "%" Pd "", |
| 855 RoundWordsToKB(stats_.after_.new_.used_in_words)); | 792 RoundWordsToKB(stats_.after_.new_.used_in_words)); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 871 RoundWordsToKB(stats_.before_.new_.external_in_words)); | 808 RoundWordsToKB(stats_.before_.new_.external_in_words)); |
| 872 event->FormatArgument(9, "After.New.External (kB)", "%" Pd "", | 809 event->FormatArgument(9, "After.New.External (kB)", "%" Pd "", |
| 873 RoundWordsToKB(stats_.after_.new_.external_in_words)); | 810 RoundWordsToKB(stats_.after_.new_.external_in_words)); |
| 874 event->FormatArgument(10, "Before.Old.External (kB)", "%" Pd "", | 811 event->FormatArgument(10, "Before.Old.External (kB)", "%" Pd "", |
| 875 RoundWordsToKB(stats_.before_.old_.external_in_words)); | 812 RoundWordsToKB(stats_.before_.old_.external_in_words)); |
| 876 event->FormatArgument(11, "After.Old.External (kB)", "%" Pd "", | 813 event->FormatArgument(11, "After.Old.External (kB)", "%" Pd "", |
| 877 RoundWordsToKB(stats_.after_.old_.external_in_words)); | 814 RoundWordsToKB(stats_.after_.old_.external_in_words)); |
| 878 #endif // !defined(PRODUCT) | 815 #endif // !defined(PRODUCT) |
| 879 } | 816 } |
| 880 | 817 |
| 881 | |
| 882 NoHeapGrowthControlScope::NoHeapGrowthControlScope() | 818 NoHeapGrowthControlScope::NoHeapGrowthControlScope() |
| 883 : StackResource(Thread::Current()) { | 819 : StackResource(Thread::Current()) { |
| 884 Heap* heap = reinterpret_cast<Isolate*>(isolate())->heap(); | 820 Heap* heap = reinterpret_cast<Isolate*>(isolate())->heap(); |
| 885 current_growth_controller_state_ = heap->GrowthControlState(); | 821 current_growth_controller_state_ = heap->GrowthControlState(); |
| 886 heap->DisableGrowthControl(); | 822 heap->DisableGrowthControl(); |
| 887 } | 823 } |
| 888 | 824 |
| 889 | |
| 890 NoHeapGrowthControlScope::~NoHeapGrowthControlScope() { | 825 NoHeapGrowthControlScope::~NoHeapGrowthControlScope() { |
| 891 Heap* heap = reinterpret_cast<Isolate*>(isolate())->heap(); | 826 Heap* heap = reinterpret_cast<Isolate*>(isolate())->heap(); |
| 892 heap->SetGrowthControlState(current_growth_controller_state_); | 827 heap->SetGrowthControlState(current_growth_controller_state_); |
| 893 } | 828 } |
| 894 | 829 |
| 895 | |
| 896 WritableVMIsolateScope::WritableVMIsolateScope(Thread* thread) | 830 WritableVMIsolateScope::WritableVMIsolateScope(Thread* thread) |
| 897 : StackResource(thread) { | 831 : StackResource(thread) { |
| 898 Dart::vm_isolate()->heap()->WriteProtect(false); | 832 Dart::vm_isolate()->heap()->WriteProtect(false); |
| 899 } | 833 } |
| 900 | 834 |
| 901 | |
| 902 WritableVMIsolateScope::~WritableVMIsolateScope() { | 835 WritableVMIsolateScope::~WritableVMIsolateScope() { |
| 903 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); | 836 ASSERT(Dart::vm_isolate()->heap()->UsedInWords(Heap::kNew) == 0); |
| 904 Dart::vm_isolate()->heap()->WriteProtect(true); | 837 Dart::vm_isolate()->heap()->WriteProtect(true); |
| 905 } | 838 } |
| 906 | 839 |
| 907 } // namespace dart | 840 } // namespace dart |
| OLD | NEW |