Chromium Code Reviews| OLD | NEW | 
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright | 
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. | 
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above | 
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following | 
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided | 
| (...skipping 430 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 441 // cause a full GC. | 441 // cause a full GC. | 
| 442 // Major GC would invoke weak handle callbacks on weakly reachable | 442 // Major GC would invoke weak handle callbacks on weakly reachable | 
| 443 // handles, but won't collect weakly reachable objects until next | 443 // handles, but won't collect weakly reachable objects until next | 
| 444 // major GC. Therefore if we collect aggressively and weak handle callback | 444 // major GC. Therefore if we collect aggressively and weak handle callback | 
| 445 // has been invoked, we rerun major GC to release objects which become | 445 // has been invoked, we rerun major GC to release objects which become | 
| 446 // garbage. | 446 // garbage. | 
| 447 // Note: as weak callbacks can execute arbitrary code, we cannot | 447 // Note: as weak callbacks can execute arbitrary code, we cannot | 
| 448 // hope that eventually there will be no weak callbacks invocations. | 448 // hope that eventually there will be no weak callbacks invocations. | 
| 449 // Therefore stop recollecting after several attempts. | 449 // Therefore stop recollecting after several attempts. | 
| 450 mark_compact_collector()->SetFlags(kMakeHeapIterableMask); | 450 mark_compact_collector()->SetFlags(kMakeHeapIterableMask); | 
| 451 const int kMaxNumberOfAttempts = 7; | 451 const int kMaxNumberOfAttempts = 7; | 
| 
 
ulan
2011/09/29 08:36:58
Add isolate_->compilation_cache()->Clear() ?
 
Vyacheslav Egorov (Chromium)
2011/09/29 14:06:11
Yep, lets do that. Good catch.
 
 | |
| 452 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { | 452 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { | 
| 453 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { | 453 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { | 
| 454 break; | 454 break; | 
| 455 } | 455 } | 
| 456 } | 456 } | 
| 457 mark_compact_collector()->SetFlags(kNoGCFlags); | 457 mark_compact_collector()->SetFlags(kNoGCFlags); | 
| 458 new_space_.Shrink(); | |
| 459 incremental_marking()->UncommitMarkingDeque(); | |
| 458 } | 460 } | 
| 459 | 461 | 
| 460 | 462 | 
| 461 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { | 463 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { | 
| 462 // The VM is in the GC state until exiting this function. | 464 // The VM is in the GC state until exiting this function. | 
| 463 VMState state(isolate_, GC); | 465 VMState state(isolate_, GC); | 
| 464 | 466 | 
| 465 #ifdef DEBUG | 467 #ifdef DEBUG | 
| 466 // Reset the allocation timeout to the GC interval, but make sure to | 468 // Reset the allocation timeout to the GC interval, but make sure to | 
| 467 // allow at least a few allocations after a collection. The reason | 469 // allow at least a few allocations after a collection. The reason | 
| (...skipping 6004 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6472 } | 6474 } | 
| 6473 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6475 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 
| 6474 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6476 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 
| 6475 next = chunk->next_chunk(); | 6477 next = chunk->next_chunk(); | 
| 6476 isolate_->memory_allocator()->Free(chunk); | 6478 isolate_->memory_allocator()->Free(chunk); | 
| 6477 } | 6479 } | 
| 6478 chunks_queued_for_free_ = NULL; | 6480 chunks_queued_for_free_ = NULL; | 
| 6479 } | 6481 } | 
| 6480 | 6482 | 
| 6481 } } // namespace v8::internal | 6483 } } // namespace v8::internal | 
| OLD | NEW |