OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
440 // cause a full GC. | 440 // cause a full GC. |
441 // Major GC would invoke weak handle callbacks on weakly reachable | 441 // Major GC would invoke weak handle callbacks on weakly reachable |
442 // handles, but won't collect weakly reachable objects until next | 442 // handles, but won't collect weakly reachable objects until next |
443 // major GC. Therefore if we collect aggressively and weak handle callback | 443 // major GC. Therefore if we collect aggressively and weak handle callback |
444 // has been invoked, we rerun major GC to release objects which become | 444 // has been invoked, we rerun major GC to release objects which become |
445 // garbage. | 445 // garbage. |
446 // Note: as weak callbacks can execute arbitrary code, we cannot | 446 // Note: as weak callbacks can execute arbitrary code, we cannot |
447 // hope that eventually there will be no weak callbacks invocations. | 447 // hope that eventually there will be no weak callbacks invocations. |
448 // Therefore stop recollecting after several attempts. | 448 // Therefore stop recollecting after several attempts. |
449 mark_compact_collector()->SetFlags(kMakeHeapIterableMask); | 449 mark_compact_collector()->SetFlags(kMakeHeapIterableMask); |
| 450 isolate_->compilation_cache()->Clear(); |
450 const int kMaxNumberOfAttempts = 7; | 451 const int kMaxNumberOfAttempts = 7; |
451 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { | 452 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { |
452 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { | 453 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { |
453 break; | 454 break; |
454 } | 455 } |
455 } | 456 } |
456 mark_compact_collector()->SetFlags(kNoGCFlags); | 457 mark_compact_collector()->SetFlags(kNoGCFlags); |
| 458 new_space_.Shrink(); |
| 459 incremental_marking()->UncommitMarkingDeque(); |
457 } | 460 } |
458 | 461 |
459 | 462 |
460 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { | 463 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { |
461 // The VM is in the GC state until exiting this function. | 464 // The VM is in the GC state until exiting this function. |
462 VMState state(isolate_, GC); | 465 VMState state(isolate_, GC); |
463 | 466 |
464 #ifdef DEBUG | 467 #ifdef DEBUG |
465 // Reset the allocation timeout to the GC interval, but make sure to | 468 // Reset the allocation timeout to the GC interval, but make sure to |
466 // allow at least a few allocations after a collection. The reason | 469 // allow at least a few allocations after a collection. The reason |
(...skipping 5943 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6410 isolate_->heap()->store_buffer()->Compact(); | 6413 isolate_->heap()->store_buffer()->Compact(); |
6411 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6414 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
6412 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6415 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
6413 next = chunk->next_chunk(); | 6416 next = chunk->next_chunk(); |
6414 isolate_->memory_allocator()->Free(chunk); | 6417 isolate_->memory_allocator()->Free(chunk); |
6415 } | 6418 } |
6416 chunks_queued_for_free_ = NULL; | 6419 chunks_queued_for_free_ = NULL; |
6417 } | 6420 } |
6418 | 6421 |
6419 } } // namespace v8::internal | 6422 } } // namespace v8::internal |
OLD | NEW |