OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 445 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
456 mark_compact_collector()->SetFlags(kMakeHeapIterableMask); | 456 mark_compact_collector()->SetFlags(kMakeHeapIterableMask); |
457 isolate_->compilation_cache()->Clear(); | 457 isolate_->compilation_cache()->Clear(); |
458 const int kMaxNumberOfAttempts = 7; | 458 const int kMaxNumberOfAttempts = 7; |
459 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { | 459 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { |
460 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { | 460 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { |
461 break; | 461 break; |
462 } | 462 } |
463 } | 463 } |
464 mark_compact_collector()->SetFlags(kNoGCFlags); | 464 mark_compact_collector()->SetFlags(kNoGCFlags); |
465 new_space_.Shrink(); | 465 new_space_.Shrink(); |
| 466 UncommitFromSpace(); |
| 467 Shrink(); |
466 incremental_marking()->UncommitMarkingDeque(); | 468 incremental_marking()->UncommitMarkingDeque(); |
467 } | 469 } |
468 | 470 |
469 | 471 |
470 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { | 472 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { |
471 // The VM is in the GC state until exiting this function. | 473 // The VM is in the GC state until exiting this function. |
472 VMState state(isolate_, GC); | 474 VMState state(isolate_, GC); |
473 | 475 |
474 #ifdef DEBUG | 476 #ifdef DEBUG |
475 // Reset the allocation timeout to the GC interval, but make sure to | 477 // Reset the allocation timeout to the GC interval, but make sure to |
(...skipping 6125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6601 isolate_->heap()->store_buffer()->Compact(); | 6603 isolate_->heap()->store_buffer()->Compact(); |
6602 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6604 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
6603 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6605 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
6604 next = chunk->next_chunk(); | 6606 next = chunk->next_chunk(); |
6605 isolate_->memory_allocator()->Free(chunk); | 6607 isolate_->memory_allocator()->Free(chunk); |
6606 } | 6608 } |
6607 chunks_queued_for_free_ = NULL; | 6609 chunks_queued_for_free_ = NULL; |
6608 } | 6610 } |
6609 | 6611 |
6610 } } // namespace v8::internal | 6612 } } // namespace v8::internal |
OLD | NEW |