| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
| (...skipping 1853 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1864 // always room. | 1864 // always room. |
| 1865 UNREACHABLE(); | 1865 UNREACHABLE(); |
| 1866 } | 1866 } |
| 1867 allocation = new_space->AllocateRaw(size, alignment); | 1867 allocation = new_space->AllocateRaw(size, alignment); |
| 1868 DCHECK(!allocation.IsRetry()); | 1868 DCHECK(!allocation.IsRetry()); |
| 1869 } | 1869 } |
| 1870 Object* target = allocation.ToObjectChecked(); | 1870 Object* target = allocation.ToObjectChecked(); |
| 1871 | 1871 |
| 1872 MigrateObject(HeapObject::cast(target), object, size, NEW_SPACE); | 1872 MigrateObject(HeapObject::cast(target), object, size, NEW_SPACE); |
| 1873 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { | 1873 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { |
| 1874 heap()->RegisterLiveArrayBuffer( | 1874 heap()->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); |
| 1875 true, JSArrayBuffer::cast(target)->backing_store()); | |
| 1876 } | 1875 } |
| 1877 heap()->IncrementSemiSpaceCopiedObjectSize(size); | 1876 heap()->IncrementSemiSpaceCopiedObjectSize(size); |
| 1878 } | 1877 } |
| 1879 *cells = 0; | 1878 *cells = 0; |
| 1880 } | 1879 } |
| 1881 return survivors_size; | 1880 return survivors_size; |
| 1882 } | 1881 } |
| 1883 | 1882 |
| 1884 | 1883 |
| 1885 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) { | 1884 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) { |
| (...skipping 1205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3091 | 3090 |
| 3092 OldSpace* old_space = heap()->old_space(); | 3091 OldSpace* old_space = heap()->old_space(); |
| 3093 | 3092 |
| 3094 HeapObject* target; | 3093 HeapObject* target; |
| 3095 AllocationAlignment alignment = object->RequiredAlignment(); | 3094 AllocationAlignment alignment = object->RequiredAlignment(); |
| 3096 AllocationResult allocation = old_space->AllocateRaw(object_size, alignment); | 3095 AllocationResult allocation = old_space->AllocateRaw(object_size, alignment); |
| 3097 if (allocation.To(&target)) { | 3096 if (allocation.To(&target)) { |
| 3098 MigrateObject(target, object, object_size, old_space->identity()); | 3097 MigrateObject(target, object, object_size, old_space->identity()); |
| 3099 // If we end up needing more special cases, we should factor this out. | 3098 // If we end up needing more special cases, we should factor this out. |
| 3100 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { | 3099 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { |
| 3101 heap()->PromoteArrayBuffer(target); | 3100 heap()->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target)); |
| 3102 } | 3101 } |
| 3103 heap()->IncrementPromotedObjectsSize(object_size); | 3102 heap()->IncrementPromotedObjectsSize(object_size); |
| 3104 return true; | 3103 return true; |
| 3105 } | 3104 } |
| 3106 | 3105 |
| 3107 return false; | 3106 return false; |
| 3108 } | 3107 } |
| 3109 | 3108 |
| 3110 | 3109 |
| 3111 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot, | 3110 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot, |
| (...skipping 1324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4436 | 4435 |
| 4437 // Give pages that are queued to be freed back to the OS. Invalid store | 4436 // Give pages that are queued to be freed back to the OS. Invalid store |
| 4438 // buffer entries are already filter out. We can just release the memory. | 4437 // buffer entries are already filter out. We can just release the memory. |
| 4439 heap()->FreeQueuedChunks(); | 4438 heap()->FreeQueuedChunks(); |
| 4440 | 4439 |
| 4441 EvacuateNewSpaceAndCandidates(); | 4440 EvacuateNewSpaceAndCandidates(); |
| 4442 | 4441 |
| 4443 // EvacuateNewSpaceAndCandidates iterates over new space objects and for | 4442 // EvacuateNewSpaceAndCandidates iterates over new space objects and for |
| 4444 // ArrayBuffers either re-registers them as live or promotes them. This is | 4443 // ArrayBuffers either re-registers them as live or promotes them. This is |
| 4445 // needed to properly free them. | 4444 // needed to properly free them. |
| 4446 heap()->FreeDeadArrayBuffers(false); | 4445 heap()->array_buffer_tracker()->FreeDead(false); |
| 4447 | 4446 |
| 4448 // Clear the marking state of live large objects. | 4447 // Clear the marking state of live large objects. |
| 4449 heap_->lo_space()->ClearMarkingStateOfLiveObjects(); | 4448 heap_->lo_space()->ClearMarkingStateOfLiveObjects(); |
| 4450 | 4449 |
| 4451 // Deallocate evacuated candidate pages. | 4450 // Deallocate evacuated candidate pages. |
| 4452 ReleaseEvacuationCandidates(); | 4451 ReleaseEvacuationCandidates(); |
| 4453 CodeRange* code_range = heap()->isolate()->code_range(); | 4452 CodeRange* code_range = heap()->isolate()->code_range(); |
| 4454 if (code_range != NULL && code_range->valid()) { | 4453 if (code_range != NULL && code_range->valid()) { |
| 4455 code_range->ReserveEmergencyBlock(); | 4454 code_range->ReserveEmergencyBlock(); |
| 4456 } | 4455 } |
| (...skipping 359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4816 SlotsBuffer* buffer = *buffer_address; | 4815 SlotsBuffer* buffer = *buffer_address; |
| 4817 while (buffer != NULL) { | 4816 while (buffer != NULL) { |
| 4818 SlotsBuffer* next_buffer = buffer->next(); | 4817 SlotsBuffer* next_buffer = buffer->next(); |
| 4819 DeallocateBuffer(buffer); | 4818 DeallocateBuffer(buffer); |
| 4820 buffer = next_buffer; | 4819 buffer = next_buffer; |
| 4821 } | 4820 } |
| 4822 *buffer_address = NULL; | 4821 *buffer_address = NULL; |
| 4823 } | 4822 } |
| 4824 } // namespace internal | 4823 } // namespace internal |
| 4825 } // namespace v8 | 4824 } // namespace v8 |
| OLD | NEW |