| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <algorithm> | 5 #include <algorithm> |
| 6 | 6 |
| 7 #include "src/v8.h" | 7 #include "src/v8.h" |
| 8 | 8 |
| 9 #include "src/counters.h" | 9 #include "src/counters.h" |
| 10 #include "src/heap/store-buffer-inl.h" | 10 #include "src/heap/store-buffer-inl.h" |
| (...skipping 345 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 356 } | 356 } |
| 357 } | 357 } |
| 358 } | 358 } |
| 359 | 359 |
| 360 | 360 |
| 361 void StoreBuffer::ClearInvalidStoreBufferEntries() { | 361 void StoreBuffer::ClearInvalidStoreBufferEntries() { |
| 362 Compact(); | 362 Compact(); |
| 363 Address* new_top = old_start_; | 363 Address* new_top = old_start_; |
| 364 for (Address* current = old_start_; current < old_top_; current++) { | 364 for (Address* current = old_start_; current < old_top_; current++) { |
| 365 Address addr = *current; | 365 Address addr = *current; |
| 366 Object** slot = reinterpret_cast<Object**>(*current); | 366 Object** slot = reinterpret_cast<Object**>(addr); |
| 367 Object* object = *slot; | 367 Object* object = *slot; |
| 368 if (heap_->InNewSpace(object)) { | 368 if (heap_->InNewSpace(object) && object->IsHeapObject()) { |
| 369 if (heap_->mark_compact_collector()->IsSlotInLiveObject( | 369 // If the target object is not black, the source slot must be part |
| 370 reinterpret_cast<HeapObject**>(slot), | 370 // of a non-black (dead) object. |
| 371 reinterpret_cast<HeapObject*>(object))) { | 371 HeapObject* heap_object = HeapObject::cast(object); |
| 372 if (Marking::IsBlack(Marking::MarkBitFrom(heap_object)) && |
| 373 heap_->mark_compact_collector()->IsSlotInLiveObject(addr)) { |
| 372 *new_top++ = addr; | 374 *new_top++ = addr; |
| 373 } | 375 } |
| 374 } | 376 } |
| 375 } | 377 } |
| 376 old_top_ = new_top; | 378 old_top_ = new_top; |
| 377 ClearFilteringHashSets(); | 379 ClearFilteringHashSets(); |
| 378 | 380 |
| 379 // Don't scan on scavenge dead large objects. | 381 // Don't scan on scavenge dead large objects. |
| 380 LargeObjectIterator it(heap_->lo_space()); | 382 LargeObjectIterator it(heap_->lo_space()); |
| 381 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 383 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 382 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | 384 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); |
| 383 if (chunk->scan_on_scavenge() && !Marking::MarkBitFrom(object).Get()) { | 385 if (chunk->scan_on_scavenge() && !Marking::MarkBitFrom(object).Get()) { |
| 384 chunk->set_scan_on_scavenge(false); | 386 chunk->set_scan_on_scavenge(false); |
| 385 } | 387 } |
| 386 } | 388 } |
| 387 } | 389 } |
| 388 | 390 |
| 389 | 391 |
| 390 void StoreBuffer::VerifyValidStoreBufferEntries() { | 392 void StoreBuffer::VerifyValidStoreBufferEntries() { |
| 391 for (Address* current = old_start_; current < old_top_; current++) { | 393 for (Address* current = old_start_; current < old_top_; current++) { |
| 392 Object** slot = reinterpret_cast<Object**>(*current); | 394 Object** slot = reinterpret_cast<Object**>(*current); |
| 393 Object* object = *slot; | 395 Object* object = *slot; |
| 396 CHECK(object->IsHeapObject()); |
| 394 CHECK(heap_->InNewSpace(object)); | 397 CHECK(heap_->InNewSpace(object)); |
| 395 heap_->mark_compact_collector()->VerifyIsSlotInLiveObject( | 398 heap_->mark_compact_collector()->VerifyIsSlotInLiveObject( |
| 396 reinterpret_cast<HeapObject**>(slot), | 399 reinterpret_cast<Address>(slot), HeapObject::cast(object)); |
| 397 reinterpret_cast<HeapObject*>(object)); | |
| 398 } | 400 } |
| 399 } | 401 } |
| 400 | 402 |
| 401 | 403 |
| 402 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { | 404 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { |
| 403 // We do not sort or remove duplicated entries from the store buffer because | 405 // We do not sort or remove duplicated entries from the store buffer because |
| 404 // we expect that callback will rebuild the store buffer thus removing | 406 // we expect that callback will rebuild the store buffer thus removing |
| 405 // all duplicates and pointers to old space. | 407 // all duplicates and pointers to old space. |
| 406 bool some_pages_to_scan = PrepareForIteration(); | 408 bool some_pages_to_scan = PrepareForIteration(); |
| 407 | 409 |
| (...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 564 } | 566 } |
| 565 old_buffer_is_sorted_ = false; | 567 old_buffer_is_sorted_ = false; |
| 566 old_buffer_is_filtered_ = false; | 568 old_buffer_is_filtered_ = false; |
| 567 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 569 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 568 DCHECK(old_top_ <= old_limit_); | 570 DCHECK(old_top_ <= old_limit_); |
| 569 } | 571 } |
| 570 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 572 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 571 } | 573 } |
| 572 } | 574 } |
| 573 } // namespace v8::internal | 575 } // namespace v8::internal |
| OLD | NEW |