Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <algorithm> | 5 #include <algorithm> |
| 6 | 6 |
| 7 #include "src/v8.h" | 7 #include "src/v8.h" |
| 8 | 8 |
| 9 #include "src/base/atomicops.h" | 9 #include "src/base/atomicops.h" |
| 10 #include "src/counters.h" | 10 #include "src/counters.h" |
| (...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 343 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 343 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 344 if (object->IsFixedArray()) { | 344 if (object->IsFixedArray()) { |
| 345 Address slot_address = object->address(); | 345 Address slot_address = object->address(); |
| 346 Address end = object->address() + object->Size(); | 346 Address end = object->address() + object->Size(); |
| 347 | 347 |
| 348 while (slot_address < end) { | 348 while (slot_address < end) { |
| 349 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); | 349 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); |
| 350 // When we are not in GC the Heap::InNewSpace() predicate | 350 // When we are not in GC the Heap::InNewSpace() predicate |
| 351 // checks that pointers which satisfy predicate point into | 351 // checks that pointers which satisfy predicate point into |
| 352 // the active semispace. | 352 // the active semispace. |
| 353 Object* object = reinterpret_cast<Object*>( | 353 Object* object = *slot; |
| 354 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); | |
| 355 heap_->InNewSpace(object); | 354 heap_->InNewSpace(object); |
| 356 slot_address += kPointerSize; | 355 slot_address += kPointerSize; |
| 357 } | 356 } |
| 358 } | 357 } |
| 359 } | 358 } |
| 360 } | 359 } |
| 361 #endif | 360 #endif |
| 362 | 361 |
| 363 | 362 |
| 364 void StoreBuffer::Verify() { | 363 void StoreBuffer::Verify() { |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 375 Verify(); | 374 Verify(); |
| 376 } | 375 } |
| 377 #endif | 376 #endif |
| 378 } | 377 } |
| 379 | 378 |
| 380 | 379 |
| 381 void StoreBuffer::ProcessOldToNewSlot(Address slot_address, | 380 void StoreBuffer::ProcessOldToNewSlot(Address slot_address, |
| 382 ObjectSlotCallback slot_callback, | 381 ObjectSlotCallback slot_callback, |
| 383 bool clear_maps) { | 382 bool clear_maps) { |
| 384 Object** slot = reinterpret_cast<Object**>(slot_address); | 383 Object** slot = reinterpret_cast<Object**>(slot_address); |
| 385 Object* object = reinterpret_cast<Object*>( | 384 Object* object = *slot; |
| 386 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); | |
| 387 | 385 |
| 388 // If the object is not in from space, it must be a duplicate store buffer | 386 // If the object is not in from space, it must be a duplicate store buffer |
| 389 // entry and the slot was already updated. | 387 // entry and the slot was already updated. |
| 390 if (heap_->InFromSpace(object)) { | 388 if (heap_->InFromSpace(object)) { |
| 391 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 389 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
| 392 DCHECK(heap_object->IsHeapObject()); | 390 DCHECK(heap_object->IsHeapObject()); |
| 393 // The new space object was not promoted if it still contains a map | 391 // The new space object was not promoted if it still contains a map |
| 394 // pointer. Clear the map field now lazily (during full GC). | 392 // pointer. Clear the map field now lazily (during full GC). |
| 395 if (clear_maps) ClearDeadObject(heap_object); | 393 if (clear_maps) ClearDeadObject(heap_object); |
| 396 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); | 394 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); |
| 397 object = reinterpret_cast<Object*>( | 395 object = *slot; |
| 398 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); | |
| 399 // If the object was in from space before and is after executing the | 396 // If the object was in from space before and is after executing the |
| 400 // callback in to space, the object is still live. | 397 // callback in to space, the object is still live. |
| 401 // Unfortunately, we do not know about the slot. It could be in a | 398 // Unfortunately, we do not know about the slot. It could be in a |
| 402 // just freed free space object. | 399 // just freed free space object. |
| 403 if (heap_->InToSpace(object)) { | 400 if (heap_->InToSpace(object)) { |
| 404 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); | 401 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); |
| 405 } | 402 } |
| 406 } | 403 } |
| 407 } | 404 } |
| 408 | 405 |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 433 } | 430 } |
| 434 } | 431 } |
| 435 | 432 |
| 436 | 433 |
| 437 void StoreBuffer::ClearInvalidStoreBufferEntries() { | 434 void StoreBuffer::ClearInvalidStoreBufferEntries() { |
| 438 Compact(); | 435 Compact(); |
| 439 Address* new_top = old_start_; | 436 Address* new_top = old_start_; |
| 440 for (Address* current = old_start_; current < old_top_; current++) { | 437 for (Address* current = old_start_; current < old_top_; current++) { |
| 441 Address addr = *current; | 438 Address addr = *current; |
| 442 Object** slot = reinterpret_cast<Object**>(*current); | 439 Object** slot = reinterpret_cast<Object**>(*current); |
| 440 // Use a NoBarrier_Load here since the slot can be in a dead object | |
| 441 // which may be touched by the concurrent sweeper thread. | |
|
Jarin
2015/03/10 13:32:25
Why did not you update this one?
Hannes Payer (out of office)
2015/03/10 13:51:17
Because the stale pointers may be accessed concurr
| |
| 443 Object* object = reinterpret_cast<Object*>( | 442 Object* object = reinterpret_cast<Object*>( |
| 444 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); | 443 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
| 445 if (heap_->InNewSpace(object)) { | 444 if (heap_->InNewSpace(object)) { |
| 446 if (heap_->mark_compact_collector()->IsSlotInLiveObject( | 445 if (heap_->mark_compact_collector()->IsSlotInLiveObject( |
| 447 reinterpret_cast<HeapObject**>(slot), | 446 reinterpret_cast<HeapObject**>(slot), |
| 448 reinterpret_cast<HeapObject*>(object))) { | 447 reinterpret_cast<HeapObject*>(object))) { |
| 449 *new_top++ = addr; | 448 *new_top++ = addr; |
| 450 } | 449 } |
| 451 } | 450 } |
| 452 } | 451 } |
| 453 old_top_ = new_top; | 452 old_top_ = new_top; |
| 454 ClearFilteringHashSets(); | 453 ClearFilteringHashSets(); |
| 455 | 454 |
| 456 // Don't scan on scavenge dead large objects. | 455 // Don't scan on scavenge dead large objects. |
| 457 LargeObjectIterator it(heap_->lo_space()); | 456 LargeObjectIterator it(heap_->lo_space()); |
| 458 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 457 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 459 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | 458 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); |
| 460 if (chunk->scan_on_scavenge() && !Marking::MarkBitFrom(object).Get()) { | 459 if (chunk->scan_on_scavenge() && !Marking::MarkBitFrom(object).Get()) { |
| 461 chunk->set_scan_on_scavenge(false); | 460 chunk->set_scan_on_scavenge(false); |
| 462 } | 461 } |
| 463 } | 462 } |
| 464 } | 463 } |
| 465 | 464 |
| 466 | 465 |
| 467 void StoreBuffer::VerifyValidStoreBufferEntries() { | 466 void StoreBuffer::VerifyValidStoreBufferEntries() { |
| 468 for (Address* current = old_start_; current < old_top_; current++) { | 467 for (Address* current = old_start_; current < old_top_; current++) { |
| 469 Object** slot = reinterpret_cast<Object**>(*current); | 468 Object** slot = reinterpret_cast<Object**>(*current); |
| 470 Object* object = reinterpret_cast<Object*>( | 469 Object* object = *slot; |
| 471 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); | |
| 472 CHECK(heap_->InNewSpace(object)); | 470 CHECK(heap_->InNewSpace(object)); |
| 473 heap_->mark_compact_collector()->VerifyIsSlotInLiveObject( | 471 heap_->mark_compact_collector()->VerifyIsSlotInLiveObject( |
| 474 reinterpret_cast<HeapObject**>(slot), | 472 reinterpret_cast<HeapObject**>(slot), |
| 475 reinterpret_cast<HeapObject*>(object)); | 473 reinterpret_cast<HeapObject*>(object)); |
| 476 } | 474 } |
| 477 } | 475 } |
| 478 | 476 |
| 479 | 477 |
| 480 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { | 478 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { |
| 481 IteratePointersToNewSpace(slot_callback, false); | 479 IteratePointersToNewSpace(slot_callback, false); |
| (...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 655 } | 653 } |
| 656 old_buffer_is_sorted_ = false; | 654 old_buffer_is_sorted_ = false; |
| 657 old_buffer_is_filtered_ = false; | 655 old_buffer_is_filtered_ = false; |
| 658 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 656 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 659 DCHECK(old_top_ <= old_limit_); | 657 DCHECK(old_top_ <= old_limit_); |
| 660 } | 658 } |
| 661 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 659 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 662 } | 660 } |
| 663 } | 661 } |
| 664 } // namespace v8::internal | 662 } // namespace v8::internal |
| OLD | NEW |