Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/store-buffer.h" | 5 #include "src/store-buffer.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 | 8 |
| 9 #include "src/v8.h" | 9 #include "src/v8.h" |
| 10 #include "src/counters.h" | 10 #include "src/counters.h" |
| (...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 338 if (object->IsFixedArray()) { | 338 if (object->IsFixedArray()) { |
| 339 Address slot_address = object->address(); | 339 Address slot_address = object->address(); |
| 340 Address end = object->address() + object->Size(); | 340 Address end = object->address() + object->Size(); |
| 341 | 341 |
| 342 while (slot_address < end) { | 342 while (slot_address < end) { |
| 343 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); | 343 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); |
| 344 // When we are not in GC the Heap::InNewSpace() predicate | 344 // When we are not in GC the Heap::InNewSpace() predicate |
| 345 // checks that pointers which satisfy predicate point into | 345 // checks that pointers which satisfy predicate point into |
| 346 // the active semispace. | 346 // the active semispace. |
| 347 Object* object = reinterpret_cast<Object*>( | 347 Object* object = reinterpret_cast<Object*>( |
| 348 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 348 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
|
Jakob Kummerow
2014/06/05 11:49:06
IWYU?
| |
| 349 heap_->InNewSpace(object); | 349 heap_->InNewSpace(object); |
| 350 slot_address += kPointerSize; | 350 slot_address += kPointerSize; |
| 351 } | 351 } |
| 352 } | 352 } |
| 353 } | 353 } |
| 354 } | 354 } |
| 355 #endif | 355 #endif |
| 356 | 356 |
| 357 | 357 |
| 358 void StoreBuffer::Verify() { | 358 void StoreBuffer::Verify() { |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 375 void StoreBuffer::FindPointersToNewSpaceInRegion( | 375 void StoreBuffer::FindPointersToNewSpaceInRegion( |
| 376 Address start, | 376 Address start, |
| 377 Address end, | 377 Address end, |
| 378 ObjectSlotCallback slot_callback, | 378 ObjectSlotCallback slot_callback, |
| 379 bool clear_maps) { | 379 bool clear_maps) { |
| 380 for (Address slot_address = start; | 380 for (Address slot_address = start; |
| 381 slot_address < end; | 381 slot_address < end; |
| 382 slot_address += kPointerSize) { | 382 slot_address += kPointerSize) { |
| 383 Object** slot = reinterpret_cast<Object**>(slot_address); | 383 Object** slot = reinterpret_cast<Object**>(slot_address); |
| 384 Object* object = reinterpret_cast<Object*>( | 384 Object* object = reinterpret_cast<Object*>( |
| 385 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 385 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
| 386 if (heap_->InNewSpace(object)) { | 386 if (heap_->InNewSpace(object)) { |
| 387 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 387 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
| 388 ASSERT(heap_object->IsHeapObject()); | 388 ASSERT(heap_object->IsHeapObject()); |
| 389 // The new space object was not promoted if it still contains a map | 389 // The new space object was not promoted if it still contains a map |
| 390 // pointer. Clear the map field now lazily. | 390 // pointer. Clear the map field now lazily. |
| 391 if (clear_maps) ClearDeadObject(heap_object); | 391 if (clear_maps) ClearDeadObject(heap_object); |
| 392 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); | 392 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); |
| 393 object = reinterpret_cast<Object*>( | 393 object = reinterpret_cast<Object*>( |
| 394 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 394 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
| 395 if (heap_->InNewSpace(object)) { | 395 if (heap_->InNewSpace(object)) { |
| 396 EnterDirectlyIntoStoreBuffer(slot_address); | 396 EnterDirectlyIntoStoreBuffer(slot_address); |
| 397 } | 397 } |
| 398 } | 398 } |
| 399 } | 399 } |
| 400 } | 400 } |
| 401 | 401 |
| 402 | 402 |
| 403 // Compute start address of the first map following given addr. | 403 // Compute start address of the first map following given addr. |
| 404 static inline Address MapStartAlign(Address addr) { | 404 static inline Address MapStartAlign(Address addr) { |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 463 Address* limit = old_top_; | 463 Address* limit = old_top_; |
| 464 old_top_ = old_start_; | 464 old_top_ = old_start_; |
| 465 { | 465 { |
| 466 DontMoveStoreBufferEntriesScope scope(this); | 466 DontMoveStoreBufferEntriesScope scope(this); |
| 467 for (Address* current = old_start_; current < limit; current++) { | 467 for (Address* current = old_start_; current < limit; current++) { |
| 468 #ifdef DEBUG | 468 #ifdef DEBUG |
| 469 Address* saved_top = old_top_; | 469 Address* saved_top = old_top_; |
| 470 #endif | 470 #endif |
| 471 Object** slot = reinterpret_cast<Object**>(*current); | 471 Object** slot = reinterpret_cast<Object**>(*current); |
| 472 Object* object = reinterpret_cast<Object*>( | 472 Object* object = reinterpret_cast<Object*>( |
| 473 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 473 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
| 474 if (heap_->InFromSpace(object)) { | 474 if (heap_->InFromSpace(object)) { |
| 475 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 475 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
| 476 // The new space object was not promoted if it still contains a map | 476 // The new space object was not promoted if it still contains a map |
| 477 // pointer. Clear the map field now lazily. | 477 // pointer. Clear the map field now lazily. |
| 478 if (clear_maps) ClearDeadObject(heap_object); | 478 if (clear_maps) ClearDeadObject(heap_object); |
| 479 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); | 479 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); |
| 480 object = reinterpret_cast<Object*>( | 480 object = reinterpret_cast<Object*>( |
| 481 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 481 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
| 482 if (heap_->InNewSpace(object)) { | 482 if (heap_->InNewSpace(object)) { |
| 483 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); | 483 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); |
| 484 } | 484 } |
| 485 } | 485 } |
| 486 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); | 486 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); |
| 487 } | 487 } |
| 488 } | 488 } |
| 489 } | 489 } |
| 490 | 490 |
| 491 | 491 |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 609 } | 609 } |
| 610 old_buffer_is_sorted_ = false; | 610 old_buffer_is_sorted_ = false; |
| 611 old_buffer_is_filtered_ = false; | 611 old_buffer_is_filtered_ = false; |
| 612 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 612 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 613 ASSERT(old_top_ <= old_limit_); | 613 ASSERT(old_top_ <= old_limit_); |
| 614 } | 614 } |
| 615 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 615 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 616 } | 616 } |
| 617 | 617 |
| 618 } } // namespace v8::internal | 618 } } // namespace v8::internal |
| OLD | NEW |