OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/store-buffer.h" | 5 #include "src/store-buffer.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 | 8 |
9 #include "src/v8.h" | 9 #include "src/v8.h" |
| 10 |
| 11 #include "src/base/atomicops.h" |
10 #include "src/counters.h" | 12 #include "src/counters.h" |
11 #include "src/store-buffer-inl.h" | 13 #include "src/store-buffer-inl.h" |
12 | 14 |
13 namespace v8 { | 15 namespace v8 { |
14 namespace internal { | 16 namespace internal { |
15 | 17 |
16 StoreBuffer::StoreBuffer(Heap* heap) | 18 StoreBuffer::StoreBuffer(Heap* heap) |
17 : heap_(heap), | 19 : heap_(heap), |
18 start_(NULL), | 20 start_(NULL), |
19 limit_(NULL), | 21 limit_(NULL), |
(...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
338 if (object->IsFixedArray()) { | 340 if (object->IsFixedArray()) { |
339 Address slot_address = object->address(); | 341 Address slot_address = object->address(); |
340 Address end = object->address() + object->Size(); | 342 Address end = object->address() + object->Size(); |
341 | 343 |
342 while (slot_address < end) { | 344 while (slot_address < end) { |
343 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); | 345 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); |
344 // When we are not in GC the Heap::InNewSpace() predicate | 346 // When we are not in GC the Heap::InNewSpace() predicate |
345 // checks that pointers which satisfy predicate point into | 347 // checks that pointers which satisfy predicate point into |
346 // the active semispace. | 348 // the active semispace. |
347 Object* object = reinterpret_cast<Object*>( | 349 Object* object = reinterpret_cast<Object*>( |
348 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 350 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
349 heap_->InNewSpace(object); | 351 heap_->InNewSpace(object); |
350 slot_address += kPointerSize; | 352 slot_address += kPointerSize; |
351 } | 353 } |
352 } | 354 } |
353 } | 355 } |
354 } | 356 } |
355 #endif | 357 #endif |
356 | 358 |
357 | 359 |
358 void StoreBuffer::Verify() { | 360 void StoreBuffer::Verify() { |
(...skipping 16 matching lines...) Expand all Loading... |
375 void StoreBuffer::FindPointersToNewSpaceInRegion( | 377 void StoreBuffer::FindPointersToNewSpaceInRegion( |
376 Address start, | 378 Address start, |
377 Address end, | 379 Address end, |
378 ObjectSlotCallback slot_callback, | 380 ObjectSlotCallback slot_callback, |
379 bool clear_maps) { | 381 bool clear_maps) { |
380 for (Address slot_address = start; | 382 for (Address slot_address = start; |
381 slot_address < end; | 383 slot_address < end; |
382 slot_address += kPointerSize) { | 384 slot_address += kPointerSize) { |
383 Object** slot = reinterpret_cast<Object**>(slot_address); | 385 Object** slot = reinterpret_cast<Object**>(slot_address); |
384 Object* object = reinterpret_cast<Object*>( | 386 Object* object = reinterpret_cast<Object*>( |
385 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 387 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
386 if (heap_->InNewSpace(object)) { | 388 if (heap_->InNewSpace(object)) { |
387 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 389 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
388 ASSERT(heap_object->IsHeapObject()); | 390 ASSERT(heap_object->IsHeapObject()); |
389 // The new space object was not promoted if it still contains a map | 391 // The new space object was not promoted if it still contains a map |
390 // pointer. Clear the map field now lazily. | 392 // pointer. Clear the map field now lazily. |
391 if (clear_maps) ClearDeadObject(heap_object); | 393 if (clear_maps) ClearDeadObject(heap_object); |
392 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); | 394 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); |
393 object = reinterpret_cast<Object*>( | 395 object = reinterpret_cast<Object*>( |
394 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 396 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
395 if (heap_->InNewSpace(object)) { | 397 if (heap_->InNewSpace(object)) { |
396 EnterDirectlyIntoStoreBuffer(slot_address); | 398 EnterDirectlyIntoStoreBuffer(slot_address); |
397 } | 399 } |
398 } | 400 } |
399 } | 401 } |
400 } | 402 } |
401 | 403 |
402 | 404 |
403 // Compute start address of the first map following given addr. | 405 // Compute start address of the first map following given addr. |
404 static inline Address MapStartAlign(Address addr) { | 406 static inline Address MapStartAlign(Address addr) { |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
463 Address* limit = old_top_; | 465 Address* limit = old_top_; |
464 old_top_ = old_start_; | 466 old_top_ = old_start_; |
465 { | 467 { |
466 DontMoveStoreBufferEntriesScope scope(this); | 468 DontMoveStoreBufferEntriesScope scope(this); |
467 for (Address* current = old_start_; current < limit; current++) { | 469 for (Address* current = old_start_; current < limit; current++) { |
468 #ifdef DEBUG | 470 #ifdef DEBUG |
469 Address* saved_top = old_top_; | 471 Address* saved_top = old_top_; |
470 #endif | 472 #endif |
471 Object** slot = reinterpret_cast<Object**>(*current); | 473 Object** slot = reinterpret_cast<Object**>(*current); |
472 Object* object = reinterpret_cast<Object*>( | 474 Object* object = reinterpret_cast<Object*>( |
473 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 475 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
474 if (heap_->InFromSpace(object)) { | 476 if (heap_->InFromSpace(object)) { |
475 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 477 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
476 // The new space object was not promoted if it still contains a map | 478 // The new space object was not promoted if it still contains a map |
477 // pointer. Clear the map field now lazily. | 479 // pointer. Clear the map field now lazily. |
478 if (clear_maps) ClearDeadObject(heap_object); | 480 if (clear_maps) ClearDeadObject(heap_object); |
479 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); | 481 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); |
480 object = reinterpret_cast<Object*>( | 482 object = reinterpret_cast<Object*>( |
481 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 483 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); |
482 if (heap_->InNewSpace(object)) { | 484 if (heap_->InNewSpace(object)) { |
483 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); | 485 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); |
484 } | 486 } |
485 } | 487 } |
486 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); | 488 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); |
487 } | 489 } |
488 } | 490 } |
489 } | 491 } |
490 | 492 |
491 | 493 |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
609 } | 611 } |
610 old_buffer_is_sorted_ = false; | 612 old_buffer_is_sorted_ = false; |
611 old_buffer_is_filtered_ = false; | 613 old_buffer_is_filtered_ = false; |
612 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 614 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
613 ASSERT(old_top_ <= old_limit_); | 615 ASSERT(old_top_ <= old_limit_); |
614 } | 616 } |
615 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 617 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
616 } | 618 } |
617 | 619 |
618 } } // namespace v8::internal | 620 } } // namespace v8::internal |
OLD | NEW |