Chromium Code Reviews| Index: src/store-buffer.cc |
| =================================================================== |
| --- src/store-buffer.cc (revision 7563) |
| +++ src/store-buffer.cc (working copy) |
| @@ -34,21 +34,25 @@ |
| namespace v8 { |
| namespace internal { |
| -Address* StoreBuffer::start_ = NULL; |
| -Address* StoreBuffer::limit_ = NULL; |
| -Address* StoreBuffer::old_start_ = NULL; |
| -Address* StoreBuffer::old_limit_ = NULL; |
| -Address* StoreBuffer::old_top_ = NULL; |
| -uintptr_t* StoreBuffer::hash_map_1_ = NULL; |
| -uintptr_t* StoreBuffer::hash_map_2_ = NULL; |
| -VirtualMemory* StoreBuffer::virtual_memory_ = NULL; |
| -bool StoreBuffer::old_buffer_is_sorted_ = false; |
| -bool StoreBuffer::old_buffer_is_filtered_ = false; |
| -bool StoreBuffer::during_gc_ = false; |
| -bool StoreBuffer::may_move_store_buffer_entries_ = true; |
| -bool StoreBuffer::store_buffer_rebuilding_enabled_ = false; |
| -StoreBufferCallback StoreBuffer::callback_ = NULL; |
| +StoreBuffer::StoreBuffer(Heap* heap) |
| + : heap_(heap), |
| + start_(NULL), |
| + limit_(NULL), |
| + old_start_(NULL), |
| + old_limit_(NULL), |
| + old_top_(NULL), |
| + old_buffer_is_sorted_(false), |
| + old_buffer_is_filtered_(false), |
| + during_gc_(false), |
| + store_buffer_rebuilding_enabled_(false), |
| + callback_(NULL), |
| + may_move_store_buffer_entries_(true), |
| + virtual_memory_(NULL), |
| + hash_map_1_(NULL), |
| + hash_map_2_(NULL) { |
| +} |
| + |
| void StoreBuffer::Setup() { |
| virtual_memory_ = new VirtualMemory(kStoreBufferSize * 3); |
| uintptr_t start_as_int = |
| @@ -75,13 +79,13 @@ |
| virtual_memory_->Commit(reinterpret_cast<Address>(start_), |
| kStoreBufferSize, |
| false); // Not executable. |
| - Heap::public_set_store_buffer_top(start_); |
| + heap_->public_set_store_buffer_top(start_); |
| hash_map_1_ = new uintptr_t[kHashMapLength]; |
| hash_map_2_ = new uintptr_t[kHashMapLength]; |
| - Heap::AddGCPrologueCallback(&GCPrologue, kGCTypeAll); |
| - Heap::AddGCEpilogueCallback(&GCEpilogue, kGCTypeAll); |
| + heap_->AddGCPrologueCallback(&GCPrologue, kGCTypeAll); |
| + heap_->AddGCEpilogueCallback(&GCEpilogue, kGCTypeAll); |
| ZapHashTables(); |
| } |
| @@ -94,10 +98,15 @@ |
| delete[] old_start_; |
| old_start_ = old_top_ = old_limit_ = NULL; |
| start_ = limit_ = NULL; |
| - Heap::public_set_store_buffer_top(start_); |
| + heap_->public_set_store_buffer_top(start_); |
| } |
| +void StoreBuffer::StoreBufferOverflow(Isolate* isolate) { |
| + isolate->heap()->store_buffer()->Compact(); |
| +} |
| + |
| + |
| #if V8_TARGET_ARCH_X64 |
| static int CompareAddresses(const void* void_a, const void* void_b) { |
| intptr_t a = |
| @@ -134,7 +143,7 @@ |
| for (Address* read = old_start_; read < old_top_; read++) { |
| Address current = *read; |
| if (current != previous) { |
| - if (Heap::InNewSpace(*reinterpret_cast<Object**>(current))) { |
| + if (heap_->InNewSpace(*reinterpret_cast<Object**>(current))) { |
| *write++ = current; |
| } |
| } |
| @@ -306,7 +315,7 @@ |
| *in_store_buffer_1_element_cache == cell_address) { |
| return true; |
| } |
| - Address* top = reinterpret_cast<Address*>(Heap::store_buffer_top()); |
| + Address* top = reinterpret_cast<Address*>(HEAP->store_buffer_top()); |
|
Erik Corry
2011/04/20 20:07:40
Use heap_ here and below
Vyacheslav Egorov (Chromium)
2011/04/24 11:24:08
here and below done where possible.
GCPrologue/GCE
|
| for (Address* current = top - 1; current >= start_; current--) { |
| if (*current == cell_address) { |
| in_store_buffer_1_element_cache = current; |
| @@ -335,8 +344,8 @@ |
| void StoreBuffer::GCPrologue(GCType type, GCCallbackFlags flags) { |
| - ZapHashTables(); |
| - during_gc_ = true; |
| + HEAP->store_buffer()->ZapHashTables(); |
| + HEAP->store_buffer()->during_gc_ = true; |
| } |
| @@ -345,8 +354,8 @@ |
| void StoreBuffer::GCEpilogue(GCType type, GCCallbackFlags flags) { |
| - during_gc_ = false; |
| - Verify(); |
| + HEAP->store_buffer()->during_gc_ = false; |
| + HEAP->store_buffer()->Verify(); |
| } |
| @@ -359,7 +368,7 @@ |
| Address* limit = old_top_; |
| old_top_ = old_start_; |
| { |
| - DontMoveStoreBufferEntriesScope scope; |
| + DontMoveStoreBufferEntriesScope scope(this); |
| for (Address* current = old_start_; current < limit; current++) { |
| #ifdef DEBUG |
| Address* saved_top = old_top_; |
| @@ -368,7 +377,7 @@ |
| Object* object = *cell; |
| // May be invalid if object is not in new space. |
| HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
| - if (Heap::InFromSpace(object)) { |
| + if (heap_->InFromSpace(object)) { |
| callback(reinterpret_cast<HeapObject**>(cell), heap_object); |
| } |
| ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); |
| @@ -385,25 +394,25 @@ |
| // remove the flag from the page. |
| if (some_pages_to_scan) { |
| if (callback_ != NULL) { |
| - (*callback_)(NULL, kStoreBufferStartScanningPagesEvent); |
| + (*callback_)(heap_, NULL, kStoreBufferStartScanningPagesEvent); |
| } |
| PointerChunkIterator it; |
| MemoryChunk* chunk; |
| while ((chunk = it.next()) != NULL) { |
| if (chunk->scan_on_scavenge()) { |
| if (callback_ != NULL) { |
| - (*callback_)(chunk, kStoreBufferScanningPageEvent); |
| + (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent); |
| } |
| - if (chunk->owner() == Heap::lo_space()) { |
| + if (chunk->owner() == heap_->lo_space()) { |
| LargePage* large_page = reinterpret_cast<LargePage*>(chunk); |
| HeapObject* array = large_page->GetObject(); |
| ASSERT(array->IsFixedArray()); |
| Address start = array->address(); |
| Address object_end = start + array->Size(); |
| - Heap::IteratePointersToNewSpace(start, object_end, callback); |
| + HEAP->IteratePointersToNewSpace(HEAP, start, object_end, callback); |
| } else { |
| Page* page = reinterpret_cast<Page*>(chunk); |
| - Heap::IteratePointersOnPage( |
| + HEAP->IteratePointersOnPage( |
| reinterpret_cast<PagedSpace*>(page->owner()), |
| &Heap::IteratePointersToNewSpace, |
| callback, |
| @@ -411,20 +420,20 @@ |
| } |
| } |
| } |
| - (*callback_)(NULL, kStoreBufferScanningPageEvent); |
| + (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); |
| } |
| } |
| void StoreBuffer::Compact() { |
| - Address* top = reinterpret_cast<Address*>(Heap::store_buffer_top()); |
| + Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top()); |
| if (top == start_) return; |
| // There's no check of the limit in the loop below so we check here for |
| // the worst case (compaction doesn't eliminate any pointers). |
| ASSERT(top <= limit_); |
| - Heap::public_set_store_buffer_top(start_); |
| + heap_->public_set_store_buffer_top(start_); |
| if (top - start_ > old_limit_ - old_top_) { |
| HandleFullness(); |
| } |
| @@ -434,9 +443,9 @@ |
| // duplicates will remain. We have two hash tables with different hash |
| // functions to reduce the number of unnecessary clashes. |
| for (Address* current = start_; current < top; current++) { |
| - ASSERT(!Heap::cell_space()->Contains(*current)); |
| - ASSERT(!Heap::code_space()->Contains(*current)); |
| - ASSERT(!Heap::old_data_space()->Contains(*current)); |
| + ASSERT(!heap_->cell_space()->Contains(*current)); |
| + ASSERT(!heap_->code_space()->Contains(*current)); |
| + ASSERT(!heap_->old_data_space()->Contains(*current)); |
| uintptr_t int_addr = reinterpret_cast<uintptr_t>(*current); |
| // Shift out the last bits including any tags. |
| int_addr >>= kPointerSizeLog2; |
| @@ -462,7 +471,7 @@ |
| *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| ASSERT(old_top_ <= old_limit_); |
| } |
| - Counters::store_buffer_compactions.Increment(); |
| + COUNTERS->store_buffer_compactions()->Increment(); |
| CheckForFullBuffer(); |
| } |