Index: src/heap/store-buffer.cc |
diff --git a/src/heap/store-buffer.cc b/src/heap/store-buffer.cc |
index a982eb3c40280f96d883c870161363f1a2656ae9..ffd5d96c5c663ce9f98fdc723352da07d209679c 100644 |
--- a/src/heap/store-buffer.cc |
+++ b/src/heap/store-buffer.cc |
@@ -16,62 +16,135 @@ namespace v8 { |
namespace internal { |
StoreBuffer::StoreBuffer(Heap* heap) |
- : heap_(heap), |
- top_(nullptr), |
- start_(nullptr), |
- limit_(nullptr), |
- virtual_memory_(nullptr) {} |
+ : heap_(heap), top_(nullptr), current_(0), virtual_memory_(nullptr) { |
+ for (int i = 0; i < kStoreBuffers; i++) { |
+ start_[i] = nullptr; |
+ limit_[i] = nullptr; |
+ lazy_top_[i] = nullptr; |
+ } |
+ task_running_ = false; |
+} |
void StoreBuffer::SetUp() { |
// Allocate 3x the buffer size, so that we can start the new store buffer |
// aligned to 2x the size. This lets us use a bit test to detect the end of |
// the area. |
- virtual_memory_ = new base::VirtualMemory(kStoreBufferSize * 2); |
+ virtual_memory_ = new base::VirtualMemory(kStoreBufferSize * 3); |
uintptr_t start_as_int = |
reinterpret_cast<uintptr_t>(virtual_memory_->address()); |
- start_ = reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize)); |
- limit_ = start_ + (kStoreBufferSize / kPointerSize); |
+ start_[0] = |
+ reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize)); |
+ limit_[0] = start_[0] + (kStoreBufferSize / kPointerSize); |
+ start_[1] = limit_[0]; |
+ limit_[1] = start_[1] + (kStoreBufferSize / kPointerSize); |
- DCHECK(reinterpret_cast<Address>(start_) >= virtual_memory_->address()); |
- DCHECK(reinterpret_cast<Address>(limit_) >= virtual_memory_->address()); |
Address* vm_limit = reinterpret_cast<Address*>( |
reinterpret_cast<char*>(virtual_memory_->address()) + |
virtual_memory_->size()); |
- DCHECK(start_ <= vm_limit); |
- DCHECK(limit_ <= vm_limit); |
+ |
USE(vm_limit); |
- DCHECK((reinterpret_cast<uintptr_t>(limit_) & kStoreBufferMask) == 0); |
+ for (int i = 0; i < kStoreBuffers; i++) { |
+ DCHECK(reinterpret_cast<Address>(start_[i]) >= virtual_memory_->address()); |
+ DCHECK(reinterpret_cast<Address>(limit_[i]) >= virtual_memory_->address()); |
+ DCHECK(start_[i] <= vm_limit); |
+ DCHECK(limit_[i] <= vm_limit); |
+ DCHECK((reinterpret_cast<uintptr_t>(limit_[i]) & kStoreBufferMask) == 0); |
+ } |
- if (!virtual_memory_->Commit(reinterpret_cast<Address>(start_), |
- kStoreBufferSize, |
+ if (!virtual_memory_->Commit(reinterpret_cast<Address>(start_[0]), |
+ kStoreBufferSize * kStoreBuffers, |
false)) { // Not executable. |
V8::FatalProcessOutOfMemory("StoreBuffer::SetUp"); |
} |
- top_ = start_; |
+ current_ = 0; |
+ top_ = start_[current_]; |
} |
void StoreBuffer::TearDown() { |
delete virtual_memory_; |
- top_ = start_ = limit_ = nullptr; |
+ top_ = nullptr; |
+ for (int i = 0; i < kStoreBuffers; i++) { |
+ start_[i] = nullptr; |
+ limit_[i] = nullptr; |
+ lazy_top_[i] = nullptr; |
+ } |
} |
void StoreBuffer::StoreBufferOverflow(Isolate* isolate) { |
- isolate->heap()->store_buffer()->MoveEntriesToRememberedSet(); |
+ isolate->heap()->store_buffer()->FlipStoreBuffers(); |
isolate->counters()->store_buffer_overflows()->Increment(); |
} |
-void StoreBuffer::MoveEntriesToRememberedSet() { |
- if (top_ == start_) return; |
- DCHECK(top_ <= limit_); |
- for (Address* current = start_; current < top_; current++) { |
+void StoreBuffer::FlipStoreBuffers() { |
+ base::LockGuard<base::Mutex> guard(&mutex_); |
+ int other = (current_ + 1) % kStoreBuffers; |
+ MoveEntriesToRememberedSet(other); |
+ lazy_top_[current_] = top_; |
+ current_ = other; |
+ top_ = start_[current_]; |
+ |
+ if (!task_running_) { |
+ task_running_ = true; |
+ Task* task = new Task(heap_->isolate(), this); |
+ V8::GetCurrentPlatform()->CallOnBackgroundThread( |
+ task, v8::Platform::kShortRunningTask); |
+ } |
+} |
+ |
+void StoreBuffer::MoveEntriesToRememberedSet(int index) { |
+ if (!lazy_top_[index]) return; |
+ DCHECK_GE(index, 0); |
+ DCHECK_LT(index, kStoreBuffers); |
+ for (Address* current = start_[index]; current < lazy_top_[index]; |
+ current++) { |
DCHECK(!heap_->code_space()->Contains(*current)); |
Address addr = *current; |
Page* page = Page::FromAnyPointerAddress(heap_, addr); |
- RememberedSet<OLD_TO_NEW>::Insert(page, addr); |
+ if (IsDeletionAddress(addr)) { |
+ current++; |
+ Address end = *current; |
+ DCHECK(!IsDeletionAddress(end)); |
+ addr = UnmarkDeletionAddress(addr); |
+ if (end) { |
+ RememberedSet<OLD_TO_NEW>::RemoveRange(page, addr, end, |
+ SlotSet::PREFREE_EMPTY_BUCKETS); |
+ } else { |
+ RememberedSet<OLD_TO_NEW>::Remove(page, addr); |
+ } |
+ } else { |
+ DCHECK(!IsDeletionAddress(addr)); |
+ RememberedSet<OLD_TO_NEW>::Insert(page, addr); |
+ } |
+ } |
+ lazy_top_[index] = nullptr; |
+} |
+ |
+void StoreBuffer::MoveAllEntriesToRememberedSet() { |
+ base::LockGuard<base::Mutex> guard(&mutex_); |
+ int other = (current_ + 1) % kStoreBuffers; |
+ MoveEntriesToRememberedSet(other); |
+ lazy_top_[current_] = top_; |
+ MoveEntriesToRememberedSet(current_); |
+ top_ = start_[current_]; |
+} |
+ |
+void StoreBuffer::ConcurrentlyProcessStoreBuffer() { |
+ base::LockGuard<base::Mutex> guard(&mutex_); |
+ int other = (current_ + 1) % kStoreBuffers; |
+ MoveEntriesToRememberedSet(other); |
+ task_running_ = false; |
+} |
+ |
+void StoreBuffer::DeleteEntry(Address start, Address end) { |
+ if (top_ + sizeof(Address) * 2 > limit_[current_]) { |
+ StoreBufferOverflow(heap_->isolate()); |
} |
- top_ = start_; |
+ *top_ = MarkDeletionAddress(start); |
+ top_++; |
+ *top_ = end; |
+ top_++; |
} |
} // namespace internal |