| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/store-buffer.h" | 5 #include "src/heap/store-buffer.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 | 8 |
| 9 #include "src/counters.h" | 9 #include "src/counters.h" |
| 10 #include "src/heap/incremental-marking.h" | 10 #include "src/heap/incremental-marking.h" |
| 11 #include "src/heap/store-buffer-inl.h" | |
| 12 #include "src/isolate.h" | 11 #include "src/isolate.h" |
| 13 #include "src/objects-inl.h" | 12 #include "src/objects-inl.h" |
| 14 #include "src/v8.h" | 13 #include "src/v8.h" |
| 15 | 14 |
| 16 namespace v8 { | 15 namespace v8 { |
| 17 namespace internal { | 16 namespace internal { |
| 18 | 17 |
| 19 StoreBuffer::StoreBuffer(Heap* heap) | 18 StoreBuffer::StoreBuffer(Heap* heap) |
| 20 : heap_(heap), start_(nullptr), limit_(nullptr), virtual_memory_(nullptr) {} | 19 : heap_(heap), |
| 20 top_(nullptr), |
| 21 start_(nullptr), |
| 22 limit_(nullptr), |
| 23 virtual_memory_(nullptr) {} |
| 21 | 24 |
| 22 void StoreBuffer::SetUp() { | 25 void StoreBuffer::SetUp() { |
| 23 // Allocate 3x the buffer size, so that we can start the new store buffer | 26 // Allocate 3x the buffer size, so that we can start the new store buffer |
| 24 // aligned to 2x the size. This lets us use a bit test to detect the end of | 27 // aligned to 2x the size. This lets us use a bit test to detect the end of |
| 25 // the area. | 28 // the area. |
| 26 virtual_memory_ = new base::VirtualMemory(kStoreBufferSize * 3); | 29 virtual_memory_ = new base::VirtualMemory(kStoreBufferSize * 3); |
| 27 uintptr_t start_as_int = | 30 uintptr_t start_as_int = |
| 28 reinterpret_cast<uintptr_t>(virtual_memory_->address()); | 31 reinterpret_cast<uintptr_t>(virtual_memory_->address()); |
| 29 start_ = | 32 start_ = |
| 30 reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize * 2)); | 33 reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize * 2)); |
| 31 limit_ = start_ + (kStoreBufferSize / kPointerSize); | 34 limit_ = start_ + (kStoreBufferSize / kPointerSize); |
| 32 | 35 |
| 33 DCHECK(reinterpret_cast<Address>(start_) >= virtual_memory_->address()); | 36 DCHECK(reinterpret_cast<Address>(start_) >= virtual_memory_->address()); |
| 34 DCHECK(reinterpret_cast<Address>(limit_) >= virtual_memory_->address()); | 37 DCHECK(reinterpret_cast<Address>(limit_) >= virtual_memory_->address()); |
| 35 Address* vm_limit = reinterpret_cast<Address*>( | 38 Address* vm_limit = reinterpret_cast<Address*>( |
| 36 reinterpret_cast<char*>(virtual_memory_->address()) + | 39 reinterpret_cast<char*>(virtual_memory_->address()) + |
| 37 virtual_memory_->size()); | 40 virtual_memory_->size()); |
| 38 DCHECK(start_ <= vm_limit); | 41 DCHECK(start_ <= vm_limit); |
| 39 DCHECK(limit_ <= vm_limit); | 42 DCHECK(limit_ <= vm_limit); |
| 40 USE(vm_limit); | 43 USE(vm_limit); |
| 41 DCHECK((reinterpret_cast<uintptr_t>(limit_) & kStoreBufferOverflowBit) != 0); | 44 DCHECK((reinterpret_cast<uintptr_t>(limit_) & kStoreBufferOverflowBit) != 0); |
| 42 DCHECK((reinterpret_cast<uintptr_t>(limit_ - 1) & kStoreBufferOverflowBit) == | 45 DCHECK((reinterpret_cast<uintptr_t>(limit_ - 1) & kStoreBufferOverflowBit) == |
| 43 0); | 46 0); |
| 44 | 47 |
| 45 if (!virtual_memory_->Commit(reinterpret_cast<Address>(start_), | 48 if (!virtual_memory_->Commit(reinterpret_cast<Address>(start_), |
| 46 kStoreBufferSize, | 49 kStoreBufferSize, |
| 47 false)) { // Not executable. | 50 false)) { // Not executable. |
| 48 V8::FatalProcessOutOfMemory("StoreBuffer::SetUp"); | 51 V8::FatalProcessOutOfMemory("StoreBuffer::SetUp"); |
| 49 } | 52 } |
| 50 heap_->set_store_buffer_top(reinterpret_cast<Smi*>(start_)); | 53 top_ = start_; |
| 51 } | 54 } |
| 52 | 55 |
| 53 | 56 |
| 54 void StoreBuffer::TearDown() { | 57 void StoreBuffer::TearDown() { |
| 55 delete virtual_memory_; | 58 delete virtual_memory_; |
| 56 start_ = limit_ = NULL; | 59 top_ = start_ = limit_ = nullptr; |
| 57 heap_->set_store_buffer_top(reinterpret_cast<Smi*>(start_)); | |
| 58 } | 60 } |
| 59 | 61 |
| 60 | 62 |
| 61 void StoreBuffer::StoreBufferOverflow(Isolate* isolate) { | 63 void StoreBuffer::StoreBufferOverflow(Isolate* isolate) { |
| 62 isolate->heap()->store_buffer()->MoveEntriesToRememberedSet(); | 64 isolate->heap()->store_buffer()->MoveEntriesToRememberedSet(); |
| 63 isolate->counters()->store_buffer_overflows()->Increment(); | 65 isolate->counters()->store_buffer_overflows()->Increment(); |
| 64 } | 66 } |
| 65 | 67 |
| 66 void StoreBuffer::MoveEntriesToRememberedSet() { | 68 void StoreBuffer::MoveEntriesToRememberedSet() { |
| 67 Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top()); | 69 if (top_ == start_) return; |
| 68 if (top == start_) return; | 70 DCHECK(top_ <= limit_); |
| 69 DCHECK(top <= limit_); | 71 for (Address* current = start_; current < top_; current++) { |
| 70 heap_->set_store_buffer_top(reinterpret_cast<Smi*>(start_)); | |
| 71 for (Address* current = start_; current < top; current++) { | |
| 72 DCHECK(!heap_->code_space()->Contains(*current)); | 72 DCHECK(!heap_->code_space()->Contains(*current)); |
| 73 Address addr = *current; | 73 Address addr = *current; |
| 74 Page* page = Page::FromAnyPointerAddress(heap_, addr); | 74 Page* page = Page::FromAnyPointerAddress(heap_, addr); |
| 75 RememberedSet<OLD_TO_NEW>::Insert(page, addr); | 75 RememberedSet<OLD_TO_NEW>::Insert(page, addr); |
| 76 } | 76 } |
| 77 top_ = start_; |
| 77 } | 78 } |
| 78 | 79 |
| 79 } // namespace internal | 80 } // namespace internal |
| 80 } // namespace v8 | 81 } // namespace v8 |
| OLD | NEW |