| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <algorithm> | 5 #include <algorithm> |
| 6 | 6 |
| 7 #include "src/v8.h" | 7 #include "src/v8.h" |
| 8 | 8 |
| 9 #include "src/counters.h" | 9 #include "src/counters.h" |
| 10 #include "src/heap/store-buffer-inl.h" | 10 #include "src/heap/store-buffer-inl.h" |
| (...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 190 int old_counter = containing_chunk->store_buffer_counter(); | 190 int old_counter = containing_chunk->store_buffer_counter(); |
| 191 if (old_counter >= threshold) { | 191 if (old_counter >= threshold) { |
| 192 containing_chunk->set_scan_on_scavenge(true); | 192 containing_chunk->set_scan_on_scavenge(true); |
| 193 created_new_scan_on_scavenge_pages = true; | 193 created_new_scan_on_scavenge_pages = true; |
| 194 } | 194 } |
| 195 containing_chunk->set_store_buffer_counter(old_counter + 1); | 195 containing_chunk->set_store_buffer_counter(old_counter + 1); |
| 196 previous_chunk = containing_chunk; | 196 previous_chunk = containing_chunk; |
| 197 } | 197 } |
| 198 if (created_new_scan_on_scavenge_pages) { | 198 if (created_new_scan_on_scavenge_pages) { |
| 199 Filter(MemoryChunk::SCAN_ON_SCAVENGE); | 199 Filter(MemoryChunk::SCAN_ON_SCAVENGE); |
| 200 heap_->isolate()->CountUsage( |
| 201 v8::Isolate::UseCounterFeature::kStoreBufferOverflow); |
| 200 } | 202 } |
| 201 old_buffer_is_filtered_ = true; | 203 old_buffer_is_filtered_ = true; |
| 202 } | 204 } |
| 203 | 205 |
| 204 | 206 |
| 205 void StoreBuffer::Filter(int flag) { | 207 void StoreBuffer::Filter(int flag) { |
| 206 Address* new_top = old_start_; | 208 Address* new_top = old_start_; |
| 207 MemoryChunk* previous_chunk = NULL; | 209 MemoryChunk* previous_chunk = NULL; |
| 208 for (Address* p = old_start_; p < old_top_; p++) { | 210 for (Address* p = old_start_; p < old_top_; p++) { |
| 209 Address addr = *p; | 211 Address addr = *p; |
| (...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 564 } | 566 } |
| 565 old_buffer_is_sorted_ = false; | 567 old_buffer_is_sorted_ = false; |
| 566 old_buffer_is_filtered_ = false; | 568 old_buffer_is_filtered_ = false; |
| 567 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 569 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 568 DCHECK(old_top_ <= old_limit_); | 570 DCHECK(old_top_ <= old_limit_); |
| 569 } | 571 } |
| 570 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 572 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 571 } | 573 } |
| 572 } | 574 } |
| 573 } // namespace v8::internal | 575 } // namespace v8::internal |
| OLD | NEW |