| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <algorithm> | 5 #include <algorithm> |
| 6 | 6 |
| 7 #include "src/v8.h" | 7 #include "src/v8.h" |
| 8 | 8 |
| 9 #include "src/counters.h" | 9 #include "src/counters.h" |
| 10 #include "src/heap/store-buffer-inl.h" | 10 #include "src/heap/store-buffer-inl.h" |
| (...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 190 int old_counter = containing_chunk->store_buffer_counter(); | 190 int old_counter = containing_chunk->store_buffer_counter(); |
| 191 if (old_counter >= threshold) { | 191 if (old_counter >= threshold) { |
| 192 containing_chunk->set_scan_on_scavenge(true); | 192 containing_chunk->set_scan_on_scavenge(true); |
| 193 created_new_scan_on_scavenge_pages = true; | 193 created_new_scan_on_scavenge_pages = true; |
| 194 } | 194 } |
| 195 containing_chunk->set_store_buffer_counter(old_counter + 1); | 195 containing_chunk->set_store_buffer_counter(old_counter + 1); |
| 196 previous_chunk = containing_chunk; | 196 previous_chunk = containing_chunk; |
| 197 } | 197 } |
| 198 if (created_new_scan_on_scavenge_pages) { | 198 if (created_new_scan_on_scavenge_pages) { |
| 199 Filter(MemoryChunk::SCAN_ON_SCAVENGE); | 199 Filter(MemoryChunk::SCAN_ON_SCAVENGE); |
| 200 heap_->isolate()->CountUsage( | |
| 201 v8::Isolate::UseCounterFeature::kStoreBufferOverflow); | |
| 202 } | 200 } |
| 203 old_buffer_is_filtered_ = true; | 201 old_buffer_is_filtered_ = true; |
| 204 } | 202 } |
| 205 | 203 |
| 206 | 204 |
| 207 void StoreBuffer::Filter(int flag) { | 205 void StoreBuffer::Filter(int flag) { |
| 208 Address* new_top = old_start_; | 206 Address* new_top = old_start_; |
| 209 MemoryChunk* previous_chunk = NULL; | 207 MemoryChunk* previous_chunk = NULL; |
| 210 for (Address* p = old_start_; p < old_top_; p++) { | 208 for (Address* p = old_start_; p < old_top_; p++) { |
| 211 Address addr = *p; | 209 Address addr = *p; |
| (...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 564 } | 562 } |
| 565 old_buffer_is_sorted_ = false; | 563 old_buffer_is_sorted_ = false; |
| 566 old_buffer_is_filtered_ = false; | 564 old_buffer_is_filtered_ = false; |
| 567 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 565 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 568 DCHECK(old_top_ <= old_limit_); | 566 DCHECK(old_top_ <= old_limit_); |
| 569 } | 567 } |
| 570 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 568 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 571 } | 569 } |
| 572 } | 570 } |
| 573 } // namespace v8::internal | 571 } // namespace v8::internal |
| OLD | NEW |