OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <algorithm> | 5 #include <algorithm> |
6 | 6 |
7 #include "src/v8.h" | 7 #include "src/v8.h" |
8 | 8 |
9 #include "src/base/atomicops.h" | 9 #include "src/base/atomicops.h" |
10 #include "src/counters.h" | 10 #include "src/counters.h" |
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
240 } | 240 } |
241 } | 241 } |
242 old_top_ = new_top; | 242 old_top_ = new_top; |
243 | 243 |
244 // Filtering hash sets are inconsistent with the store buffer after this | 244 // Filtering hash sets are inconsistent with the store buffer after this |
245 // operation. | 245 // operation. |
246 ClearFilteringHashSets(); | 246 ClearFilteringHashSets(); |
247 } | 247 } |
248 | 248 |
249 | 249 |
250 void StoreBuffer::RemoveSlots(Address start_address, Address end_address) { | |
251 struct IsValueInRangePredicate { | |
252 Address start_address_; | |
253 Address end_address_; | |
254 | |
255 IsValueInRangePredicate(Address start_address, Address end_address) | |
256 : start_address_(start_address), end_address_(end_address) {} | |
257 | |
258 bool operator()(Address addr) { | |
259 return start_address_ <= addr && addr < end_address_; | |
260 } | |
261 }; | |
262 | |
263 IsValueInRangePredicate predicate(start_address, end_address); | |
264 // Some address in old space that does not move. | |
265 const Address kRemovedSlot = heap_->undefined_value()->address(); | |
266 DCHECK(Page::FromAddress(kRemovedSlot)->NeverEvacuate()); | |
267 | |
268 { | |
269 Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top()); | |
270 std::replace_if(start_, top, predicate, kRemovedSlot); | |
271 } | |
272 | |
273 if (old_buffer_is_sorted_) { | |
274 // Remove slots from an old buffer preserving the order. | |
275 Address* lower = std::lower_bound(old_start_, old_top_, start_address); | |
276 if (lower != old_top_) { | |
277 // [lower, old_top_) range contain elements that are >= |start_address|. | |
278 Address* upper = std::lower_bound(lower, old_top_, end_address); | |
279 // Remove [lower, upper) from the buffer. | |
280 if (upper == old_top_) { | |
281 // All elements in [lower, old_top_) range are < |end_address|. | |
282 old_top_ = lower; | |
283 } else if (lower != upper) { | |
284 // [upper, old_top_) range contain elements that are >= |end_address|, | |
285 // move [upper, old_top_) range to [lower, ...) and update old_top_. | |
286 Address* new_top = lower; | |
287 for (Address* p = upper; p < old_top_; p++) { | |
288 *new_top++ = *p; | |
289 } | |
290 old_top_ = new_top; | |
291 } | |
292 } | |
293 } else { | |
294 std::replace_if(old_start_, old_top_, predicate, kRemovedSlot); | |
295 } | |
296 } | |
297 | |
298 | |
299 void StoreBuffer::SortUniq() { | 250 void StoreBuffer::SortUniq() { |
300 Compact(); | 251 Compact(); |
301 if (old_buffer_is_sorted_) return; | 252 if (old_buffer_is_sorted_) return; |
302 std::sort(old_start_, old_top_); | 253 std::sort(old_start_, old_top_); |
303 Uniq(); | 254 Uniq(); |
304 | 255 |
305 old_buffer_is_sorted_ = true; | 256 old_buffer_is_sorted_ = true; |
306 | 257 |
307 // Filtering hash sets are inconsistent with the store buffer after this | 258 // Filtering hash sets are inconsistent with the store buffer after this |
308 // operation. | 259 // operation. |
(...skipping 30 matching lines...) Expand all Loading... |
339 ClearFilteringHashSets(); | 290 ClearFilteringHashSets(); |
340 Uniq(); // Also removes things that no longer point to new space. | 291 Uniq(); // Also removes things that no longer point to new space. |
341 EnsureSpace(kStoreBufferSize / 2); | 292 EnsureSpace(kStoreBufferSize / 2); |
342 } | 293 } |
343 | 294 |
344 | 295 |
345 static Address* in_store_buffer_1_element_cache = NULL; | 296 static Address* in_store_buffer_1_element_cache = NULL; |
346 | 297 |
347 | 298 |
348 bool StoreBuffer::CellIsInStoreBuffer(Address cell_address) { | 299 bool StoreBuffer::CellIsInStoreBuffer(Address cell_address) { |
349 DCHECK_NOT_NULL(cell_address); | 300 if (!FLAG_enable_slow_asserts) return true; |
350 Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top()); | |
351 if (in_store_buffer_1_element_cache != NULL && | 301 if (in_store_buffer_1_element_cache != NULL && |
352 *in_store_buffer_1_element_cache == cell_address) { | 302 *in_store_buffer_1_element_cache == cell_address) { |
353 // Check if the cache still points into the active part of the buffer. | 303 return true; |
354 if ((start_ <= in_store_buffer_1_element_cache && | |
355 in_store_buffer_1_element_cache < top) || | |
356 (old_start_ <= in_store_buffer_1_element_cache && | |
357 in_store_buffer_1_element_cache < old_top_)) { | |
358 return true; | |
359 } | |
360 } | 304 } |
| 305 Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top()); |
361 for (Address* current = top - 1; current >= start_; current--) { | 306 for (Address* current = top - 1; current >= start_; current--) { |
362 if (*current == cell_address) { | 307 if (*current == cell_address) { |
363 in_store_buffer_1_element_cache = current; | 308 in_store_buffer_1_element_cache = current; |
364 return true; | 309 return true; |
365 } | 310 } |
366 } | 311 } |
367 for (Address* current = old_top_ - 1; current >= old_start_; current--) { | 312 for (Address* current = old_top_ - 1; current >= old_start_; current--) { |
368 if (*current == cell_address) { | 313 if (*current == cell_address) { |
369 in_store_buffer_1_element_cache = current; | 314 in_store_buffer_1_element_cache = current; |
370 return true; | 315 return true; |
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
690 } | 635 } |
691 old_buffer_is_sorted_ = false; | 636 old_buffer_is_sorted_ = false; |
692 old_buffer_is_filtered_ = false; | 637 old_buffer_is_filtered_ = false; |
693 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 638 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
694 DCHECK(old_top_ <= old_limit_); | 639 DCHECK(old_top_ <= old_limit_); |
695 } | 640 } |
696 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 641 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
697 } | 642 } |
698 } | 643 } |
699 } // namespace v8::internal | 644 } // namespace v8::internal |
OLD | NEW |