OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/store-buffer.h" | 5 #include "src/store-buffer.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 | 8 |
9 #include "src/v8.h" | 9 #include "src/v8.h" |
10 | 10 |
(...skipping 497 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
508 if (page->parallel_sweeping() > MemoryChunk::SWEEPING_FINALIZE) { | 508 if (page->parallel_sweeping() > MemoryChunk::SWEEPING_FINALIZE) { |
509 heap_->mark_compact_collector()->SweepInParallel(page, owner); | 509 heap_->mark_compact_collector()->SweepInParallel(page, owner); |
510 if (page->parallel_sweeping() > MemoryChunk::SWEEPING_FINALIZE) { | 510 if (page->parallel_sweeping() > MemoryChunk::SWEEPING_FINALIZE) { |
511 // We were not able to sweep that page, i.e., a concurrent | 511 // We were not able to sweep that page, i.e., a concurrent |
512 // sweeper thread currently owns this page. | 512 // sweeper thread currently owns this page. |
513 // TODO(hpayer): This may introduce a huge pause here. We | 513 // TODO(hpayer): This may introduce a huge pause here. We |
514 // just care about finish sweeping of the scan on scavenge page. | 514 // just care about finish sweeping of the scan on scavenge page. |
515 heap_->mark_compact_collector()->EnsureSweepingCompleted(); | 515 heap_->mark_compact_collector()->EnsureSweepingCompleted(); |
516 } | 516 } |
517 } | 517 } |
518 FindPointersToNewSpaceInRegion( | 518 // TODO(hpayer): remove the special casing and merge map and pointer |
519 start, end, slot_callback, clear_maps); | 519 // space handling as soon as we removed conservative sweeping. |
520 CHECK(page->owner() == heap_->old_pointer_space()); | |
521 if (heap_->old_pointer_space()->swept_precisely()) { | |
522 HeapObjectIterator iterator(page, NULL); | |
523 for (HeapObject* heap_object = iterator.Next(); | |
524 heap_object != NULL; heap_object = iterator.Next()) { | |
525 // We iterate over objects that contain pointers only. | |
rmcilroy
2014/07/24 10:24:48
nit - new space pointers only.
Hannes Payer (out of office)
2014/07/24 10:40:08
Done.
| |
526 if (heap_object->MayContainNewSpacePointers()) { | |
527 FindPointersToNewSpaceInRegion( | |
528 heap_object->address() + HeapObject::kHeaderSize, | |
529 heap_object->address() + heap_object->Size(), | |
530 slot_callback, clear_maps); | |
531 } | |
532 } | |
533 } else { | |
534 FindPointersToNewSpaceInRegion(start, end, slot_callback, | |
535 clear_maps); | |
536 } | |
520 } | 537 } |
521 } | 538 } |
522 } | 539 } |
523 } | 540 } |
524 if (callback_ != NULL) { | 541 if (callback_ != NULL) { |
525 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); | 542 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); |
526 } | 543 } |
527 } | 544 } |
528 } | 545 } |
529 | 546 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
575 } | 592 } |
576 old_buffer_is_sorted_ = false; | 593 old_buffer_is_sorted_ = false; |
577 old_buffer_is_filtered_ = false; | 594 old_buffer_is_filtered_ = false; |
578 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 595 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
579 ASSERT(old_top_ <= old_limit_); | 596 ASSERT(old_top_ <= old_limit_); |
580 } | 597 } |
581 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 598 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
582 } | 599 } |
583 | 600 |
584 } } // namespace v8::internal | 601 } } // namespace v8::internal |
OLD | NEW |