OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <algorithm> | 5 #include <algorithm> |
6 | 6 |
7 #include "src/v8.h" | 7 #include "src/v8.h" |
8 | 8 |
9 #include "src/base/atomicops.h" | 9 #include "src/base/atomicops.h" |
10 #include "src/counters.h" | 10 #include "src/counters.h" |
(...skipping 459 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
470 if (chunk->owner() == heap_->lo_space()) { | 470 if (chunk->owner() == heap_->lo_space()) { |
471 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); | 471 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); |
472 HeapObject* array = large_page->GetObject(); | 472 HeapObject* array = large_page->GetObject(); |
473 DCHECK(array->IsFixedArray()); | 473 DCHECK(array->IsFixedArray()); |
474 Address start = array->address(); | 474 Address start = array->address(); |
475 Address end = start + array->Size(); | 475 Address end = start + array->Size(); |
476 FindPointersToNewSpaceInRegion(start, end, slot_callback, clear_maps); | 476 FindPointersToNewSpaceInRegion(start, end, slot_callback, clear_maps); |
477 } else { | 477 } else { |
478 Page* page = reinterpret_cast<Page*>(chunk); | 478 Page* page = reinterpret_cast<Page*>(chunk); |
479 PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner()); | 479 PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner()); |
480 Address start = page->area_start(); | |
481 Address end = page->area_end(); | |
482 if (owner == heap_->map_space()) { | 480 if (owner == heap_->map_space()) { |
483 DCHECK(page->WasSweptPrecisely()); | 481 DCHECK(page->WasSwept()); |
484 HeapObjectIterator iterator(page, NULL); | 482 HeapObjectIterator iterator(page, NULL); |
485 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; | 483 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; |
486 heap_object = iterator.Next()) { | 484 heap_object = iterator.Next()) { |
487 // We skip free space objects. | 485 // We skip free space objects. |
488 if (!heap_object->IsFiller()) { | 486 if (!heap_object->IsFiller()) { |
489 DCHECK(heap_object->IsMap()); | 487 DCHECK(heap_object->IsMap()); |
490 FindPointersToNewSpaceInRegion( | 488 FindPointersToNewSpaceInRegion( |
491 heap_object->address() + Map::kPointerFieldsBeginOffset, | 489 heap_object->address() + Map::kPointerFieldsBeginOffset, |
492 heap_object->address() + Map::kPointerFieldsEndOffset, | 490 heap_object->address() + Map::kPointerFieldsEndOffset, |
493 slot_callback, clear_maps); | 491 slot_callback, clear_maps); |
494 } | 492 } |
495 } | 493 } |
496 } else { | 494 } else { |
497 if (!page->SweepingCompleted()) { | 495 if (!page->SweepingCompleted()) { |
498 heap_->mark_compact_collector()->SweepInParallel(page, owner); | 496 heap_->mark_compact_collector()->SweepInParallel(page, owner); |
499 if (!page->SweepingCompleted()) { | 497 if (!page->SweepingCompleted()) { |
500 // We were not able to sweep that page, i.e., a concurrent | 498 // We were not able to sweep that page, i.e., a concurrent |
501 // sweeper thread currently owns this page. | 499 // sweeper thread currently owns this page. |
502 // TODO(hpayer): This may introduce a huge pause here. We | 500 // TODO(hpayer): This may introduce a huge pause here. We |
503 // just care about finish sweeping of the scan on scavenge page. | 501 // just care about finish sweeping of the scan on scavenge page. |
504 heap_->mark_compact_collector()->EnsureSweepingCompleted(); | 502 heap_->mark_compact_collector()->EnsureSweepingCompleted(); |
505 } | 503 } |
506 } | 504 } |
507 // TODO(hpayer): remove the special casing and merge map and pointer | |
508 // space handling as soon as we removed conservative sweeping. | |
509 CHECK(page->owner() == heap_->old_pointer_space()); | 505 CHECK(page->owner() == heap_->old_pointer_space()); |
510 if (heap_->old_pointer_space()->swept_precisely()) { | 506 HeapObjectIterator iterator(page, NULL); |
511 HeapObjectIterator iterator(page, NULL); | 507 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; |
512 for (HeapObject* heap_object = iterator.Next(); | 508 heap_object = iterator.Next()) { |
513 heap_object != NULL; heap_object = iterator.Next()) { | 509 // We iterate over objects that contain new space pointers only. |
514 // We iterate over objects that contain new space pointers only. | 510 if (heap_object->MayContainNewSpacePointers()) { |
515 if (heap_object->MayContainNewSpacePointers()) { | 511 FindPointersToNewSpaceInRegion( |
516 FindPointersToNewSpaceInRegion( | 512 heap_object->address() + HeapObject::kHeaderSize, |
517 heap_object->address() + HeapObject::kHeaderSize, | 513 heap_object->address() + heap_object->Size(), slot_callback, |
518 heap_object->address() + heap_object->Size(), | 514 clear_maps); |
519 slot_callback, clear_maps); | |
520 } | |
521 } | 515 } |
522 } else { | |
523 FindPointersToNewSpaceInRegion(start, end, slot_callback, | |
524 clear_maps); | |
525 } | 516 } |
526 } | 517 } |
527 } | 518 } |
528 } | 519 } |
529 } | 520 } |
530 if (callback_ != NULL) { | 521 if (callback_ != NULL) { |
531 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); | 522 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); |
532 } | 523 } |
533 } | 524 } |
534 } | 525 } |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
581 } | 572 } |
582 old_buffer_is_sorted_ = false; | 573 old_buffer_is_sorted_ = false; |
583 old_buffer_is_filtered_ = false; | 574 old_buffer_is_filtered_ = false; |
584 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 575 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
585 DCHECK(old_top_ <= old_limit_); | 576 DCHECK(old_top_ <= old_limit_); |
586 } | 577 } |
587 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 578 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
588 } | 579 } |
589 } | 580 } |
590 } // namespace v8::internal | 581 } // namespace v8::internal |
OLD | NEW |