Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/store-buffer.cc

Issue 18998004: Implemented lazy sweeping of new space. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/store-buffer.h ('k') | src/store-buffer-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after
357 void StoreBuffer::VerifyPointers(PagedSpace* space, 357 void StoreBuffer::VerifyPointers(PagedSpace* space,
358 RegionCallback region_callback) { 358 RegionCallback region_callback) {
359 PageIterator it(space); 359 PageIterator it(space);
360 360
361 while (it.has_next()) { 361 while (it.has_next()) {
362 Page* page = it.next(); 362 Page* page = it.next();
363 FindPointersToNewSpaceOnPage( 363 FindPointersToNewSpaceOnPage(
364 reinterpret_cast<PagedSpace*>(page->owner()), 364 reinterpret_cast<PagedSpace*>(page->owner()),
365 page, 365 page,
366 region_callback, 366 region_callback,
367 &DummyScavengePointer); 367 &DummyScavengePointer,
368 false);
368 } 369 }
369 } 370 }
370 371
371 372
372 void StoreBuffer::VerifyPointers(LargeObjectSpace* space) { 373 void StoreBuffer::VerifyPointers(LargeObjectSpace* space) {
373 LargeObjectIterator it(space); 374 LargeObjectIterator it(space);
374 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { 375 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
375 if (object->IsFixedArray()) { 376 if (object->IsFixedArray()) {
376 Address slot_address = object->address(); 377 Address slot_address = object->address();
377 Address end = object->address() + object->Size(); 378 Address end = object->address() + object->Size();
(...skipping 27 matching lines...) Expand all
405 during_gc_ = false; 406 during_gc_ = false;
406 #ifdef VERIFY_HEAP 407 #ifdef VERIFY_HEAP
407 if (FLAG_verify_heap) { 408 if (FLAG_verify_heap) {
408 Verify(); 409 Verify();
409 } 410 }
410 #endif 411 #endif
411 } 412 }
412 413
413 414
414 void StoreBuffer::FindPointersToNewSpaceInRegion( 415 void StoreBuffer::FindPointersToNewSpaceInRegion(
415 Address start, Address end, ObjectSlotCallback slot_callback) { 416 Address start,
417 Address end,
418 ObjectSlotCallback slot_callback,
419 bool clear_maps) {
416 for (Address slot_address = start; 420 for (Address slot_address = start;
417 slot_address < end; 421 slot_address < end;
418 slot_address += kPointerSize) { 422 slot_address += kPointerSize) {
419 Object** slot = reinterpret_cast<Object**>(slot_address); 423 Object** slot = reinterpret_cast<Object**>(slot_address);
420 if (heap_->InNewSpace(*slot)) { 424 if (heap_->InNewSpace(*slot)) {
421 HeapObject* object = reinterpret_cast<HeapObject*>(*slot); 425 HeapObject* object = reinterpret_cast<HeapObject*>(*slot);
422 ASSERT(object->IsHeapObject()); 426 ASSERT(object->IsHeapObject());
427 // The new space object was not promoted if it still contains a map
428 // pointer. Clear the map field now lazily.
429 if (clear_maps) ClearDeadObject(object);
423 slot_callback(reinterpret_cast<HeapObject**>(slot), object); 430 slot_callback(reinterpret_cast<HeapObject**>(slot), object);
424 if (heap_->InNewSpace(*slot)) { 431 if (heap_->InNewSpace(*slot)) {
425 EnterDirectlyIntoStoreBuffer(slot_address); 432 EnterDirectlyIntoStoreBuffer(slot_address);
426 } 433 }
427 } 434 }
428 } 435 }
429 } 436 }
430 437
431 438
432 // Compute start address of the first map following given addr. 439 // Compute start address of the first map following given addr.
433 static inline Address MapStartAlign(Address addr) { 440 static inline Address MapStartAlign(Address addr) {
434 Address page = Page::FromAddress(addr)->area_start(); 441 Address page = Page::FromAddress(addr)->area_start();
435 return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize); 442 return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize);
436 } 443 }
437 444
438 445
439 // Compute end address of the first map preceding given addr. 446 // Compute end address of the first map preceding given addr.
440 static inline Address MapEndAlign(Address addr) { 447 static inline Address MapEndAlign(Address addr) {
441 Address page = Page::FromAllocationTop(addr)->area_start(); 448 Address page = Page::FromAllocationTop(addr)->area_start();
442 return page + ((addr - page) / Map::kSize * Map::kSize); 449 return page + ((addr - page) / Map::kSize * Map::kSize);
443 } 450 }
444 451
445 452
446 void StoreBuffer::FindPointersToNewSpaceInMaps( 453 void StoreBuffer::FindPointersToNewSpaceInMaps(
447 Address start, 454 Address start,
448 Address end, 455 Address end,
449 ObjectSlotCallback slot_callback) { 456 ObjectSlotCallback slot_callback,
457 bool clear_maps) {
450 ASSERT(MapStartAlign(start) == start); 458 ASSERT(MapStartAlign(start) == start);
451 ASSERT(MapEndAlign(end) == end); 459 ASSERT(MapEndAlign(end) == end);
452 460
453 Address map_address = start; 461 Address map_address = start;
454 while (map_address < end) { 462 while (map_address < end) {
455 ASSERT(!heap_->InNewSpace(Memory::Object_at(map_address))); 463 ASSERT(!heap_->InNewSpace(Memory::Object_at(map_address)));
456 ASSERT(Memory::Object_at(map_address)->IsMap()); 464 ASSERT(Memory::Object_at(map_address)->IsMap());
457 465
458 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset; 466 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
459 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset; 467 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
460 468
461 FindPointersToNewSpaceInRegion(pointer_fields_start, 469 FindPointersToNewSpaceInRegion(pointer_fields_start,
462 pointer_fields_end, 470 pointer_fields_end,
463 slot_callback); 471 slot_callback,
472 clear_maps);
464 map_address += Map::kSize; 473 map_address += Map::kSize;
465 } 474 }
466 } 475 }
467 476
468 477
469 void StoreBuffer::FindPointersToNewSpaceInMapsRegion( 478 void StoreBuffer::FindPointersToNewSpaceInMapsRegion(
470 Address start, 479 Address start,
471 Address end, 480 Address end,
472 ObjectSlotCallback slot_callback) { 481 ObjectSlotCallback slot_callback,
482 bool clear_maps) {
473 Address map_aligned_start = MapStartAlign(start); 483 Address map_aligned_start = MapStartAlign(start);
474 Address map_aligned_end = MapEndAlign(end); 484 Address map_aligned_end = MapEndAlign(end);
475 485
476 ASSERT(map_aligned_start == start); 486 ASSERT(map_aligned_start == start);
477 ASSERT(map_aligned_end == end); 487 ASSERT(map_aligned_end == end);
478 488
479 FindPointersToNewSpaceInMaps(map_aligned_start, 489 FindPointersToNewSpaceInMaps(map_aligned_start,
480 map_aligned_end, 490 map_aligned_end,
481 slot_callback); 491 slot_callback,
492 clear_maps);
482 } 493 }
483 494
484 495
485 // This function iterates over all the pointers in a paged space in the heap, 496 // This function iterates over all the pointers in a paged space in the heap,
486 // looking for pointers into new space. Within the pages there may be dead 497 // looking for pointers into new space. Within the pages there may be dead
487 // objects that have not been overwritten by free spaces or fillers because of 498 // objects that have not been overwritten by free spaces or fillers because of
488 // lazy sweeping. These dead objects may not contain pointers to new space. 499 // lazy sweeping. These dead objects may not contain pointers to new space.
489 // The garbage areas that have been swept properly (these will normally be the 500 // The garbage areas that have been swept properly (these will normally be the
490 // large ones) will be marked with free space and filler map words. In 501 // large ones) will be marked with free space and filler map words. In
491 // addition any area that has never been used at all for object allocation must 502 // addition any area that has never been used at all for object allocation must
492 // be marked with a free space or filler. Because the free space and filler 503 // be marked with a free space or filler. Because the free space and filler
493 // maps do not move we can always recognize these even after a compaction. 504 // maps do not move we can always recognize these even after a compaction.
494 // Normal objects like FixedArrays and JSObjects should not contain references 505 // Normal objects like FixedArrays and JSObjects should not contain references
495 // to these maps. The special garbage section (see comment in spaces.h) is 506 // to these maps. The special garbage section (see comment in spaces.h) is
496 // skipped since it can contain absolutely anything. Any objects that are 507 // skipped since it can contain absolutely anything. Any objects that are
497 // allocated during iteration may or may not be visited by the iteration, but 508 // allocated during iteration may or may not be visited by the iteration, but
498 // they will not be partially visited. 509 // they will not be partially visited.
499 void StoreBuffer::FindPointersToNewSpaceOnPage( 510 void StoreBuffer::FindPointersToNewSpaceOnPage(
500 PagedSpace* space, 511 PagedSpace* space,
501 Page* page, 512 Page* page,
502 RegionCallback region_callback, 513 RegionCallback region_callback,
503 ObjectSlotCallback slot_callback) { 514 ObjectSlotCallback slot_callback,
515 bool clear_maps) {
504 Address visitable_start = page->area_start(); 516 Address visitable_start = page->area_start();
505 Address end_of_page = page->area_end(); 517 Address end_of_page = page->area_end();
506 518
507 Address visitable_end = visitable_start; 519 Address visitable_end = visitable_start;
508 520
509 Object* free_space_map = heap_->free_space_map(); 521 Object* free_space_map = heap_->free_space_map();
510 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); 522 Object* two_pointer_filler_map = heap_->two_pointer_filler_map();
511 523
512 while (visitable_end < end_of_page) { 524 while (visitable_end < end_of_page) {
513 Object* o = *reinterpret_cast<Object**>(visitable_end); 525 Object* o = *reinterpret_cast<Object**>(visitable_end);
514 // Skip fillers but not things that look like fillers in the special 526 // Skip fillers but not things that look like fillers in the special
515 // garbage section which can contain anything. 527 // garbage section which can contain anything.
516 if (o == free_space_map || 528 if (o == free_space_map ||
517 o == two_pointer_filler_map || 529 o == two_pointer_filler_map ||
518 (visitable_end == space->top() && visitable_end != space->limit())) { 530 (visitable_end == space->top() && visitable_end != space->limit())) {
519 if (visitable_start != visitable_end) { 531 if (visitable_start != visitable_end) {
520 // After calling this the special garbage section may have moved. 532 // After calling this the special garbage section may have moved.
521 (this->*region_callback)(visitable_start, 533 (this->*region_callback)(visitable_start,
522 visitable_end, 534 visitable_end,
523 slot_callback); 535 slot_callback,
536 clear_maps);
524 if (visitable_end >= space->top() && visitable_end < space->limit()) { 537 if (visitable_end >= space->top() && visitable_end < space->limit()) {
525 visitable_end = space->limit(); 538 visitable_end = space->limit();
526 visitable_start = visitable_end; 539 visitable_start = visitable_end;
527 continue; 540 continue;
528 } 541 }
529 } 542 }
530 if (visitable_end == space->top() && visitable_end != space->limit()) { 543 if (visitable_end == space->top() && visitable_end != space->limit()) {
531 visitable_start = visitable_end = space->limit(); 544 visitable_start = visitable_end = space->limit();
532 } else { 545 } else {
533 // At this point we are either at the start of a filler or we are at 546 // At this point we are either at the start of a filler or we are at
(...skipping 10 matching lines...) Expand all
544 ASSERT(o != free_space_map); 557 ASSERT(o != free_space_map);
545 ASSERT(o != two_pointer_filler_map); 558 ASSERT(o != two_pointer_filler_map);
546 ASSERT(visitable_end < space->top() || visitable_end >= space->limit()); 559 ASSERT(visitable_end < space->top() || visitable_end >= space->limit());
547 visitable_end += kPointerSize; 560 visitable_end += kPointerSize;
548 } 561 }
549 } 562 }
550 ASSERT(visitable_end == end_of_page); 563 ASSERT(visitable_end == end_of_page);
551 if (visitable_start != visitable_end) { 564 if (visitable_start != visitable_end) {
552 (this->*region_callback)(visitable_start, 565 (this->*region_callback)(visitable_start,
553 visitable_end, 566 visitable_end,
554 slot_callback); 567 slot_callback,
568 clear_maps);
555 } 569 }
556 } 570 }
557 571
558 572
559 void StoreBuffer::IteratePointersInStoreBuffer( 573 void StoreBuffer::IteratePointersInStoreBuffer(
560 ObjectSlotCallback slot_callback) { 574 ObjectSlotCallback slot_callback,
575 bool clear_maps) {
561 Address* limit = old_top_; 576 Address* limit = old_top_;
562 old_top_ = old_start_; 577 old_top_ = old_start_;
563 { 578 {
564 DontMoveStoreBufferEntriesScope scope(this); 579 DontMoveStoreBufferEntriesScope scope(this);
565 for (Address* current = old_start_; current < limit; current++) { 580 for (Address* current = old_start_; current < limit; current++) {
566 #ifdef DEBUG 581 #ifdef DEBUG
567 Address* saved_top = old_top_; 582 Address* saved_top = old_top_;
568 #endif 583 #endif
569 Object** slot = reinterpret_cast<Object**>(*current); 584 Object** slot = reinterpret_cast<Object**>(*current);
570 Object* object = *slot; 585 Object* object = *slot;
571 if (heap_->InFromSpace(object)) { 586 if (heap_->InFromSpace(object)) {
572 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); 587 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
588 // The new space object was not promoted if it still contains a map
589 // pointer. Clear the map field now lazily.
590 if (clear_maps) ClearDeadObject(heap_object);
573 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); 591 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object);
574 if (heap_->InNewSpace(*slot)) { 592 if (heap_->InNewSpace(*slot)) {
575 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); 593 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot));
576 } 594 }
577 } 595 }
578 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); 596 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top);
579 } 597 }
580 } 598 }
581 } 599 }
582 600
583 601
584 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { 602 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) {
603 IteratePointersToNewSpace(slot_callback, false);
604 }
605
606
607 void StoreBuffer::IteratePointersToNewSpaceAndClearMaps(
608 ObjectSlotCallback slot_callback) {
609 IteratePointersToNewSpace(slot_callback, true);
610 }
611
612
613 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback,
614 bool clear_maps) {
585 // We do not sort or remove duplicated entries from the store buffer because 615 // We do not sort or remove duplicated entries from the store buffer because
586 // we expect that callback will rebuild the store buffer thus removing 616 // we expect that callback will rebuild the store buffer thus removing
587 // all duplicates and pointers to old space. 617 // all duplicates and pointers to old space.
588 bool some_pages_to_scan = PrepareForIteration(); 618 bool some_pages_to_scan = PrepareForIteration();
589 619
590 // TODO(gc): we want to skip slots on evacuation candidates 620 // TODO(gc): we want to skip slots on evacuation candidates
591 // but we can't simply figure that out from slot address 621 // but we can't simply figure that out from slot address
592 // because slot can belong to a large object. 622 // because slot can belong to a large object.
593 IteratePointersInStoreBuffer(slot_callback); 623 IteratePointersInStoreBuffer(slot_callback, clear_maps);
594 624
595 // We are done scanning all the pointers that were in the store buffer, but 625 // We are done scanning all the pointers that were in the store buffer, but
596 // there may be some pages marked scan_on_scavenge that have pointers to new 626 // there may be some pages marked scan_on_scavenge that have pointers to new
597 // space that are not in the store buffer. We must scan them now. As we 627 // space that are not in the store buffer. We must scan them now. As we
598 // scan, the surviving pointers to new space will be added to the store 628 // scan, the surviving pointers to new space will be added to the store
599 // buffer. If there are still a lot of pointers to new space then we will 629 // buffer. If there are still a lot of pointers to new space then we will
600 // keep the scan_on_scavenge flag on the page and discard the pointers that 630 // keep the scan_on_scavenge flag on the page and discard the pointers that
601 // were added to the store buffer. If there are not many pointers to new 631 // were added to the store buffer. If there are not many pointers to new
602 // space left on the page we will keep the pointers in the store buffer and 632 // space left on the page we will keep the pointers in the store buffer and
603 // remove the flag from the page. 633 // remove the flag from the page.
604 if (some_pages_to_scan) { 634 if (some_pages_to_scan) {
605 if (callback_ != NULL) { 635 if (callback_ != NULL) {
606 (*callback_)(heap_, NULL, kStoreBufferStartScanningPagesEvent); 636 (*callback_)(heap_, NULL, kStoreBufferStartScanningPagesEvent);
607 } 637 }
608 PointerChunkIterator it(heap_); 638 PointerChunkIterator it(heap_);
609 MemoryChunk* chunk; 639 MemoryChunk* chunk;
610 while ((chunk = it.next()) != NULL) { 640 while ((chunk = it.next()) != NULL) {
611 if (chunk->scan_on_scavenge()) { 641 if (chunk->scan_on_scavenge()) {
612 chunk->set_scan_on_scavenge(false); 642 chunk->set_scan_on_scavenge(false);
613 if (callback_ != NULL) { 643 if (callback_ != NULL) {
614 (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent); 644 (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent);
615 } 645 }
616 if (chunk->owner() == heap_->lo_space()) { 646 if (chunk->owner() == heap_->lo_space()) {
617 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); 647 LargePage* large_page = reinterpret_cast<LargePage*>(chunk);
618 HeapObject* array = large_page->GetObject(); 648 HeapObject* array = large_page->GetObject();
619 ASSERT(array->IsFixedArray()); 649 ASSERT(array->IsFixedArray());
620 Address start = array->address(); 650 Address start = array->address();
621 Address end = start + array->Size(); 651 Address end = start + array->Size();
622 FindPointersToNewSpaceInRegion(start, end, slot_callback); 652 FindPointersToNewSpaceInRegion(start, end, slot_callback, clear_maps);
623 } else { 653 } else {
624 Page* page = reinterpret_cast<Page*>(chunk); 654 Page* page = reinterpret_cast<Page*>(chunk);
625 PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner()); 655 PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner());
626 FindPointersToNewSpaceOnPage( 656 FindPointersToNewSpaceOnPage(
627 owner, 657 owner,
628 page, 658 page,
629 (owner == heap_->map_space() ? 659 (owner == heap_->map_space() ?
630 &StoreBuffer::FindPointersToNewSpaceInMapsRegion : 660 &StoreBuffer::FindPointersToNewSpaceInMapsRegion :
631 &StoreBuffer::FindPointersToNewSpaceInRegion), 661 &StoreBuffer::FindPointersToNewSpaceInRegion),
632 slot_callback); 662 slot_callback,
663 clear_maps);
633 } 664 }
634 } 665 }
635 } 666 }
636 if (callback_ != NULL) { 667 if (callback_ != NULL) {
637 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); 668 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent);
638 } 669 }
639 } 670 }
640 } 671 }
641 672
642 673
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
687 } 718 }
688 old_buffer_is_sorted_ = false; 719 old_buffer_is_sorted_ = false;
689 old_buffer_is_filtered_ = false; 720 old_buffer_is_filtered_ = false;
690 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); 721 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2);
691 ASSERT(old_top_ <= old_limit_); 722 ASSERT(old_top_ <= old_limit_);
692 } 723 }
693 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); 724 heap_->isolate()->counters()->store_buffer_compactions()->Increment();
694 } 725 }
695 726
696 } } // namespace v8::internal 727 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/store-buffer.h ('k') | src/store-buffer-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698