OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 482 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
493 ASSERT(map_aligned_end == end); | 493 ASSERT(map_aligned_end == end); |
494 | 494 |
495 FindPointersToNewSpaceInMaps(map_aligned_start, | 495 FindPointersToNewSpaceInMaps(map_aligned_start, |
496 map_aligned_end, | 496 map_aligned_end, |
497 slot_callback, | 497 slot_callback, |
498 clear_maps); | 498 clear_maps); |
499 } | 499 } |
500 | 500 |
501 | 501 |
502 // This function iterates over all the pointers in a paged space in the heap, | 502 // This function iterates over all the pointers in a paged space in the heap, |
503 // looking for pointers into new space. Within the pages there may be dead | 503 // looking for pointers into new space. Within the pages there may be dead |
Hannes Payer (out of office)
2014/03/03 15:18:37
Please update this comment as well.
rmcilroy
2014/03/03 15:51:16
Done.
| |
504 // objects that have not been overwritten by free spaces or fillers because of | 504 // objects that have not been overwritten by free spaces or fillers because of |
505 // lazy sweeping. These dead objects may not contain pointers to new space. | 505 // lazy sweeping. These dead objects may not contain pointers to new space. |
506 // The garbage areas that have been swept properly (these will normally be the | 506 // The garbage areas that have been swept properly (these will normally be the |
507 // large ones) will be marked with free space and filler map words. In | 507 // large ones) will be marked with free space and filler map words. In |
508 // addition any area that has never been used at all for object allocation must | 508 // addition any area that has never been used at all for object allocation must |
509 // be marked with a free space or filler. Because the free space and filler | 509 // be marked with a free space or filler. Because the free space and filler |
510 // maps do not move we can always recognize these even after a compaction. | 510 // maps do not move we can always recognize these even after a compaction. |
511 // Normal objects like FixedArrays and JSObjects should not contain references | 511 // Normal objects like FixedArrays and JSObjects should not contain references |
512 // to these maps. The special garbage section (see comment in spaces.h) is | 512 // to these maps. The special garbage section (see comment in spaces.h) is |
513 // skipped since it can contain absolutely anything. Any objects that are | 513 // skipped since it can contain absolutely anything. Any objects that are |
514 // allocated during iteration may or may not be visited by the iteration, but | 514 // allocated during iteration may or may not be visited by the iteration, but |
515 // they will not be partially visited. | 515 // they will not be partially visited. |
516 void StoreBuffer::FindPointersToNewSpaceOnPage( | 516 void StoreBuffer::FindPointersToNewSpaceOnPage( |
517 PagedSpace* space, | 517 PagedSpace* space, |
518 Page* page, | 518 Page* page, |
519 RegionCallback region_callback, | 519 RegionCallback region_callback, |
520 ObjectSlotCallback slot_callback, | 520 ObjectSlotCallback slot_callback, |
521 bool clear_maps) { | 521 bool clear_maps) { |
522 Address visitable_start = page->area_start(); | 522 Address visitable_start = page->area_start(); |
523 Address end_of_page = page->area_end(); | 523 Address end_of_page = page->area_end(); |
524 | 524 |
525 Address visitable_end = visitable_start; | 525 Address visitable_end = visitable_start; |
526 | 526 |
527 Object* free_space_map = heap_->free_space_map(); | 527 Object* free_space_map = heap_->free_space_map(); |
528 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); | 528 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); |
529 Object* constant_pool_array_map = heap_->constant_pool_array_map(); | |
529 | 530 |
530 while (visitable_end < end_of_page) { | 531 while (visitable_end < end_of_page) { |
531 Object* o = *reinterpret_cast<Object**>(visitable_end); | 532 Object* o = *reinterpret_cast<Object**>(visitable_end); |
532 // Skip fillers but not things that look like fillers in the special | 533 // Skip fillers or constant pool arrays (which never contain new-space |
533 // garbage section which can contain anything. | 534 // pointers but can contain pointers which can be confused for fillers) |
535 // but not things that look like fillers in the special garbage section | |
536 // which can contain anything. | |
534 if (o == free_space_map || | 537 if (o == free_space_map || |
535 o == two_pointer_filler_map || | 538 o == two_pointer_filler_map || |
539 o == constant_pool_array_map || | |
536 (visitable_end == space->top() && visitable_end != space->limit())) { | 540 (visitable_end == space->top() && visitable_end != space->limit())) { |
537 if (visitable_start != visitable_end) { | 541 if (visitable_start != visitable_end) { |
538 // After calling this the special garbage section may have moved. | 542 // After calling this the special garbage section may have moved. |
539 (this->*region_callback)(visitable_start, | 543 (this->*region_callback)(visitable_start, |
540 visitable_end, | 544 visitable_end, |
541 slot_callback, | 545 slot_callback, |
542 clear_maps); | 546 clear_maps); |
543 if (visitable_end >= space->top() && visitable_end < space->limit()) { | 547 if (visitable_end >= space->top() && visitable_end < space->limit()) { |
544 visitable_end = space->limit(); | 548 visitable_end = space->limit(); |
545 visitable_start = visitable_end; | 549 visitable_start = visitable_end; |
546 continue; | 550 continue; |
547 } | 551 } |
548 } | 552 } |
549 if (visitable_end == space->top() && visitable_end != space->limit()) { | 553 if (visitable_end == space->top() && visitable_end != space->limit()) { |
550 visitable_start = visitable_end = space->limit(); | 554 visitable_start = visitable_end = space->limit(); |
551 } else { | 555 } else { |
552 // At this point we are either at the start of a filler or we are at | 556 // At this point we are either at the start of a filler, a |
553 // the point where the space->top() used to be before the | 557 // constant pool array, or we are at the point where the space->top() |
554 // visit_pointer_region call above. Either way we can skip the | 558 // used to be before the visit_pointer_region call above. Either way we |
555 // object at the current spot: We don't promise to visit objects | 559 // can skip the object at the current spot: We don't promise to visit |
556 // allocated during heap traversal, and if space->top() moved then it | 560 // objects allocated during heap traversal, and if space->top() moved |
557 // must be because an object was allocated at this point. | 561 // then it must be because an object was allocated at this point. |
558 visitable_start = | 562 visitable_start = |
559 visitable_end + HeapObject::FromAddress(visitable_end)->Size(); | 563 visitable_end + HeapObject::FromAddress(visitable_end)->Size(); |
560 visitable_end = visitable_start; | 564 visitable_end = visitable_start; |
561 } | 565 } |
562 } else { | 566 } else { |
563 ASSERT(o != free_space_map); | 567 ASSERT(o != free_space_map); |
564 ASSERT(o != two_pointer_filler_map); | 568 ASSERT(o != two_pointer_filler_map); |
569 ASSERT(o != constant_pool_array_map); | |
565 ASSERT(visitable_end < space->top() || visitable_end >= space->limit()); | 570 ASSERT(visitable_end < space->top() || visitable_end >= space->limit()); |
566 visitable_end += kPointerSize; | 571 visitable_end += kPointerSize; |
567 } | 572 } |
568 } | 573 } |
569 ASSERT(visitable_end == end_of_page); | 574 ASSERT(visitable_end == end_of_page); |
570 if (visitable_start != visitable_end) { | 575 if (visitable_start != visitable_end) { |
571 (this->*region_callback)(visitable_start, | 576 (this->*region_callback)(visitable_start, |
572 visitable_end, | 577 visitable_end, |
573 slot_callback, | 578 slot_callback, |
574 clear_maps); | 579 clear_maps); |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
724 } | 729 } |
725 old_buffer_is_sorted_ = false; | 730 old_buffer_is_sorted_ = false; |
726 old_buffer_is_filtered_ = false; | 731 old_buffer_is_filtered_ = false; |
727 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 732 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
728 ASSERT(old_top_ <= old_limit_); | 733 ASSERT(old_top_ <= old_limit_); |
729 } | 734 } |
730 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 735 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
731 } | 736 } |
732 | 737 |
733 } } // namespace v8::internal | 738 } } // namespace v8::internal |
OLD | NEW |