Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 348 } | 348 } |
| 349 | 349 |
| 350 | 350 |
| 351 void StoreBuffer::GCPrologue() { | 351 void StoreBuffer::GCPrologue() { |
| 352 ClearFilteringHashSets(); | 352 ClearFilteringHashSets(); |
| 353 during_gc_ = true; | 353 during_gc_ = true; |
| 354 } | 354 } |
| 355 | 355 |
| 356 | 356 |
| 357 #ifdef VERIFY_HEAP | 357 #ifdef VERIFY_HEAP |
| 358 static void DummyScavengePointer(HeapObject** p, HeapObject* o) { | 358 static void DummyScavengePointer(HeapObject** p, HeapObject* o, Object* old) { |
| 359 // Do nothing. | 359 // Do nothing. |
| 360 } | 360 } |
| 361 | 361 |
| 362 | 362 |
| 363 void StoreBuffer::VerifyPointers(PagedSpace* space, | 363 void StoreBuffer::VerifyPointers(PagedSpace* space, |
| 364 RegionCallback region_callback) { | 364 RegionCallback region_callback) { |
| 365 PageIterator it(space); | 365 PageIterator it(space); |
| 366 | 366 |
| 367 while (it.has_next()) { | 367 while (it.has_next()) { |
| 368 Page* page = it.next(); | 368 Page* page = it.next(); |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 420 | 420 |
| 421 void StoreBuffer::FindPointersToNewSpaceInRegion( | 421 void StoreBuffer::FindPointersToNewSpaceInRegion( |
| 422 Address start, | 422 Address start, |
| 423 Address end, | 423 Address end, |
| 424 ObjectSlotCallback slot_callback, | 424 ObjectSlotCallback slot_callback, |
| 425 bool clear_maps) { | 425 bool clear_maps) { |
| 426 for (Address slot_address = start; | 426 for (Address slot_address = start; |
| 427 slot_address < end; | 427 slot_address < end; |
| 428 slot_address += kPointerSize) { | 428 slot_address += kPointerSize) { |
| 429 Object** slot = reinterpret_cast<Object**>(slot_address); | 429 Object** slot = reinterpret_cast<Object**>(slot_address); |
| 430 if (heap_->InNewSpace(*slot)) { | 430 Object* old_value = *slot; |
| 431 if (heap_->InNewSpace(old_value)) { | |
| 431 HeapObject* object = reinterpret_cast<HeapObject*>(*slot); | 432 HeapObject* object = reinterpret_cast<HeapObject*>(*slot); |
|
Jarin
2014/04/08 13:00:54
Why could not we get rid of the read of *slot here
Hannes Payer (out of office)
2014/04/08 14:04:35
Yes, this one should not be there. I did not uploa
| |
| 432 ASSERT(object->IsHeapObject()); | 433 ASSERT(object->IsHeapObject()); |
| 433 // The new space object was not promoted if it still contains a map | 434 // The new space object was not promoted if it still contains a map |
| 434 // pointer. Clear the map field now lazily. | 435 // pointer. Clear the map field now lazily. |
| 435 if (clear_maps) ClearDeadObject(object); | 436 if (clear_maps) ClearDeadObject(object); |
| 436 slot_callback(reinterpret_cast<HeapObject**>(slot), object); | 437 slot_callback(reinterpret_cast<HeapObject**>(slot), object, old_value); |
|
Jarin
2014/04/08 13:00:54
This extra argument should not be necessary - we c
Hannes Payer (out of office)
2014/04/08 14:04:35
Done.
Good point, I was too conservative here.
| |
| 437 if (heap_->InNewSpace(*slot)) { | 438 if (heap_->InNewSpace(*slot)) { |
| 438 EnterDirectlyIntoStoreBuffer(slot_address); | 439 EnterDirectlyIntoStoreBuffer(slot_address); |
| 439 } | 440 } |
| 440 } | 441 } |
| 441 } | 442 } |
| 442 } | 443 } |
| 443 | 444 |
| 444 | 445 |
| 445 // Compute start address of the first map following given addr. | 446 // Compute start address of the first map following given addr. |
| 446 static inline Address MapStartAlign(Address addr) { | 447 static inline Address MapStartAlign(Address addr) { |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 524 Address visitable_start = page->area_start(); | 525 Address visitable_start = page->area_start(); |
| 525 Address end_of_page = page->area_end(); | 526 Address end_of_page = page->area_end(); |
| 526 | 527 |
| 527 Address visitable_end = visitable_start; | 528 Address visitable_end = visitable_start; |
| 528 | 529 |
| 529 Object* free_space_map = heap_->free_space_map(); | 530 Object* free_space_map = heap_->free_space_map(); |
| 530 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); | 531 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); |
| 531 Object* constant_pool_array_map = heap_->constant_pool_array_map(); | 532 Object* constant_pool_array_map = heap_->constant_pool_array_map(); |
| 532 | 533 |
| 533 while (visitable_end < end_of_page) { | 534 while (visitable_end < end_of_page) { |
| 534 Object* o = *reinterpret_cast<Object**>(visitable_end); | 535 // The sweeper thread concurrently may write free space maps and size to |
| 536 // this page. We need acquire load here to make sure that we get a | |
| 537 // consistent view of maps and their sizes. | |
| 538 Object* o = reinterpret_cast<Object*>( | |
| 539 Acquire_Load(reinterpret_cast<AtomicWord*>(visitable_end))); | |
| 535 // Skip fillers or constant pool arrays (which never contain new-space | 540 // Skip fillers or constant pool arrays (which never contain new-space |
| 536 // pointers but can contain pointers which can be confused for fillers) | 541 // pointers but can contain pointers which can be confused for fillers) |
| 537 // but not things that look like fillers in the special garbage section | 542 // but not things that look like fillers in the special garbage section |
| 538 // which can contain anything. | 543 // which can contain anything. |
| 539 if (o == free_space_map || | 544 if (o == free_space_map || |
| 540 o == two_pointer_filler_map || | 545 o == two_pointer_filler_map || |
| 541 o == constant_pool_array_map || | 546 o == constant_pool_array_map || |
| 542 (visitable_end == space->top() && visitable_end != space->limit())) { | 547 (visitable_end == space->top() && visitable_end != space->limit())) { |
| 543 if (visitable_start != visitable_end) { | 548 if (visitable_start != visitable_end) { |
| 544 // After calling this the special garbage section may have moved. | 549 // After calling this the special garbage section may have moved. |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 588 bool clear_maps) { | 593 bool clear_maps) { |
| 589 Address* limit = old_top_; | 594 Address* limit = old_top_; |
| 590 old_top_ = old_start_; | 595 old_top_ = old_start_; |
| 591 { | 596 { |
| 592 DontMoveStoreBufferEntriesScope scope(this); | 597 DontMoveStoreBufferEntriesScope scope(this); |
| 593 for (Address* current = old_start_; current < limit; current++) { | 598 for (Address* current = old_start_; current < limit; current++) { |
| 594 #ifdef DEBUG | 599 #ifdef DEBUG |
| 595 Address* saved_top = old_top_; | 600 Address* saved_top = old_top_; |
| 596 #endif | 601 #endif |
| 597 Object** slot = reinterpret_cast<Object**>(*current); | 602 Object** slot = reinterpret_cast<Object**>(*current); |
| 598 Object* object = *slot; | 603 Object* object = reinterpret_cast<Object*>( |
| 604 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | |
| 599 if (heap_->InFromSpace(object)) { | 605 if (heap_->InFromSpace(object)) { |
| 600 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 606 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
| 601 // The new space object was not promoted if it still contains a map | 607 // The new space object was not promoted if it still contains a map |
| 602 // pointer. Clear the map field now lazily. | 608 // pointer. Clear the map field now lazily. |
| 603 if (clear_maps) ClearDeadObject(heap_object); | 609 if (clear_maps) ClearDeadObject(heap_object); |
| 604 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); | 610 slot_callback(reinterpret_cast<HeapObject**>(slot), |
| 611 heap_object, | |
| 612 object); | |
| 605 if (heap_->InNewSpace(*slot)) { | 613 if (heap_->InNewSpace(*slot)) { |
| 606 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); | 614 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); |
| 607 } | 615 } |
| 608 } | 616 } |
| 609 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); | 617 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); |
| 610 } | 618 } |
| 611 } | 619 } |
| 612 } | 620 } |
| 613 | 621 |
| 614 | 622 |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 731 } | 739 } |
| 732 old_buffer_is_sorted_ = false; | 740 old_buffer_is_sorted_ = false; |
| 733 old_buffer_is_filtered_ = false; | 741 old_buffer_is_filtered_ = false; |
| 734 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 742 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 735 ASSERT(old_top_ <= old_limit_); | 743 ASSERT(old_top_ <= old_limit_); |
| 736 } | 744 } |
| 737 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 745 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 738 } | 746 } |
| 739 | 747 |
| 740 } } // namespace v8::internal | 748 } } // namespace v8::internal |
| OLD | NEW |