Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 357 void StoreBuffer::VerifyPointers(PagedSpace* space, | 357 void StoreBuffer::VerifyPointers(PagedSpace* space, |
| 358 RegionCallback region_callback) { | 358 RegionCallback region_callback) { |
| 359 PageIterator it(space); | 359 PageIterator it(space); |
| 360 | 360 |
| 361 while (it.has_next()) { | 361 while (it.has_next()) { |
| 362 Page* page = it.next(); | 362 Page* page = it.next(); |
| 363 FindPointersToNewSpaceOnPage( | 363 FindPointersToNewSpaceOnPage( |
| 364 reinterpret_cast<PagedSpace*>(page->owner()), | 364 reinterpret_cast<PagedSpace*>(page->owner()), |
| 365 page, | 365 page, |
| 366 region_callback, | 366 region_callback, |
| 367 &DummyScavengePointer); | 367 &DummyScavengePointer, |
| 368 false); | |
| 368 } | 369 } |
| 369 } | 370 } |
| 370 | 371 |
| 371 | 372 |
| 372 void StoreBuffer::VerifyPointers(LargeObjectSpace* space) { | 373 void StoreBuffer::VerifyPointers(LargeObjectSpace* space) { |
| 373 LargeObjectIterator it(space); | 374 LargeObjectIterator it(space); |
| 374 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 375 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 375 if (object->IsFixedArray()) { | 376 if (object->IsFixedArray()) { |
| 376 Address slot_address = object->address(); | 377 Address slot_address = object->address(); |
| 377 Address end = object->address() + object->Size(); | 378 Address end = object->address() + object->Size(); |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 405 during_gc_ = false; | 406 during_gc_ = false; |
| 406 #ifdef VERIFY_HEAP | 407 #ifdef VERIFY_HEAP |
| 407 if (FLAG_verify_heap) { | 408 if (FLAG_verify_heap) { |
| 408 Verify(); | 409 Verify(); |
| 409 } | 410 } |
| 410 #endif | 411 #endif |
| 411 } | 412 } |
| 412 | 413 |
| 413 | 414 |
| 414 void StoreBuffer::FindPointersToNewSpaceInRegion( | 415 void StoreBuffer::FindPointersToNewSpaceInRegion( |
| 415 Address start, Address end, ObjectSlotCallback slot_callback) { | 416 Address start, |
| 417 Address end, | |
| 418 ObjectSlotCallback slot_callback, | |
| 419 bool clear_maps) { | |
| 416 for (Address slot_address = start; | 420 for (Address slot_address = start; |
| 417 slot_address < end; | 421 slot_address < end; |
| 418 slot_address += kPointerSize) { | 422 slot_address += kPointerSize) { |
| 419 Object** slot = reinterpret_cast<Object**>(slot_address); | 423 Object** slot = reinterpret_cast<Object**>(slot_address); |
| 420 if (heap_->InNewSpace(*slot)) { | 424 if (heap_->InNewSpace(*slot)) { |
| 421 HeapObject* object = reinterpret_cast<HeapObject*>(*slot); | 425 HeapObject* object = reinterpret_cast<HeapObject*>(*slot); |
| 426 Address& map_field = Memory::Address_at(object->address()); | |
|
titzer
2013/07/10 17:06:35
I'm a little dedup, short and stout. Rip me over a
Hannes Payer (out of office)
2013/07/11 07:30:40
Done.
| |
| 427 // The new space object was not promoted if it still contains a map | |
| 428 // pointer. Clear the map field now lazily. | |
| 429 if (clear_maps && heap_->map_space()->Contains(map_field)) { | |
| 430 map_field = NULL; | |
| 431 } | |
| 422 ASSERT(object->IsHeapObject()); | 432 ASSERT(object->IsHeapObject()); |
| 423 slot_callback(reinterpret_cast<HeapObject**>(slot), object); | 433 slot_callback(reinterpret_cast<HeapObject**>(slot), object); |
| 424 if (heap_->InNewSpace(*slot)) { | 434 if (heap_->InNewSpace(*slot)) { |
| 425 EnterDirectlyIntoStoreBuffer(slot_address); | 435 EnterDirectlyIntoStoreBuffer(slot_address); |
| 426 } | 436 } |
| 427 } | 437 } |
| 428 } | 438 } |
| 429 } | 439 } |
| 430 | 440 |
| 431 | 441 |
| 432 // Compute start address of the first map following given addr. | 442 // Compute start address of the first map following given addr. |
| 433 static inline Address MapStartAlign(Address addr) { | 443 static inline Address MapStartAlign(Address addr) { |
| 434 Address page = Page::FromAddress(addr)->area_start(); | 444 Address page = Page::FromAddress(addr)->area_start(); |
| 435 return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize); | 445 return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize); |
| 436 } | 446 } |
| 437 | 447 |
| 438 | 448 |
| 439 // Compute end address of the first map preceding given addr. | 449 // Compute end address of the first map preceding given addr. |
| 440 static inline Address MapEndAlign(Address addr) { | 450 static inline Address MapEndAlign(Address addr) { |
| 441 Address page = Page::FromAllocationTop(addr)->area_start(); | 451 Address page = Page::FromAllocationTop(addr)->area_start(); |
| 442 return page + ((addr - page) / Map::kSize * Map::kSize); | 452 return page + ((addr - page) / Map::kSize * Map::kSize); |
| 443 } | 453 } |
| 444 | 454 |
| 445 | 455 |
| 446 void StoreBuffer::FindPointersToNewSpaceInMaps( | 456 void StoreBuffer::FindPointersToNewSpaceInMaps( |
| 447 Address start, | 457 Address start, |
| 448 Address end, | 458 Address end, |
| 449 ObjectSlotCallback slot_callback) { | 459 ObjectSlotCallback slot_callback, |
| 460 bool clear_maps) { | |
| 450 ASSERT(MapStartAlign(start) == start); | 461 ASSERT(MapStartAlign(start) == start); |
| 451 ASSERT(MapEndAlign(end) == end); | 462 ASSERT(MapEndAlign(end) == end); |
| 452 | 463 |
| 453 Address map_address = start; | 464 Address map_address = start; |
| 454 while (map_address < end) { | 465 while (map_address < end) { |
| 455 ASSERT(!heap_->InNewSpace(Memory::Object_at(map_address))); | 466 ASSERT(!heap_->InNewSpace(Memory::Object_at(map_address))); |
| 456 ASSERT(Memory::Object_at(map_address)->IsMap()); | 467 ASSERT(Memory::Object_at(map_address)->IsMap()); |
| 457 | 468 |
| 458 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset; | 469 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset; |
| 459 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset; | 470 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset; |
| 460 | 471 |
| 461 FindPointersToNewSpaceInRegion(pointer_fields_start, | 472 FindPointersToNewSpaceInRegion(pointer_fields_start, |
| 462 pointer_fields_end, | 473 pointer_fields_end, |
| 463 slot_callback); | 474 slot_callback, |
| 475 clear_maps); | |
| 464 map_address += Map::kSize; | 476 map_address += Map::kSize; |
| 465 } | 477 } |
| 466 } | 478 } |
| 467 | 479 |
| 468 | 480 |
| 469 void StoreBuffer::FindPointersToNewSpaceInMapsRegion( | 481 void StoreBuffer::FindPointersToNewSpaceInMapsRegion( |
| 470 Address start, | 482 Address start, |
| 471 Address end, | 483 Address end, |
| 472 ObjectSlotCallback slot_callback) { | 484 ObjectSlotCallback slot_callback, |
| 485 bool clear_maps) { | |
| 473 Address map_aligned_start = MapStartAlign(start); | 486 Address map_aligned_start = MapStartAlign(start); |
| 474 Address map_aligned_end = MapEndAlign(end); | 487 Address map_aligned_end = MapEndAlign(end); |
| 475 | 488 |
| 476 ASSERT(map_aligned_start == start); | 489 ASSERT(map_aligned_start == start); |
| 477 ASSERT(map_aligned_end == end); | 490 ASSERT(map_aligned_end == end); |
| 478 | 491 |
| 479 FindPointersToNewSpaceInMaps(map_aligned_start, | 492 FindPointersToNewSpaceInMaps(map_aligned_start, |
| 480 map_aligned_end, | 493 map_aligned_end, |
| 481 slot_callback); | 494 slot_callback, |
| 495 clear_maps); | |
| 482 } | 496 } |
| 483 | 497 |
| 484 | 498 |
| 485 // This function iterates over all the pointers in a paged space in the heap, | 499 // This function iterates over all the pointers in a paged space in the heap, |
| 486 // looking for pointers into new space. Within the pages there may be dead | 500 // looking for pointers into new space. Within the pages there may be dead |
| 487 // objects that have not been overwritten by free spaces or fillers because of | 501 // objects that have not been overwritten by free spaces or fillers because of |
| 488 // lazy sweeping. These dead objects may not contain pointers to new space. | 502 // lazy sweeping. These dead objects may not contain pointers to new space. |
| 489 // The garbage areas that have been swept properly (these will normally be the | 503 // The garbage areas that have been swept properly (these will normally be the |
| 490 // large ones) will be marked with free space and filler map words. In | 504 // large ones) will be marked with free space and filler map words. In |
| 491 // addition any area that has never been used at all for object allocation must | 505 // addition any area that has never been used at all for object allocation must |
| 492 // be marked with a free space or filler. Because the free space and filler | 506 // be marked with a free space or filler. Because the free space and filler |
| 493 // maps do not move we can always recognize these even after a compaction. | 507 // maps do not move we can always recognize these even after a compaction. |
| 494 // Normal objects like FixedArrays and JSObjects should not contain references | 508 // Normal objects like FixedArrays and JSObjects should not contain references |
| 495 // to these maps. The special garbage section (see comment in spaces.h) is | 509 // to these maps. The special garbage section (see comment in spaces.h) is |
| 496 // skipped since it can contain absolutely anything. Any objects that are | 510 // skipped since it can contain absolutely anything. Any objects that are |
| 497 // allocated during iteration may or may not be visited by the iteration, but | 511 // allocated during iteration may or may not be visited by the iteration, but |
| 498 // they will not be partially visited. | 512 // they will not be partially visited. |
| 499 void StoreBuffer::FindPointersToNewSpaceOnPage( | 513 void StoreBuffer::FindPointersToNewSpaceOnPage( |
| 500 PagedSpace* space, | 514 PagedSpace* space, |
| 501 Page* page, | 515 Page* page, |
| 502 RegionCallback region_callback, | 516 RegionCallback region_callback, |
| 503 ObjectSlotCallback slot_callback) { | 517 ObjectSlotCallback slot_callback, |
| 518 bool clear_maps) { | |
| 504 Address visitable_start = page->area_start(); | 519 Address visitable_start = page->area_start(); |
| 505 Address end_of_page = page->area_end(); | 520 Address end_of_page = page->area_end(); |
| 506 | 521 |
| 507 Address visitable_end = visitable_start; | 522 Address visitable_end = visitable_start; |
| 508 | 523 |
| 509 Object* free_space_map = heap_->free_space_map(); | 524 Object* free_space_map = heap_->free_space_map(); |
| 510 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); | 525 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); |
| 511 | 526 |
| 512 while (visitable_end < end_of_page) { | 527 while (visitable_end < end_of_page) { |
| 513 Object* o = *reinterpret_cast<Object**>(visitable_end); | 528 Object* o = *reinterpret_cast<Object**>(visitable_end); |
| 514 // Skip fillers but not things that look like fillers in the special | 529 // Skip fillers but not things that look like fillers in the special |
| 515 // garbage section which can contain anything. | 530 // garbage section which can contain anything. |
| 516 if (o == free_space_map || | 531 if (o == free_space_map || |
| 517 o == two_pointer_filler_map || | 532 o == two_pointer_filler_map || |
| 518 (visitable_end == space->top() && visitable_end != space->limit())) { | 533 (visitable_end == space->top() && visitable_end != space->limit())) { |
| 519 if (visitable_start != visitable_end) { | 534 if (visitable_start != visitable_end) { |
| 520 // After calling this the special garbage section may have moved. | 535 // After calling this the special garbage section may have moved. |
| 521 (this->*region_callback)(visitable_start, | 536 (this->*region_callback)(visitable_start, |
| 522 visitable_end, | 537 visitable_end, |
| 523 slot_callback); | 538 slot_callback, |
| 539 clear_maps); | |
| 524 if (visitable_end >= space->top() && visitable_end < space->limit()) { | 540 if (visitable_end >= space->top() && visitable_end < space->limit()) { |
| 525 visitable_end = space->limit(); | 541 visitable_end = space->limit(); |
| 526 visitable_start = visitable_end; | 542 visitable_start = visitable_end; |
| 527 continue; | 543 continue; |
| 528 } | 544 } |
| 529 } | 545 } |
| 530 if (visitable_end == space->top() && visitable_end != space->limit()) { | 546 if (visitable_end == space->top() && visitable_end != space->limit()) { |
| 531 visitable_start = visitable_end = space->limit(); | 547 visitable_start = visitable_end = space->limit(); |
| 532 } else { | 548 } else { |
| 533 // At this point we are either at the start of a filler or we are at | 549 // At this point we are either at the start of a filler or we are at |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 544 ASSERT(o != free_space_map); | 560 ASSERT(o != free_space_map); |
| 545 ASSERT(o != two_pointer_filler_map); | 561 ASSERT(o != two_pointer_filler_map); |
| 546 ASSERT(visitable_end < space->top() || visitable_end >= space->limit()); | 562 ASSERT(visitable_end < space->top() || visitable_end >= space->limit()); |
| 547 visitable_end += kPointerSize; | 563 visitable_end += kPointerSize; |
| 548 } | 564 } |
| 549 } | 565 } |
| 550 ASSERT(visitable_end == end_of_page); | 566 ASSERT(visitable_end == end_of_page); |
| 551 if (visitable_start != visitable_end) { | 567 if (visitable_start != visitable_end) { |
| 552 (this->*region_callback)(visitable_start, | 568 (this->*region_callback)(visitable_start, |
| 553 visitable_end, | 569 visitable_end, |
| 554 slot_callback); | 570 slot_callback, |
| 571 clear_maps); | |
| 555 } | 572 } |
| 556 } | 573 } |
| 557 | 574 |
| 558 | 575 |
| 559 void StoreBuffer::IteratePointersInStoreBuffer( | 576 void StoreBuffer::IteratePointersInStoreBuffer( |
| 560 ObjectSlotCallback slot_callback) { | 577 ObjectSlotCallback slot_callback, |
| 578 bool clear_maps) { | |
| 561 Address* limit = old_top_; | 579 Address* limit = old_top_; |
| 562 old_top_ = old_start_; | 580 old_top_ = old_start_; |
| 563 { | 581 { |
| 564 DontMoveStoreBufferEntriesScope scope(this); | 582 DontMoveStoreBufferEntriesScope scope(this); |
| 565 for (Address* current = old_start_; current < limit; current++) { | 583 for (Address* current = old_start_; current < limit; current++) { |
| 566 #ifdef DEBUG | 584 #ifdef DEBUG |
| 567 Address* saved_top = old_top_; | 585 Address* saved_top = old_top_; |
| 568 #endif | 586 #endif |
| 569 Object** slot = reinterpret_cast<Object**>(*current); | 587 Object** slot = reinterpret_cast<Object**>(*current); |
| 570 Object* object = *slot; | 588 Object* object = *slot; |
| 571 if (heap_->InFromSpace(object)) { | 589 if (heap_->InFromSpace(object)) { |
| 572 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 590 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
| 591 Address& map_field = Memory::Address_at(heap_object->address()); | |
| 592 // The new space object was not promoted if it still contains a map | |
| 593 // pointer. Clear the map field now lazily. | |
| 594 if (clear_maps && heap_->map_space()->Contains(map_field)) { | |
| 595 map_field = NULL; | |
| 596 } | |
|
titzer
2013/07/10 17:06:35
BTW, do you still want to call the slot callback i
Hannes Payer (out of office)
2013/07/11 07:30:40
As soon as we clear the todo in the callback, we c
| |
| 573 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); | 597 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); |
| 574 if (heap_->InNewSpace(*slot)) { | 598 if (heap_->InNewSpace(*slot)) { |
| 575 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); | 599 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); |
| 576 } | 600 } |
| 577 } | 601 } |
| 578 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); | 602 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); |
| 579 } | 603 } |
| 580 } | 604 } |
| 581 } | 605 } |
| 582 | 606 |
| 583 | 607 |
| 584 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { | 608 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback, |
| 609 bool clear_maps) { | |
| 585 // We do not sort or remove duplicated entries from the store buffer because | 610 // We do not sort or remove duplicated entries from the store buffer because |
| 586 // we expect that callback will rebuild the store buffer thus removing | 611 // we expect that callback will rebuild the store buffer thus removing |
| 587 // all duplicates and pointers to old space. | 612 // all duplicates and pointers to old space. |
| 588 bool some_pages_to_scan = PrepareForIteration(); | 613 bool some_pages_to_scan = PrepareForIteration(); |
| 589 | 614 |
| 590 // TODO(gc): we want to skip slots on evacuation candidates | 615 // TODO(gc): we want to skip slots on evacuation candidates |
| 591 // but we can't simply figure that out from slot address | 616 // but we can't simply figure that out from slot address |
| 592 // because slot can belong to a large object. | 617 // because slot can belong to a large object. |
| 593 IteratePointersInStoreBuffer(slot_callback); | 618 IteratePointersInStoreBuffer(slot_callback, clear_maps); |
| 594 | 619 |
| 595 // We are done scanning all the pointers that were in the store buffer, but | 620 // We are done scanning all the pointers that were in the store buffer, but |
| 596 // there may be some pages marked scan_on_scavenge that have pointers to new | 621 // there may be some pages marked scan_on_scavenge that have pointers to new |
| 597 // space that are not in the store buffer. We must scan them now. As we | 622 // space that are not in the store buffer. We must scan them now. As we |
| 598 // scan, the surviving pointers to new space will be added to the store | 623 // scan, the surviving pointers to new space will be added to the store |
| 599 // buffer. If there are still a lot of pointers to new space then we will | 624 // buffer. If there are still a lot of pointers to new space then we will |
| 600 // keep the scan_on_scavenge flag on the page and discard the pointers that | 625 // keep the scan_on_scavenge flag on the page and discard the pointers that |
| 601 // were added to the store buffer. If there are not many pointers to new | 626 // were added to the store buffer. If there are not many pointers to new |
| 602 // space left on the page we will keep the pointers in the store buffer and | 627 // space left on the page we will keep the pointers in the store buffer and |
| 603 // remove the flag from the page. | 628 // remove the flag from the page. |
| 604 if (some_pages_to_scan) { | 629 if (some_pages_to_scan) { |
| 605 if (callback_ != NULL) { | 630 if (callback_ != NULL) { |
| 606 (*callback_)(heap_, NULL, kStoreBufferStartScanningPagesEvent); | 631 (*callback_)(heap_, NULL, kStoreBufferStartScanningPagesEvent); |
| 607 } | 632 } |
| 608 PointerChunkIterator it(heap_); | 633 PointerChunkIterator it(heap_); |
| 609 MemoryChunk* chunk; | 634 MemoryChunk* chunk; |
| 610 while ((chunk = it.next()) != NULL) { | 635 while ((chunk = it.next()) != NULL) { |
| 611 if (chunk->scan_on_scavenge()) { | 636 if (chunk->scan_on_scavenge()) { |
| 612 chunk->set_scan_on_scavenge(false); | 637 chunk->set_scan_on_scavenge(false); |
| 613 if (callback_ != NULL) { | 638 if (callback_ != NULL) { |
| 614 (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent); | 639 (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent); |
| 615 } | 640 } |
| 616 if (chunk->owner() == heap_->lo_space()) { | 641 if (chunk->owner() == heap_->lo_space()) { |
| 617 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); | 642 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); |
| 618 HeapObject* array = large_page->GetObject(); | 643 HeapObject* array = large_page->GetObject(); |
| 619 ASSERT(array->IsFixedArray()); | 644 ASSERT(array->IsFixedArray()); |
| 620 Address start = array->address(); | 645 Address start = array->address(); |
| 621 Address end = start + array->Size(); | 646 Address end = start + array->Size(); |
| 622 FindPointersToNewSpaceInRegion(start, end, slot_callback); | 647 FindPointersToNewSpaceInRegion(start, end, slot_callback, clear_maps); |
| 623 } else { | 648 } else { |
| 624 Page* page = reinterpret_cast<Page*>(chunk); | 649 Page* page = reinterpret_cast<Page*>(chunk); |
| 625 PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner()); | 650 PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner()); |
| 626 FindPointersToNewSpaceOnPage( | 651 FindPointersToNewSpaceOnPage( |
| 627 owner, | 652 owner, |
| 628 page, | 653 page, |
| 629 (owner == heap_->map_space() ? | 654 (owner == heap_->map_space() ? |
| 630 &StoreBuffer::FindPointersToNewSpaceInMapsRegion : | 655 &StoreBuffer::FindPointersToNewSpaceInMapsRegion : |
| 631 &StoreBuffer::FindPointersToNewSpaceInRegion), | 656 &StoreBuffer::FindPointersToNewSpaceInRegion), |
| 632 slot_callback); | 657 slot_callback, |
| 658 clear_maps); | |
| 633 } | 659 } |
| 634 } | 660 } |
| 635 } | 661 } |
| 636 if (callback_ != NULL) { | 662 if (callback_ != NULL) { |
| 637 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); | 663 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); |
| 638 } | 664 } |
| 639 } | 665 } |
| 640 } | 666 } |
| 641 | 667 |
| 642 | 668 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 687 } | 713 } |
| 688 old_buffer_is_sorted_ = false; | 714 old_buffer_is_sorted_ = false; |
| 689 old_buffer_is_filtered_ = false; | 715 old_buffer_is_filtered_ = false; |
| 690 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 716 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 691 ASSERT(old_top_ <= old_limit_); | 717 ASSERT(old_top_ <= old_limit_); |
| 692 } | 718 } |
| 693 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 719 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 694 } | 720 } |
| 695 | 721 |
| 696 } } // namespace v8::internal | 722 } } // namespace v8::internal |
| OLD | NEW |