Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/store-buffer.cc

Issue 227533006: Synchronize store buffer processing and concurrent sweeping. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/spaces.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after
381 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { 381 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
382 if (object->IsFixedArray()) { 382 if (object->IsFixedArray()) {
383 Address slot_address = object->address(); 383 Address slot_address = object->address();
384 Address end = object->address() + object->Size(); 384 Address end = object->address() + object->Size();
385 385
386 while (slot_address < end) { 386 while (slot_address < end) {
387 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); 387 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address);
388 // When we are not in GC the Heap::InNewSpace() predicate 388 // When we are not in GC the Heap::InNewSpace() predicate
389 // checks that pointers which satisfy predicate point into 389 // checks that pointers which satisfy predicate point into
390 // the active semispace. 390 // the active semispace.
391 heap_->InNewSpace(*slot); 391 Object* object = reinterpret_cast<Object*>(
392 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot)));
393 heap_->InNewSpace(object);
392 slot_address += kPointerSize; 394 slot_address += kPointerSize;
393 } 395 }
394 } 396 }
395 } 397 }
396 } 398 }
397 #endif 399 #endif
398 400
399 401
400 void StoreBuffer::Verify() { 402 void StoreBuffer::Verify() {
401 #ifdef VERIFY_HEAP 403 #ifdef VERIFY_HEAP
(...skipping 18 matching lines...) Expand all
420 422
421 void StoreBuffer::FindPointersToNewSpaceInRegion( 423 void StoreBuffer::FindPointersToNewSpaceInRegion(
422 Address start, 424 Address start,
423 Address end, 425 Address end,
424 ObjectSlotCallback slot_callback, 426 ObjectSlotCallback slot_callback,
425 bool clear_maps) { 427 bool clear_maps) {
426 for (Address slot_address = start; 428 for (Address slot_address = start;
427 slot_address < end; 429 slot_address < end;
428 slot_address += kPointerSize) { 430 slot_address += kPointerSize) {
429 Object** slot = reinterpret_cast<Object**>(slot_address); 431 Object** slot = reinterpret_cast<Object**>(slot_address);
430 if (heap_->InNewSpace(*slot)) { 432 Object* object = reinterpret_cast<Object*>(
431 HeapObject* object = reinterpret_cast<HeapObject*>(*slot); 433 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot)));
432 ASSERT(object->IsHeapObject()); 434 if (heap_->InNewSpace(object)) {
435 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
436 ASSERT(heap_object->IsHeapObject());
433 // The new space object was not promoted if it still contains a map 437 // The new space object was not promoted if it still contains a map
434 // pointer. Clear the map field now lazily. 438 // pointer. Clear the map field now lazily.
435 if (clear_maps) ClearDeadObject(object); 439 if (clear_maps) ClearDeadObject(heap_object);
436 slot_callback(reinterpret_cast<HeapObject**>(slot), object); 440 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object);
437 if (heap_->InNewSpace(*slot)) { 441 object = reinterpret_cast<Object*>(
442 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot)));
443 if (heap_->InNewSpace(object)) {
438 EnterDirectlyIntoStoreBuffer(slot_address); 444 EnterDirectlyIntoStoreBuffer(slot_address);
439 } 445 }
440 } 446 }
441 } 447 }
442 } 448 }
443 449
444 450
445 // Compute start address of the first map following given addr. 451 // Compute start address of the first map following given addr.
446 static inline Address MapStartAlign(Address addr) { 452 static inline Address MapStartAlign(Address addr) {
447 Address page = Page::FromAddress(addr)->area_start(); 453 Address page = Page::FromAddress(addr)->area_start();
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
524 Address visitable_start = page->area_start(); 530 Address visitable_start = page->area_start();
525 Address end_of_page = page->area_end(); 531 Address end_of_page = page->area_end();
526 532
527 Address visitable_end = visitable_start; 533 Address visitable_end = visitable_start;
528 534
529 Object* free_space_map = heap_->free_space_map(); 535 Object* free_space_map = heap_->free_space_map();
530 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); 536 Object* two_pointer_filler_map = heap_->two_pointer_filler_map();
531 Object* constant_pool_array_map = heap_->constant_pool_array_map(); 537 Object* constant_pool_array_map = heap_->constant_pool_array_map();
532 538
533 while (visitable_end < end_of_page) { 539 while (visitable_end < end_of_page) {
534 Object* o = *reinterpret_cast<Object**>(visitable_end); 540 // The sweeper thread concurrently may write free space maps and size to
541 // this page. We need acquire load here to make sure that we get a
542 // consistent view of maps and their sizes.
543 Object* o = reinterpret_cast<Object*>(
544 Acquire_Load(reinterpret_cast<AtomicWord*>(visitable_end)));
535 // Skip fillers or constant pool arrays (which never contain new-space 545 // Skip fillers or constant pool arrays (which never contain new-space
536 // pointers but can contain pointers which can be confused for fillers) 546 // pointers but can contain pointers which can be confused for fillers)
537 // but not things that look like fillers in the special garbage section 547 // but not things that look like fillers in the special garbage section
538 // which can contain anything. 548 // which can contain anything.
539 if (o == free_space_map || 549 if (o == free_space_map ||
540 o == two_pointer_filler_map || 550 o == two_pointer_filler_map ||
541 o == constant_pool_array_map || 551 o == constant_pool_array_map ||
542 (visitable_end == space->top() && visitable_end != space->limit())) { 552 (visitable_end == space->top() && visitable_end != space->limit())) {
543 if (visitable_start != visitable_end) { 553 if (visitable_start != visitable_end) {
544 // After calling this the special garbage section may have moved. 554 // After calling this the special garbage section may have moved.
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
588 bool clear_maps) { 598 bool clear_maps) {
589 Address* limit = old_top_; 599 Address* limit = old_top_;
590 old_top_ = old_start_; 600 old_top_ = old_start_;
591 { 601 {
592 DontMoveStoreBufferEntriesScope scope(this); 602 DontMoveStoreBufferEntriesScope scope(this);
593 for (Address* current = old_start_; current < limit; current++) { 603 for (Address* current = old_start_; current < limit; current++) {
594 #ifdef DEBUG 604 #ifdef DEBUG
595 Address* saved_top = old_top_; 605 Address* saved_top = old_top_;
596 #endif 606 #endif
597 Object** slot = reinterpret_cast<Object**>(*current); 607 Object** slot = reinterpret_cast<Object**>(*current);
598 Object* object = *slot; 608 Object* object = reinterpret_cast<Object*>(
609 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot)));
599 if (heap_->InFromSpace(object)) { 610 if (heap_->InFromSpace(object)) {
600 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); 611 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
601 // The new space object was not promoted if it still contains a map 612 // The new space object was not promoted if it still contains a map
602 // pointer. Clear the map field now lazily. 613 // pointer. Clear the map field now lazily.
603 if (clear_maps) ClearDeadObject(heap_object); 614 if (clear_maps) ClearDeadObject(heap_object);
604 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); 615 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object);
605 if (heap_->InNewSpace(*slot)) { 616 object = reinterpret_cast<Object*>(
617 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot)));
618 if (heap_->InNewSpace(object)) {
606 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot)); 619 EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot));
607 } 620 }
608 } 621 }
609 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top); 622 ASSERT(old_top_ == saved_top + 1 || old_top_ == saved_top);
610 } 623 }
611 } 624 }
612 } 625 }
613 626
614 627
615 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { 628 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) {
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
731 } 744 }
732 old_buffer_is_sorted_ = false; 745 old_buffer_is_sorted_ = false;
733 old_buffer_is_filtered_ = false; 746 old_buffer_is_filtered_ = false;
734 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); 747 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2);
735 ASSERT(old_top_ <= old_limit_); 748 ASSERT(old_top_ <= old_limit_);
736 } 749 }
737 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); 750 heap_->isolate()->counters()->store_buffer_compactions()->Increment();
738 } 751 }
739 752
740 } } // namespace v8::internal 753 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/spaces.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698