| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 399 return compacting_; | 399 return compacting_; |
| 400 } | 400 } |
| 401 | 401 |
| 402 | 402 |
| 403 void MarkCompactCollector::CollectGarbage() { | 403 void MarkCompactCollector::CollectGarbage() { |
| 404 // Make sure that Prepare() has been called. The individual steps below will | 404 // Make sure that Prepare() has been called. The individual steps below will |
| 405 // update the state as they proceed. | 405 // update the state as they proceed. |
| 406 ASSERT(state_ == PREPARE_GC); | 406 ASSERT(state_ == PREPARE_GC); |
| 407 ASSERT(encountered_weak_collections_ == Smi::FromInt(0)); | 407 ASSERT(encountered_weak_collections_ == Smi::FromInt(0)); |
| 408 | 408 |
| 409 heap()->allocation_mementos_found_ = 0; | |
| 410 | |
| 411 MarkLiveObjects(); | 409 MarkLiveObjects(); |
| 412 ASSERT(heap_->incremental_marking()->IsStopped()); | 410 ASSERT(heap_->incremental_marking()->IsStopped()); |
| 413 | 411 |
| 414 if (FLAG_collect_maps) ClearNonLiveReferences(); | 412 if (FLAG_collect_maps) ClearNonLiveReferences(); |
| 415 | 413 |
| 416 ClearWeakCollections(); | 414 ClearWeakCollections(); |
| 417 | 415 |
| 418 #ifdef VERIFY_HEAP | 416 #ifdef VERIFY_HEAP |
| 419 if (FLAG_verify_heap) { | 417 if (FLAG_verify_heap) { |
| 420 VerifyMarking(heap_); | 418 VerifyMarking(heap_); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 442 | 440 |
| 443 Finish(); | 441 Finish(); |
| 444 | 442 |
| 445 if (marking_parity_ == EVEN_MARKING_PARITY) { | 443 if (marking_parity_ == EVEN_MARKING_PARITY) { |
| 446 marking_parity_ = ODD_MARKING_PARITY; | 444 marking_parity_ = ODD_MARKING_PARITY; |
| 447 } else { | 445 } else { |
| 448 ASSERT(marking_parity_ == ODD_MARKING_PARITY); | 446 ASSERT(marking_parity_ == ODD_MARKING_PARITY); |
| 449 marking_parity_ = EVEN_MARKING_PARITY; | 447 marking_parity_ = EVEN_MARKING_PARITY; |
| 450 } | 448 } |
| 451 | 449 |
| 452 if (FLAG_trace_track_allocation_sites && | |
| 453 heap()->allocation_mementos_found_ > 0) { | |
| 454 PrintF("AllocationMementos found during mark-sweep = %d\n", | |
| 455 heap()->allocation_mementos_found_); | |
| 456 } | |
| 457 tracer_ = NULL; | 450 tracer_ = NULL; |
| 458 } | 451 } |
| 459 | 452 |
| 460 | 453 |
| 461 #ifdef VERIFY_HEAP | 454 #ifdef VERIFY_HEAP |
| 462 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { | 455 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { |
| 463 PageIterator it(space); | 456 PageIterator it(space); |
| 464 | 457 |
| 465 while (it.has_next()) { | 458 while (it.has_next()) { |
| 466 Page* p = it.next(); | 459 Page* p = it.next(); |
| (...skipping 1415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1882 }; | 1875 }; |
| 1883 | 1876 |
| 1884 | 1877 |
| 1885 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects | 1878 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects |
| 1886 // are retained. | 1879 // are retained. |
| 1887 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { | 1880 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { |
| 1888 public: | 1881 public: |
| 1889 virtual Object* RetainAs(Object* object) { | 1882 virtual Object* RetainAs(Object* object) { |
| 1890 if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) { | 1883 if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) { |
| 1891 return object; | 1884 return object; |
| 1885 } else if (object->IsAllocationSite() && |
| 1886 !(AllocationSite::cast(object)->IsZombie())) { |
| 1887 // "dead" AllocationSites need to live long enough for a traversal of new |
| 1888 // space. These sites get a one-time reprieve. |
| 1889 AllocationSite* site = AllocationSite::cast(object); |
| 1890 site->MarkZombie(); |
| 1891 site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site); |
| 1892 return object; |
| 1892 } else { | 1893 } else { |
| 1893 return NULL; | 1894 return NULL; |
| 1894 } | 1895 } |
| 1895 } | 1896 } |
| 1896 }; | 1897 }; |
| 1897 | 1898 |
| 1898 | 1899 |
| 1899 // Fill the marking stack with overflowed objects returned by the given | 1900 // Fill the marking stack with overflowed objects returned by the given |
| 1900 // iterator. Stop when the marking stack is filled or the end of the space | 1901 // iterator. Stop when the marking stack is filled or the end of the space |
| 1901 // is reached, whichever comes first. | 1902 // is reached, whichever comes first. |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1993 while (current_cell != 0) { | 1994 while (current_cell != 0) { |
| 1994 int trailing_zeros = CompilerIntrinsics::CountTrailingZeros(current_cell); | 1995 int trailing_zeros = CompilerIntrinsics::CountTrailingZeros(current_cell); |
| 1995 current_cell >>= trailing_zeros; | 1996 current_cell >>= trailing_zeros; |
| 1996 offset += trailing_zeros; | 1997 offset += trailing_zeros; |
| 1997 Address address = cell_base + offset * kPointerSize; | 1998 Address address = cell_base + offset * kPointerSize; |
| 1998 HeapObject* object = HeapObject::FromAddress(address); | 1999 HeapObject* object = HeapObject::FromAddress(address); |
| 1999 | 2000 |
| 2000 int size = object->Size(); | 2001 int size = object->Size(); |
| 2001 survivors_size += size; | 2002 survivors_size += size; |
| 2002 | 2003 |
| 2003 if (FLAG_trace_track_allocation_sites && object->IsJSObject()) { | 2004 Heap::UpdateAllocationSiteFeedback(object); |
| 2004 if (AllocationMemento::FindForJSObject(JSObject::cast(object), true) | |
| 2005 != NULL) { | |
| 2006 heap()->allocation_mementos_found_++; | |
| 2007 } | |
| 2008 } | |
| 2009 | 2005 |
| 2010 offset++; | 2006 offset++; |
| 2011 current_cell >>= 1; | 2007 current_cell >>= 1; |
| 2012 // Aggressively promote young survivors to the old space. | 2008 // Aggressively promote young survivors to the old space. |
| 2013 if (TryPromoteObject(object, size)) { | 2009 if (TryPromoteObject(object, size)) { |
| 2014 continue; | 2010 continue; |
| 2015 } | 2011 } |
| 2016 | 2012 |
| 2017 // Promotion failed. Just migrate object to another semispace. | 2013 // Promotion failed. Just migrate object to another semispace. |
| 2018 MaybeObject* allocation = new_space->AllocateRaw(size); | 2014 MaybeObject* allocation = new_space->AllocateRaw(size); |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2091 StringTable* string_table = heap()->string_table(); | 2087 StringTable* string_table = heap()->string_table(); |
| 2092 // Mark the string table itself. | 2088 // Mark the string table itself. |
| 2093 MarkBit string_table_mark = Marking::MarkBitFrom(string_table); | 2089 MarkBit string_table_mark = Marking::MarkBitFrom(string_table); |
| 2094 SetMark(string_table, string_table_mark); | 2090 SetMark(string_table, string_table_mark); |
| 2095 // Explicitly mark the prefix. | 2091 // Explicitly mark the prefix. |
| 2096 string_table->IteratePrefix(visitor); | 2092 string_table->IteratePrefix(visitor); |
| 2097 ProcessMarkingDeque(); | 2093 ProcessMarkingDeque(); |
| 2098 } | 2094 } |
| 2099 | 2095 |
| 2100 | 2096 |
| 2097 void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) { |
| 2098 MarkBit mark_bit = Marking::MarkBitFrom(site); |
| 2099 SetMark(site, mark_bit); |
| 2100 } |
| 2101 |
| 2102 |
| 2101 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 2103 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
| 2102 // Mark the heap roots including global variables, stack variables, | 2104 // Mark the heap roots including global variables, stack variables, |
| 2103 // etc., and all objects reachable from them. | 2105 // etc., and all objects reachable from them. |
| 2104 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 2106 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
| 2105 | 2107 |
| 2106 // Handle the string table specially. | 2108 // Handle the string table specially. |
| 2107 MarkStringTable(visitor); | 2109 MarkStringTable(visitor); |
| 2108 | 2110 |
| 2109 MarkWeakObjectToCodeTable(); | 2111 MarkWeakObjectToCodeTable(); |
| 2110 | 2112 |
| (...skipping 2254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4365 while (buffer != NULL) { | 4367 while (buffer != NULL) { |
| 4366 SlotsBuffer* next_buffer = buffer->next(); | 4368 SlotsBuffer* next_buffer = buffer->next(); |
| 4367 DeallocateBuffer(buffer); | 4369 DeallocateBuffer(buffer); |
| 4368 buffer = next_buffer; | 4370 buffer = next_buffer; |
| 4369 } | 4371 } |
| 4370 *buffer_address = NULL; | 4372 *buffer_address = NULL; |
| 4371 } | 4373 } |
| 4372 | 4374 |
| 4373 | 4375 |
| 4374 } } // namespace v8::internal | 4376 } } // namespace v8::internal |
| OLD | NEW |