Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(97)

Side by Side Diff: src/heap/heap.cc

Issue 1323993004: [heap] Separate scavenger functionality into own file. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Rebased. Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
11 #include "src/base/utils/random-number-generator.h" 11 #include "src/base/utils/random-number-generator.h"
12 #include "src/bootstrapper.h" 12 #include "src/bootstrapper.h"
13 #include "src/codegen.h" 13 #include "src/codegen.h"
14 #include "src/compilation-cache.h" 14 #include "src/compilation-cache.h"
15 #include "src/conversions.h" 15 #include "src/conversions.h"
16 #include "src/cpu-profiler.h" 16 #include "src/cpu-profiler.h"
17 #include "src/debug/debug.h" 17 #include "src/debug/debug.h"
18 #include "src/deoptimizer.h" 18 #include "src/deoptimizer.h"
19 #include "src/global-handles.h" 19 #include "src/global-handles.h"
20 #include "src/heap/array-buffer-tracker.h" 20 #include "src/heap/array-buffer-tracker.h"
21 #include "src/heap/gc-idle-time-handler.h" 21 #include "src/heap/gc-idle-time-handler.h"
22 #include "src/heap/gc-tracer.h" 22 #include "src/heap/gc-tracer.h"
23 #include "src/heap/incremental-marking.h" 23 #include "src/heap/incremental-marking.h"
24 #include "src/heap/mark-compact-inl.h" 24 #include "src/heap/mark-compact-inl.h"
25 #include "src/heap/mark-compact.h" 25 #include "src/heap/mark-compact.h"
26 #include "src/heap/memory-reducer.h" 26 #include "src/heap/memory-reducer.h"
27 #include "src/heap/object-stats.h" 27 #include "src/heap/object-stats.h"
28 #include "src/heap/objects-visiting-inl.h" 28 #include "src/heap/objects-visiting-inl.h"
29 #include "src/heap/objects-visiting.h" 29 #include "src/heap/objects-visiting.h"
30 #include "src/heap/scavenger-inl.h"
30 #include "src/heap/store-buffer.h" 31 #include "src/heap/store-buffer.h"
31 #include "src/heap-profiler.h" 32 #include "src/heap-profiler.h"
32 #include "src/interpreter/interpreter.h" 33 #include "src/interpreter/interpreter.h"
33 #include "src/runtime-profiler.h" 34 #include "src/runtime-profiler.h"
34 #include "src/scopeinfo.h" 35 #include "src/scopeinfo.h"
35 #include "src/snapshot/natives.h" 36 #include "src/snapshot/natives.h"
36 #include "src/snapshot/serialize.h" 37 #include "src/snapshot/serialize.h"
37 #include "src/snapshot/snapshot.h" 38 #include "src/snapshot/snapshot.h"
38 #include "src/type-feedback-vector.h" 39 #include "src/type-feedback-vector.h"
39 #include "src/utils.h" 40 #include "src/utils.h"
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
113 nodes_promoted_(0), 114 nodes_promoted_(0),
114 maximum_size_scavenges_(0), 115 maximum_size_scavenges_(0),
115 max_gc_pause_(0.0), 116 max_gc_pause_(0.0),
116 total_gc_time_ms_(0.0), 117 total_gc_time_ms_(0.0),
117 max_alive_after_gc_(0), 118 max_alive_after_gc_(0),
118 min_in_mutator_(kMaxInt), 119 min_in_mutator_(kMaxInt),
119 marking_time_(0.0), 120 marking_time_(0.0),
120 sweeping_time_(0.0), 121 sweeping_time_(0.0),
121 last_idle_notification_time_(0.0), 122 last_idle_notification_time_(0.0),
122 last_gc_time_(0.0), 123 last_gc_time_(0.0),
124 scavenge_collector_(nullptr),
123 mark_compact_collector_(this), 125 mark_compact_collector_(this),
124 store_buffer_(this), 126 store_buffer_(this),
125 incremental_marking_(this), 127 incremental_marking_(this),
126 memory_reducer_(nullptr), 128 memory_reducer_(nullptr),
127 object_stats_(nullptr), 129 object_stats_(nullptr),
128 full_codegen_bytes_generated_(0), 130 full_codegen_bytes_generated_(0),
129 crankshaft_codegen_bytes_generated_(0), 131 crankshaft_codegen_bytes_generated_(0),
130 new_space_allocation_counter_(0), 132 new_space_allocation_counter_(0),
131 old_generation_allocation_counter_(0), 133 old_generation_allocation_counter_(0),
132 old_generation_size_at_last_gc_(0), 134 old_generation_size_at_last_gc_(0),
(...skipping 1253 matching lines...) Expand 10 before | Expand all | Expand 10 after
1386 1388
1387 FlushNumberStringCache(); 1389 FlushNumberStringCache();
1388 if (FLAG_cleanup_code_caches_at_gc) { 1390 if (FLAG_cleanup_code_caches_at_gc) {
1389 polymorphic_code_cache()->set_cache(undefined_value()); 1391 polymorphic_code_cache()->set_cache(undefined_value());
1390 } 1392 }
1391 1393
1392 ClearNormalizedMapCaches(); 1394 ClearNormalizedMapCaches();
1393 } 1395 }
1394 1396
1395 1397
1396 // Helper class for copying HeapObjects
1397 class ScavengeVisitor : public ObjectVisitor {
1398 public:
1399 explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
1400
1401 void VisitPointer(Object** p) { ScavengePointer(p); }
1402
1403 void VisitPointers(Object** start, Object** end) {
1404 // Copy all HeapObject pointers in [start, end)
1405 for (Object** p = start; p < end; p++) ScavengePointer(p);
1406 }
1407
1408 private:
1409 void ScavengePointer(Object** p) {
1410 Object* object = *p;
1411 if (!heap_->InNewSpace(object)) return;
1412 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1413 reinterpret_cast<HeapObject*>(object));
1414 }
1415
1416 Heap* heap_;
1417 };
1418
1419
1420 #ifdef VERIFY_HEAP 1398 #ifdef VERIFY_HEAP
1421 // Visitor class to verify pointers in code or data space do not point into 1399 // Visitor class to verify pointers in code or data space do not point into
1422 // new space. 1400 // new space.
1423 class VerifyNonPointerSpacePointersVisitor : public ObjectVisitor { 1401 class VerifyNonPointerSpacePointersVisitor : public ObjectVisitor {
1424 public: 1402 public:
1425 explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {} 1403 explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {}
1426 void VisitPointers(Object** start, Object** end) { 1404 void VisitPointers(Object** start, Object** end) {
1427 for (Object** current = start; current < end; current++) { 1405 for (Object** current = start; current < end; current++) {
1428 if ((*current)->IsHeapObject()) { 1406 if ((*current)->IsHeapObject()) {
1429 CHECK(!heap_->InNewSpace(HeapObject::cast(*current))); 1407 CHECK(!heap_->InNewSpace(HeapObject::cast(*current)));
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
1554 1532
1555 // Implements Cheney's copying algorithm 1533 // Implements Cheney's copying algorithm
1556 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1534 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1557 1535
1558 // Clear descriptor cache. 1536 // Clear descriptor cache.
1559 isolate_->descriptor_lookup_cache()->Clear(); 1537 isolate_->descriptor_lookup_cache()->Clear();
1560 1538
1561 // Used for updating survived_since_last_expansion_ at function end. 1539 // Used for updating survived_since_last_expansion_ at function end.
1562 intptr_t survived_watermark = PromotedSpaceSizeOfObjects(); 1540 intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1563 1541
1564 SelectScavengingVisitorsTable(); 1542 scavenge_collector_->SelectScavengingVisitorsTable();
1565 1543
1566 array_buffer_tracker()->PrepareDiscoveryInNewSpace(); 1544 array_buffer_tracker()->PrepareDiscoveryInNewSpace();
1567 1545
1568 // Flip the semispaces. After flipping, to space is empty, from space has 1546 // Flip the semispaces. After flipping, to space is empty, from space has
1569 // live objects. 1547 // live objects.
1570 new_space_.Flip(); 1548 new_space_.Flip();
1571 new_space_.ResetAllocationInfo(); 1549 new_space_.ResetAllocationInfo();
1572 1550
1573 // We need to sweep newly copied objects which can be either in the 1551 // We need to sweep newly copied objects which can be either in the
1574 // to space or promoted to the old generation. For to-space 1552 // to space or promoted to the old generation. For to-space
(...skipping 21 matching lines...) Expand all
1596 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); 1574 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS);
1597 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); 1575 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
1598 } 1576 }
1599 1577
1600 { 1578 {
1601 // Copy objects reachable from the old generation. 1579 // Copy objects reachable from the old generation.
1602 GCTracer::Scope gc_scope(tracer(), 1580 GCTracer::Scope gc_scope(tracer(),
1603 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); 1581 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS);
1604 StoreBufferRebuildScope scope(this, store_buffer(), 1582 StoreBufferRebuildScope scope(this, store_buffer(),
1605 &ScavengeStoreBufferCallback); 1583 &ScavengeStoreBufferCallback);
1606 store_buffer()->IteratePointersToNewSpace(&ScavengeObject); 1584 store_buffer()->IteratePointersToNewSpace(&Scavenger::ScavengeObject);
1607 } 1585 }
1608 1586
1609 { 1587 {
1610 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_WEAK); 1588 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_WEAK);
1611 // Copy objects reachable from the encountered weak collections list. 1589 // Copy objects reachable from the encountered weak collections list.
1612 scavenge_visitor.VisitPointer(&encountered_weak_collections_); 1590 scavenge_visitor.VisitPointer(&encountered_weak_collections_);
1613 // Copy objects reachable from the encountered weak cells. 1591 // Copy objects reachable from the encountered weak cells.
1614 scavenge_visitor.VisitPointer(&encountered_weak_cells_); 1592 scavenge_visitor.VisitPointer(&encountered_weak_cells_);
1615 } 1593 }
1616 1594
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after
1825 } 1803 }
1826 1804
1827 private: 1805 private:
1828 v8::ExternalResourceVisitor* visitor_; 1806 v8::ExternalResourceVisitor* visitor_;
1829 } external_string_table_visitor(visitor); 1807 } external_string_table_visitor(visitor);
1830 1808
1831 external_string_table_.Iterate(&external_string_table_visitor); 1809 external_string_table_.Iterate(&external_string_table_visitor);
1832 } 1810 }
1833 1811
1834 1812
1835 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
1836 public:
1837 static inline void VisitPointer(Heap* heap, Object** p) {
1838 Object* object = *p;
1839 if (!heap->InNewSpace(object)) return;
1840 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1841 reinterpret_cast<HeapObject*>(object));
1842 }
1843 };
1844
1845
1846 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, 1813 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1847 Address new_space_front) { 1814 Address new_space_front) {
1848 do { 1815 do {
1849 SemiSpace::AssertValidRange(new_space_front, new_space_.top()); 1816 SemiSpace::AssertValidRange(new_space_front, new_space_.top());
1850 // The addresses new_space_front and new_space_.top() define a 1817 // The addresses new_space_front and new_space_.top() define a
1851 // queue of unprocessed copied objects. Process them until the 1818 // queue of unprocessed copied objects. Process them until the
1852 // queue is empty. 1819 // queue is empty.
1853 while (new_space_front != new_space_.top()) { 1820 while (new_space_front != new_space_.top()) {
1854 if (!NewSpacePage::IsAtEnd(new_space_front)) { 1821 if (!NewSpacePage::IsAtEnd(new_space_front)) {
1855 HeapObject* object = HeapObject::FromAddress(new_space_front); 1822 HeapObject* object = HeapObject::FromAddress(new_space_front);
1856 new_space_front += 1823 new_space_front +=
1857 NewSpaceScavenger::IterateBody(object->map(), object); 1824 StaticScavengeVisitor::IterateBody(object->map(), object);
1858 } else { 1825 } else {
1859 new_space_front = 1826 new_space_front =
1860 NewSpacePage::FromLimit(new_space_front)->next_page()->area_start(); 1827 NewSpacePage::FromLimit(new_space_front)->next_page()->area_start();
1861 } 1828 }
1862 } 1829 }
1863 1830
1864 // Promote and process all the to-be-promoted objects. 1831 // Promote and process all the to-be-promoted objects.
1865 { 1832 {
1866 StoreBufferRebuildScope scope(this, store_buffer(), 1833 StoreBufferRebuildScope scope(this, store_buffer(),
1867 &ScavengeStoreBufferCallback); 1834 &ScavengeStoreBufferCallback);
(...skipping 24 matching lines...) Expand all
1892 LayoutDescriptorHelper helper(target->map()); 1859 LayoutDescriptorHelper helper(target->map());
1893 bool has_only_tagged_fields = helper.all_fields_tagged(); 1860 bool has_only_tagged_fields = helper.all_fields_tagged();
1894 1861
1895 if (!has_only_tagged_fields) { 1862 if (!has_only_tagged_fields) {
1896 for (int offset = 0; offset < size;) { 1863 for (int offset = 0; offset < size;) {
1897 int end_of_region_offset; 1864 int end_of_region_offset;
1898 if (helper.IsTagged(offset, size, &end_of_region_offset)) { 1865 if (helper.IsTagged(offset, size, &end_of_region_offset)) {
1899 IterateAndMarkPointersToFromSpace( 1866 IterateAndMarkPointersToFromSpace(
1900 target, obj_address + offset, 1867 target, obj_address + offset,
1901 obj_address + end_of_region_offset, record_slots, 1868 obj_address + end_of_region_offset, record_slots,
1902 &ScavengeObject); 1869 &Scavenger::ScavengeObject);
1903 } 1870 }
1904 offset = end_of_region_offset; 1871 offset = end_of_region_offset;
1905 } 1872 }
1906 } else { 1873 } else {
1907 #endif 1874 #endif
1908 IterateAndMarkPointersToFromSpace(target, obj_address, 1875 IterateAndMarkPointersToFromSpace(target, obj_address,
1909 obj_address + size, record_slots, 1876 obj_address + size, record_slots,
1910 &ScavengeObject); 1877 &Scavenger::ScavengeObject);
1911 #if V8_DOUBLE_FIELDS_UNBOXING 1878 #if V8_DOUBLE_FIELDS_UNBOXING
1912 } 1879 }
1913 #endif 1880 #endif
1914 } 1881 }
1915 } 1882 }
1916 1883
1917 // Take another spin if there are now unswept objects in new space 1884 // Take another spin if there are now unswept objects in new space
1918 // (there are currently no more unswept promoted objects). 1885 // (there are currently no more unswept promoted objects).
1919 } while (new_space_front != new_space_.top()); 1886 } while (new_space_front != new_space_.top());
1920 1887
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
1992 void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) { 1959 void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
1993 return array_buffer_tracker()->RegisterNew(buffer); 1960 return array_buffer_tracker()->RegisterNew(buffer);
1994 } 1961 }
1995 1962
1996 1963
1997 void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) { 1964 void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
1998 return array_buffer_tracker()->Unregister(buffer); 1965 return array_buffer_tracker()->Unregister(buffer);
1999 } 1966 }
2000 1967
2001 1968
2002 enum LoggingAndProfiling {
2003 LOGGING_AND_PROFILING_ENABLED,
2004 LOGGING_AND_PROFILING_DISABLED
2005 };
2006
2007
2008 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
2009
2010
2011 template <MarksHandling marks_handling,
2012 LoggingAndProfiling logging_and_profiling_mode>
2013 class ScavengingVisitor : public StaticVisitorBase {
2014 public:
2015 static void Initialize() {
2016 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
2017 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
2018 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
2019 table_.Register(kVisitByteArray, &EvacuateByteArray);
2020 table_.Register(kVisitFixedArray, &EvacuateFixedArray);
2021 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
2022 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
2023 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
2024 table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
2025
2026 table_.Register(
2027 kVisitNativeContext,
2028 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2029 Context::kSize>);
2030
2031 table_.Register(
2032 kVisitConsString,
2033 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2034 ConsString::kSize>);
2035
2036 table_.Register(
2037 kVisitSlicedString,
2038 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2039 SlicedString::kSize>);
2040
2041 table_.Register(
2042 kVisitSymbol,
2043 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2044 Symbol::kSize>);
2045
2046 table_.Register(
2047 kVisitSharedFunctionInfo,
2048 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2049 SharedFunctionInfo::kSize>);
2050
2051 table_.Register(kVisitJSWeakCollection,
2052 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2053
2054 table_.Register(kVisitJSTypedArray,
2055 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2056
2057 table_.Register(kVisitJSDataView,
2058 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2059
2060 table_.Register(kVisitJSRegExp,
2061 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2062
2063 if (marks_handling == IGNORE_MARKS) {
2064 table_.Register(
2065 kVisitJSFunction,
2066 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2067 JSFunction::kSize>);
2068 } else {
2069 table_.Register(kVisitJSFunction, &EvacuateJSFunction);
2070 }
2071
2072 table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
2073 kVisitDataObject, kVisitDataObjectGeneric>();
2074
2075 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
2076 kVisitJSObject, kVisitJSObjectGeneric>();
2077
2078 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
2079 kVisitStruct, kVisitStructGeneric>();
2080 }
2081
2082 static VisitorDispatchTable<ScavengingCallback>* GetTable() {
2083 return &table_;
2084 }
2085
2086 private:
2087 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
2088
2089 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
2090 bool should_record = false;
2091 #ifdef DEBUG
2092 should_record = FLAG_heap_stats;
2093 #endif
2094 should_record = should_record || FLAG_log_gc;
2095 if (should_record) {
2096 if (heap->new_space()->Contains(obj)) {
2097 heap->new_space()->RecordAllocation(obj);
2098 } else {
2099 heap->new_space()->RecordPromotion(obj);
2100 }
2101 }
2102 }
2103
2104 // Helper function used by CopyObject to copy a source object to an
2105 // allocated target object and update the forwarding pointer in the source
2106 // object. Returns the target object.
2107 INLINE(static void MigrateObject(Heap* heap, HeapObject* source,
2108 HeapObject* target, int size)) {
2109 // If we migrate into to-space, then the to-space top pointer should be
2110 // right after the target object. Incorporate double alignment
2111 // over-allocation.
2112 DCHECK(!heap->InToSpace(target) ||
2113 target->address() + size == heap->new_space()->top() ||
2114 target->address() + size + kPointerSize == heap->new_space()->top());
2115
2116 // Make sure that we do not overwrite the promotion queue which is at
2117 // the end of to-space.
2118 DCHECK(!heap->InToSpace(target) ||
2119 heap->promotion_queue()->IsBelowPromotionQueue(
2120 heap->new_space()->top()));
2121
2122 // Copy the content of source to target.
2123 heap->CopyBlock(target->address(), source->address(), size);
2124
2125 // Set the forwarding address.
2126 source->set_map_word(MapWord::FromForwardingAddress(target));
2127
2128 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
2129 // Update NewSpace stats if necessary.
2130 RecordCopiedObject(heap, target);
2131 heap->OnMoveEvent(target, source, size);
2132 }
2133
2134 if (marks_handling == TRANSFER_MARKS) {
2135 if (Marking::TransferColor(source, target)) {
2136 MemoryChunk::IncrementLiveBytesFromGC(target, size);
2137 }
2138 }
2139 }
2140
2141 template <AllocationAlignment alignment>
2142 static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
2143 HeapObject* object, int object_size) {
2144 Heap* heap = map->GetHeap();
2145
2146 DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
2147 AllocationResult allocation =
2148 heap->new_space()->AllocateRaw(object_size, alignment);
2149
2150 HeapObject* target = NULL; // Initialization to please compiler.
2151 if (allocation.To(&target)) {
2152 // Order is important here: Set the promotion limit before storing a
2153 // filler for double alignment or migrating the object. Otherwise we
2154 // may end up overwriting promotion queue entries when we migrate the
2155 // object.
2156 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
2157
2158 MigrateObject(heap, object, target, object_size);
2159
2160 // Update slot to new target.
2161 *slot = target;
2162
2163 heap->IncrementSemiSpaceCopiedObjectSize(object_size);
2164 return true;
2165 }
2166 return false;
2167 }
2168
2169
2170 template <ObjectContents object_contents, AllocationAlignment alignment>
2171 static inline bool PromoteObject(Map* map, HeapObject** slot,
2172 HeapObject* object, int object_size) {
2173 Heap* heap = map->GetHeap();
2174
2175 AllocationResult allocation =
2176 heap->old_space()->AllocateRaw(object_size, alignment);
2177
2178 HeapObject* target = NULL; // Initialization to please compiler.
2179 if (allocation.To(&target)) {
2180 MigrateObject(heap, object, target, object_size);
2181
2182 // Update slot to new target.
2183 *slot = target;
2184
2185 if (object_contents == POINTER_OBJECT) {
2186 if (map->instance_type() == JS_FUNCTION_TYPE) {
2187 heap->promotion_queue()->insert(target,
2188 JSFunction::kNonWeakFieldsEndOffset);
2189 } else {
2190 heap->promotion_queue()->insert(target, object_size);
2191 }
2192 }
2193 heap->IncrementPromotedObjectsSize(object_size);
2194 return true;
2195 }
2196 return false;
2197 }
2198
2199
2200 template <ObjectContents object_contents, AllocationAlignment alignment>
2201 static inline void EvacuateObject(Map* map, HeapObject** slot,
2202 HeapObject* object, int object_size) {
2203 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
2204 SLOW_DCHECK(object->Size() == object_size);
2205 Heap* heap = map->GetHeap();
2206
2207 if (!heap->ShouldBePromoted(object->address(), object_size)) {
2208 // A semi-space copy may fail due to fragmentation. In that case, we
2209 // try to promote the object.
2210 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
2211 return;
2212 }
2213 }
2214
2215 if (PromoteObject<object_contents, alignment>(map, slot, object,
2216 object_size)) {
2217 return;
2218 }
2219
2220 // If promotion failed, we try to copy the object to the other semi-space
2221 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
2222
2223 UNREACHABLE();
2224 }
2225
2226
2227 static inline void EvacuateJSFunction(Map* map, HeapObject** slot,
2228 HeapObject* object) {
2229 ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2230 JSFunction::kSize>(map, slot, object);
2231
2232 MapWord map_word = object->map_word();
2233 DCHECK(map_word.IsForwardingAddress());
2234 HeapObject* target = map_word.ToForwardingAddress();
2235
2236 MarkBit mark_bit = Marking::MarkBitFrom(target);
2237 if (Marking::IsBlack(mark_bit)) {
2238 // This object is black and it might not be rescanned by marker.
2239 // We should explicitly record code entry slot for compaction because
2240 // promotion queue processing (IterateAndMarkPointersToFromSpace) will
2241 // miss it as it is not HeapObject-tagged.
2242 Address code_entry_slot =
2243 target->address() + JSFunction::kCodeEntryOffset;
2244 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
2245 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
2246 target, code_entry_slot, code);
2247 }
2248 }
2249
2250
2251 static inline void EvacuateFixedArray(Map* map, HeapObject** slot,
2252 HeapObject* object) {
2253 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
2254 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
2255 object_size);
2256 }
2257
2258
2259 static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot,
2260 HeapObject* object) {
2261 int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
2262 int object_size = FixedDoubleArray::SizeFor(length);
2263 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
2264 }
2265
2266
2267 static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot,
2268 HeapObject* object) {
2269 int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size();
2270 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
2271
2272 MapWord map_word = object->map_word();
2273 DCHECK(map_word.IsForwardingAddress());
2274 FixedTypedArrayBase* target =
2275 reinterpret_cast<FixedTypedArrayBase*>(map_word.ToForwardingAddress());
2276 if (target->base_pointer() != Smi::FromInt(0))
2277 target->set_base_pointer(target, SKIP_WRITE_BARRIER);
2278 }
2279
2280
2281 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
2282 HeapObject* object) {
2283 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
2284 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
2285
2286 MapWord map_word = object->map_word();
2287 DCHECK(map_word.IsForwardingAddress());
2288 FixedTypedArrayBase* target =
2289 reinterpret_cast<FixedTypedArrayBase*>(map_word.ToForwardingAddress());
2290 if (target->base_pointer() != Smi::FromInt(0))
2291 target->set_base_pointer(target, SKIP_WRITE_BARRIER);
2292 }
2293
2294
2295 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
2296 HeapObject* object) {
2297 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
2298
2299 Heap* heap = map->GetHeap();
2300 MapWord map_word = object->map_word();
2301 DCHECK(map_word.IsForwardingAddress());
2302 HeapObject* target = map_word.ToForwardingAddress();
2303 if (!heap->InNewSpace(target)) {
2304 heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
2305 }
2306 }
2307
2308
2309 static inline void EvacuateByteArray(Map* map, HeapObject** slot,
2310 HeapObject* object) {
2311 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
2312 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
2313 }
2314
2315
2316 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
2317 HeapObject* object) {
2318 int object_size = SeqOneByteString::cast(object)
2319 ->SeqOneByteStringSize(map->instance_type());
2320 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
2321 }
2322
2323
2324 static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot,
2325 HeapObject* object) {
2326 int object_size = SeqTwoByteString::cast(object)
2327 ->SeqTwoByteStringSize(map->instance_type());
2328 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
2329 }
2330
2331
2332 static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
2333 HeapObject* object) {
2334 DCHECK(IsShortcutCandidate(map->instance_type()));
2335
2336 Heap* heap = map->GetHeap();
2337
2338 if (marks_handling == IGNORE_MARKS &&
2339 ConsString::cast(object)->unchecked_second() == heap->empty_string()) {
2340 HeapObject* first =
2341 HeapObject::cast(ConsString::cast(object)->unchecked_first());
2342
2343 *slot = first;
2344
2345 if (!heap->InNewSpace(first)) {
2346 object->set_map_word(MapWord::FromForwardingAddress(first));
2347 return;
2348 }
2349
2350 MapWord first_word = first->map_word();
2351 if (first_word.IsForwardingAddress()) {
2352 HeapObject* target = first_word.ToForwardingAddress();
2353
2354 *slot = target;
2355 object->set_map_word(MapWord::FromForwardingAddress(target));
2356 return;
2357 }
2358
2359 Heap::ScavengeObjectSlow(slot, first);
2360 object->set_map_word(MapWord::FromForwardingAddress(*slot));
2361 return;
2362 }
2363
2364 int object_size = ConsString::kSize;
2365 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
2366 object_size);
2367 }
2368
2369 template <ObjectContents object_contents>
2370 class ObjectEvacuationStrategy {
2371 public:
2372 template <int object_size>
2373 static inline void VisitSpecialized(Map* map, HeapObject** slot,
2374 HeapObject* object) {
2375 EvacuateObject<object_contents, kWordAligned>(map, slot, object,
2376 object_size);
2377 }
2378
2379 static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) {
2380 int object_size = map->instance_size();
2381 EvacuateObject<object_contents, kWordAligned>(map, slot, object,
2382 object_size);
2383 }
2384 };
2385
2386 static VisitorDispatchTable<ScavengingCallback> table_;
2387 };
2388
2389
2390 template <MarksHandling marks_handling,
2391 LoggingAndProfiling logging_and_profiling_mode>
2392 VisitorDispatchTable<ScavengingCallback>
2393 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
2394
2395
2396 static void InitializeScavengingVisitorsTables() {
2397 ScavengingVisitor<TRANSFER_MARKS,
2398 LOGGING_AND_PROFILING_DISABLED>::Initialize();
2399 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
2400 ScavengingVisitor<TRANSFER_MARKS,
2401 LOGGING_AND_PROFILING_ENABLED>::Initialize();
2402 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
2403 }
2404
2405
2406 void Heap::SelectScavengingVisitorsTable() {
2407 bool logging_and_profiling =
2408 FLAG_verify_predictable || isolate()->logger()->is_logging() ||
2409 isolate()->cpu_profiler()->is_profiling() ||
2410 (isolate()->heap_profiler() != NULL &&
2411 isolate()->heap_profiler()->is_tracking_object_moves());
2412
2413 if (!incremental_marking()->IsMarking()) {
2414 if (!logging_and_profiling) {
2415 scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
2416 IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::GetTable());
2417 } else {
2418 scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
2419 IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::GetTable());
2420 }
2421 } else {
2422 if (!logging_and_profiling) {
2423 scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
2424 TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>::GetTable());
2425 } else {
2426 scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
2427 TRANSFER_MARKS, LOGGING_AND_PROFILING_ENABLED>::GetTable());
2428 }
2429
2430 if (incremental_marking()->IsCompacting()) {
2431 // When compacting forbid short-circuiting of cons-strings.
2432 // Scavenging code relies on the fact that new space object
2433 // can't be evacuated into evacuation candidate but
2434 // short-circuiting violates this assumption.
2435 scavenging_visitors_table_.Register(
2436 StaticVisitorBase::kVisitShortcutCandidate,
2437 scavenging_visitors_table_.GetVisitorById(
2438 StaticVisitorBase::kVisitConsString));
2439 }
2440 }
2441 }
2442
2443
2444 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
2445 SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
2446 MapWord first_word = object->map_word();
2447 SLOW_DCHECK(!first_word.IsForwardingAddress());
2448 Map* map = first_word.ToMap();
2449 map->GetHeap()->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
2450 }
2451
2452
2453 void Heap::ConfigureInitialOldGenerationSize() { 1969 void Heap::ConfigureInitialOldGenerationSize() {
2454 if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) { 1970 if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
2455 old_generation_allocation_limit_ = 1971 old_generation_allocation_limit_ =
2456 Max(kMinimumOldGenerationAllocationLimit, 1972 Max(kMinimumOldGenerationAllocationLimit,
2457 static_cast<intptr_t>( 1973 static_cast<intptr_t>(
2458 static_cast<double>(old_generation_allocation_limit_) * 1974 static_cast<double>(old_generation_allocation_limit_) *
2459 (tracer()->AverageSurvivalRatio() / 100))); 1975 (tracer()->AverageSurvivalRatio() / 100)));
2460 } 1976 }
2461 } 1977 }
2462 1978
(...skipping 3008 matching lines...) Expand 10 before | Expand all | Expand 10 after
5471 for (PagedSpace* space = spaces.next(); space != NULL; 4987 for (PagedSpace* space = spaces.next(); space != NULL;
5472 space = spaces.next()) { 4988 space = spaces.next()) {
5473 space->EmptyAllocationInfo(); 4989 space->EmptyAllocationInfo();
5474 } 4990 }
5475 } 4991 }
5476 4992
5477 4993
5478 V8_DECLARE_ONCE(initialize_gc_once); 4994 V8_DECLARE_ONCE(initialize_gc_once);
5479 4995
5480 static void InitializeGCOnce() { 4996 static void InitializeGCOnce() {
5481 InitializeScavengingVisitorsTables(); 4997 Scavenger::Initialize();
5482 NewSpaceScavenger::Initialize(); 4998 StaticScavengeVisitor::Initialize();
5483 MarkCompactCollector::Initialize(); 4999 MarkCompactCollector::Initialize();
5484 } 5000 }
5485 5001
5486 5002
5487 bool Heap::SetUp() { 5003 bool Heap::SetUp() {
5488 #ifdef DEBUG 5004 #ifdef DEBUG
5489 allocation_timeout_ = FLAG_gc_interval; 5005 allocation_timeout_ = FLAG_gc_interval;
5490 #endif 5006 #endif
5491 5007
5492 // Initialize heap spaces and initial maps and objects. Whenever something 5008 // Initialize heap spaces and initial maps and objects. Whenever something
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
5551 } 5067 }
5552 } 5068 }
5553 5069
5554 for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount); 5070 for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
5555 i++) { 5071 i++) {
5556 deferred_counters_[i] = 0; 5072 deferred_counters_[i] = 0;
5557 } 5073 }
5558 5074
5559 tracer_ = new GCTracer(this); 5075 tracer_ = new GCTracer(this);
5560 5076
5077 scavenge_collector_ = new Scavenger(this);
5078
5561 memory_reducer_ = new MemoryReducer(this); 5079 memory_reducer_ = new MemoryReducer(this);
5562 5080
5563 object_stats_ = new ObjectStats(this); 5081 object_stats_ = new ObjectStats(this);
5564 object_stats_->ClearObjectStats(true); 5082 object_stats_->ClearObjectStats(true);
5565 5083
5566 array_buffer_tracker_ = new ArrayBufferTracker(this); 5084 array_buffer_tracker_ = new ArrayBufferTracker(this);
5567 5085
5568 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); 5086 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
5569 LOG(isolate_, IntPtrTEvent("heap-available", Available())); 5087 LOG(isolate_, IntPtrTEvent("heap-available", Available()));
5570 5088
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
5662 map_space_->MaximumCommittedMemory()); 5180 map_space_->MaximumCommittedMemory());
5663 PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ", 5181 PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ",
5664 lo_space_->MaximumCommittedMemory()); 5182 lo_space_->MaximumCommittedMemory());
5665 PrintF("\n\n"); 5183 PrintF("\n\n");
5666 } 5184 }
5667 5185
5668 if (FLAG_verify_predictable) { 5186 if (FLAG_verify_predictable) {
5669 PrintAlloctionsHash(); 5187 PrintAlloctionsHash();
5670 } 5188 }
5671 5189
5190 delete scavenge_collector_;
5191 scavenge_collector_ = nullptr;
5192
5672 if (memory_reducer_ != nullptr) { 5193 if (memory_reducer_ != nullptr) {
5673 memory_reducer_->TearDown(); 5194 memory_reducer_->TearDown();
5674 delete memory_reducer_; 5195 delete memory_reducer_;
5675 memory_reducer_ = nullptr; 5196 memory_reducer_ = nullptr;
5676 } 5197 }
5677 5198
5678 delete object_stats_; 5199 delete object_stats_;
5679 object_stats_ = nullptr; 5200 object_stats_ = nullptr;
5680 5201
5681 WaitUntilUnmappingOfFreeChunksCompleted(); 5202 WaitUntilUnmappingOfFreeChunksCompleted();
(...skipping 896 matching lines...) Expand 10 before | Expand all | Expand 10 after
6578 *object_sub_type = "CODE_AGE/" #name; \ 6099 *object_sub_type = "CODE_AGE/" #name; \
6579 return true; 6100 return true;
6580 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) 6101 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME)
6581 #undef COMPARE_AND_RETURN_NAME 6102 #undef COMPARE_AND_RETURN_NAME
6582 } 6103 }
6583 return false; 6104 return false;
6584 } 6105 }
6585 6106
6586 } // namespace internal 6107 } // namespace internal
6587 } // namespace v8 6108 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698