Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(76)

Side by Side Diff: src/heap/heap.h

Issue 1053203007: Revert of Reland "Remove the weak list of views from array buffers" (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap-snapshot-generator.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_HEAP_H_ 5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_ 6 #define V8_HEAP_HEAP_H_
7 7
8 #include <cmath> 8 #include <cmath>
9 9
10 #include "src/allocation.h" 10 #include "src/allocation.h"
(...skipping 865 matching lines...) Expand 10 before | Expand all | Expand 10 after
876 void set_array_buffers_list(Object* object) { array_buffers_list_ = object; } 876 void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
877 Object* array_buffers_list() const { return array_buffers_list_; } 877 Object* array_buffers_list() const { return array_buffers_list_; }
878 878
879 void set_last_array_buffer_in_list(Object* object) { 879 void set_last_array_buffer_in_list(Object* object) {
880 last_array_buffer_in_list_ = object; 880 last_array_buffer_in_list_ = object;
881 } 881 }
882 Object* last_array_buffer_in_list() const { 882 Object* last_array_buffer_in_list() const {
883 return last_array_buffer_in_list_; 883 return last_array_buffer_in_list_;
884 } 884 }
885 885
886 void set_new_array_buffer_views_list(Object* object) {
887 new_array_buffer_views_list_ = object;
888 }
889 Object* new_array_buffer_views_list() const {
890 return new_array_buffer_views_list_;
891 }
892
886 void set_allocation_sites_list(Object* object) { 893 void set_allocation_sites_list(Object* object) {
887 allocation_sites_list_ = object; 894 allocation_sites_list_ = object;
888 } 895 }
889 Object* allocation_sites_list() { return allocation_sites_list_; } 896 Object* allocation_sites_list() { return allocation_sites_list_; }
890 897
891 // Used in CreateAllocationSiteStub and the (de)serializer. 898 // Used in CreateAllocationSiteStub and the (de)serializer.
892 Object** allocation_sites_list_address() { return &allocation_sites_list_; } 899 Object** allocation_sites_list_address() { return &allocation_sites_list_; }
893 900
894 void set_encountered_weak_collections(Object* weak_collection) { 901 void set_encountered_weak_collections(Object* weak_collection) {
895 encountered_weak_collections_ = weak_collection; 902 encountered_weak_collections_ = weak_collection;
(...skipping 586 matching lines...) Expand 10 before | Expand all | Expand 10 after
1482 // trigger the event. In order to track ALL allocations one must turn off 1489 // trigger the event. In order to track ALL allocations one must turn off
1483 // FLAG_inline_new and FLAG_use_allocation_folding. 1490 // FLAG_inline_new and FLAG_use_allocation_folding.
1484 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); 1491 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
1485 1492
1486 // This event is triggered after object is moved to a new place. 1493 // This event is triggered after object is moved to a new place.
1487 inline void OnMoveEvent(HeapObject* target, HeapObject* source, 1494 inline void OnMoveEvent(HeapObject* target, HeapObject* source,
1488 int size_in_bytes); 1495 int size_in_bytes);
1489 1496
1490 bool deserialization_complete() const { return deserialization_complete_; } 1497 bool deserialization_complete() const { return deserialization_complete_; }
1491 1498
1499 bool migration_failure() const { return migration_failure_; }
1500 void set_migration_failure(bool migration_failure) {
1501 migration_failure_ = migration_failure;
1502 }
1503
1504 bool previous_migration_failure() const {
1505 return previous_migration_failure_;
1506 }
1507 void set_previous_migration_failure(bool previous_migration_failure) {
1508 previous_migration_failure_ = previous_migration_failure;
1509 }
1510
1492 protected: 1511 protected:
1493 // Methods made available to tests. 1512 // Methods made available to tests.
1494 1513
1495 // Allocates a JS Map in the heap. 1514 // Allocates a JS Map in the heap.
1496 MUST_USE_RESULT AllocationResult 1515 MUST_USE_RESULT AllocationResult
1497 AllocateMap(InstanceType instance_type, int instance_size, 1516 AllocateMap(InstanceType instance_type, int instance_size,
1498 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND); 1517 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1499 1518
1500 // Allocates and initializes a new JavaScript object based on a 1519 // Allocates and initializes a new JavaScript object based on a
1501 // constructor. 1520 // constructor.
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
1656 // for all spaces. This is used to disable allocations in generated code. 1675 // for all spaces. This is used to disable allocations in generated code.
1657 bool inline_allocation_disabled_; 1676 bool inline_allocation_disabled_;
1658 1677
1659 // Weak list heads, threaded through the objects. 1678 // Weak list heads, threaded through the objects.
1660 // List heads are initialized lazily and contain the undefined_value at start. 1679 // List heads are initialized lazily and contain the undefined_value at start.
1661 Object* native_contexts_list_; 1680 Object* native_contexts_list_;
1662 Object* array_buffers_list_; 1681 Object* array_buffers_list_;
1663 Object* last_array_buffer_in_list_; 1682 Object* last_array_buffer_in_list_;
1664 Object* allocation_sites_list_; 1683 Object* allocation_sites_list_;
1665 1684
1685 // This is a global list of array buffer views in new space. When the views
1686 // get promoted, they are removed form the list and added to the corresponding
1687 // array buffer.
1688 Object* new_array_buffer_views_list_;
1689
1666 // List of encountered weak collections (JSWeakMap and JSWeakSet) during 1690 // List of encountered weak collections (JSWeakMap and JSWeakSet) during
1667 // marking. It is initialized during marking, destroyed after marking and 1691 // marking. It is initialized during marking, destroyed after marking and
1668 // contains Smi(0) while marking is not active. 1692 // contains Smi(0) while marking is not active.
1669 Object* encountered_weak_collections_; 1693 Object* encountered_weak_collections_;
1670 1694
1671 Object* encountered_weak_cells_; 1695 Object* encountered_weak_cells_;
1672 1696
1673 StoreBufferRebuilder store_buffer_rebuilder_; 1697 StoreBufferRebuilder store_buffer_rebuilder_;
1674 1698
1675 struct StringTypeTable { 1699 struct StringTypeTable {
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after
1986 2010
1987 // Performs a major collection in the whole heap. 2011 // Performs a major collection in the whole heap.
1988 void MarkCompact(); 2012 void MarkCompact();
1989 2013
1990 // Code to be run before and after mark-compact. 2014 // Code to be run before and after mark-compact.
1991 void MarkCompactPrologue(); 2015 void MarkCompactPrologue();
1992 void MarkCompactEpilogue(); 2016 void MarkCompactEpilogue();
1993 2017
1994 void ProcessNativeContexts(WeakObjectRetainer* retainer); 2018 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1995 void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young); 2019 void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young);
2020 void ProcessNewArrayBufferViews(WeakObjectRetainer* retainer);
1996 void ProcessAllocationSites(WeakObjectRetainer* retainer); 2021 void ProcessAllocationSites(WeakObjectRetainer* retainer);
1997 2022
1998 // Deopts all code that contains allocation instruction which are tenured or 2023 // Deopts all code that contains allocation instruction which are tenured or
1999 // not tenured. Moreover it clears the pretenuring allocation site statistics. 2024 // not tenured. Moreover it clears the pretenuring allocation site statistics.
2000 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); 2025 void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
2001 2026
2002 // Evaluates local pretenuring for the old space and calls 2027 // Evaluates local pretenuring for the old space and calls
2003 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in 2028 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
2004 // the old space. 2029 // the old space.
2005 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); 2030 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
2145 MemoryChunk* chunks_queued_for_free_; 2170 MemoryChunk* chunks_queued_for_free_;
2146 2171
2147 base::Mutex relocation_mutex_; 2172 base::Mutex relocation_mutex_;
2148 2173
2149 int gc_callbacks_depth_; 2174 int gc_callbacks_depth_;
2150 2175
2151 bool deserialization_complete_; 2176 bool deserialization_complete_;
2152 2177
2153 bool concurrent_sweeping_enabled_; 2178 bool concurrent_sweeping_enabled_;
2154 2179
2180 // A migration failure indicates that a semi-space copy of an object during
2181 // a scavenge failed and the object got promoted instead.
2182 bool migration_failure_;
2183
2184 // A migration failure happened in the previous scavenge.
2185 bool previous_migration_failure_;
2186
2155 friend class AlwaysAllocateScope; 2187 friend class AlwaysAllocateScope;
2156 friend class Deserializer; 2188 friend class Deserializer;
2157 friend class Factory; 2189 friend class Factory;
2158 friend class GCCallbacksScope; 2190 friend class GCCallbacksScope;
2159 friend class GCTracer; 2191 friend class GCTracer;
2160 friend class HeapIterator; 2192 friend class HeapIterator;
2161 friend class Isolate; 2193 friend class Isolate;
2162 friend class MarkCompactCollector; 2194 friend class MarkCompactCollector;
2163 friend class MarkCompactMarkingVisitor; 2195 friend class MarkCompactMarkingVisitor;
2164 friend class MapCompact; 2196 friend class MapCompact;
(...skipping 441 matching lines...) Expand 10 before | Expand all | Expand 10 after
2606 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. 2638 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2607 2639
2608 private: 2640 private:
2609 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); 2641 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2610 }; 2642 };
2611 #endif // DEBUG 2643 #endif // DEBUG
2612 } 2644 }
2613 } // namespace v8::internal 2645 } // namespace v8::internal
2614 2646
2615 #endif // V8_HEAP_HEAP_H_ 2647 #endif // V8_HEAP_HEAP_H_
OLDNEW
« no previous file with comments | « src/heap-snapshot-generator.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698