| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
| 6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 | 9 |
| 10 #include "src/allocation.h" | 10 #include "src/allocation.h" |
| (...skipping 863 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 874 void set_array_buffers_list(Object* object) { array_buffers_list_ = object; } | 874 void set_array_buffers_list(Object* object) { array_buffers_list_ = object; } |
| 875 Object* array_buffers_list() const { return array_buffers_list_; } | 875 Object* array_buffers_list() const { return array_buffers_list_; } |
| 876 | 876 |
| 877 void set_last_array_buffer_in_list(Object* object) { | 877 void set_last_array_buffer_in_list(Object* object) { |
| 878 last_array_buffer_in_list_ = object; | 878 last_array_buffer_in_list_ = object; |
| 879 } | 879 } |
| 880 Object* last_array_buffer_in_list() const { | 880 Object* last_array_buffer_in_list() const { |
| 881 return last_array_buffer_in_list_; | 881 return last_array_buffer_in_list_; |
| 882 } | 882 } |
| 883 | 883 |
| 884 void set_new_array_buffer_views_list(Object* object) { | |
| 885 new_array_buffer_views_list_ = object; | |
| 886 } | |
| 887 Object* new_array_buffer_views_list() const { | |
| 888 return new_array_buffer_views_list_; | |
| 889 } | |
| 890 | |
| 891 void set_allocation_sites_list(Object* object) { | 884 void set_allocation_sites_list(Object* object) { |
| 892 allocation_sites_list_ = object; | 885 allocation_sites_list_ = object; |
| 893 } | 886 } |
| 894 Object* allocation_sites_list() { return allocation_sites_list_; } | 887 Object* allocation_sites_list() { return allocation_sites_list_; } |
| 895 | 888 |
| 896 // Used in CreateAllocationSiteStub and the (de)serializer. | 889 // Used in CreateAllocationSiteStub and the (de)serializer. |
| 897 Object** allocation_sites_list_address() { return &allocation_sites_list_; } | 890 Object** allocation_sites_list_address() { return &allocation_sites_list_; } |
| 898 | 891 |
| 899 void set_encountered_weak_collections(Object* weak_collection) { | 892 void set_encountered_weak_collections(Object* weak_collection) { |
| 900 encountered_weak_collections_ = weak_collection; | 893 encountered_weak_collections_ = weak_collection; |
| (...skipping 586 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1487 // trigger the event. In order to track ALL allocations one must turn off | 1480 // trigger the event. In order to track ALL allocations one must turn off |
| 1488 // FLAG_inline_new and FLAG_use_allocation_folding. | 1481 // FLAG_inline_new and FLAG_use_allocation_folding. |
| 1489 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); | 1482 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); |
| 1490 | 1483 |
| 1491 // This event is triggered after object is moved to a new place. | 1484 // This event is triggered after object is moved to a new place. |
| 1492 inline void OnMoveEvent(HeapObject* target, HeapObject* source, | 1485 inline void OnMoveEvent(HeapObject* target, HeapObject* source, |
| 1493 int size_in_bytes); | 1486 int size_in_bytes); |
| 1494 | 1487 |
| 1495 bool deserialization_complete() const { return deserialization_complete_; } | 1488 bool deserialization_complete() const { return deserialization_complete_; } |
| 1496 | 1489 |
| 1497 bool migration_failure() const { return migration_failure_; } | |
| 1498 void set_migration_failure(bool migration_failure) { | |
| 1499 migration_failure_ = migration_failure; | |
| 1500 } | |
| 1501 | |
| 1502 bool previous_migration_failure() const { | |
| 1503 return previous_migration_failure_; | |
| 1504 } | |
| 1505 void set_previous_migration_failure(bool previous_migration_failure) { | |
| 1506 previous_migration_failure_ = previous_migration_failure; | |
| 1507 } | |
| 1508 | |
| 1509 protected: | 1490 protected: |
| 1510 // Methods made available to tests. | 1491 // Methods made available to tests. |
| 1511 | 1492 |
| 1512 // Allocates a JS Map in the heap. | 1493 // Allocates a JS Map in the heap. |
| 1513 MUST_USE_RESULT AllocationResult | 1494 MUST_USE_RESULT AllocationResult |
| 1514 AllocateMap(InstanceType instance_type, int instance_size, | 1495 AllocateMap(InstanceType instance_type, int instance_size, |
| 1515 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND); | 1496 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND); |
| 1516 | 1497 |
| 1517 // Allocates and initializes a new JavaScript object based on a | 1498 // Allocates and initializes a new JavaScript object based on a |
| 1518 // constructor. | 1499 // constructor. |
| (...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1673 // for all spaces. This is used to disable allocations in generated code. | 1654 // for all spaces. This is used to disable allocations in generated code. |
| 1674 bool inline_allocation_disabled_; | 1655 bool inline_allocation_disabled_; |
| 1675 | 1656 |
| 1676 // Weak list heads, threaded through the objects. | 1657 // Weak list heads, threaded through the objects. |
| 1677 // List heads are initialized lazily and contain the undefined_value at start. | 1658 // List heads are initialized lazily and contain the undefined_value at start. |
| 1678 Object* native_contexts_list_; | 1659 Object* native_contexts_list_; |
| 1679 Object* array_buffers_list_; | 1660 Object* array_buffers_list_; |
| 1680 Object* last_array_buffer_in_list_; | 1661 Object* last_array_buffer_in_list_; |
| 1681 Object* allocation_sites_list_; | 1662 Object* allocation_sites_list_; |
| 1682 | 1663 |
| 1683 // This is a global list of array buffer views in new space. When the views | |
| 1684 // get promoted, they are removed form the list and added to the corresponding | |
| 1685 // array buffer. | |
| 1686 Object* new_array_buffer_views_list_; | |
| 1687 | |
| 1688 // List of encountered weak collections (JSWeakMap and JSWeakSet) during | 1664 // List of encountered weak collections (JSWeakMap and JSWeakSet) during |
| 1689 // marking. It is initialized during marking, destroyed after marking and | 1665 // marking. It is initialized during marking, destroyed after marking and |
| 1690 // contains Smi(0) while marking is not active. | 1666 // contains Smi(0) while marking is not active. |
| 1691 Object* encountered_weak_collections_; | 1667 Object* encountered_weak_collections_; |
| 1692 | 1668 |
| 1693 Object* encountered_weak_cells_; | 1669 Object* encountered_weak_cells_; |
| 1694 | 1670 |
| 1695 StoreBufferRebuilder store_buffer_rebuilder_; | 1671 StoreBufferRebuilder store_buffer_rebuilder_; |
| 1696 | 1672 |
| 1697 struct StringTypeTable { | 1673 struct StringTypeTable { |
| (...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2008 | 1984 |
| 2009 // Performs a major collection in the whole heap. | 1985 // Performs a major collection in the whole heap. |
| 2010 void MarkCompact(); | 1986 void MarkCompact(); |
| 2011 | 1987 |
| 2012 // Code to be run before and after mark-compact. | 1988 // Code to be run before and after mark-compact. |
| 2013 void MarkCompactPrologue(); | 1989 void MarkCompactPrologue(); |
| 2014 void MarkCompactEpilogue(); | 1990 void MarkCompactEpilogue(); |
| 2015 | 1991 |
| 2016 void ProcessNativeContexts(WeakObjectRetainer* retainer); | 1992 void ProcessNativeContexts(WeakObjectRetainer* retainer); |
| 2017 void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young); | 1993 void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young); |
| 2018 void ProcessNewArrayBufferViews(WeakObjectRetainer* retainer); | |
| 2019 void ProcessAllocationSites(WeakObjectRetainer* retainer); | 1994 void ProcessAllocationSites(WeakObjectRetainer* retainer); |
| 2020 | 1995 |
| 2021 // Deopts all code that contains allocation instruction which are tenured or | 1996 // Deopts all code that contains allocation instruction which are tenured or |
| 2022 // not tenured. Moreover it clears the pretenuring allocation site statistics. | 1997 // not tenured. Moreover it clears the pretenuring allocation site statistics. |
| 2023 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); | 1998 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); |
| 2024 | 1999 |
| 2025 // Evaluates local pretenuring for the old space and calls | 2000 // Evaluates local pretenuring for the old space and calls |
| 2026 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in | 2001 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in |
| 2027 // the old space. | 2002 // the old space. |
| 2028 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); | 2003 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2168 MemoryChunk* chunks_queued_for_free_; | 2143 MemoryChunk* chunks_queued_for_free_; |
| 2169 | 2144 |
| 2170 base::Mutex relocation_mutex_; | 2145 base::Mutex relocation_mutex_; |
| 2171 | 2146 |
| 2172 int gc_callbacks_depth_; | 2147 int gc_callbacks_depth_; |
| 2173 | 2148 |
| 2174 bool deserialization_complete_; | 2149 bool deserialization_complete_; |
| 2175 | 2150 |
| 2176 bool concurrent_sweeping_enabled_; | 2151 bool concurrent_sweeping_enabled_; |
| 2177 | 2152 |
| 2178 // A migration failure indicates that a semi-space copy of an object during | |
| 2179 // a scavenge failed and the object got promoted instead. | |
| 2180 bool migration_failure_; | |
| 2181 | |
| 2182 // A migration failure happened in the previous scavenge. | |
| 2183 bool previous_migration_failure_; | |
| 2184 | |
| 2185 friend class AlwaysAllocateScope; | 2153 friend class AlwaysAllocateScope; |
| 2186 friend class Deserializer; | 2154 friend class Deserializer; |
| 2187 friend class Factory; | 2155 friend class Factory; |
| 2188 friend class GCCallbacksScope; | 2156 friend class GCCallbacksScope; |
| 2189 friend class GCTracer; | 2157 friend class GCTracer; |
| 2190 friend class HeapIterator; | 2158 friend class HeapIterator; |
| 2191 friend class Isolate; | 2159 friend class Isolate; |
| 2192 friend class MarkCompactCollector; | 2160 friend class MarkCompactCollector; |
| 2193 friend class MarkCompactMarkingVisitor; | 2161 friend class MarkCompactMarkingVisitor; |
| 2194 friend class MapCompact; | 2162 friend class MapCompact; |
| (...skipping 441 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2636 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2604 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
| 2637 | 2605 |
| 2638 private: | 2606 private: |
| 2639 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2607 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 2640 }; | 2608 }; |
| 2641 #endif // DEBUG | 2609 #endif // DEBUG |
| 2642 } | 2610 } |
| 2643 } // namespace v8::internal | 2611 } // namespace v8::internal |
| 2644 | 2612 |
| 2645 #endif // V8_HEAP_HEAP_H_ | 2613 #endif // V8_HEAP_HEAP_H_ |
| OLD | NEW |