| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
| 6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 | 9 |
| 10 #include "src/allocation.h" | 10 #include "src/allocation.h" |
| (...skipping 814 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 825 void set_allocation_sites_list(Object* object) { | 825 void set_allocation_sites_list(Object* object) { |
| 826 allocation_sites_list_ = object; | 826 allocation_sites_list_ = object; |
| 827 } | 827 } |
| 828 Object* allocation_sites_list() { return allocation_sites_list_; } | 828 Object* allocation_sites_list() { return allocation_sites_list_; } |
| 829 | 829 |
| 830 // Used in CreateAllocationSiteStub and the (de)serializer. | 830 // Used in CreateAllocationSiteStub and the (de)serializer. |
| 831 Object** allocation_sites_list_address() { return &allocation_sites_list_; } | 831 Object** allocation_sites_list_address() { return &allocation_sites_list_; } |
| 832 | 832 |
| 833 Object* weak_object_to_code_table() { return weak_object_to_code_table_; } | 833 Object* weak_object_to_code_table() { return weak_object_to_code_table_; } |
| 834 | 834 |
| 835 Object* embedded_map_cache() { return embedded_map_cache_; } |
| 836 |
| 835 void set_encountered_weak_collections(Object* weak_collection) { | 837 void set_encountered_weak_collections(Object* weak_collection) { |
| 836 encountered_weak_collections_ = weak_collection; | 838 encountered_weak_collections_ = weak_collection; |
| 837 } | 839 } |
| 838 Object* encountered_weak_collections() const { | 840 Object* encountered_weak_collections() const { |
| 839 return encountered_weak_collections_; | 841 return encountered_weak_collections_; |
| 840 } | 842 } |
| 841 | 843 |
| 842 void set_encountered_weak_cells(Object* weak_cell) { | 844 void set_encountered_weak_cells(Object* weak_cell) { |
| 843 encountered_weak_cells_ = weak_cell; | 845 encountered_weak_cells_ = weak_cell; |
| 844 } | 846 } |
| 845 Object* encountered_weak_cells() const { return encountered_weak_cells_; } | 847 Object* encountered_weak_cells() const { return encountered_weak_cells_; } |
| 846 | 848 |
| 847 // Number of mark-sweeps. | 849 // Number of mark-sweeps. |
| 848 unsigned int ms_count() { return ms_count_; } | 850 unsigned int ms_count() { return ms_count_; } |
| 849 | 851 |
| 850 // Iterates over all roots in the heap. | 852 // Iterates over all roots in the heap. |
| 851 void IterateRoots(ObjectVisitor* v, VisitMode mode); | 853 void IterateRoots(ObjectVisitor* v, VisitMode mode); |
| 852 // Iterates over all strong roots in the heap. | 854 // Iterates over all strong roots in the heap. |
| 853 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); | 855 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); |
| 854 // Iterates over entries in the smi roots list. Only interesting to the | 856 // Iterates over entries in the smi roots list. Only interesting to the |
| 855 // serializer/deserializer, since GC does not care about smis. | 857 // serializer/deserializer, since GC does not care about smis. |
| 856 void IterateSmiRoots(ObjectVisitor* v); | 858 void IterateSmiRoots(ObjectVisitor* v); |
| 857 // Iterates over all the other roots in the heap. | 859 // Iterates over all the other roots in the heap. |
| 858 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); | 860 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); |
| 859 | 861 |
| 860 // Iterate pointers to from semispace of new space found in memory interval | 862 // Iterate pointers to from semispace of new space found in memory interval |
| 861 // from start to end. | 863 // from start to end. |
| 862 void IterateAndMarkPointersToFromSpace(Address start, Address end, | 864 void IterateAndMarkPointersToFromSpace(Address start, Address end, |
| 863 ObjectSlotCallback callback); | 865 ObjectSlotCallback callback); |
| 866 void IterateEmbeddedMapCache(ObjectVisitor* v); |
| 864 | 867 |
| 865 // Returns whether the object resides in new space. | 868 // Returns whether the object resides in new space. |
| 866 inline bool InNewSpace(Object* object); | 869 inline bool InNewSpace(Object* object); |
| 867 inline bool InNewSpace(Address address); | 870 inline bool InNewSpace(Address address); |
| 868 inline bool InNewSpacePage(Address address); | 871 inline bool InNewSpacePage(Address address); |
| 869 inline bool InFromSpace(Object* object); | 872 inline bool InFromSpace(Object* object); |
| 870 inline bool InToSpace(Object* object); | 873 inline bool InToSpace(Object* object); |
| 871 | 874 |
| 872 // Returns whether the object resides in old pointer space. | 875 // Returns whether the object resides in old pointer space. |
| 873 inline bool InOldPointerSpace(Address address); | 876 inline bool InOldPointerSpace(Address address); |
| (...skipping 517 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1391 | 1394 |
| 1392 void AddWeakObjectToCodeDependency(Handle<Object> obj, | 1395 void AddWeakObjectToCodeDependency(Handle<Object> obj, |
| 1393 Handle<DependentCode> dep); | 1396 Handle<DependentCode> dep); |
| 1394 | 1397 |
| 1395 DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj); | 1398 DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj); |
| 1396 | 1399 |
| 1397 void InitializeWeakObjectToCodeTable() { | 1400 void InitializeWeakObjectToCodeTable() { |
| 1398 set_weak_object_to_code_table(undefined_value()); | 1401 set_weak_object_to_code_table(undefined_value()); |
| 1399 } | 1402 } |
| 1400 | 1403 |
| 1404 void InitializeEmbeddedMapCache() { embedded_map_cache_ = undefined_value(); } |
| 1405 |
| 1401 void EnsureWeakObjectToCodeTable(); | 1406 void EnsureWeakObjectToCodeTable(); |
| 1402 | 1407 |
| 1408 void CacheEmbeddedMap(Handle<Map> map); |
| 1409 |
| 1403 static void FatalProcessOutOfMemory(const char* location, | 1410 static void FatalProcessOutOfMemory(const char* location, |
| 1404 bool take_snapshot = false); | 1411 bool take_snapshot = false); |
| 1405 | 1412 |
| 1406 // This event is triggered after successful allocation of a new object made | 1413 // This event is triggered after successful allocation of a new object made |
| 1407 // by runtime. Allocations of target space for object evacuation do not | 1414 // by runtime. Allocations of target space for object evacuation do not |
| 1408 // trigger the event. In order to track ALL allocations one must turn off | 1415 // trigger the event. In order to track ALL allocations one must turn off |
| 1409 // FLAG_inline_new and FLAG_use_allocation_folding. | 1416 // FLAG_inline_new and FLAG_use_allocation_folding. |
| 1410 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); | 1417 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); |
| 1411 | 1418 |
| 1412 // This event is triggered after object is moved to a new place. | 1419 // This event is triggered after object is moved to a new place. |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1576 // List heads are initilized lazily and contain the undefined_value at start. | 1583 // List heads are initilized lazily and contain the undefined_value at start. |
| 1577 Object* native_contexts_list_; | 1584 Object* native_contexts_list_; |
| 1578 Object* array_buffers_list_; | 1585 Object* array_buffers_list_; |
| 1579 Object* allocation_sites_list_; | 1586 Object* allocation_sites_list_; |
| 1580 | 1587 |
| 1581 // WeakHashTable that maps objects embedded in optimized code to dependent | 1588 // WeakHashTable that maps objects embedded in optimized code to dependent |
| 1582 // code list. It is initilized lazily and contains the undefined_value at | 1589 // code list. It is initilized lazily and contains the undefined_value at |
| 1583 // start. | 1590 // start. |
| 1584 Object* weak_object_to_code_table_; | 1591 Object* weak_object_to_code_table_; |
| 1585 | 1592 |
| 1593 Object* embedded_map_cache_; |
| 1594 |
| 1586 // List of encountered weak collections (JSWeakMap and JSWeakSet) during | 1595 // List of encountered weak collections (JSWeakMap and JSWeakSet) during |
| 1587 // marking. It is initialized during marking, destroyed after marking and | 1596 // marking. It is initialized during marking, destroyed after marking and |
| 1588 // contains Smi(0) while marking is not active. | 1597 // contains Smi(0) while marking is not active. |
| 1589 Object* encountered_weak_collections_; | 1598 Object* encountered_weak_collections_; |
| 1590 | 1599 |
| 1591 Object* encountered_weak_cells_; | 1600 Object* encountered_weak_cells_; |
| 1592 | 1601 |
| 1593 StoreBufferRebuilder store_buffer_rebuilder_; | 1602 StoreBufferRebuilder store_buffer_rebuilder_; |
| 1594 | 1603 |
| 1595 struct StringTypeTable { | 1604 struct StringTypeTable { |
| (...skipping 398 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1994 | 2003 |
| 1995 void set_weak_object_to_code_table(Object* value) { | 2004 void set_weak_object_to_code_table(Object* value) { |
| 1996 DCHECK(!InNewSpace(value)); | 2005 DCHECK(!InNewSpace(value)); |
| 1997 weak_object_to_code_table_ = value; | 2006 weak_object_to_code_table_ = value; |
| 1998 } | 2007 } |
| 1999 | 2008 |
| 2000 Object** weak_object_to_code_table_address() { | 2009 Object** weak_object_to_code_table_address() { |
| 2001 return &weak_object_to_code_table_; | 2010 return &weak_object_to_code_table_; |
| 2002 } | 2011 } |
| 2003 | 2012 |
| 2013 Object** embedded_map_cache_address() { return &embedded_map_cache_; } |
| 2014 |
| 2004 inline void UpdateAllocationsHash(HeapObject* object); | 2015 inline void UpdateAllocationsHash(HeapObject* object); |
| 2005 inline void UpdateAllocationsHash(uint32_t value); | 2016 inline void UpdateAllocationsHash(uint32_t value); |
| 2006 inline void PrintAlloctionsHash(); | 2017 inline void PrintAlloctionsHash(); |
| 2007 | 2018 |
| 2008 static const int kInitialStringTableSize = 2048; | 2019 static const int kInitialStringTableSize = 2048; |
| 2009 static const int kInitialEvalCacheSize = 64; | 2020 static const int kInitialEvalCacheSize = 64; |
| 2010 static const int kInitialNumberStringCacheSize = 256; | 2021 static const int kInitialNumberStringCacheSize = 256; |
| 2011 | 2022 |
| 2012 // Object counts and used memory by InstanceType | 2023 // Object counts and used memory by InstanceType |
| 2013 size_t object_counts_[OBJECT_STATS_COUNT]; | 2024 size_t object_counts_[OBJECT_STATS_COUNT]; |
| (...skipping 541 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2555 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2566 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
| 2556 | 2567 |
| 2557 private: | 2568 private: |
| 2558 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2569 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 2559 }; | 2570 }; |
| 2560 #endif // DEBUG | 2571 #endif // DEBUG |
| 2561 } | 2572 } |
| 2562 } // namespace v8::internal | 2573 } // namespace v8::internal |
| 2563 | 2574 |
| 2564 #endif // V8_HEAP_HEAP_H_ | 2575 #endif // V8_HEAP_HEAP_H_ |
| OLD | NEW |