Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(107)

Side by Side Diff: src/heap/heap.h

Issue 871253005: Use weak cells in dependent code. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Address comments Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_HEAP_H_ 5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_ 6 #define V8_HEAP_HEAP_H_
7 7
8 #include <cmath> 8 #include <cmath>
9 9
10 #include "src/allocation.h" 10 #include "src/allocation.h"
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after
175 V(Cell, undefined_cell, UndefineCell) \ 175 V(Cell, undefined_cell, UndefineCell) \
176 V(JSObject, observation_state, ObservationState) \ 176 V(JSObject, observation_state, ObservationState) \
177 V(Map, external_map, ExternalMap) \ 177 V(Map, external_map, ExternalMap) \
178 V(Object, symbol_registry, SymbolRegistry) \ 178 V(Object, symbol_registry, SymbolRegistry) \
179 V(SeededNumberDictionary, empty_slow_element_dictionary, \ 179 V(SeededNumberDictionary, empty_slow_element_dictionary, \
180 EmptySlowElementDictionary) \ 180 EmptySlowElementDictionary) \
181 V(FixedArray, materialized_objects, MaterializedObjects) \ 181 V(FixedArray, materialized_objects, MaterializedObjects) \
182 V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \ 182 V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
183 V(FixedArray, microtask_queue, MicrotaskQueue) \ 183 V(FixedArray, microtask_queue, MicrotaskQueue) \
184 V(FixedArray, keyed_load_dummy_vector, KeyedLoadDummyVector) \ 184 V(FixedArray, keyed_load_dummy_vector, KeyedLoadDummyVector) \
185 V(FixedArray, detached_contexts, DetachedContexts) 185 V(FixedArray, detached_contexts, DetachedContexts) \
186 V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)
186 187
187 // Entries in this list are limited to Smis and are not visited during GC. 188 // Entries in this list are limited to Smis and are not visited during GC.
188 #define SMI_ROOT_LIST(V) \ 189 #define SMI_ROOT_LIST(V) \
189 V(Smi, stack_limit, StackLimit) \ 190 V(Smi, stack_limit, StackLimit) \
190 V(Smi, real_stack_limit, RealStackLimit) \ 191 V(Smi, real_stack_limit, RealStackLimit) \
191 V(Smi, last_script_id, LastScriptId) \ 192 V(Smi, last_script_id, LastScriptId) \
192 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \ 193 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
193 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \ 194 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
194 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \ 195 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
195 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset) 196 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
(...skipping 670 matching lines...) Expand 10 before | Expand all | Expand 10 after
866 } 867 }
867 868
868 void set_allocation_sites_list(Object* object) { 869 void set_allocation_sites_list(Object* object) {
869 allocation_sites_list_ = object; 870 allocation_sites_list_ = object;
870 } 871 }
871 Object* allocation_sites_list() { return allocation_sites_list_; } 872 Object* allocation_sites_list() { return allocation_sites_list_; }
872 873
873 // Used in CreateAllocationSiteStub and the (de)serializer. 874 // Used in CreateAllocationSiteStub and the (de)serializer.
874 Object** allocation_sites_list_address() { return &allocation_sites_list_; } 875 Object** allocation_sites_list_address() { return &allocation_sites_list_; }
875 876
876 Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
877
878 void set_encountered_weak_collections(Object* weak_collection) { 877 void set_encountered_weak_collections(Object* weak_collection) {
879 encountered_weak_collections_ = weak_collection; 878 encountered_weak_collections_ = weak_collection;
880 } 879 }
881 Object* encountered_weak_collections() const { 880 Object* encountered_weak_collections() const {
882 return encountered_weak_collections_; 881 return encountered_weak_collections_;
883 } 882 }
884 883
885 void set_encountered_weak_cells(Object* weak_cell) { 884 void set_encountered_weak_cells(Object* weak_cell) {
886 encountered_weak_cells_ = weak_cell; 885 encountered_weak_cells_ = weak_cell;
887 } 886 }
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
972 971
973 Address* store_buffer_top_address() { 972 Address* store_buffer_top_address() {
974 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]); 973 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
975 } 974 }
976 975
977 static bool RootIsImmortalImmovable(int root_index); 976 static bool RootIsImmortalImmovable(int root_index);
978 977
979 #ifdef VERIFY_HEAP 978 #ifdef VERIFY_HEAP
980 // Verify the heap is in its normal state before or after a GC. 979 // Verify the heap is in its normal state before or after a GC.
981 void Verify(); 980 void Verify();
982
983
984 bool weak_embedded_objects_verification_enabled() {
985 return no_weak_object_verification_scope_depth_ == 0;
986 }
987 #endif 981 #endif
988 982
989 #ifdef DEBUG 983 #ifdef DEBUG
990 void Print(); 984 void Print();
991 void PrintHandles(); 985 void PrintHandles();
992 986
993 void OldPointerSpaceCheckStoreBuffer(); 987 void OldPointerSpaceCheckStoreBuffer();
994 void MapSpaceCheckStoreBuffer(); 988 void MapSpaceCheckStoreBuffer();
995 void LargeObjectSpaceCheckStoreBuffer(); 989 void LargeObjectSpaceCheckStoreBuffer();
996 990
(...skipping 435 matching lines...) Expand 10 before | Expand all | Expand 10 after
1432 heap_->relocation_mutex_.Lock(); 1426 heap_->relocation_mutex_.Lock();
1433 } 1427 }
1434 1428
1435 1429
1436 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); } 1430 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
1437 1431
1438 private: 1432 private:
1439 Heap* heap_; 1433 Heap* heap_;
1440 }; 1434 };
1441 1435
1442 void AddWeakObjectToCodeDependency(Handle<Object> obj, 1436 void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
1443 Handle<DependentCode> dep); 1437 Handle<DependentCode> dep);
1444 1438
1445 DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj); 1439 DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
1446
1447 void InitializeWeakObjectToCodeTable() {
1448 set_weak_object_to_code_table(undefined_value());
1449 }
1450
1451 void EnsureWeakObjectToCodeTable();
1452 1440
1453 static void FatalProcessOutOfMemory(const char* location, 1441 static void FatalProcessOutOfMemory(const char* location,
1454 bool take_snapshot = false); 1442 bool take_snapshot = false);
1455 1443
1456 // This event is triggered after successful allocation of a new object made 1444 // This event is triggered after successful allocation of a new object made
1457 // by runtime. Allocations of target space for object evacuation do not 1445 // by runtime. Allocations of target space for object evacuation do not
1458 // trigger the event. In order to track ALL allocations one must turn off 1446 // trigger the event. In order to track ALL allocations one must turn off
1459 // FLAG_inline_new and FLAG_use_allocation_folding. 1447 // FLAG_inline_new and FLAG_use_allocation_folding.
1460 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); 1448 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
1461 1449
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
1638 // List heads are initialized lazily and contain the undefined_value at start. 1626 // List heads are initialized lazily and contain the undefined_value at start.
1639 Object* native_contexts_list_; 1627 Object* native_contexts_list_;
1640 Object* array_buffers_list_; 1628 Object* array_buffers_list_;
1641 Object* allocation_sites_list_; 1629 Object* allocation_sites_list_;
1642 1630
1643 // This is a global list of array buffer views in new space. When the views 1631 // This is a global list of array buffer views in new space. When the views
1644 // get promoted, they are removed form the list and added to the corresponding 1632 // get promoted, they are removed form the list and added to the corresponding
1645 // array buffer. 1633 // array buffer.
1646 Object* new_array_buffer_views_list_; 1634 Object* new_array_buffer_views_list_;
1647 1635
1648 // WeakHashTable that maps objects embedded in optimized code to dependent
1649 // code list. It is initialized lazily and contains the undefined_value at
1650 // start.
1651 Object* weak_object_to_code_table_;
1652
1653 // List of encountered weak collections (JSWeakMap and JSWeakSet) during 1636 // List of encountered weak collections (JSWeakMap and JSWeakSet) during
1654 // marking. It is initialized during marking, destroyed after marking and 1637 // marking. It is initialized during marking, destroyed after marking and
1655 // contains Smi(0) while marking is not active. 1638 // contains Smi(0) while marking is not active.
1656 Object* encountered_weak_collections_; 1639 Object* encountered_weak_collections_;
1657 1640
1658 Object* encountered_weak_cells_; 1641 Object* encountered_weak_cells_;
1659 1642
1660 StoreBufferRebuilder store_buffer_rebuilder_; 1643 StoreBufferRebuilder store_buffer_rebuilder_;
1661 1644
1662 struct StringTypeTable { 1645 struct StringTypeTable {
(...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after
2060 void IdleMarkCompact(const char* message); 2043 void IdleMarkCompact(const char* message);
2061 2044
2062 bool TryFinalizeIdleIncrementalMarking( 2045 bool TryFinalizeIdleIncrementalMarking(
2063 double idle_time_in_ms, size_t size_of_objects, 2046 double idle_time_in_ms, size_t size_of_objects,
2064 size_t mark_compact_speed_in_bytes_per_ms); 2047 size_t mark_compact_speed_in_bytes_per_ms);
2065 2048
2066 bool WorthActivatingIncrementalMarking(); 2049 bool WorthActivatingIncrementalMarking();
2067 2050
2068 void ClearObjectStats(bool clear_last_time_stats = false); 2051 void ClearObjectStats(bool clear_last_time_stats = false);
2069 2052
2070 void set_weak_object_to_code_table(Object* value) {
2071 DCHECK(!InNewSpace(value));
2072 weak_object_to_code_table_ = value;
2073 }
2074
2075 Object** weak_object_to_code_table_address() {
2076 return &weak_object_to_code_table_;
2077 }
2078
2079 inline void UpdateAllocationsHash(HeapObject* object); 2053 inline void UpdateAllocationsHash(HeapObject* object);
2080 inline void UpdateAllocationsHash(uint32_t value); 2054 inline void UpdateAllocationsHash(uint32_t value);
2081 inline void PrintAlloctionsHash(); 2055 inline void PrintAlloctionsHash();
2082 2056
2083 // Object counts and used memory by InstanceType 2057 // Object counts and used memory by InstanceType
2084 size_t object_counts_[OBJECT_STATS_COUNT]; 2058 size_t object_counts_[OBJECT_STATS_COUNT];
2085 size_t object_counts_last_time_[OBJECT_STATS_COUNT]; 2059 size_t object_counts_last_time_[OBJECT_STATS_COUNT];
2086 size_t object_sizes_[OBJECT_STATS_COUNT]; 2060 size_t object_sizes_[OBJECT_STATS_COUNT];
2087 size_t object_sizes_last_time_[OBJECT_STATS_COUNT]; 2061 size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
2088 2062
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2120 2094
2121 // These two counters are monotomically increasing and never reset. 2095 // These two counters are monotomically increasing and never reset.
2122 size_t full_codegen_bytes_generated_; 2096 size_t full_codegen_bytes_generated_;
2123 size_t crankshaft_codegen_bytes_generated_; 2097 size_t crankshaft_codegen_bytes_generated_;
2124 2098
2125 // If the --deopt_every_n_garbage_collections flag is set to a positive value, 2099 // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2126 // this variable holds the number of garbage collections since the last 2100 // this variable holds the number of garbage collections since the last
2127 // deoptimization triggered by garbage collection. 2101 // deoptimization triggered by garbage collection.
2128 int gcs_since_last_deopt_; 2102 int gcs_since_last_deopt_;
2129 2103
2130 #ifdef VERIFY_HEAP
2131 int no_weak_object_verification_scope_depth_;
2132 #endif
2133
2134 static const int kAllocationSiteScratchpadSize = 256; 2104 static const int kAllocationSiteScratchpadSize = 256;
2135 int allocation_sites_scratchpad_length_; 2105 int allocation_sites_scratchpad_length_;
2136 2106
2137 static const int kMaxMarkCompactsInIdleRound = 7; 2107 static const int kMaxMarkCompactsInIdleRound = 7;
2138 static const int kIdleScavengeThreshold = 5; 2108 static const int kIdleScavengeThreshold = 5;
2139 2109
2140 // Shared state read by the scavenge collector and set by ScavengeObject. 2110 // Shared state read by the scavenge collector and set by ScavengeObject.
2141 PromotionQueue promotion_queue_; 2111 PromotionQueue promotion_queue_;
2142 2112
2143 // Flag is set when the heap has been configured. The heap can be repeatedly 2113 // Flag is set when the heap has been configured. The heap can be repeatedly
(...skipping 478 matching lines...) Expand 10 before | Expand all | Expand 10 after
2622 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. 2592 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2623 2593
2624 private: 2594 private:
2625 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); 2595 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2626 }; 2596 };
2627 #endif // DEBUG 2597 #endif // DEBUG
2628 } 2598 }
2629 } // namespace v8::internal 2599 } // namespace v8::internal
2630 2600
2631 #endif // V8_HEAP_HEAP_H_ 2601 #endif // V8_HEAP_HEAP_H_
OLDNEW
« no previous file with comments | « src/compiler.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698