OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 18 matching lines...) Expand all Loading... | |
29 #define V8_WRITE_BARRIER_H_ | 29 #define V8_WRITE_BARRIER_H_ |
30 | 30 |
31 #include "allocation.h" | 31 #include "allocation.h" |
32 #include "checks.h" | 32 #include "checks.h" |
33 #include "globals.h" | 33 #include "globals.h" |
34 #include "platform.h" | 34 #include "platform.h" |
35 | 35 |
36 namespace v8 { | 36 namespace v8 { |
37 namespace internal { | 37 namespace internal { |
38 | 38 |
39 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); | |
39 | 40 |
40 // Used to implement the write barrier by collecting addresses of pointers | 41 // Used to implement the write barrier by collecting addresses of pointers |
41 // between spaces. | 42 // between spaces. |
42 class StoreBuffer : public AllStatic { | 43 class StoreBuffer : public AllStatic { |
43 public: | 44 public: |
44 static inline Address TopAddress(); | 45 static inline Address TopAddress(); |
45 | 46 |
46 static void Setup(); | 47 static void Setup(); |
47 static void TearDown(); | 48 static void TearDown(); |
48 | 49 |
50 // This is used by the mutator to enter addresses into the store buffer. | |
49 static inline void Mark(Address addr); | 51 static inline void Mark(Address addr); |
50 | 52 |
53 // This is used by the heap traversal to enter the addresses into the store | |
54 // buffer that should still be in the store buffer after GC. It enters | |
55 // addresses directly into the old buffer because the GC starts by wiping the | |
56 // old buffer and thereafter only visits each cell once so there is no need | |
57 // to attempt to remove any dupes. During the first part of a scavenge we | |
58 // are using the store buffer to access the old spaces and at the same time | |
59 // we are rebuilding the store buffer using this function. There is, however | |
60 // no issue of overwriting the buffer we are iterating over, because this | |
61 // stage of the scavenge can only reduce the number of addresses in the store | |
62 // buffer (some objects are promoted so pointers to them do not need to be in | |
63 // the store buffer). The later parts of the scavenge process the promotion | |
64 // queue and they can overflow this buffer, which we must check for. | |
65 static inline void EnterDirectlyIntoStoreBuffer(Address addr); | |
66 | |
67 enum RebuildStoreBufferMode { | |
68 kRebuildStoreBufferWhileIterating, | |
69 kPreserveStoreBufferWhileIterating}; | |
Vyacheslav Egorov (Chromium)
2011/02/02 13:15:47
new line + indent closing }
Erik Corry
2011/02/03 13:21:17
Forgot this one :-(
| |
70 | |
71 // Iterates over all pointers that go from old space to new space. It will | |
72 // delete the store buffer as it starts so the callback should reenter | |
73 // surviving old-to-new pointers into the store buffer to rebuild it. | |
74 static void IteratePointersToNewSpace(ObjectSlotCallback callback); | |
75 | |
51 static const int kStoreBufferOverflowBit = 1 << 16; | 76 static const int kStoreBufferOverflowBit = 1 << 16; |
52 static const int kStoreBufferSize = kStoreBufferOverflowBit; | 77 static const int kStoreBufferSize = kStoreBufferOverflowBit; |
53 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); | 78 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); |
54 static const int kOldStoreBufferLength = kStoreBufferLength * 16; | 79 static const int kOldStoreBufferLength = kStoreBufferLength * 4; |
55 static const int kHashMapLengthLog2 = 12; | 80 static const int kHashMapLengthLog2 = 12; |
56 static const int kHashMapLength = 1 << kHashMapLengthLog2; | 81 static const int kHashMapLength = 1 << kHashMapLengthLog2; |
57 | 82 |
58 static void Compact(); | 83 static void Compact(); |
59 static void GCPrologue(GCType type, GCCallbackFlags flags); | 84 static void GCPrologue(GCType type, GCCallbackFlags flags); |
60 static void GCEpilogue(GCType type, GCCallbackFlags flags); | 85 static void GCEpilogue(GCType type, GCCallbackFlags flags); |
61 | 86 |
62 static Object*** Start() { return reinterpret_cast<Object***>(old_start_); } | 87 static Object*** Start() { return reinterpret_cast<Object***>(old_start_); } |
63 static Object*** Top() { return reinterpret_cast<Object***>(old_top_); } | 88 static Object*** Top() { return reinterpret_cast<Object***>(old_top_); } |
64 | 89 |
65 static bool must_scan_entire_memory() { return must_scan_entire_memory_; } | 90 enum StoreBufferMode { |
91 kStoreBufferFunctional, | |
92 kStoreBufferDisabled, | |
93 kStoreBufferBeingRebuilt | |
94 }; | |
95 | |
96 static StoreBufferMode store_buffer_mode() { return store_buffer_mode_; } | |
66 static bool old_buffer_is_sorted() { return old_buffer_is_sorted_; } | 97 static bool old_buffer_is_sorted() { return old_buffer_is_sorted_; } |
67 | 98 |
68 // Goes through the store buffer removing pointers to things that have | 99 // Goes through the store buffer removing pointers to things that have |
69 // been promoted. Rebuilds the store buffer completely if it overflowed. | 100 // been promoted. Rebuilds the store buffer completely if it overflowed. |
70 static void SortUniq(); | 101 static void SortUniq(); |
71 static void Verify(); | 102 static void Verify(); |
72 | 103 |
73 #ifdef DEBUG | 104 #ifdef DEBUG |
74 static void Clean(); | 105 static void Clean(); |
75 #endif | 106 #endif |
76 | 107 |
77 private: | 108 private: |
78 // The store buffer is divided up into a new buffer that is constantly being | 109 // The store buffer is divided up into a new buffer that is constantly being |
79 // filled by mutator activity and an old buffer that is filled with the data | 110 // filled by mutator activity and an old buffer that is filled with the data |
80 // from the new buffer after compression. | 111 // from the new buffer after compression. |
81 static Address* start_; | 112 static Address* start_; |
82 static Address* limit_; | 113 static Address* limit_; |
83 | 114 |
84 static Address* old_start_; | 115 static Address* old_start_; |
85 static Address* old_limit_; | 116 static Address* old_limit_; |
86 static Address* old_top_; | 117 static Address* old_top_; |
87 | 118 |
88 static bool old_buffer_is_sorted_; | 119 static bool old_buffer_is_sorted_; |
89 static bool must_scan_entire_memory_; | 120 static StoreBufferMode store_buffer_mode_; |
90 static bool during_gc_; | 121 static bool during_gc_; |
122 static bool store_buffer_rebuilding_enabled_; | |
123 static bool may_move_store_buffer_entries_; | |
91 | 124 |
92 static VirtualMemory* virtual_memory_; | 125 static VirtualMemory* virtual_memory_; |
93 static uintptr_t* hash_map_1_; | 126 static uintptr_t* hash_map_1_; |
94 static uintptr_t* hash_map_2_; | 127 static uintptr_t* hash_map_2_; |
95 | 128 |
96 static void CheckForFullBuffer(); | 129 static void CheckForFullBuffer(); |
97 static void Uniq(); | 130 static void Uniq(); |
98 static void ZapHashTables(); | 131 static void ZapHashTables(); |
99 static bool HashTablesAreZapped(); | 132 static bool HashTablesAreZapped(); |
133 | |
134 friend class StoreBufferRebuildScope; | |
135 friend class DontMoveStoreBufferEntriesScope; | |
136 }; | |
137 | |
138 | |
139 class StoreBufferRebuildScope { | |
140 public: | |
141 StoreBufferRebuildScope() : | |
142 stored_state_(StoreBuffer::store_buffer_rebuilding_enabled_) { | |
143 StoreBuffer::store_buffer_rebuilding_enabled_ = true; | |
144 } | |
145 | |
146 ~StoreBufferRebuildScope() { | |
147 StoreBuffer::store_buffer_rebuilding_enabled_ = stored_state_; | |
148 StoreBuffer::CheckForFullBuffer(); | |
149 } | |
150 | |
151 private: | |
152 bool stored_state_; | |
153 }; | |
154 | |
155 | |
156 class DontMoveStoreBufferEntriesScope { | |
157 public: | |
158 DontMoveStoreBufferEntriesScope() : | |
159 stored_state_(StoreBuffer::may_move_store_buffer_entries_) { | |
160 StoreBuffer::may_move_store_buffer_entries_ = false; | |
161 } | |
162 | |
163 ~DontMoveStoreBufferEntriesScope() { | |
164 StoreBuffer::may_move_store_buffer_entries_ = stored_state_; | |
165 } | |
166 | |
167 private: | |
168 bool stored_state_; | |
100 }; | 169 }; |
101 | 170 |
102 } } // namespace v8::internal | 171 } } // namespace v8::internal |
103 | 172 |
104 #endif // V8_WRITE_BARRIER_H_ | 173 #endif // V8_WRITE_BARRIER_H_ |
OLD | NEW |