| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 153 | 153 |
| 154 // Two hash sets used for filtering. | 154 // Two hash sets used for filtering. |
| 155 // If address is in the hash set then it is guaranteed to be in the | 155 // If address is in the hash set then it is guaranteed to be in the |
| 156 // old part of the store buffer. | 156 // old part of the store buffer. |
| 157 uintptr_t* hash_set_1_; | 157 uintptr_t* hash_set_1_; |
| 158 uintptr_t* hash_set_2_; | 158 uintptr_t* hash_set_2_; |
| 159 bool hash_sets_are_empty_; | 159 bool hash_sets_are_empty_; |
| 160 | 160 |
| 161 void ClearFilteringHashSets(); | 161 void ClearFilteringHashSets(); |
| 162 | 162 |
| 163 void CheckForFullBuffer(); | 163 bool SpaceAvailable(intptr_t space_needed); |
| 164 void Uniq(); | 164 void Uniq(); |
| 165 void ExemptPopularPages(int prime_sample_step, int threshold); | 165 void ExemptPopularPages(int prime_sample_step, int threshold); |
| 166 | 166 |
| 167 void FindPointersToNewSpaceInRegion(Address start, | 167 void FindPointersToNewSpaceInRegion(Address start, |
| 168 Address end, | 168 Address end, |
| 169 ObjectSlotCallback slot_callback); | 169 ObjectSlotCallback slot_callback); |
| 170 | 170 |
| 171 // For each region of pointers on a page in use from an old space call | 171 // For each region of pointers on a page in use from an old space call |
| 172 // visit_pointer_region callback. | 172 // visit_pointer_region callback. |
| 173 // If either visit_pointer_region or callback can cause an allocation | 173 // If either visit_pointer_region or callback can cause an allocation |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 216 stored_state_(store_buffer->store_buffer_rebuilding_enabled_), | 216 stored_state_(store_buffer->store_buffer_rebuilding_enabled_), |
| 217 stored_callback_(store_buffer->callback_) { | 217 stored_callback_(store_buffer->callback_) { |
| 218 store_buffer_->store_buffer_rebuilding_enabled_ = true; | 218 store_buffer_->store_buffer_rebuilding_enabled_ = true; |
| 219 store_buffer_->callback_ = callback; | 219 store_buffer_->callback_ = callback; |
| 220 (*callback)(heap, NULL, kStoreBufferStartScanningPagesEvent); | 220 (*callback)(heap, NULL, kStoreBufferStartScanningPagesEvent); |
| 221 } | 221 } |
| 222 | 222 |
| 223 ~StoreBufferRebuildScope() { | 223 ~StoreBufferRebuildScope() { |
| 224 store_buffer_->callback_ = stored_callback_; | 224 store_buffer_->callback_ = stored_callback_; |
| 225 store_buffer_->store_buffer_rebuilding_enabled_ = stored_state_; | 225 store_buffer_->store_buffer_rebuilding_enabled_ = stored_state_; |
| 226 store_buffer_->CheckForFullBuffer(); | |
| 227 } | 226 } |
| 228 | 227 |
| 229 private: | 228 private: |
| 230 StoreBuffer* store_buffer_; | 229 StoreBuffer* store_buffer_; |
| 231 bool stored_state_; | 230 bool stored_state_; |
| 232 StoreBufferCallback stored_callback_; | 231 StoreBufferCallback stored_callback_; |
| 233 }; | 232 }; |
| 234 | 233 |
| 235 | 234 |
| 236 class DontMoveStoreBufferEntriesScope { | 235 class DontMoveStoreBufferEntriesScope { |
| 237 public: | 236 public: |
| 238 explicit DontMoveStoreBufferEntriesScope(StoreBuffer* store_buffer) | 237 explicit DontMoveStoreBufferEntriesScope(StoreBuffer* store_buffer) |
| 239 : store_buffer_(store_buffer), | 238 : store_buffer_(store_buffer), |
| 240 stored_state_(store_buffer->may_move_store_buffer_entries_) { | 239 stored_state_(store_buffer->may_move_store_buffer_entries_) { |
| 241 store_buffer_->may_move_store_buffer_entries_ = false; | 240 store_buffer_->may_move_store_buffer_entries_ = false; |
| 242 } | 241 } |
| 243 | 242 |
| 244 ~DontMoveStoreBufferEntriesScope() { | 243 ~DontMoveStoreBufferEntriesScope() { |
| 245 store_buffer_->may_move_store_buffer_entries_ = stored_state_; | 244 store_buffer_->may_move_store_buffer_entries_ = stored_state_; |
| 246 } | 245 } |
| 247 | 246 |
| 248 private: | 247 private: |
| 249 StoreBuffer* store_buffer_; | 248 StoreBuffer* store_buffer_; |
| 250 bool stored_state_; | 249 bool stored_state_; |
| 251 }; | 250 }; |
| 252 | 251 |
| 253 } } // namespace v8::internal | 252 } } // namespace v8::internal |
| 254 | 253 |
| 255 #endif // V8_STORE_BUFFER_H_ | 254 #endif // V8_STORE_BUFFER_H_ |
| OLD | NEW |