| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 163 if (old_buffer_is_filtered_) return; | 163 if (old_buffer_is_filtered_) return; |
| 164 ASSERT(may_move_store_buffer_entries_); | 164 ASSERT(may_move_store_buffer_entries_); |
| 165 Compact(); | 165 Compact(); |
| 166 | 166 |
| 167 old_buffer_is_filtered_ = true; | 167 old_buffer_is_filtered_ = true; |
| 168 bool page_has_scan_on_scavenge_flag = false; | 168 bool page_has_scan_on_scavenge_flag = false; |
| 169 | 169 |
| 170 PointerChunkIterator it(heap_); | 170 PointerChunkIterator it(heap_); |
| 171 MemoryChunk* chunk; | 171 MemoryChunk* chunk; |
| 172 while ((chunk = it.next()) != NULL) { | 172 while ((chunk = it.next()) != NULL) { |
| 173 if (chunk->scan_on_scavenge()) page_has_scan_on_scavenge_flag = true; | 173 if (chunk->scan_on_scavenge()) { |
| 174 page_has_scan_on_scavenge_flag = true; |
| 175 break; |
| 176 } |
| 174 } | 177 } |
| 175 | 178 |
| 176 if (page_has_scan_on_scavenge_flag) { | 179 if (page_has_scan_on_scavenge_flag) { |
| 177 Filter(MemoryChunk::SCAN_ON_SCAVENGE); | 180 Filter(MemoryChunk::SCAN_ON_SCAVENGE); |
| 178 } | 181 } |
| 179 | 182 |
| 180 if (SpaceAvailable(space_needed)) return; | 183 if (SpaceAvailable(space_needed)) return; |
| 181 | 184 |
| 182 // Sample 1 entry in 97 and filter out the pages where we estimate that more | 185 // Sample 1 entry in 97 and filter out the pages where we estimate that more |
| 183 // than 1 in 8 pointers are to new space. | 186 // than 1 in 8 pointers are to new space. |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 272 ClearFilteringHashSets(); | 275 ClearFilteringHashSets(); |
| 273 } | 276 } |
| 274 | 277 |
| 275 | 278 |
| 276 bool StoreBuffer::PrepareForIteration() { | 279 bool StoreBuffer::PrepareForIteration() { |
| 277 Compact(); | 280 Compact(); |
| 278 PointerChunkIterator it(heap_); | 281 PointerChunkIterator it(heap_); |
| 279 MemoryChunk* chunk; | 282 MemoryChunk* chunk; |
| 280 bool page_has_scan_on_scavenge_flag = false; | 283 bool page_has_scan_on_scavenge_flag = false; |
| 281 while ((chunk = it.next()) != NULL) { | 284 while ((chunk = it.next()) != NULL) { |
| 282 if (chunk->scan_on_scavenge()) page_has_scan_on_scavenge_flag = true; | 285 if (chunk->scan_on_scavenge()) { |
| 286 page_has_scan_on_scavenge_flag = true; |
| 287 break; |
| 288 } |
| 283 } | 289 } |
| 284 | 290 |
| 285 if (page_has_scan_on_scavenge_flag) { | 291 if (page_has_scan_on_scavenge_flag) { |
| 286 Filter(MemoryChunk::SCAN_ON_SCAVENGE); | 292 Filter(MemoryChunk::SCAN_ON_SCAVENGE); |
| 287 } | 293 } |
| 288 | 294 |
| 289 // Filtering hash sets are inconsistent with the store buffer after | 295 // Filtering hash sets are inconsistent with the store buffer after |
| 290 // iteration. | 296 // iteration. |
| 291 ClearFilteringHashSets(); | 297 ClearFilteringHashSets(); |
| 292 | 298 |
| (...skipping 425 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 718 } | 724 } |
| 719 old_buffer_is_sorted_ = false; | 725 old_buffer_is_sorted_ = false; |
| 720 old_buffer_is_filtered_ = false; | 726 old_buffer_is_filtered_ = false; |
| 721 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 727 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 722 ASSERT(old_top_ <= old_limit_); | 728 ASSERT(old_top_ <= old_limit_); |
| 723 } | 729 } |
| 724 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 730 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 725 } | 731 } |
| 726 | 732 |
| 727 } } // namespace v8::internal | 733 } } // namespace v8::internal |
| OLD | NEW |