Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(340)

Side by Side Diff: src/heap/store-buffer.cc

Issue 463523002: Move store-buffer to heap and remove some unnecessary includes. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap/store-buffer.h ('k') | src/heap/store-buffer-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/store-buffer.h"
6
7 #include <algorithm> 5 #include <algorithm>
8 6
9 #include "src/v8.h" 7 #include "src/v8.h"
10 8
11 #include "src/base/atomicops.h" 9 #include "src/base/atomicops.h"
12 #include "src/counters.h" 10 #include "src/counters.h"
13 #include "src/store-buffer-inl.h" 11 #include "src/heap/store-buffer-inl.h"
14 12
15 namespace v8 { 13 namespace v8 {
16 namespace internal { 14 namespace internal {
17 15
18 StoreBuffer::StoreBuffer(Heap* heap) 16 StoreBuffer::StoreBuffer(Heap* heap)
19 : heap_(heap), 17 : heap_(heap),
20 start_(NULL), 18 start_(NULL),
21 limit_(NULL), 19 limit_(NULL),
22 old_start_(NULL), 20 old_start_(NULL),
23 old_limit_(NULL), 21 old_limit_(NULL),
24 old_top_(NULL), 22 old_top_(NULL),
25 old_reserved_limit_(NULL), 23 old_reserved_limit_(NULL),
26 old_buffer_is_sorted_(false), 24 old_buffer_is_sorted_(false),
27 old_buffer_is_filtered_(false), 25 old_buffer_is_filtered_(false),
28 during_gc_(false), 26 during_gc_(false),
29 store_buffer_rebuilding_enabled_(false), 27 store_buffer_rebuilding_enabled_(false),
30 callback_(NULL), 28 callback_(NULL),
31 may_move_store_buffer_entries_(true), 29 may_move_store_buffer_entries_(true),
32 virtual_memory_(NULL), 30 virtual_memory_(NULL),
33 hash_set_1_(NULL), 31 hash_set_1_(NULL),
34 hash_set_2_(NULL), 32 hash_set_2_(NULL),
35 hash_sets_are_empty_(true) { 33 hash_sets_are_empty_(true) {}
36 }
37 34
38 35
39 void StoreBuffer::SetUp() { 36 void StoreBuffer::SetUp() {
40 virtual_memory_ = new base::VirtualMemory(kStoreBufferSize * 3); 37 virtual_memory_ = new base::VirtualMemory(kStoreBufferSize * 3);
41 uintptr_t start_as_int = 38 uintptr_t start_as_int =
42 reinterpret_cast<uintptr_t>(virtual_memory_->address()); 39 reinterpret_cast<uintptr_t>(virtual_memory_->address());
43 start_ = 40 start_ =
44 reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize * 2)); 41 reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize * 2));
45 limit_ = start_ + (kStoreBufferSize / kPointerSize); 42 limit_ = start_ + (kStoreBufferSize / kPointerSize);
46 43
47 old_virtual_memory_ = 44 old_virtual_memory_ =
48 new base::VirtualMemory(kOldStoreBufferLength * kPointerSize); 45 new base::VirtualMemory(kOldStoreBufferLength * kPointerSize);
49 old_top_ = old_start_ = 46 old_top_ = old_start_ =
50 reinterpret_cast<Address*>(old_virtual_memory_->address()); 47 reinterpret_cast<Address*>(old_virtual_memory_->address());
51 // Don't know the alignment requirements of the OS, but it is certainly not 48 // Don't know the alignment requirements of the OS, but it is certainly not
52 // less than 0xfff. 49 // less than 0xfff.
53 DCHECK((reinterpret_cast<uintptr_t>(old_start_) & 0xfff) == 0); 50 DCHECK((reinterpret_cast<uintptr_t>(old_start_) & 0xfff) == 0);
54 int initial_length = 51 int initial_length =
55 static_cast<int>(base::OS::CommitPageSize() / kPointerSize); 52 static_cast<int>(base::OS::CommitPageSize() / kPointerSize);
56 DCHECK(initial_length > 0); 53 DCHECK(initial_length > 0);
57 DCHECK(initial_length <= kOldStoreBufferLength); 54 DCHECK(initial_length <= kOldStoreBufferLength);
58 old_limit_ = old_start_ + initial_length; 55 old_limit_ = old_start_ + initial_length;
59 old_reserved_limit_ = old_start_ + kOldStoreBufferLength; 56 old_reserved_limit_ = old_start_ + kOldStoreBufferLength;
60 57
61 CHECK(old_virtual_memory_->Commit( 58 CHECK(old_virtual_memory_->Commit(reinterpret_cast<void*>(old_start_),
62 reinterpret_cast<void*>(old_start_), 59 (old_limit_ - old_start_) * kPointerSize,
63 (old_limit_ - old_start_) * kPointerSize, 60 false));
64 false));
65 61
66 DCHECK(reinterpret_cast<Address>(start_) >= virtual_memory_->address()); 62 DCHECK(reinterpret_cast<Address>(start_) >= virtual_memory_->address());
67 DCHECK(reinterpret_cast<Address>(limit_) >= virtual_memory_->address()); 63 DCHECK(reinterpret_cast<Address>(limit_) >= virtual_memory_->address());
68 Address* vm_limit = reinterpret_cast<Address*>( 64 Address* vm_limit = reinterpret_cast<Address*>(
69 reinterpret_cast<char*>(virtual_memory_->address()) + 65 reinterpret_cast<char*>(virtual_memory_->address()) +
70 virtual_memory_->size()); 66 virtual_memory_->size());
71 DCHECK(start_ <= vm_limit); 67 DCHECK(start_ <= vm_limit);
72 DCHECK(limit_ <= vm_limit); 68 DCHECK(limit_ <= vm_limit);
73 USE(vm_limit); 69 USE(vm_limit);
74 DCHECK((reinterpret_cast<uintptr_t>(limit_) & kStoreBufferOverflowBit) != 0); 70 DCHECK((reinterpret_cast<uintptr_t>(limit_) & kStoreBufferOverflowBit) != 0);
75 DCHECK((reinterpret_cast<uintptr_t>(limit_ - 1) & kStoreBufferOverflowBit) == 71 DCHECK((reinterpret_cast<uintptr_t>(limit_ - 1) & kStoreBufferOverflowBit) ==
76 0); 72 0);
77 73
78 CHECK(virtual_memory_->Commit(reinterpret_cast<Address>(start_), 74 CHECK(virtual_memory_->Commit(reinterpret_cast<Address>(start_),
79 kStoreBufferSize, 75 kStoreBufferSize,
80 false)); // Not executable. 76 false)); // Not executable.
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
126 bool StoreBuffer::SpaceAvailable(intptr_t space_needed) { 122 bool StoreBuffer::SpaceAvailable(intptr_t space_needed) {
127 return old_limit_ - old_top_ >= space_needed; 123 return old_limit_ - old_top_ >= space_needed;
128 } 124 }
129 125
130 126
131 void StoreBuffer::EnsureSpace(intptr_t space_needed) { 127 void StoreBuffer::EnsureSpace(intptr_t space_needed) {
132 while (old_limit_ - old_top_ < space_needed && 128 while (old_limit_ - old_top_ < space_needed &&
133 old_limit_ < old_reserved_limit_) { 129 old_limit_ < old_reserved_limit_) {
134 size_t grow = old_limit_ - old_start_; // Double size. 130 size_t grow = old_limit_ - old_start_; // Double size.
135 CHECK(old_virtual_memory_->Commit(reinterpret_cast<void*>(old_limit_), 131 CHECK(old_virtual_memory_->Commit(reinterpret_cast<void*>(old_limit_),
136 grow * kPointerSize, 132 grow * kPointerSize, false));
137 false));
138 old_limit_ += grow; 133 old_limit_ += grow;
139 } 134 }
140 135
141 if (SpaceAvailable(space_needed)) return; 136 if (SpaceAvailable(space_needed)) return;
142 137
143 if (old_buffer_is_filtered_) return; 138 if (old_buffer_is_filtered_) return;
144 DCHECK(may_move_store_buffer_entries_); 139 DCHECK(may_move_store_buffer_entries_);
145 Compact(); 140 Compact();
146 141
147 old_buffer_is_filtered_ = true; 142 old_buffer_is_filtered_ = true;
(...skipping 13 matching lines...) Expand all
161 } 156 }
162 157
163 if (SpaceAvailable(space_needed)) return; 158 if (SpaceAvailable(space_needed)) return;
164 159
165 // Sample 1 entry in 97 and filter out the pages where we estimate that more 160 // Sample 1 entry in 97 and filter out the pages where we estimate that more
166 // than 1 in 8 pointers are to new space. 161 // than 1 in 8 pointers are to new space.
167 static const int kSampleFinenesses = 5; 162 static const int kSampleFinenesses = 5;
168 static const struct Samples { 163 static const struct Samples {
169 int prime_sample_step; 164 int prime_sample_step;
170 int threshold; 165 int threshold;
171 } samples[kSampleFinenesses] = { 166 } samples[kSampleFinenesses] = {
172 { 97, ((Page::kPageSize / kPointerSize) / 97) / 8 }, 167 {97, ((Page::kPageSize / kPointerSize) / 97) / 8},
173 { 23, ((Page::kPageSize / kPointerSize) / 23) / 16 }, 168 {23, ((Page::kPageSize / kPointerSize) / 23) / 16},
174 { 7, ((Page::kPageSize / kPointerSize) / 7) / 32 }, 169 {7, ((Page::kPageSize / kPointerSize) / 7) / 32},
175 { 3, ((Page::kPageSize / kPointerSize) / 3) / 256 }, 170 {3, ((Page::kPageSize / kPointerSize) / 3) / 256},
176 { 1, 0} 171 {1, 0}};
177 };
178 for (int i = 0; i < kSampleFinenesses; i++) { 172 for (int i = 0; i < kSampleFinenesses; i++) {
179 ExemptPopularPages(samples[i].prime_sample_step, samples[i].threshold); 173 ExemptPopularPages(samples[i].prime_sample_step, samples[i].threshold);
180 // As a last resort we mark all pages as being exempt from the store buffer. 174 // As a last resort we mark all pages as being exempt from the store buffer.
181 DCHECK(i != (kSampleFinenesses - 1) || old_top_ == old_start_); 175 DCHECK(i != (kSampleFinenesses - 1) || old_top_ == old_start_);
182 if (SpaceAvailable(space_needed)) return; 176 if (SpaceAvailable(space_needed)) return;
183 } 177 }
184 UNREACHABLE(); 178 UNREACHABLE();
185 } 179 }
186 180
187 181
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
310 return true; 304 return true;
311 } 305 }
312 } 306 }
313 return false; 307 return false;
314 } 308 }
315 #endif 309 #endif
316 310
317 311
318 void StoreBuffer::ClearFilteringHashSets() { 312 void StoreBuffer::ClearFilteringHashSets() {
319 if (!hash_sets_are_empty_) { 313 if (!hash_sets_are_empty_) {
320 memset(reinterpret_cast<void*>(hash_set_1_), 314 memset(reinterpret_cast<void*>(hash_set_1_), 0,
321 0,
322 sizeof(uintptr_t) * kHashSetLength); 315 sizeof(uintptr_t) * kHashSetLength);
323 memset(reinterpret_cast<void*>(hash_set_2_), 316 memset(reinterpret_cast<void*>(hash_set_2_), 0,
324 0,
325 sizeof(uintptr_t) * kHashSetLength); 317 sizeof(uintptr_t) * kHashSetLength);
326 hash_sets_are_empty_ = true; 318 hash_sets_are_empty_ = true;
327 } 319 }
328 } 320 }
329 321
330 322
331 void StoreBuffer::GCPrologue() { 323 void StoreBuffer::GCPrologue() {
332 ClearFilteringHashSets(); 324 ClearFilteringHashSets();
333 during_gc_ = true; 325 during_gc_ = true;
334 } 326 }
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
369 during_gc_ = false; 361 during_gc_ = false;
370 #ifdef VERIFY_HEAP 362 #ifdef VERIFY_HEAP
371 if (FLAG_verify_heap) { 363 if (FLAG_verify_heap) {
372 Verify(); 364 Verify();
373 } 365 }
374 #endif 366 #endif
375 } 367 }
376 368
377 369
378 void StoreBuffer::FindPointersToNewSpaceInRegion( 370 void StoreBuffer::FindPointersToNewSpaceInRegion(
379 Address start, 371 Address start, Address end, ObjectSlotCallback slot_callback,
380 Address end,
381 ObjectSlotCallback slot_callback,
382 bool clear_maps) { 372 bool clear_maps) {
383 for (Address slot_address = start; 373 for (Address slot_address = start; slot_address < end;
384 slot_address < end;
385 slot_address += kPointerSize) { 374 slot_address += kPointerSize) {
386 Object** slot = reinterpret_cast<Object**>(slot_address); 375 Object** slot = reinterpret_cast<Object**>(slot_address);
387 Object* object = reinterpret_cast<Object*>( 376 Object* object = reinterpret_cast<Object*>(
388 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); 377 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot)));
389 if (heap_->InNewSpace(object)) { 378 if (heap_->InNewSpace(object)) {
390 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); 379 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
391 DCHECK(heap_object->IsHeapObject()); 380 DCHECK(heap_object->IsHeapObject());
392 // The new space object was not promoted if it still contains a map 381 // The new space object was not promoted if it still contains a map
393 // pointer. Clear the map field now lazily. 382 // pointer. Clear the map field now lazily.
394 if (clear_maps) ClearDeadObject(heap_object); 383 if (clear_maps) ClearDeadObject(heap_object);
395 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); 384 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object);
396 object = reinterpret_cast<Object*>( 385 object = reinterpret_cast<Object*>(
397 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot))); 386 base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot)));
398 if (heap_->InNewSpace(object)) { 387 if (heap_->InNewSpace(object)) {
399 EnterDirectlyIntoStoreBuffer(slot_address); 388 EnterDirectlyIntoStoreBuffer(slot_address);
400 } 389 }
401 } 390 }
402 } 391 }
403 } 392 }
404 393
405 394
406 void StoreBuffer::IteratePointersInStoreBuffer( 395 void StoreBuffer::IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback,
407 ObjectSlotCallback slot_callback, 396 bool clear_maps) {
408 bool clear_maps) {
409 Address* limit = old_top_; 397 Address* limit = old_top_;
410 old_top_ = old_start_; 398 old_top_ = old_start_;
411 { 399 {
412 DontMoveStoreBufferEntriesScope scope(this); 400 DontMoveStoreBufferEntriesScope scope(this);
413 for (Address* current = old_start_; current < limit; current++) { 401 for (Address* current = old_start_; current < limit; current++) {
414 #ifdef DEBUG 402 #ifdef DEBUG
415 Address* saved_top = old_top_; 403 Address* saved_top = old_top_;
416 #endif 404 #endif
417 Object** slot = reinterpret_cast<Object**>(*current); 405 Object** slot = reinterpret_cast<Object**>(*current);
418 Object* object = reinterpret_cast<Object*>( 406 Object* object = reinterpret_cast<Object*>(
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
590 hash_set_1_[hash1] = int_addr; 578 hash_set_1_[hash1] = int_addr;
591 hash_set_2_[hash2] = 0; 579 hash_set_2_[hash2] = 0;
592 } 580 }
593 old_buffer_is_sorted_ = false; 581 old_buffer_is_sorted_ = false;
594 old_buffer_is_filtered_ = false; 582 old_buffer_is_filtered_ = false;
595 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); 583 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2);
596 DCHECK(old_top_ <= old_limit_); 584 DCHECK(old_top_ <= old_limit_);
597 } 585 }
598 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); 586 heap_->isolate()->counters()->store_buffer_compactions()->Increment();
599 } 587 }
600 588 }
601 } } // namespace v8::internal 589 } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/store-buffer.h ('k') | src/heap/store-buffer-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698