| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_SPACES_INL_H_ | 5 #ifndef V8_HEAP_SPACES_INL_H_ |
| 6 #define V8_HEAP_SPACES_INL_H_ | 6 #define V8_HEAP_SPACES_INL_H_ |
| 7 | 7 |
| 8 #include "src/heap/incremental-marking.h" | 8 #include "src/heap/incremental-marking.h" |
| 9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
| 10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
| (...skipping 16 matching lines...) Expand all Loading... |
| 27 operator++(); | 27 operator++(); |
| 28 return tmp; | 28 return tmp; |
| 29 } | 29 } |
| 30 | 30 |
| 31 NewSpacePageRange::NewSpacePageRange(Address start, Address limit) | 31 NewSpacePageRange::NewSpacePageRange(Address start, Address limit) |
| 32 : range_(Page::FromAddress(start), | 32 : range_(Page::FromAddress(start), |
| 33 Page::FromAllocationAreaAddress(limit)->next_page()) { | 33 Page::FromAllocationAreaAddress(limit)->next_page()) { |
| 34 SemiSpace::AssertValidRange(start, limit); | 34 SemiSpace::AssertValidRange(start, limit); |
| 35 } | 35 } |
| 36 | 36 |
| 37 |
| 37 // ----------------------------------------------------------------------------- | 38 // ----------------------------------------------------------------------------- |
| 38 // SemiSpaceIterator | 39 // SemiSpaceIterator |
| 39 | 40 |
| 40 HeapObject* SemiSpaceIterator::Next() { | 41 HeapObject* SemiSpaceIterator::Next() { |
| 41 while (current_ != limit_) { | 42 while (current_ != limit_) { |
| 42 if (Page::IsAlignedToPageSize(current_)) { | 43 if (Page::IsAlignedToPageSize(current_)) { |
| 43 Page* page = Page::FromAllocationAreaAddress(current_); | 44 Page* page = Page::FromAllocationAreaAddress(current_); |
| 44 page = page->next_page(); | 45 page = page->next_page(); |
| 45 DCHECK(!page->is_anchor()); | 46 DCHECK(!page->is_anchor()); |
| 46 current_ = page->area_start(); | 47 current_ = page->area_start(); |
| (...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 234 | 235 |
| 235 void MemoryChunk::ResetLiveBytes() { | 236 void MemoryChunk::ResetLiveBytes() { |
| 236 if (FLAG_trace_live_bytes) { | 237 if (FLAG_trace_live_bytes) { |
| 237 PrintIsolate(heap()->isolate(), "live-bytes: reset page=%p %d->0\n", | 238 PrintIsolate(heap()->isolate(), "live-bytes: reset page=%p %d->0\n", |
| 238 static_cast<void*>(this), live_byte_count_); | 239 static_cast<void*>(this), live_byte_count_); |
| 239 } | 240 } |
| 240 live_byte_count_ = 0; | 241 live_byte_count_ = 0; |
| 241 } | 242 } |
| 242 | 243 |
| 243 void MemoryChunk::IncrementLiveBytes(int by) { | 244 void MemoryChunk::IncrementLiveBytes(int by) { |
| 245 if (IsFlagSet(BLACK_PAGE)) return; |
| 244 if (FLAG_trace_live_bytes) { | 246 if (FLAG_trace_live_bytes) { |
| 245 PrintIsolate( | 247 PrintIsolate( |
| 246 heap()->isolate(), "live-bytes: update page=%p delta=%d %d->%d\n", | 248 heap()->isolate(), "live-bytes: update page=%p delta=%d %d->%d\n", |
| 247 static_cast<void*>(this), by, live_byte_count_, live_byte_count_ + by); | 249 static_cast<void*>(this), by, live_byte_count_, live_byte_count_ + by); |
| 248 } | 250 } |
| 249 live_byte_count_ += by; | 251 live_byte_count_ += by; |
| 250 DCHECK_GE(live_byte_count_, 0); | 252 DCHECK_GE(live_byte_count_, 0); |
| 251 DCHECK_LE(static_cast<size_t>(live_byte_count_), size_); | 253 DCHECK_LE(static_cast<size_t>(live_byte_count_), size_); |
| 252 } | 254 } |
| 253 | 255 |
| (...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 435 // Raw allocation. | 437 // Raw allocation. |
| 436 AllocationResult PagedSpace::AllocateRawUnaligned( | 438 AllocationResult PagedSpace::AllocateRawUnaligned( |
| 437 int size_in_bytes, UpdateSkipList update_skip_list) { | 439 int size_in_bytes, UpdateSkipList update_skip_list) { |
| 438 HeapObject* object = AllocateLinearly(size_in_bytes); | 440 HeapObject* object = AllocateLinearly(size_in_bytes); |
| 439 | 441 |
| 440 if (object == NULL) { | 442 if (object == NULL) { |
| 441 object = free_list_.Allocate(size_in_bytes); | 443 object = free_list_.Allocate(size_in_bytes); |
| 442 if (object == NULL) { | 444 if (object == NULL) { |
| 443 object = SlowAllocateRaw(size_in_bytes); | 445 object = SlowAllocateRaw(size_in_bytes); |
| 444 } | 446 } |
| 445 if (object != NULL) { | |
| 446 if (heap()->incremental_marking()->black_allocation()) { | |
| 447 Marking::MarkBlack(ObjectMarking::MarkBitFrom(object)); | |
| 448 MemoryChunk::IncrementLiveBytesFromGC(object, size_in_bytes); | |
| 449 } | |
| 450 } | |
| 451 } | 447 } |
| 452 | 448 |
| 453 if (object != NULL) { | 449 if (object != NULL) { |
| 454 if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) { | 450 if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) { |
| 455 SkipList::Update(object->address(), size_in_bytes); | 451 SkipList::Update(object->address(), size_in_bytes); |
| 456 } | 452 } |
| 457 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); | 453 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); |
| 458 return object; | 454 return object; |
| 459 } | 455 } |
| 460 | 456 |
| (...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 635 other->allocation_info_.Reset(nullptr, nullptr); | 631 other->allocation_info_.Reset(nullptr, nullptr); |
| 636 return true; | 632 return true; |
| 637 } | 633 } |
| 638 return false; | 634 return false; |
| 639 } | 635 } |
| 640 | 636 |
| 641 } // namespace internal | 637 } // namespace internal |
| 642 } // namespace v8 | 638 } // namespace v8 |
| 643 | 639 |
| 644 #endif // V8_HEAP_SPACES_INL_H_ | 640 #endif // V8_HEAP_SPACES_INL_H_ |
| OLD | NEW |