| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_SPACES_INL_H_ | 5 #ifndef V8_HEAP_SPACES_INL_H_ |
| 6 #define V8_HEAP_SPACES_INL_H_ | 6 #define V8_HEAP_SPACES_INL_H_ |
| 7 | 7 |
| 8 #include "src/heap/spaces.h" | 8 #include "src/heap/spaces.h" |
| 9 #include "src/heap-profiler.h" | 9 #include "src/heap-profiler.h" |
| 10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
| (...skipping 10 matching lines...) Expand all Loading... |
| 21 void Bitmap::Clear(MemoryChunk* chunk) { | 21 void Bitmap::Clear(MemoryChunk* chunk) { |
| 22 Bitmap* bitmap = chunk->markbits(); | 22 Bitmap* bitmap = chunk->markbits(); |
| 23 for (int i = 0; i < bitmap->CellsCount(); i++) bitmap->cells()[i] = 0; | 23 for (int i = 0; i < bitmap->CellsCount(); i++) bitmap->cells()[i] = 0; |
| 24 chunk->ResetLiveBytes(); | 24 chunk->ResetLiveBytes(); |
| 25 } | 25 } |
| 26 | 26 |
| 27 | 27 |
| 28 // ----------------------------------------------------------------------------- | 28 // ----------------------------------------------------------------------------- |
| 29 // PageIterator | 29 // PageIterator |
| 30 | 30 |
| 31 | |
| 32 PageIterator::PageIterator(PagedSpace* space) | 31 PageIterator::PageIterator(PagedSpace* space) |
| 33 : space_(space), | 32 : space_(space), |
| 34 prev_page_(&space->anchor_), | 33 prev_page_(&space->anchor_), |
| 35 next_page_(prev_page_->next_page()) {} | 34 next_page_(prev_page_->next_page()) {} |
| 36 | 35 |
| 37 | 36 |
| 38 bool PageIterator::has_next() { return next_page_ != &space_->anchor_; } | 37 bool PageIterator::has_next() { return next_page_ != &space_->anchor_; } |
| 39 | 38 |
| 40 | 39 |
| 41 Page* PageIterator::next() { | 40 Page* PageIterator::next() { |
| 42 DCHECK(has_next()); | 41 DCHECK(has_next()); |
| 43 prev_page_ = next_page_; | 42 prev_page_ = next_page_; |
| 44 next_page_ = next_page_->next_page(); | 43 next_page_ = next_page_->next_page(); |
| 45 return prev_page_; | 44 return prev_page_; |
| 46 } | 45 } |
| 47 | 46 |
| 48 | 47 |
| 49 // ----------------------------------------------------------------------------- | 48 // ----------------------------------------------------------------------------- |
| 49 // SemiSpaceIterator |
| 50 |
| 51 HeapObject* SemiSpaceIterator::Next() { |
| 52 if (current_ == limit_) return NULL; |
| 53 if (NewSpacePage::IsAtEnd(current_)) { |
| 54 NewSpacePage* page = NewSpacePage::FromLimit(current_); |
| 55 page = page->next_page(); |
| 56 DCHECK(!page->is_anchor()); |
| 57 current_ = page->area_start(); |
| 58 if (current_ == limit_) return NULL; |
| 59 } |
| 60 |
| 61 HeapObject* object = HeapObject::FromAddress(current_); |
| 62 int size = object->Size(); |
| 63 |
| 64 current_ += size; |
| 65 return object; |
| 66 } |
| 67 |
| 68 |
| 69 HeapObject* SemiSpaceIterator::next_object() { return Next(); } |
| 70 |
| 71 |
| 72 // ----------------------------------------------------------------------------- |
| 50 // NewSpacePageIterator | 73 // NewSpacePageIterator |
| 51 | 74 |
| 52 | |
| 53 NewSpacePageIterator::NewSpacePageIterator(NewSpace* space) | 75 NewSpacePageIterator::NewSpacePageIterator(NewSpace* space) |
| 54 : prev_page_(NewSpacePage::FromAddress(space->ToSpaceStart())->prev_page()), | 76 : prev_page_(NewSpacePage::FromAddress(space->ToSpaceStart())->prev_page()), |
| 55 next_page_(NewSpacePage::FromAddress(space->ToSpaceStart())), | 77 next_page_(NewSpacePage::FromAddress(space->ToSpaceStart())), |
| 56 last_page_(NewSpacePage::FromLimit(space->ToSpaceEnd())) {} | 78 last_page_(NewSpacePage::FromLimit(space->ToSpaceEnd())) {} |
| 57 | 79 |
| 58 NewSpacePageIterator::NewSpacePageIterator(SemiSpace* space) | 80 NewSpacePageIterator::NewSpacePageIterator(SemiSpace* space) |
| 59 : prev_page_(space->anchor()), | 81 : prev_page_(space->anchor()), |
| 60 next_page_(prev_page_->next_page()), | 82 next_page_(prev_page_->next_page()), |
| 61 last_page_(prev_page_->prev_page()) {} | 83 last_page_(prev_page_->prev_page()) {} |
| 62 | 84 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 74 NewSpacePage* NewSpacePageIterator::next() { | 96 NewSpacePage* NewSpacePageIterator::next() { |
| 75 DCHECK(has_next()); | 97 DCHECK(has_next()); |
| 76 prev_page_ = next_page_; | 98 prev_page_ = next_page_; |
| 77 next_page_ = next_page_->next_page(); | 99 next_page_ = next_page_->next_page(); |
| 78 return prev_page_; | 100 return prev_page_; |
| 79 } | 101 } |
| 80 | 102 |
| 81 | 103 |
| 82 // ----------------------------------------------------------------------------- | 104 // ----------------------------------------------------------------------------- |
| 83 // HeapObjectIterator | 105 // HeapObjectIterator |
| 106 |
| 107 HeapObject* HeapObjectIterator::Next() { |
| 108 do { |
| 109 HeapObject* next_obj = FromCurrentPage(); |
| 110 if (next_obj != NULL) return next_obj; |
| 111 } while (AdvanceToNextPage()); |
| 112 return NULL; |
| 113 } |
| 114 |
| 115 |
| 116 HeapObject* HeapObjectIterator::next_object() { return Next(); } |
| 117 |
| 118 |
| 84 HeapObject* HeapObjectIterator::FromCurrentPage() { | 119 HeapObject* HeapObjectIterator::FromCurrentPage() { |
| 85 while (cur_addr_ != cur_end_) { | 120 while (cur_addr_ != cur_end_) { |
| 86 if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) { | 121 if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) { |
| 87 cur_addr_ = space_->limit(); | 122 cur_addr_ = space_->limit(); |
| 88 continue; | 123 continue; |
| 89 } | 124 } |
| 90 HeapObject* obj = HeapObject::FromAddress(cur_addr_); | 125 HeapObject* obj = HeapObject::FromAddress(cur_addr_); |
| 91 int obj_size = obj->Size(); | 126 int obj_size = obj->Size(); |
| 92 cur_addr_ += obj_size; | 127 cur_addr_ += obj_size; |
| 93 DCHECK(cur_addr_ <= cur_end_); | 128 DCHECK(cur_addr_ <= cur_end_); |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 131 void MemoryAllocator::UnprotectChunkFromPage(Page* page) { | 166 void MemoryAllocator::UnprotectChunkFromPage(Page* page) { |
| 132 int id = GetChunkId(page); | 167 int id = GetChunkId(page); |
| 133 base::OS::Unprotect(chunks_[id].address(), chunks_[id].size(), | 168 base::OS::Unprotect(chunks_[id].address(), chunks_[id].size(), |
| 134 chunks_[id].owner()->executable() == EXECUTABLE); | 169 chunks_[id].owner()->executable() == EXECUTABLE); |
| 135 } | 170 } |
| 136 | 171 |
| 137 #endif | 172 #endif |
| 138 | 173 |
| 139 | 174 |
| 140 // -------------------------------------------------------------------------- | 175 // -------------------------------------------------------------------------- |
| 176 // AllocationResult |
| 177 |
| 178 AllocationSpace AllocationResult::RetrySpace() { |
| 179 DCHECK(IsRetry()); |
| 180 return static_cast<AllocationSpace>(Smi::cast(object_)->value()); |
| 181 } |
| 182 |
| 183 |
| 184 // -------------------------------------------------------------------------- |
| 141 // PagedSpace | 185 // PagedSpace |
| 186 |
| 142 Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, | 187 Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, |
| 143 PagedSpace* owner) { | 188 PagedSpace* owner) { |
| 144 Page* page = reinterpret_cast<Page*>(chunk); | 189 Page* page = reinterpret_cast<Page*>(chunk); |
| 145 page->mutex_ = new base::Mutex(); | 190 page->mutex_ = new base::Mutex(); |
| 146 DCHECK(page->area_size() <= kMaxRegularHeapObjectSize); | 191 DCHECK(page->area_size() <= kMaxRegularHeapObjectSize); |
| 147 DCHECK(chunk->owner() == owner); | 192 DCHECK(chunk->owner() == owner); |
| 148 owner->IncreaseCapacity(page->area_size()); | 193 owner->IncreaseCapacity(page->area_size()); |
| 149 owner->Free(page->area_start(), page->area_size()); | 194 owner->Free(page->area_start(), page->area_size()); |
| 150 | 195 |
| 151 heap->incremental_marking()->SetOldSpacePageFlags(chunk); | 196 heap->incremental_marking()->SetOldSpacePageFlags(chunk); |
| 152 | 197 |
| 153 return page; | 198 return page; |
| 154 } | 199 } |
| 155 | 200 |
| 156 | 201 |
| 157 bool PagedSpace::Contains(Address addr) { | 202 bool PagedSpace::Contains(Address addr) { |
| 158 Page* p = Page::FromAddress(addr); | 203 Page* p = Page::FromAddress(addr); |
| 159 if (!p->is_valid()) return false; | 204 if (!p->is_valid()) return false; |
| 160 return p->owner() == this; | 205 return p->owner() == this; |
| 161 } | 206 } |
| 162 | 207 |
| 163 | 208 |
| 209 bool PagedSpace::Contains(HeapObject* o) { return Contains(o->address()); } |
| 210 |
| 211 |
| 164 void MemoryChunk::set_scan_on_scavenge(bool scan) { | 212 void MemoryChunk::set_scan_on_scavenge(bool scan) { |
| 165 if (scan) { | 213 if (scan) { |
| 166 if (!scan_on_scavenge()) heap_->increment_scan_on_scavenge_pages(); | 214 if (!scan_on_scavenge()) heap_->increment_scan_on_scavenge_pages(); |
| 167 SetFlag(SCAN_ON_SCAVENGE); | 215 SetFlag(SCAN_ON_SCAVENGE); |
| 168 } else { | 216 } else { |
| 169 if (scan_on_scavenge()) heap_->decrement_scan_on_scavenge_pages(); | 217 if (scan_on_scavenge()) heap_->decrement_scan_on_scavenge_pages(); |
| 170 ClearFlag(SCAN_ON_SCAVENGE); | 218 ClearFlag(SCAN_ON_SCAVENGE); |
| 171 } | 219 } |
| 172 heap_->incremental_marking()->SetOldSpacePageFlags(this); | 220 heap_->incremental_marking()->SetOldSpacePageFlags(this); |
| 173 } | 221 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 186 if (chunk->Contains(addr)) { | 234 if (chunk->Contains(addr)) { |
| 187 return chunk; | 235 return chunk; |
| 188 } | 236 } |
| 189 } | 237 } |
| 190 } | 238 } |
| 191 UNREACHABLE(); | 239 UNREACHABLE(); |
| 192 return NULL; | 240 return NULL; |
| 193 } | 241 } |
| 194 | 242 |
| 195 | 243 |
| 196 void MemoryChunk::UpdateHighWaterMark(Address mark) { | |
| 197 if (mark == NULL) return; | |
| 198 // Need to subtract one from the mark because when a chunk is full the | |
| 199 // top points to the next address after the chunk, which effectively belongs | |
| 200 // to another chunk. See the comment to Page::FromAllocationTop. | |
| 201 MemoryChunk* chunk = MemoryChunk::FromAddress(mark - 1); | |
| 202 int new_mark = static_cast<int>(mark - chunk->address()); | |
| 203 if (new_mark > chunk->high_water_mark_) { | |
| 204 chunk->high_water_mark_ = new_mark; | |
| 205 } | |
| 206 } | |
| 207 | |
| 208 | |
| 209 PointerChunkIterator::PointerChunkIterator(Heap* heap) | 244 PointerChunkIterator::PointerChunkIterator(Heap* heap) |
| 210 : state_(kOldSpaceState), | 245 : state_(kOldSpaceState), |
| 211 old_iterator_(heap->old_space()), | 246 old_iterator_(heap->old_space()), |
| 212 map_iterator_(heap->map_space()), | 247 map_iterator_(heap->map_space()), |
| 213 lo_iterator_(heap->lo_space()) {} | 248 lo_iterator_(heap->lo_space()) {} |
| 214 | 249 |
| 215 | 250 |
| 216 Page* Page::next_page() { | 251 MemoryChunk* PointerChunkIterator::next() { |
| 217 DCHECK(next_chunk()->owner() == owner()); | 252 switch (state_) { |
| 218 return static_cast<Page*>(next_chunk()); | 253 case kOldSpaceState: { |
| 254 if (old_iterator_.has_next()) { |
| 255 return old_iterator_.next(); |
| 256 } |
| 257 state_ = kMapState; |
| 258 // Fall through. |
| 259 } |
| 260 case kMapState: { |
| 261 if (map_iterator_.has_next()) { |
| 262 return map_iterator_.next(); |
| 263 } |
| 264 state_ = kLargeObjectState; |
| 265 // Fall through. |
| 266 } |
| 267 case kLargeObjectState: { |
| 268 HeapObject* heap_object; |
| 269 do { |
| 270 heap_object = lo_iterator_.Next(); |
| 271 if (heap_object == NULL) { |
| 272 state_ = kFinishedState; |
| 273 return NULL; |
| 274 } |
| 275 // Fixed arrays are the only pointer-containing objects in large |
| 276 // object space. |
| 277 } while (!heap_object->IsFixedArray()); |
| 278 MemoryChunk* answer = MemoryChunk::FromAddress(heap_object->address()); |
| 279 return answer; |
| 280 } |
| 281 case kFinishedState: |
| 282 return NULL; |
| 283 default: |
| 284 break; |
| 285 } |
| 286 UNREACHABLE(); |
| 287 return NULL; |
| 219 } | 288 } |
| 220 | 289 |
| 221 | 290 |
| 222 Page* Page::prev_page() { | |
| 223 DCHECK(prev_chunk()->owner() == owner()); | |
| 224 return static_cast<Page*>(prev_chunk()); | |
| 225 } | |
| 226 | |
| 227 | |
| 228 void Page::set_next_page(Page* page) { | 291 void Page::set_next_page(Page* page) { |
| 229 DCHECK(page->owner() == owner()); | 292 DCHECK(page->owner() == owner()); |
| 230 set_next_chunk(page); | 293 set_next_chunk(page); |
| 231 } | 294 } |
| 232 | 295 |
| 233 | 296 |
| 234 void Page::set_prev_page(Page* page) { | 297 void Page::set_prev_page(Page* page) { |
| 235 DCHECK(page->owner() == owner()); | 298 DCHECK(page->owner() == owner()); |
| 236 set_prev_chunk(page); | 299 set_prev_chunk(page); |
| 237 } | 300 } |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 414 | 477 |
| 415 | 478 |
| 416 intptr_t LargeObjectSpace::Available() { | 479 intptr_t LargeObjectSpace::Available() { |
| 417 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); | 480 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); |
| 418 } | 481 } |
| 419 | 482 |
| 420 } | 483 } |
| 421 } // namespace v8::internal | 484 } // namespace v8::internal |
| 422 | 485 |
| 423 #endif // V8_HEAP_SPACES_INL_H_ | 486 #endif // V8_HEAP_SPACES_INL_H_ |
| OLD | NEW |