OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_SPACES_INL_H_ | 5 #ifndef V8_HEAP_SPACES_INL_H_ |
6 #define V8_HEAP_SPACES_INL_H_ | 6 #define V8_HEAP_SPACES_INL_H_ |
7 | 7 |
8 #include "src/heap/incremental-marking.h" | 8 #include "src/heap/incremental-marking.h" |
9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
140 DCHECK_CODEOBJECT_SIZE(obj_size, space_); | 140 DCHECK_CODEOBJECT_SIZE(obj_size, space_); |
141 } else { | 141 } else { |
142 DCHECK_OBJECT_SIZE(obj_size); | 142 DCHECK_OBJECT_SIZE(obj_size); |
143 } | 143 } |
144 return obj; | 144 return obj; |
145 } | 145 } |
146 } | 146 } |
147 return NULL; | 147 return NULL; |
148 } | 148 } |
149 | 149 |
| 150 // ----------------------------------------------------------------------------- |
| 151 // LargePageIterator |
| 152 |
| 153 LargePageIterator::LargePageIterator(LargeObjectSpace* space) |
| 154 : next_page_(space->first_page()) {} |
| 155 |
| 156 LargePage* LargePageIterator::next() { |
| 157 LargePage* result = next_page_; |
| 158 if (next_page_ != nullptr) { |
| 159 next_page_ = next_page_->next_page(); |
| 160 } |
| 161 return result; |
| 162 } |
150 | 163 |
151 // ----------------------------------------------------------------------------- | 164 // ----------------------------------------------------------------------------- |
152 // MemoryAllocator | 165 // MemoryAllocator |
153 | 166 |
154 #ifdef ENABLE_HEAP_PROTECTION | 167 #ifdef ENABLE_HEAP_PROTECTION |
155 | 168 |
156 void MemoryAllocator::Protect(Address start, size_t size) { | 169 void MemoryAllocator::Protect(Address start, size_t size) { |
157 base::OS::Protect(start, size); | 170 base::OS::Protect(start, size); |
158 } | 171 } |
159 | 172 |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
301 if (offset < MemoryChunk::kHeaderSize || !chunk->HasPageHeader()) { | 314 if (offset < MemoryChunk::kHeaderSize || !chunk->HasPageHeader()) { |
302 chunk = heap->lo_space()->FindPage(addr); | 315 chunk = heap->lo_space()->FindPage(addr); |
303 } | 316 } |
304 return chunk; | 317 return chunk; |
305 } | 318 } |
306 | 319 |
307 Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) { | 320 Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) { |
308 return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr)); | 321 return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr)); |
309 } | 322 } |
310 | 323 |
311 | 324 MemoryChunkIterator::MemoryChunkIterator(Heap* heap, Mode mode) |
312 PointerChunkIterator::PointerChunkIterator(Heap* heap) | |
313 : state_(kOldSpaceState), | 325 : state_(kOldSpaceState), |
| 326 mode_(mode), |
314 old_iterator_(heap->old_space()), | 327 old_iterator_(heap->old_space()), |
| 328 code_iterator_(heap->code_space()), |
315 map_iterator_(heap->map_space()), | 329 map_iterator_(heap->map_space()), |
316 lo_iterator_(heap->lo_space()) {} | 330 lo_iterator_(heap->lo_space()) {} |
317 | 331 |
318 | 332 MemoryChunk* MemoryChunkIterator::next() { |
319 MemoryChunk* PointerChunkIterator::next() { | |
320 switch (state_) { | 333 switch (state_) { |
321 case kOldSpaceState: { | 334 case kOldSpaceState: { |
322 if (old_iterator_.has_next()) { | 335 if (old_iterator_.has_next()) { |
323 return old_iterator_.next(); | 336 return old_iterator_.next(); |
324 } | 337 } |
325 state_ = kMapState; | 338 state_ = kMapState; |
326 // Fall through. | 339 // Fall through. |
327 } | 340 } |
328 case kMapState: { | 341 case kMapState: { |
329 if (map_iterator_.has_next()) { | 342 if (mode_ != ALL_BUT_MAP_SPACE && map_iterator_.has_next()) { |
330 return map_iterator_.next(); | 343 return map_iterator_.next(); |
331 } | 344 } |
| 345 state_ = kCodeState; |
| 346 // Fall through. |
| 347 } |
| 348 case kCodeState: { |
| 349 if (mode_ != ALL_BUT_CODE_SPACE && code_iterator_.has_next()) { |
| 350 return code_iterator_.next(); |
| 351 } |
332 state_ = kLargeObjectState; | 352 state_ = kLargeObjectState; |
333 // Fall through. | 353 // Fall through. |
334 } | 354 } |
335 case kLargeObjectState: { | 355 case kLargeObjectState: { |
336 HeapObject* heap_object; | 356 MemoryChunk* answer = lo_iterator_.next(); |
337 do { | 357 if (answer != nullptr) { |
338 heap_object = lo_iterator_.Next(); | 358 return answer; |
339 if (heap_object == NULL) { | 359 } |
340 state_ = kFinishedState; | 360 state_ = kFinishedState; |
341 return NULL; | 361 // Fall through; |
342 } | |
343 // Fixed arrays are the only pointer-containing objects in large | |
344 // object space. | |
345 } while (!heap_object->IsFixedArray()); | |
346 MemoryChunk* answer = MemoryChunk::FromAddress(heap_object->address()); | |
347 return answer; | |
348 } | 362 } |
349 case kFinishedState: | 363 case kFinishedState: |
350 return NULL; | 364 return nullptr; |
351 default: | 365 default: |
352 break; | 366 break; |
353 } | 367 } |
354 UNREACHABLE(); | 368 UNREACHABLE(); |
355 return NULL; | 369 return nullptr; |
356 } | 370 } |
357 | 371 |
358 | 372 |
359 void Page::set_next_page(Page* page) { | 373 void Page::set_next_page(Page* page) { |
360 DCHECK(page->owner() == owner()); | 374 DCHECK(page->owner() == owner()); |
361 set_next_chunk(page); | 375 set_next_chunk(page); |
362 } | 376 } |
363 | 377 |
364 | 378 |
365 void Page::set_prev_page(Page* page) { | 379 void Page::set_prev_page(Page* page) { |
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
611 other->allocation_info_.Reset(nullptr, nullptr); | 625 other->allocation_info_.Reset(nullptr, nullptr); |
612 return true; | 626 return true; |
613 } | 627 } |
614 return false; | 628 return false; |
615 } | 629 } |
616 | 630 |
617 } // namespace internal | 631 } // namespace internal |
618 } // namespace v8 | 632 } // namespace v8 |
619 | 633 |
620 #endif // V8_HEAP_SPACES_INL_H_ | 634 #endif // V8_HEAP_SPACES_INL_H_ |
OLD | NEW |