OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_SPACES_INL_H_ | 5 #ifndef V8_HEAP_SPACES_INL_H_ |
6 #define V8_HEAP_SPACES_INL_H_ | 6 #define V8_HEAP_SPACES_INL_H_ |
7 | 7 |
8 #include "src/heap/incremental-marking.h" | 8 #include "src/heap/incremental-marking.h" |
9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
140 DCHECK_CODEOBJECT_SIZE(obj_size, space_); | 140 DCHECK_CODEOBJECT_SIZE(obj_size, space_); |
141 } else { | 141 } else { |
142 DCHECK_OBJECT_SIZE(obj_size); | 142 DCHECK_OBJECT_SIZE(obj_size); |
143 } | 143 } |
144 return obj; | 144 return obj; |
145 } | 145 } |
146 } | 146 } |
147 return NULL; | 147 return NULL; |
148 } | 148 } |
149 | 149 |
150 // ----------------------------------------------------------------------------- | |
151 // LargePageIterator | |
152 | |
153 LargePageIterator::LargePageIterator(LargeObjectSpace* space) | |
154 : next_page_(space->first_page()) {} | |
155 | |
156 LargePage* LargePageIterator::next() { | |
157 LargePage* result = next_page_; | |
158 if (next_page_ != nullptr) { | |
159 next_page_ = next_page_->next_page(); | |
160 } | |
161 return result; | |
162 } | |
163 | 150 |
164 // ----------------------------------------------------------------------------- | 151 // ----------------------------------------------------------------------------- |
165 // MemoryAllocator | 152 // MemoryAllocator |
166 | 153 |
167 #ifdef ENABLE_HEAP_PROTECTION | 154 #ifdef ENABLE_HEAP_PROTECTION |
168 | 155 |
169 void MemoryAllocator::Protect(Address start, size_t size) { | 156 void MemoryAllocator::Protect(Address start, size_t size) { |
170 base::OS::Protect(start, size); | 157 base::OS::Protect(start, size); |
171 } | 158 } |
172 | 159 |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
314 if (offset < MemoryChunk::kHeaderSize || !chunk->HasPageHeader()) { | 301 if (offset < MemoryChunk::kHeaderSize || !chunk->HasPageHeader()) { |
315 chunk = heap->lo_space()->FindPage(addr); | 302 chunk = heap->lo_space()->FindPage(addr); |
316 } | 303 } |
317 return chunk; | 304 return chunk; |
318 } | 305 } |
319 | 306 |
320 Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) { | 307 Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) { |
321 return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr)); | 308 return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr)); |
322 } | 309 } |
323 | 310 |
324 MemoryChunkIterator::MemoryChunkIterator(Heap* heap, Mode mode) | 311 |
| 312 PointerChunkIterator::PointerChunkIterator(Heap* heap) |
325 : state_(kOldSpaceState), | 313 : state_(kOldSpaceState), |
326 mode_(mode), | |
327 old_iterator_(heap->old_space()), | 314 old_iterator_(heap->old_space()), |
328 code_iterator_(heap->code_space()), | |
329 map_iterator_(heap->map_space()), | 315 map_iterator_(heap->map_space()), |
330 lo_iterator_(heap->lo_space()) {} | 316 lo_iterator_(heap->lo_space()) {} |
331 | 317 |
332 MemoryChunk* MemoryChunkIterator::next() { | 318 |
| 319 MemoryChunk* PointerChunkIterator::next() { |
333 switch (state_) { | 320 switch (state_) { |
334 case kOldSpaceState: { | 321 case kOldSpaceState: { |
335 if (old_iterator_.has_next()) { | 322 if (old_iterator_.has_next()) { |
336 return old_iterator_.next(); | 323 return old_iterator_.next(); |
337 } | 324 } |
338 state_ = kMapState; | 325 state_ = kMapState; |
339 // Fall through. | 326 // Fall through. |
340 } | 327 } |
341 case kMapState: { | 328 case kMapState: { |
342 if (mode_ != ALL_BUT_MAP_SPACE && map_iterator_.has_next()) { | 329 if (map_iterator_.has_next()) { |
343 return map_iterator_.next(); | 330 return map_iterator_.next(); |
344 } | 331 } |
345 state_ = kCodeState; | |
346 // Fall through. | |
347 } | |
348 case kCodeState: { | |
349 if (mode_ != ALL_BUT_CODE_SPACE && code_iterator_.has_next()) { | |
350 return code_iterator_.next(); | |
351 } | |
352 state_ = kLargeObjectState; | 332 state_ = kLargeObjectState; |
353 // Fall through. | 333 // Fall through. |
354 } | 334 } |
355 case kLargeObjectState: { | 335 case kLargeObjectState: { |
356 MemoryChunk* answer = lo_iterator_.next(); | 336 HeapObject* heap_object; |
357 if (answer != nullptr) { | 337 do { |
358 return answer; | 338 heap_object = lo_iterator_.Next(); |
359 } | 339 if (heap_object == NULL) { |
360 state_ = kFinishedState; | 340 state_ = kFinishedState; |
361 // Fall through; | 341 return NULL; |
| 342 } |
| 343 // Fixed arrays are the only pointer-containing objects in large |
| 344 // object space. |
| 345 } while (!heap_object->IsFixedArray()); |
| 346 MemoryChunk* answer = MemoryChunk::FromAddress(heap_object->address()); |
| 347 return answer; |
362 } | 348 } |
363 case kFinishedState: | 349 case kFinishedState: |
364 return nullptr; | 350 return NULL; |
365 default: | 351 default: |
366 break; | 352 break; |
367 } | 353 } |
368 UNREACHABLE(); | 354 UNREACHABLE(); |
369 return nullptr; | 355 return NULL; |
370 } | 356 } |
371 | 357 |
372 | 358 |
373 void Page::set_next_page(Page* page) { | 359 void Page::set_next_page(Page* page) { |
374 DCHECK(page->owner() == owner()); | 360 DCHECK(page->owner() == owner()); |
375 set_next_chunk(page); | 361 set_next_chunk(page); |
376 } | 362 } |
377 | 363 |
378 | 364 |
379 void Page::set_prev_page(Page* page) { | 365 void Page::set_prev_page(Page* page) { |
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
625 other->allocation_info_.Reset(nullptr, nullptr); | 611 other->allocation_info_.Reset(nullptr, nullptr); |
626 return true; | 612 return true; |
627 } | 613 } |
628 return false; | 614 return false; |
629 } | 615 } |
630 | 616 |
631 } // namespace internal | 617 } // namespace internal |
632 } // namespace v8 | 618 } // namespace v8 |
633 | 619 |
634 #endif // V8_HEAP_SPACES_INL_H_ | 620 #endif // V8_HEAP_SPACES_INL_H_ |
OLD | NEW |