| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_SPACES_INL_H_ | 5 #ifndef V8_HEAP_SPACES_INL_H_ |
| 6 #define V8_HEAP_SPACES_INL_H_ | 6 #define V8_HEAP_SPACES_INL_H_ |
| 7 | 7 |
| 8 #include "src/heap/incremental-marking.h" | 8 #include "src/heap/incremental-marking.h" |
| 9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
| 10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
| (...skipping 362 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 373 bool FreeListCategory::is_linked() { | 373 bool FreeListCategory::is_linked() { |
| 374 return prev_ != nullptr || next_ != nullptr || owner()->top(type_) == this; | 374 return prev_ != nullptr || next_ != nullptr || owner()->top(type_) == this; |
| 375 } | 375 } |
| 376 | 376 |
| 377 // Try linear allocation in the page of alloc_info's allocation top. Does | 377 // Try linear allocation in the page of alloc_info's allocation top. Does |
| 378 // not contain slow case logic (e.g. move to the next page or try free list | 378 // not contain slow case logic (e.g. move to the next page or try free list |
| 379 // allocation) so it can be used by all the allocation functions and for all | 379 // allocation) so it can be used by all the allocation functions and for all |
| 380 // the paged spaces. | 380 // the paged spaces. |
| 381 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { | 381 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { |
| 382 Address current_top = allocation_info_.top(); | 382 Address current_top = allocation_info_.top(); |
| 383 if (allocation_info_.top() > allocation_info_.limit()) { |
| 384 PrintF("top > limit: %p %p\n", |
| 385 reinterpret_cast<void*>(allocation_info_.top()), |
| 386 reinterpret_cast<void*>(allocation_info_.limit())), |
| 387 PrintStackFramesAndDie(); |
| 388 } |
| 383 Address new_top = current_top + size_in_bytes; | 389 Address new_top = current_top + size_in_bytes; |
| 384 if (new_top > allocation_info_.limit()) return NULL; | 390 if (new_top > allocation_info_.limit()) return NULL; |
| 385 | 391 |
| 386 allocation_info_.set_top(new_top); | 392 allocation_info_.set_top(new_top); |
| 387 return HeapObject::FromAddress(current_top); | 393 return HeapObject::FromAddress(current_top); |
| 388 } | 394 } |
| 389 | 395 |
| 390 | 396 |
| 391 AllocationResult LocalAllocationBuffer::AllocateRawAligned( | 397 AllocationResult LocalAllocationBuffer::AllocateRawAligned( |
| 392 int size_in_bytes, AllocationAlignment alignment) { | 398 int size_in_bytes, AllocationAlignment alignment) { |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 470 HeapObject* object = AllocateLinearlyAligned(&allocation_size, alignment); | 476 HeapObject* object = AllocateLinearlyAligned(&allocation_size, alignment); |
| 471 | 477 |
| 472 if (object == NULL) { | 478 if (object == NULL) { |
| 473 // We don't know exactly how much filler we need to align until space is | 479 // We don't know exactly how much filler we need to align until space is |
| 474 // allocated, so assume the worst case. | 480 // allocated, so assume the worst case. |
| 475 int filler_size = Heap::GetMaximumFillToAlign(alignment); | 481 int filler_size = Heap::GetMaximumFillToAlign(alignment); |
| 476 allocation_size += filler_size; | 482 allocation_size += filler_size; |
| 477 object = free_list_.Allocate(allocation_size); | 483 object = free_list_.Allocate(allocation_size); |
| 478 if (object == NULL) { | 484 if (object == NULL) { |
| 479 object = SlowAllocateRaw(allocation_size); | 485 object = SlowAllocateRaw(allocation_size); |
| 486 } else { |
| 487 DCHECK(allocation_info_.limit() != nullptr); |
| 480 } | 488 } |
| 481 if (object != NULL && filler_size != 0) { | 489 if (object != NULL && filler_size != 0) { |
| 490 DCHECK(allocation_info_.limit() != nullptr); |
| 482 object = heap()->AlignWithFiller(object, size_in_bytes, allocation_size, | 491 object = heap()->AlignWithFiller(object, size_in_bytes, allocation_size, |
| 483 alignment); | 492 alignment); |
| 493 DCHECK(allocation_info_.limit() != nullptr); |
| 484 // Filler objects are initialized, so mark only the aligned object memory | 494 // Filler objects are initialized, so mark only the aligned object memory |
| 485 // as uninitialized. | 495 // as uninitialized. |
| 486 allocation_size = size_in_bytes; | 496 allocation_size = size_in_bytes; |
| 487 } | 497 } |
| 498 } else { |
| 499 DCHECK(allocation_info_.limit() != nullptr); |
| 488 } | 500 } |
| 489 | 501 |
| 490 if (object != NULL) { | 502 if (object != NULL) { |
| 491 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), allocation_size); | 503 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), allocation_size); |
| 492 return object; | 504 return object; |
| 493 } | 505 } |
| 494 | 506 |
| 495 return AllocationResult::Retry(identity()); | 507 return AllocationResult::Retry(identity()); |
| 496 } | 508 } |
| 497 | 509 |
| 498 | 510 |
| 499 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, | 511 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes, |
| 500 AllocationAlignment alignment) { | 512 AllocationAlignment alignment) { |
| 501 #ifdef V8_HOST_ARCH_32_BIT | 513 #ifdef V8_HOST_ARCH_32_BIT |
| 502 AllocationResult result = | 514 AllocationResult result = |
| 503 alignment == kDoubleAligned | 515 alignment == kDoubleAligned |
| 504 ? AllocateRawAligned(size_in_bytes, kDoubleAligned) | 516 ? AllocateRawAligned(size_in_bytes, kDoubleAligned) |
| 505 : AllocateRawUnaligned(size_in_bytes); | 517 : AllocateRawUnaligned(size_in_bytes); |
| 506 #else | 518 #else |
| 507 AllocationResult result = AllocateRawUnaligned(size_in_bytes); | 519 AllocationResult result = AllocateRawUnaligned(size_in_bytes); |
| 508 #endif | 520 #endif |
| 509 HeapObject* heap_obj = nullptr; | 521 HeapObject* heap_obj = nullptr; |
| 510 if (!result.IsRetry() && result.To(&heap_obj)) { | 522 if (!result.IsRetry() && result.To(&heap_obj)) { |
| 523 DCHECK(allocation_info_.limit() != nullptr); |
| 511 AllocationStep(heap_obj->address(), size_in_bytes); | 524 AllocationStep(heap_obj->address(), size_in_bytes); |
| 525 DCHECK(allocation_info_.limit() != nullptr); |
| 512 } | 526 } |
| 513 return result; | 527 return result; |
| 514 } | 528 } |
| 515 | 529 |
| 516 | 530 |
| 517 // ----------------------------------------------------------------------------- | 531 // ----------------------------------------------------------------------------- |
| 518 // NewSpace | 532 // NewSpace |
| 519 | 533 |
| 520 | 534 |
| 521 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, | 535 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes, |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 628 other->allocation_info_.Reset(nullptr, nullptr); | 642 other->allocation_info_.Reset(nullptr, nullptr); |
| 629 return true; | 643 return true; |
| 630 } | 644 } |
| 631 return false; | 645 return false; |
| 632 } | 646 } |
| 633 | 647 |
| 634 } // namespace internal | 648 } // namespace internal |
| 635 } // namespace v8 | 649 } // namespace v8 |
| 636 | 650 |
| 637 #endif // V8_HEAP_SPACES_INL_H_ | 651 #endif // V8_HEAP_SPACES_INL_H_ |
| OLD | NEW |