Index: src/heap/spaces-inl.h |
diff --git a/src/heap/spaces-inl.h b/src/heap/spaces-inl.h |
index 76011768faf3d8e7f5337f4241f1742b5c0bc9e5..3023fbf51ead410fba6a14aaab99dff05e5acf61 100644 |
--- a/src/heap/spaces-inl.h |
+++ b/src/heap/spaces-inl.h |
@@ -5,6 +5,7 @@ |
#ifndef V8_HEAP_SPACES_INL_H_ |
#define V8_HEAP_SPACES_INL_H_ |
+#include "src/heap/incremental-marking.h" |
#include "src/heap/spaces.h" |
#include "src/isolate.h" |
#include "src/msan.h" |
@@ -49,20 +50,21 @@ Page* PageIterator::next() { |
// SemiSpaceIterator |
HeapObject* SemiSpaceIterator::Next() { |
- if (current_ == limit_) return NULL; |
- if (NewSpacePage::IsAtEnd(current_)) { |
- NewSpacePage* page = NewSpacePage::FromLimit(current_); |
- page = page->next_page(); |
- DCHECK(!page->is_anchor()); |
- current_ = page->area_start(); |
- if (current_ == limit_) return NULL; |
+ while (current_ != limit_) { |
+ if (NewSpacePage::IsAtEnd(current_)) { |
+ NewSpacePage* page = NewSpacePage::FromLimit(current_); |
+ page = page->next_page(); |
+ DCHECK(!page->is_anchor()); |
+ current_ = page->area_start(); |
+ if (current_ == limit_) return nullptr; |
+ } |
+ HeapObject* object = HeapObject::FromAddress(current_); |
+ current_ += object->Size(); |
+ if (!object->IsFiller()) { |
+ return object; |
+ } |
} |
- |
- HeapObject* object = HeapObject::FromAddress(current_); |
- int size = object->Size(); |
- |
- current_ += size; |
- return object; |
+ return nullptr; |
} |
@@ -319,6 +321,24 @@ HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { |
} |
+AllocationResult LocalAllocationBuffer::AllocateRawAligned( |
+ int size_in_bytes, AllocationAlignment alignment) { |
+ Address current_top = allocation_info_.top(); |
+ int filler_size = Heap::GetFillToAlign(current_top, alignment); |
+ |
+ Address new_top = current_top + filler_size + size_in_bytes; |
+ if (new_top > allocation_info_.limit()) return AllocationResult::Retry(); |
+ |
+ allocation_info_.set_top(new_top); |
+ if (filler_size > 0) { |
+ return heap_->PrecedeWithFiller(HeapObject::FromAddress(current_top), |
+ filler_size); |
+ } |
+ |
+ return AllocationResult(HeapObject::FromAddress(current_top)); |
+} |
+ |
+ |
HeapObject* PagedSpace::AllocateLinearlyAligned(int* size_in_bytes, |
AllocationAlignment alignment) { |
Address current_top = allocation_info_.top(); |
@@ -482,6 +502,13 @@ AllocationResult NewSpace::AllocateRaw(int size_in_bytes, |
} |
+MUST_USE_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized( |
+ int size_in_bytes, AllocationAlignment alignment) { |
+ base::LockGuard<base::Mutex> guard(&mutex_); |
+ return AllocateRaw(size_in_bytes, alignment); |
+} |
+ |
+ |
LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) { |
heap->incremental_marking()->SetOldSpacePageFlags(chunk); |
return static_cast<LargePage*>(chunk); |
@@ -492,6 +519,34 @@ intptr_t LargeObjectSpace::Available() { |
return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); |
} |
+ |
+LocalAllocationBuffer LocalAllocationBuffer::InvalidBuffer() { |
+ return LocalAllocationBuffer(nullptr, AllocationInfo(nullptr, nullptr)); |
+} |
+ |
+ |
+LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap, |
+ AllocationResult result, |
+ intptr_t size) { |
+ if (result.IsRetry()) return InvalidBuffer(); |
+ HeapObject* obj = nullptr; |
+ bool ok = result.To(&obj); |
+ USE(ok); |
+ DCHECK(ok); |
+ Address top = HeapObject::cast(obj)->address(); |
+ return LocalAllocationBuffer(heap, AllocationInfo(top, top + size)); |
+} |
+ |
+ |
+bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) { |
+ if (allocation_info_.top() == other->allocation_info_.limit()) { |
+ allocation_info_.set_top(other->allocation_info_.top()); |
+ other->allocation_info_.Reset(nullptr, nullptr); |
+ return true; |
+ } |
+ return false; |
+} |
+ |
} // namespace internal |
} // namespace v8 |