Index: src/heap-inl.h |
=================================================================== |
--- src/heap-inl.h (revision 9327) |
+++ src/heap-inl.h (working copy) |
@@ -33,15 +33,26 @@ |
#include "list-inl.h" |
#include "objects.h" |
#include "v8-counters.h" |
+#include "store-buffer.h" |
+#include "store-buffer-inl.h" |
namespace v8 { |
namespace internal { |
void PromotionQueue::insert(HeapObject* target, int size) { |
+ if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) { |
+ NewSpacePage* rear_page = |
+ NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_)); |
+ ASSERT(!rear_page->prev_page()->is_anchor()); |
+ rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->body_limit()); |
+ } |
*(--rear_) = reinterpret_cast<intptr_t>(target); |
*(--rear_) = size; |
// Assert no overflow into live objects. |
- ASSERT(reinterpret_cast<Address>(rear_) >= HEAP->new_space()->top()); |
+#ifdef DEBUG |
+ SemiSpace::AssertValidRange(HEAP->new_space()->top(), |
+ reinterpret_cast<Address>(rear_)); |
+#endif |
} |
@@ -84,7 +95,7 @@ |
// Allocate string. |
Object* result; |
{ MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace()) |
- ? lo_space_->AllocateRaw(size) |
+ ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE) |
: old_data_space_->AllocateRaw(size); |
if (!maybe_result->ToObject(&result)) return maybe_result; |
} |
@@ -117,7 +128,7 @@ |
// Allocate string. |
Object* result; |
{ MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace()) |
- ? lo_space_->AllocateRaw(size) |
+ ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE) |
: old_data_space_->AllocateRaw(size); |
if (!maybe_result->ToObject(&result)) return maybe_result; |
} |
@@ -181,7 +192,7 @@ |
} else if (CODE_SPACE == space) { |
result = code_space_->AllocateRaw(size_in_bytes); |
} else if (LO_SPACE == space) { |
- result = lo_space_->AllocateRaw(size_in_bytes); |
+ result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); |
} else if (CELL_SPACE == space) { |
result = cell_space_->AllocateRaw(size_in_bytes); |
} else { |
@@ -265,6 +276,11 @@ |
} |
+bool Heap::InNewSpace(Address addr) { |
+ return new_space_.Contains(addr); |
+} |
+ |
+ |
bool Heap::InFromSpace(Object* object) { |
return new_space_.FromSpaceContains(object); |
} |
@@ -275,29 +291,36 @@ |
} |
+bool Heap::OldGenerationAllocationLimitReached() { |
+ if (!incremental_marking()->IsStopped()) return false; |
+ return OldGenerationSpaceAvailable() < 0; |
+} |
+ |
+ |
bool Heap::ShouldBePromoted(Address old_address, int object_size) { |
// An object should be promoted if: |
// - the object has survived a scavenge operation or |
// - to space is already 25% full. |
- return old_address < new_space_.age_mark() |
- || (new_space_.Size() + object_size) >= (new_space_.Capacity() >> 2); |
+ NewSpacePage* page = NewSpacePage::FromAddress(old_address); |
+ Address age_mark = new_space_.age_mark(); |
+ bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) && |
+ (!page->ContainsLimit(age_mark) || old_address < age_mark); |
+ return below_mark || (new_space_.Size() + object_size) >= |
+ (new_space_.EffectiveCapacity() >> 2); |
} |
void Heap::RecordWrite(Address address, int offset) { |
- if (new_space_.Contains(address)) return; |
- ASSERT(!new_space_.FromSpaceContains(address)); |
- SLOW_ASSERT(Contains(address + offset)); |
- Page::FromAddress(address)->MarkRegionDirty(address + offset); |
+ if (!InNewSpace(address)) store_buffer_.Mark(address + offset); |
} |
void Heap::RecordWrites(Address address, int start, int len) { |
- if (new_space_.Contains(address)) return; |
- ASSERT(!new_space_.FromSpaceContains(address)); |
- Page* page = Page::FromAddress(address); |
- page->SetRegionMarks(page->GetRegionMarks() | |
- page->GetRegionMaskForSpan(address + start, len * kPointerSize)); |
+ if (!InNewSpace(address)) { |
+ for (int i = 0; i < len; i++) { |
+ store_buffer_.Mark(address + start + i * kPointerSize); |
+ } |
+ } |
} |
@@ -343,31 +366,6 @@ |
} |
-void Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst, |
- Address src, |
- int byte_size) { |
- ASSERT(IsAligned(byte_size, kPointerSize)); |
- |
- Page* page = Page::FromAddress(dst); |
- uint32_t marks = page->GetRegionMarks(); |
- |
- for (int remaining = byte_size / kPointerSize; |
- remaining > 0; |
- remaining--) { |
- Memory::Object_at(dst) = Memory::Object_at(src); |
- |
- if (InNewSpace(Memory::Object_at(dst))) { |
- marks |= page->GetRegionMaskForAddress(dst); |
- } |
- |
- dst += kPointerSize; |
- src += kPointerSize; |
- } |
- |
- page->SetRegionMarks(marks); |
-} |
- |
- |
void Heap::MoveBlock(Address dst, Address src, int byte_size) { |
ASSERT(IsAligned(byte_size, kPointerSize)); |
@@ -387,16 +385,6 @@ |
} |
-void Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst, |
- Address src, |
- int byte_size) { |
- ASSERT(IsAligned(byte_size, kPointerSize)); |
- ASSERT((dst < src) || (dst >= (src + byte_size))); |
- |
- CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, byte_size); |
-} |
- |
- |
void Heap::ScavengePointer(HeapObject** p) { |
ScavengeObject(p, *p); |
} |
@@ -414,7 +402,9 @@ |
// If the first word is a forwarding address, the object has already been |
// copied. |
if (first_word.IsForwardingAddress()) { |
- *p = first_word.ToForwardingAddress(); |
+ HeapObject* dest = first_word.ToForwardingAddress(); |
+ ASSERT(HEAP->InFromSpace(*p)); |
+ *p = dest; |
return; |
} |
@@ -459,7 +449,7 @@ |
amount_of_external_allocated_memory_ - |
amount_of_external_allocated_memory_at_last_global_gc_; |
if (amount_since_last_global_gc > external_allocation_limit_) { |
- CollectAllGarbage(false); |
+ CollectAllGarbage(kNoGCFlags); |
} |
} else { |
// Avoid underflow. |
@@ -476,6 +466,7 @@ |
roots_[kLastScriptIdRootIndex] = last_script_id; |
} |
+ |
Isolate* Heap::isolate() { |
return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - |
reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); |
@@ -688,15 +679,6 @@ |
} |
-void MarkCompactCollector::SetMark(HeapObject* obj) { |
- tracer_->increment_marked_count(); |
-#ifdef DEBUG |
- UpdateLiveObjectCount(obj); |
-#endif |
- obj->SetMark(); |
-} |
- |
- |
} } // namespace v8::internal |
#endif // V8_HEAP_INL_H_ |