Index: src/spaces.cc |
=================================================================== |
--- src/spaces.cc (revision 5696) |
+++ src/spaces.cc (working copy) |
@@ -873,7 +873,7 @@ |
} |
-Object* PagedSpace::FindObject(Address addr) { |
+MaybeObject* PagedSpace::FindObject(Address addr) { |
// Note: this function can only be called before or after mark-compact GC |
// because it accesses map pointers. |
ASSERT(!MarkCompactCollector::in_use()); |
@@ -1804,7 +1804,7 @@ |
} |
-Object* OldSpaceFreeList::Allocate(int size_in_bytes, int* wasted_bytes) { |
+MaybeObject* OldSpaceFreeList::Allocate(int size_in_bytes, int* wasted_bytes) { |
ASSERT(0 < size_in_bytes); |
ASSERT(size_in_bytes <= kMaxBlockSize); |
ASSERT(IsAligned(size_in_bytes, kPointerSize)); |
@@ -1924,7 +1924,7 @@ |
} |
-Object* FixedSizeFreeList::Allocate() { |
+MaybeObject* FixedSizeFreeList::Allocate() { |
if (head_ == NULL) { |
return Failure::RetryAfterGC(owner_); |
} |
@@ -2187,9 +2187,10 @@ |
// is currently forbidden. |
if (!Heap::linear_allocation()) { |
int wasted_bytes; |
- Object* result = free_list_.Allocate(size_in_bytes, &wasted_bytes); |
+ Object* result; |
+ MaybeObject* maybe = free_list_.Allocate(size_in_bytes, &wasted_bytes); |
accounting_stats_.WasteBytes(wasted_bytes); |
- if (!result->IsFailure()) { |
+ if (maybe->ToObject(&result)) { |
accounting_stats_.AllocateBytes(size_in_bytes); |
HeapObject* obj = HeapObject::cast(result); |
@@ -2495,8 +2496,9 @@ |
// that is currently forbidden. The fixed space free list implicitly assumes |
// that all free blocks are of the fixed size. |
if (!Heap::linear_allocation()) { |
- Object* result = free_list_.Allocate(); |
- if (!result->IsFailure()) { |
+ Object* result; |
+ MaybeObject* maybe = free_list_.Allocate(); |
+ if (maybe->ToObject(&result)) { |
accounting_stats_.AllocateBytes(size_in_bytes); |
HeapObject* obj = HeapObject::cast(result); |
Page* p = Page::FromAddress(obj->address()); |
@@ -2745,9 +2747,9 @@ |
#endif |
-Object* LargeObjectSpace::AllocateRawInternal(int requested_size, |
- int object_size, |
- Executability executable) { |
+MaybeObject* LargeObjectSpace::AllocateRawInternal(int requested_size, |
+ int object_size, |
+ Executability executable) { |
ASSERT(0 < object_size && object_size <= requested_size); |
// Check if we want to force a GC before growing the old space further. |
@@ -2783,7 +2785,7 @@ |
} |
-Object* LargeObjectSpace::AllocateRawCode(int size_in_bytes) { |
+MaybeObject* LargeObjectSpace::AllocateRawCode(int size_in_bytes) { |
ASSERT(0 < size_in_bytes); |
return AllocateRawInternal(size_in_bytes, |
size_in_bytes, |
@@ -2791,7 +2793,7 @@ |
} |
-Object* LargeObjectSpace::AllocateRawFixedArray(int size_in_bytes) { |
+MaybeObject* LargeObjectSpace::AllocateRawFixedArray(int size_in_bytes) { |
ASSERT(0 < size_in_bytes); |
return AllocateRawInternal(size_in_bytes, |
size_in_bytes, |
@@ -2799,7 +2801,7 @@ |
} |
-Object* LargeObjectSpace::AllocateRaw(int size_in_bytes) { |
+MaybeObject* LargeObjectSpace::AllocateRaw(int size_in_bytes) { |
ASSERT(0 < size_in_bytes); |
return AllocateRawInternal(size_in_bytes, |
size_in_bytes, |
@@ -2808,7 +2810,7 @@ |
// GC support |
-Object* LargeObjectSpace::FindObject(Address a) { |
+MaybeObject* LargeObjectSpace::FindObject(Address a) { |
for (LargeObjectChunk* chunk = first_chunk_; |
chunk != NULL; |
chunk = chunk->next()) { |