| Index: src/serialize.cc
|
| ===================================================================
|
| --- src/serialize.cc (revision 5696)
|
| +++ src/serialize.cc (working copy)
|
| @@ -555,14 +555,16 @@
|
| if (!SpaceIsLarge(space_index)) {
|
| ASSERT(!SpaceIsPaged(space_index) ||
|
| size <= Page::kPageSize - Page::kObjectStartOffset);
|
| - Object* new_allocation;
|
| + MaybeObject* maybe_new_allocation;
|
| if (space_index == NEW_SPACE) {
|
| - new_allocation = reinterpret_cast<NewSpace*>(space)->AllocateRaw(size);
|
| + maybe_new_allocation =
|
| + reinterpret_cast<NewSpace*>(space)->AllocateRaw(size);
|
| } else {
|
| - new_allocation = reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size);
|
| + maybe_new_allocation =
|
| + reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size);
|
| }
|
| + Object* new_allocation = maybe_new_allocation->ToObjectUnchecked();
|
| HeapObject* new_object = HeapObject::cast(new_allocation);
|
| - ASSERT(!new_object->IsFailure());
|
| address = new_object->address();
|
| high_water_[space_index] = address + size;
|
| } else {
|
| @@ -571,14 +573,14 @@
|
| LargeObjectSpace* lo_space = reinterpret_cast<LargeObjectSpace*>(space);
|
| Object* new_allocation;
|
| if (space_index == kLargeData) {
|
| - new_allocation = lo_space->AllocateRaw(size);
|
| + new_allocation = lo_space->AllocateRaw(size)->ToObjectUnchecked();
|
| } else if (space_index == kLargeFixedArray) {
|
| - new_allocation = lo_space->AllocateRawFixedArray(size);
|
| + new_allocation =
|
| + lo_space->AllocateRawFixedArray(size)->ToObjectUnchecked();
|
| } else {
|
| ASSERT_EQ(kLargeCode, space_index);
|
| - new_allocation = lo_space->AllocateRawCode(size);
|
| + new_allocation = lo_space->AllocateRawCode(size)->ToObjectUnchecked();
|
| }
|
| - ASSERT(!new_allocation->IsFailure());
|
| HeapObject* new_object = HeapObject::cast(new_allocation);
|
| // Record all large objects in the same space.
|
| address = new_object->address();
|
|
|