| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 537 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 548 | 548 |
| 549 | 549 |
| 550 // This routine both allocates a new object, and also keeps | 550 // This routine both allocates a new object, and also keeps |
| 551 // track of where objects have been allocated so that we can | 551 // track of where objects have been allocated so that we can |
| 552 // fix back references when deserializing. | 552 // fix back references when deserializing. |
| 553 Address Deserializer::Allocate(int space_index, Space* space, int size) { | 553 Address Deserializer::Allocate(int space_index, Space* space, int size) { |
| 554 Address address; | 554 Address address; |
| 555 if (!SpaceIsLarge(space_index)) { | 555 if (!SpaceIsLarge(space_index)) { |
| 556 ASSERT(!SpaceIsPaged(space_index) || | 556 ASSERT(!SpaceIsPaged(space_index) || |
| 557 size <= Page::kPageSize - Page::kObjectStartOffset); | 557 size <= Page::kPageSize - Page::kObjectStartOffset); |
| 558 Object* new_allocation; | 558 MaybeObject* maybe_new_allocation; |
| 559 if (space_index == NEW_SPACE) { | 559 if (space_index == NEW_SPACE) { |
| 560 new_allocation = reinterpret_cast<NewSpace*>(space)->AllocateRaw(size); | 560 maybe_new_allocation = |
| 561 reinterpret_cast<NewSpace*>(space)->AllocateRaw(size); |
| 561 } else { | 562 } else { |
| 562 new_allocation = reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size); | 563 maybe_new_allocation = |
| 564 reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size); |
| 563 } | 565 } |
| 566 Object* new_allocation = maybe_new_allocation->ToObjectUnchecked(); |
| 564 HeapObject* new_object = HeapObject::cast(new_allocation); | 567 HeapObject* new_object = HeapObject::cast(new_allocation); |
| 565 ASSERT(!new_object->IsFailure()); | |
| 566 address = new_object->address(); | 568 address = new_object->address(); |
| 567 high_water_[space_index] = address + size; | 569 high_water_[space_index] = address + size; |
| 568 } else { | 570 } else { |
| 569 ASSERT(SpaceIsLarge(space_index)); | 571 ASSERT(SpaceIsLarge(space_index)); |
| 570 ASSERT(size > Page::kPageSize - Page::kObjectStartOffset); | 572 ASSERT(size > Page::kPageSize - Page::kObjectStartOffset); |
| 571 LargeObjectSpace* lo_space = reinterpret_cast<LargeObjectSpace*>(space); | 573 LargeObjectSpace* lo_space = reinterpret_cast<LargeObjectSpace*>(space); |
| 572 Object* new_allocation; | 574 Object* new_allocation; |
| 573 if (space_index == kLargeData) { | 575 if (space_index == kLargeData) { |
| 574 new_allocation = lo_space->AllocateRaw(size); | 576 new_allocation = lo_space->AllocateRaw(size)->ToObjectUnchecked(); |
| 575 } else if (space_index == kLargeFixedArray) { | 577 } else if (space_index == kLargeFixedArray) { |
| 576 new_allocation = lo_space->AllocateRawFixedArray(size); | 578 new_allocation = |
| 579 lo_space->AllocateRawFixedArray(size)->ToObjectUnchecked(); |
| 577 } else { | 580 } else { |
| 578 ASSERT_EQ(kLargeCode, space_index); | 581 ASSERT_EQ(kLargeCode, space_index); |
| 579 new_allocation = lo_space->AllocateRawCode(size); | 582 new_allocation = lo_space->AllocateRawCode(size)->ToObjectUnchecked(); |
| 580 } | 583 } |
| 581 ASSERT(!new_allocation->IsFailure()); | |
| 582 HeapObject* new_object = HeapObject::cast(new_allocation); | 584 HeapObject* new_object = HeapObject::cast(new_allocation); |
| 583 // Record all large objects in the same space. | 585 // Record all large objects in the same space. |
| 584 address = new_object->address(); | 586 address = new_object->address(); |
| 585 pages_[LO_SPACE].Add(address); | 587 pages_[LO_SPACE].Add(address); |
| 586 } | 588 } |
| 587 last_object_address_ = address; | 589 last_object_address_ = address; |
| 588 return address; | 590 return address; |
| 589 } | 591 } |
| 590 | 592 |
| 591 | 593 |
| (...skipping 893 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1485 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); | 1487 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); |
| 1486 } | 1488 } |
| 1487 } | 1489 } |
| 1488 int allocation_address = fullness_[space]; | 1490 int allocation_address = fullness_[space]; |
| 1489 fullness_[space] = allocation_address + size; | 1491 fullness_[space] = allocation_address + size; |
| 1490 return allocation_address; | 1492 return allocation_address; |
| 1491 } | 1493 } |
| 1492 | 1494 |
| 1493 | 1495 |
| 1494 } } // namespace v8::internal | 1496 } } // namespace v8::internal |
| OLD | NEW |