| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 614 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 625 if (SpaceIsLarge(space)) { | 625 if (SpaceIsLarge(space)) { |
| 626 // Large spaces have one object per 'page'. | 626 // Large spaces have one object per 'page'. |
| 627 return HeapObject::FromAddress(pages_[LO_SPACE][offset]); | 627 return HeapObject::FromAddress(pages_[LO_SPACE][offset]); |
| 628 } | 628 } |
| 629 offset <<= kObjectAlignmentBits; | 629 offset <<= kObjectAlignmentBits; |
| 630 if (space == NEW_SPACE) { | 630 if (space == NEW_SPACE) { |
| 631 // New space has only one space - numbered 0. | 631 // New space has only one space - numbered 0. |
| 632 return HeapObject::FromAddress(pages_[space][0] + offset); | 632 return HeapObject::FromAddress(pages_[space][0] + offset); |
| 633 } | 633 } |
| 634 ASSERT(SpaceIsPaged(space)); | 634 ASSERT(SpaceIsPaged(space)); |
| 635 int page_of_pointee = offset >> Page::kPageSizeBits; | 635 int page_of_pointee = offset >> kPageSizeBits; |
| 636 Address object_address = pages_[space][page_of_pointee] + | 636 Address object_address = pages_[space][page_of_pointee] + |
| 637 (offset & Page::kPageAlignmentMask); | 637 (offset & Page::kPageAlignmentMask); |
| 638 return HeapObject::FromAddress(object_address); | 638 return HeapObject::FromAddress(object_address); |
| 639 } | 639 } |
| 640 | 640 |
| 641 | 641 |
| 642 void Deserializer::Deserialize() { | 642 void Deserializer::Deserialize() { |
| 643 // Don't GC while deserializing - just expand the heap. | 643 // Don't GC while deserializing - just expand the heap. |
| 644 AlwaysAllocateScope always_allocate; | 644 AlwaysAllocateScope always_allocate; |
| 645 // Don't use the free lists while deserializing. | 645 // Don't use the free lists while deserializing. |
| (...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 965 Object* o, | 965 Object* o, |
| 966 ReferenceRepresentation reference_representation) { | 966 ReferenceRepresentation reference_representation) { |
| 967 CHECK(o->IsHeapObject()); | 967 CHECK(o->IsHeapObject()); |
| 968 HeapObject* heap_object = HeapObject::cast(o); | 968 HeapObject* heap_object = HeapObject::cast(o); |
| 969 if (SerializationAddressMapper::IsMapped(heap_object)) { | 969 if (SerializationAddressMapper::IsMapped(heap_object)) { |
| 970 int space = SpaceOfAlreadySerializedObject(heap_object); | 970 int space = SpaceOfAlreadySerializedObject(heap_object); |
| 971 int address = SerializationAddressMapper::MappedTo(heap_object); | 971 int address = SerializationAddressMapper::MappedTo(heap_object); |
| 972 int offset = CurrentAllocationAddress(space) - address; | 972 int offset = CurrentAllocationAddress(space) - address; |
| 973 bool from_start = true; | 973 bool from_start = true; |
| 974 if (SpaceIsPaged(space)) { | 974 if (SpaceIsPaged(space)) { |
| 975 if ((CurrentAllocationAddress(space) >> Page::kPageSizeBits) == | 975 if ((CurrentAllocationAddress(space) >> kPageSizeBits) == |
| 976 (address >> Page::kPageSizeBits)) { | 976 (address >> kPageSizeBits)) { |
| 977 from_start = false; | 977 from_start = false; |
| 978 address = offset; | 978 address = offset; |
| 979 } | 979 } |
| 980 } else if (space == NEW_SPACE) { | 980 } else if (space == NEW_SPACE) { |
| 981 if (offset < address) { | 981 if (offset < address) { |
| 982 from_start = false; | 982 from_start = false; |
| 983 address = offset; | 983 address = offset; |
| 984 } | 984 } |
| 985 } | 985 } |
| 986 // If we are actually dealing with real offsets (and not a numbering of | 986 // If we are actually dealing with real offsets (and not a numbering of |
| (...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1221 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); | 1221 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); |
| 1222 } | 1222 } |
| 1223 } | 1223 } |
| 1224 int allocation_address = fullness_[space]; | 1224 int allocation_address = fullness_[space]; |
| 1225 fullness_[space] = allocation_address + size; | 1225 fullness_[space] = allocation_address + size; |
| 1226 return allocation_address; | 1226 return allocation_address; |
| 1227 } | 1227 } |
| 1228 | 1228 |
| 1229 | 1229 |
| 1230 } } // namespace v8::internal | 1230 } } // namespace v8::internal |
| OLD | NEW |