| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 113 SnapshotByteSource(const byte* array, int length) | 113 SnapshotByteSource(const byte* array, int length) |
| 114 : data_(array), length_(length), position_(0) { } | 114 : data_(array), length_(length), position_(0) { } |
| 115 | 115 |
| 116 bool HasMore() { return position_ < length_; } | 116 bool HasMore() { return position_ < length_; } |
| 117 | 117 |
| 118 int Get() { | 118 int Get() { |
| 119 ASSERT(position_ < length_); | 119 ASSERT(position_ < length_); |
| 120 return data_[position_++]; | 120 return data_[position_++]; |
| 121 } | 121 } |
| 122 | 122 |
| 123 void CopyRaw(byte* to, int number_of_bytes) { | 123 inline void CopyRaw(byte* to, int number_of_bytes); |
| 124 memcpy(to, data_ + position_, number_of_bytes); | |
| 125 position_ += number_of_bytes; | |
| 126 } | |
| 127 | 124 |
| 128 int GetInt() { | 125 inline int GetInt(); |
| 129 // A little unwind to catch the really small ints. | |
| 130 int snapshot_byte = Get(); | |
| 131 if ((snapshot_byte & 0x80) == 0) { | |
| 132 return snapshot_byte; | |
| 133 } | |
| 134 int accumulator = (snapshot_byte & 0x7f) << 7; | |
| 135 while (true) { | |
| 136 snapshot_byte = Get(); | |
| 137 if ((snapshot_byte & 0x80) == 0) { | |
| 138 return accumulator | snapshot_byte; | |
| 139 } | |
| 140 accumulator = (accumulator | (snapshot_byte & 0x7f)) << 7; | |
| 141 } | |
| 142 UNREACHABLE(); | |
| 143 return accumulator; | |
| 144 } | |
| 145 | 126 |
| 146 bool AtEOF() { | 127 bool AtEOF() { |
| 147 return position_ == length_; | 128 return position_ == length_; |
| 148 } | 129 } |
| 149 | 130 |
| 150 int position() { return position_; } | 131 int position() { return position_; } |
| 151 | 132 |
| 152 private: | 133 private: |
| 153 const byte* data_; | 134 const byte* data_; |
| 154 int length_; | 135 int length_; |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 228 | 209 |
| 229 // A bitmask for getting the space out of an instruction. | 210 // A bitmask for getting the space out of an instruction. |
| 230 static const int kSpaceMask = 15; | 211 static const int kSpaceMask = 15; |
| 231 | 212 |
| 232 static inline bool SpaceIsLarge(int space) { return space >= kLargeData; } | 213 static inline bool SpaceIsLarge(int space) { return space >= kLargeData; } |
| 233 static inline bool SpaceIsPaged(int space) { | 214 static inline bool SpaceIsPaged(int space) { |
| 234 return space >= FIRST_PAGED_SPACE && space <= LAST_PAGED_SPACE; | 215 return space >= FIRST_PAGED_SPACE && space <= LAST_PAGED_SPACE; |
| 235 } | 216 } |
| 236 | 217 |
| 237 static int partial_snapshot_cache_length_; | 218 static int partial_snapshot_cache_length_; |
| 238 static const int kPartialSnapshotCacheCapacity = 1024; | 219 static const int kPartialSnapshotCacheCapacity = 1300; |
| 239 static Object* partial_snapshot_cache_[]; | 220 static Object* partial_snapshot_cache_[]; |
| 240 }; | 221 }; |
| 241 | 222 |
| 242 | 223 |
| 224 int SnapshotByteSource::GetInt() { |
| 225 // A little unwind to catch the really small ints. |
| 226 int snapshot_byte = Get(); |
| 227 if ((snapshot_byte & 0x80) == 0) { |
| 228 return snapshot_byte; |
| 229 } |
| 230 int accumulator = (snapshot_byte & 0x7f) << 7; |
| 231 while (true) { |
| 232 snapshot_byte = Get(); |
| 233 if ((snapshot_byte & 0x80) == 0) { |
| 234 return accumulator | snapshot_byte; |
| 235 } |
| 236 accumulator = (accumulator | (snapshot_byte & 0x7f)) << 7; |
| 237 } |
| 238 UNREACHABLE(); |
| 239 return accumulator; |
| 240 } |
| 241 |
| 242 |
| 243 void SnapshotByteSource::CopyRaw(byte* to, int number_of_bytes) { |
| 244 memcpy(to, data_ + position_, number_of_bytes); |
| 245 position_ += number_of_bytes; |
| 246 } |
| 247 |
| 243 | 248 |
| 244 // A Deserializer reads a snapshot and reconstructs the Object graph it defines. | 249 // A Deserializer reads a snapshot and reconstructs the Object graph it defines. |
| 245 class Deserializer: public SerializerDeserializer { | 250 class Deserializer: public SerializerDeserializer { |
| 246 public: | 251 public: |
| 247 // Create a deserializer from a snapshot byte source. | 252 // Create a deserializer from a snapshot byte source. |
| 248 explicit Deserializer(SnapshotByteSource* source); | 253 explicit Deserializer(SnapshotByteSource* source); |
| 249 | 254 |
| 250 virtual ~Deserializer(); | 255 virtual ~Deserializer(); |
| 251 | 256 |
| 252 // Deserialize the snapshot into an empty heap. | 257 // Deserialize the snapshot into an empty heap. |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 357 | 362 |
| 358 HashMap* serialization_map_; | 363 HashMap* serialization_map_; |
| 359 AssertNoAllocation* no_allocation_; | 364 AssertNoAllocation* no_allocation_; |
| 360 DISALLOW_COPY_AND_ASSIGN(SerializationAddressMapper); | 365 DISALLOW_COPY_AND_ASSIGN(SerializationAddressMapper); |
| 361 }; | 366 }; |
| 362 | 367 |
| 363 | 368 |
| 364 class Serializer : public SerializerDeserializer { | 369 class Serializer : public SerializerDeserializer { |
| 365 public: | 370 public: |
| 366 explicit Serializer(SnapshotByteSink* sink); | 371 explicit Serializer(SnapshotByteSink* sink); |
| 372 ~Serializer(); |
| 367 void VisitPointers(Object** start, Object** end); | 373 void VisitPointers(Object** start, Object** end); |
| 368 // You can call this after serialization to find out how much space was used | 374 // You can call this after serialization to find out how much space was used |
| 369 // in each space. | 375 // in each space. |
| 370 int CurrentAllocationAddress(int space) { | 376 int CurrentAllocationAddress(int space) { |
| 371 if (SpaceIsLarge(space)) return large_object_total_; | 377 if (SpaceIsLarge(space)) return large_object_total_; |
| 372 return fullness_[space]; | 378 return fullness_[space]; |
| 373 } | 379 } |
| 374 | 380 |
| 375 static void Enable() { | 381 static void Enable() { |
| 376 if (!serialization_enabled_) { | 382 if (!serialization_enabled_) { |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 485 | 491 |
| 486 // Serialize the objects reachable from a single object pointer. | 492 // Serialize the objects reachable from a single object pointer. |
| 487 virtual void Serialize(Object** o); | 493 virtual void Serialize(Object** o); |
| 488 virtual void SerializeObject(Object* o, | 494 virtual void SerializeObject(Object* o, |
| 489 ReferenceRepresentation representation); | 495 ReferenceRepresentation representation); |
| 490 | 496 |
| 491 protected: | 497 protected: |
| 492 virtual int RootIndex(HeapObject* o); | 498 virtual int RootIndex(HeapObject* o); |
| 493 virtual int PartialSnapshotCacheIndex(HeapObject* o); | 499 virtual int PartialSnapshotCacheIndex(HeapObject* o); |
| 494 virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) { | 500 virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) { |
| 495 return o->IsString() || o->IsSharedFunctionInfo(); | 501 // Scripts should be referred only through shared function infos. We can't |
| 502 // allow them to be part of the partial snapshot because they contain a |
| 503 // unique ID, and deserializing several partial snapshots containing script |
| 504 // would cause dupes. |
| 505 ASSERT(!o->IsScript()); |
| 506 return o->IsString() || o->IsSharedFunctionInfo() || o->IsHeapNumber(); |
| 496 } | 507 } |
| 497 | 508 |
| 498 private: | 509 private: |
| 499 Serializer* startup_serializer_; | 510 Serializer* startup_serializer_; |
| 500 DISALLOW_COPY_AND_ASSIGN(PartialSerializer); | 511 DISALLOW_COPY_AND_ASSIGN(PartialSerializer); |
| 501 }; | 512 }; |
| 502 | 513 |
| 503 | 514 |
| 504 class StartupSerializer : public Serializer { | 515 class StartupSerializer : public Serializer { |
| 505 public: | 516 public: |
| (...skipping 17 matching lines...) Expand all Loading... |
| 523 SerializeWeakReferences(); | 534 SerializeWeakReferences(); |
| 524 } | 535 } |
| 525 | 536 |
| 526 private: | 537 private: |
| 527 virtual int RootIndex(HeapObject* o) { return kInvalidRootIndex; } | 538 virtual int RootIndex(HeapObject* o) { return kInvalidRootIndex; } |
| 528 virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) { | 539 virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) { |
| 529 return false; | 540 return false; |
| 530 } | 541 } |
| 531 }; | 542 }; |
| 532 | 543 |
| 544 |
| 533 } } // namespace v8::internal | 545 } } // namespace v8::internal |
| 534 | 546 |
| 535 #endif // V8_SERIALIZE_H_ | 547 #endif // V8_SERIALIZE_H_ |
| OLD | NEW |