| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_SERIALIZE_H_ | 5 #ifndef V8_SERIALIZE_H_ |
| 6 #define V8_SERIALIZE_H_ | 6 #define V8_SERIALIZE_H_ |
| 7 | 7 |
| 8 #include "src/hashmap.h" | 8 #include "src/hashmap.h" |
| 9 #include "src/heap-profiler.h" | 9 #include "src/heap-profiler.h" |
| 10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
| (...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 296 public: | 296 public: |
| 297 static void Iterate(Isolate* isolate, ObjectVisitor* visitor); | 297 static void Iterate(Isolate* isolate, ObjectVisitor* visitor); |
| 298 | 298 |
| 299 static int nop() { return kNop; } | 299 static int nop() { return kNop; } |
| 300 | 300 |
| 301 // No reservation for large object space necessary. | 301 // No reservation for large object space necessary. |
| 302 static const int kNumberOfPreallocatedSpaces = LO_SPACE; | 302 static const int kNumberOfPreallocatedSpaces = LO_SPACE; |
| 303 static const int kNumberOfSpaces = LAST_SPACE + 1; | 303 static const int kNumberOfSpaces = LAST_SPACE + 1; |
| 304 | 304 |
| 305 protected: | 305 protected: |
| 306 static bool CanBeDeferred(HeapObject* o) { |
| 307 return !o->IsString() && !o->IsScript(); |
| 308 } |
| 309 |
| 306 // ---------- byte code range 0x00..0x7f ---------- | 310 // ---------- byte code range 0x00..0x7f ---------- |
| 307 // Byte codes in this range represent Where, HowToCode and WhereToPoint. | 311 // Byte codes in this range represent Where, HowToCode and WhereToPoint. |
| 308 // Where the pointed-to object can be found: | 312 // Where the pointed-to object can be found: |
| 309 enum Where { | 313 enum Where { |
| 310 // 0x00..0x05 Allocate new object, in specified space. | 314 // 0x00..0x05 Allocate new object, in specified space. |
| 311 kNewObject = 0, | 315 kNewObject = 0, |
| 312 // 0x06 Unused (including 0x26, 0x46, 0x66). | 316 // 0x06 Unused (including 0x26, 0x46, 0x66). |
| 313 // 0x07 Unused (including 0x27, 0x47, 0x67). | 317 // 0x07 Unused (including 0x27, 0x47, 0x67). |
| 314 // 0x08..0x0d Reference to previous object from space. | 318 // 0x08..0x0d Reference to previous object from space. |
| 315 kBackref = 0x08, | 319 kBackref = 0x08, |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 359 // ---------- Misc ---------- | 363 // ---------- Misc ---------- |
| 360 // Skip. | 364 // Skip. |
| 361 static const int kSkip = 0x1d; | 365 static const int kSkip = 0x1d; |
| 362 // Internal reference encoded as offsets of pc and target from code entry. | 366 // Internal reference encoded as offsets of pc and target from code entry. |
| 363 static const int kInternalReference = 0x1e; | 367 static const int kInternalReference = 0x1e; |
| 364 static const int kInternalReferenceEncoded = 0x1f; | 368 static const int kInternalReferenceEncoded = 0x1f; |
| 365 // Do nothing, used for padding. | 369 // Do nothing, used for padding. |
| 366 static const int kNop = 0x3d; | 370 static const int kNop = 0x3d; |
| 367 // Move to next reserved chunk. | 371 // Move to next reserved chunk. |
| 368 static const int kNextChunk = 0x3e; | 372 static const int kNextChunk = 0x3e; |
| 373 // Deferring object content. |
| 374 static const int kDeferred = 0x3f; |
| 369 // A tag emitted at strategic points in the snapshot to delineate sections. | 375 // A tag emitted at strategic points in the snapshot to delineate sections. |
| 370 // If the deserializer does not find these at the expected moments then it | 376 // If the deserializer does not find these at the expected moments then it |
| 371 // is an indication that the snapshot and the VM do not fit together. | 377 // is an indication that the snapshot and the VM do not fit together. |
| 372 // Examine the build process for architecture, version or configuration | 378 // Examine the build process for architecture, version or configuration |
| 373 // mismatches. | 379 // mismatches. |
| 374 static const int kSynchronize = 0x5d; | 380 static const int kSynchronize = 0x5d; |
| 375 // Used for the source code of the natives, which is in the executable, but | 381 // Used for the source code of the natives, which is in the executable, but |
| 376 // is referred to from external strings in the snapshot. | 382 // is referred to from external strings in the snapshot. |
| 377 static const int kNativesStringResource = 0x5e; | 383 static const int kNativesStringResource = 0x5e; |
| 378 // Raw data of variable length. | 384 // Raw data of variable length. |
| (...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 539 bool deserializing_user_code() { return deserializing_user_code_; } | 545 bool deserializing_user_code() { return deserializing_user_code_; } |
| 540 | 546 |
| 541 void DecodeReservation(Vector<const SerializedData::Reservation> res); | 547 void DecodeReservation(Vector<const SerializedData::Reservation> res); |
| 542 | 548 |
| 543 bool ReserveSpace(); | 549 bool ReserveSpace(); |
| 544 | 550 |
| 545 void UnalignedCopy(Object** dest, Object** src) { | 551 void UnalignedCopy(Object** dest, Object** src) { |
| 546 memcpy(dest, src, sizeof(*src)); | 552 memcpy(dest, src, sizeof(*src)); |
| 547 } | 553 } |
| 548 | 554 |
| 549 // Allocation sites are present in the snapshot, and must be linked into | 555 void DeserializeDeferredObjects(); |
| 550 // a list at deserialization time. | |
| 551 void RelinkAllocationSite(AllocationSite* site); | |
| 552 | 556 |
| 553 // Fills in some heap data in an area from start to end (non-inclusive). The | 557 // Fills in some heap data in an area from start to end (non-inclusive). The |
| 554 // space id is used for the write barrier. The object_address is the address | 558 // space id is used for the write barrier. The object_address is the address |
| 555 // of the object we are writing into, or NULL if we are not writing into an | 559 // of the object we are writing into, or NULL if we are not writing into an |
| 556 // object, i.e. if we are writing a series of tagged values that are not on | 560 // object, i.e. if we are writing a series of tagged values that are not on |
| 557 // the heap. | 561 // the heap. Return false if the object content has been deferred. |
| 558 void ReadData(Object** start, Object** end, int space, | 562 bool ReadData(Object** start, Object** end, int space, |
| 559 Address object_address); | 563 Address object_address); |
| 560 void ReadObject(int space_number, Object** write_back); | 564 void ReadObject(int space_number, Object** write_back); |
| 561 Address Allocate(int space_index, int size); | 565 Address Allocate(int space_index, int size); |
| 562 | 566 |
| 563 // Special handling for serialized code like hooking up internalized strings. | 567 // Special handling for serialized code like hooking up internalized strings. |
| 564 HeapObject* ProcessNewObjectFromSerializedCode(HeapObject* obj); | 568 HeapObject* PostProcessNewObject(HeapObject* obj); |
| 569 |
| 570 void RelinkAllocationSite(AllocationSite* obj); |
| 565 | 571 |
| 566 // This returns the address of an object that has been described in the | 572 // This returns the address of an object that has been described in the |
| 567 // snapshot by chunk index and offset. | 573 // snapshot by chunk index and offset. |
| 568 HeapObject* GetBackReferencedObject(int space); | 574 HeapObject* GetBackReferencedObject(int space); |
| 569 | 575 |
| 570 // Cached current isolate. | 576 // Cached current isolate. |
| 571 Isolate* isolate_; | 577 Isolate* isolate_; |
| 572 | 578 |
| 573 // Objects from the attached object descriptions in the serialized user code. | 579 // Objects from the attached object descriptions in the serialized user code. |
| 574 Vector<Handle<Object> > attached_objects_; | 580 Vector<Handle<Object> > attached_objects_; |
| (...skipping 23 matching lines...) Expand all Loading... |
| 598 | 604 |
| 599 // There can be only one serializer per V8 process. | 605 // There can be only one serializer per V8 process. |
| 600 class Serializer : public SerializerDeserializer { | 606 class Serializer : public SerializerDeserializer { |
| 601 public: | 607 public: |
| 602 Serializer(Isolate* isolate, SnapshotByteSink* sink); | 608 Serializer(Isolate* isolate, SnapshotByteSink* sink); |
| 603 ~Serializer(); | 609 ~Serializer(); |
| 604 void VisitPointers(Object** start, Object** end) OVERRIDE; | 610 void VisitPointers(Object** start, Object** end) OVERRIDE; |
| 605 | 611 |
| 606 void EncodeReservations(List<SerializedData::Reservation>* out) const; | 612 void EncodeReservations(List<SerializedData::Reservation>* out) const; |
| 607 | 613 |
| 614 void SerializeDeferredObjects(); |
| 615 |
| 608 Isolate* isolate() const { return isolate_; } | 616 Isolate* isolate() const { return isolate_; } |
| 609 | 617 |
| 610 BackReferenceMap* back_reference_map() { return &back_reference_map_; } | 618 BackReferenceMap* back_reference_map() { return &back_reference_map_; } |
| 611 RootIndexMap* root_index_map() { return &root_index_map_; } | 619 RootIndexMap* root_index_map() { return &root_index_map_; } |
| 612 | 620 |
| 613 protected: | 621 protected: |
| 614 class ObjectSerializer : public ObjectVisitor { | 622 class ObjectSerializer : public ObjectVisitor { |
| 615 public: | 623 public: |
| 616 ObjectSerializer(Serializer* serializer, Object* o, SnapshotByteSink* sink, | 624 ObjectSerializer(Serializer* serializer, Object* o, SnapshotByteSink* sink, |
| 617 HowToCode how_to_code, WhereToPoint where_to_point) | 625 HowToCode how_to_code, WhereToPoint where_to_point) |
| 618 : serializer_(serializer), | 626 : serializer_(serializer), |
| 619 object_(HeapObject::cast(o)), | 627 object_(HeapObject::cast(o)), |
| 620 sink_(sink), | 628 sink_(sink), |
| 621 reference_representation_(how_to_code + where_to_point), | 629 reference_representation_(how_to_code + where_to_point), |
| 622 bytes_processed_so_far_(0), | 630 bytes_processed_so_far_(0), |
| 623 is_code_object_(o->IsCode()), | 631 is_code_object_(o->IsCode()), |
| 624 code_has_been_output_(false) {} | 632 code_has_been_output_(false) {} |
| 625 void Serialize(); | 633 void Serialize(); |
| 634 void SerializeDeferred(); |
| 626 void VisitPointers(Object** start, Object** end); | 635 void VisitPointers(Object** start, Object** end); |
| 627 void VisitEmbeddedPointer(RelocInfo* target); | 636 void VisitEmbeddedPointer(RelocInfo* target); |
| 628 void VisitExternalReference(Address* p); | 637 void VisitExternalReference(Address* p); |
| 629 void VisitExternalReference(RelocInfo* rinfo); | 638 void VisitExternalReference(RelocInfo* rinfo); |
| 630 void VisitInternalReference(RelocInfo* rinfo); | 639 void VisitInternalReference(RelocInfo* rinfo); |
| 631 void VisitCodeTarget(RelocInfo* target); | 640 void VisitCodeTarget(RelocInfo* target); |
| 632 void VisitCodeEntry(Address entry_address); | 641 void VisitCodeEntry(Address entry_address); |
| 633 void VisitCell(RelocInfo* rinfo); | 642 void VisitCell(RelocInfo* rinfo); |
| 634 void VisitRuntimeEntry(RelocInfo* reloc); | 643 void VisitRuntimeEntry(RelocInfo* reloc); |
| 635 // Used for seralizing the external strings that hold the natives source. | 644 // Used for seralizing the external strings that hold the natives source. |
| (...skipping 21 matching lines...) Expand all Loading... |
| 657 | 666 |
| 658 Serializer* serializer_; | 667 Serializer* serializer_; |
| 659 HeapObject* object_; | 668 HeapObject* object_; |
| 660 SnapshotByteSink* sink_; | 669 SnapshotByteSink* sink_; |
| 661 int reference_representation_; | 670 int reference_representation_; |
| 662 int bytes_processed_so_far_; | 671 int bytes_processed_so_far_; |
| 663 bool is_code_object_; | 672 bool is_code_object_; |
| 664 bool code_has_been_output_; | 673 bool code_has_been_output_; |
| 665 }; | 674 }; |
| 666 | 675 |
| 676 class RecursionScope { |
| 677 public: |
| 678 explicit RecursionScope(Serializer* serializer) : serializer_(serializer) { |
| 679 serializer_->recursion_depth_++; |
| 680 } |
| 681 ~RecursionScope() { serializer_->recursion_depth_--; } |
| 682 bool ExceedsMaximum() { |
| 683 return serializer_->recursion_depth_ >= kMaxRecursionDepth; |
| 684 } |
| 685 |
| 686 private: |
| 687 static const int kMaxRecursionDepth = 32; |
| 688 Serializer* serializer_; |
| 689 }; |
| 690 |
| 667 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, | 691 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, |
| 668 WhereToPoint where_to_point, int skip) = 0; | 692 WhereToPoint where_to_point, int skip) = 0; |
| 669 | 693 |
| 670 void PutRoot(int index, HeapObject* object, HowToCode how, WhereToPoint where, | 694 void PutRoot(int index, HeapObject* object, HowToCode how, WhereToPoint where, |
| 671 int skip); | 695 int skip); |
| 672 | 696 |
| 697 void PutBackReference(HeapObject* object, BackReference reference); |
| 698 |
| 673 // Returns true if the object was successfully serialized. | 699 // Returns true if the object was successfully serialized. |
| 674 bool SerializeKnownObject(HeapObject* obj, HowToCode how_to_code, | 700 bool SerializeKnownObject(HeapObject* obj, HowToCode how_to_code, |
| 675 WhereToPoint where_to_point, int skip); | 701 WhereToPoint where_to_point, int skip); |
| 676 | 702 |
| 677 inline void FlushSkip(int skip) { | 703 inline void FlushSkip(int skip) { |
| 678 if (skip != 0) { | 704 if (skip != 0) { |
| 679 sink_->Put(kSkip, "SkipFromSerializeObject"); | 705 sink_->Put(kSkip, "SkipFromSerializeObject"); |
| 680 sink_->PutInt(skip, "SkipDistanceFromSerializeObject"); | 706 sink_->PutInt(skip, "SkipDistanceFromSerializeObject"); |
| 681 } | 707 } |
| 682 } | 708 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 704 Code* CopyCode(Code* code); | 730 Code* CopyCode(Code* code); |
| 705 | 731 |
| 706 inline uint32_t max_chunk_size(int space) const { | 732 inline uint32_t max_chunk_size(int space) const { |
| 707 DCHECK_LE(0, space); | 733 DCHECK_LE(0, space); |
| 708 DCHECK_LT(space, kNumberOfSpaces); | 734 DCHECK_LT(space, kNumberOfSpaces); |
| 709 return max_chunk_size_[space]; | 735 return max_chunk_size_[space]; |
| 710 } | 736 } |
| 711 | 737 |
| 712 SnapshotByteSink* sink() const { return sink_; } | 738 SnapshotByteSink* sink() const { return sink_; } |
| 713 | 739 |
| 740 void QueueDeferredObject(HeapObject* obj) { |
| 741 DCHECK(back_reference_map_.Lookup(obj).is_valid()); |
| 742 deferred_objects_.Add(obj); |
| 743 } |
| 744 |
| 714 Isolate* isolate_; | 745 Isolate* isolate_; |
| 715 | 746 |
| 716 SnapshotByteSink* sink_; | 747 SnapshotByteSink* sink_; |
| 717 ExternalReferenceEncoder external_reference_encoder_; | 748 ExternalReferenceEncoder external_reference_encoder_; |
| 718 | 749 |
| 719 BackReferenceMap back_reference_map_; | 750 BackReferenceMap back_reference_map_; |
| 720 RootIndexMap root_index_map_; | 751 RootIndexMap root_index_map_; |
| 721 | 752 |
| 753 int recursion_depth_; |
| 754 |
| 722 friend class Deserializer; | 755 friend class Deserializer; |
| 723 friend class ObjectSerializer; | 756 friend class ObjectSerializer; |
| 757 friend class RecursionScope; |
| 724 friend class SnapshotData; | 758 friend class SnapshotData; |
| 725 | 759 |
| 726 private: | 760 private: |
| 727 CodeAddressMap* code_address_map_; | 761 CodeAddressMap* code_address_map_; |
| 728 // Objects from the same space are put into chunks for bulk-allocation | 762 // Objects from the same space are put into chunks for bulk-allocation |
| 729 // when deserializing. We have to make sure that each chunk fits into a | 763 // when deserializing. We have to make sure that each chunk fits into a |
| 730 // page. So we track the chunk size in pending_chunk_ of a space, but | 764 // page. So we track the chunk size in pending_chunk_ of a space, but |
| 731 // when it exceeds a page, we complete the current chunk and start a new one. | 765 // when it exceeds a page, we complete the current chunk and start a new one. |
| 732 uint32_t pending_chunk_[kNumberOfPreallocatedSpaces]; | 766 uint32_t pending_chunk_[kNumberOfPreallocatedSpaces]; |
| 733 List<uint32_t> completed_chunks_[kNumberOfPreallocatedSpaces]; | 767 List<uint32_t> completed_chunks_[kNumberOfPreallocatedSpaces]; |
| 734 uint32_t max_chunk_size_[kNumberOfPreallocatedSpaces]; | 768 uint32_t max_chunk_size_[kNumberOfPreallocatedSpaces]; |
| 735 | 769 |
| 736 // We map serialized large objects to indexes for back-referencing. | 770 // We map serialized large objects to indexes for back-referencing. |
| 737 uint32_t large_objects_total_size_; | 771 uint32_t large_objects_total_size_; |
| 738 uint32_t seen_large_objects_index_; | 772 uint32_t seen_large_objects_index_; |
| 739 | 773 |
| 740 List<byte> code_buffer_; | 774 List<byte> code_buffer_; |
| 741 | 775 |
| 776 // To handle stack overflow. |
| 777 List<HeapObject*> deferred_objects_; |
| 778 |
| 742 DISALLOW_COPY_AND_ASSIGN(Serializer); | 779 DISALLOW_COPY_AND_ASSIGN(Serializer); |
| 743 }; | 780 }; |
| 744 | 781 |
| 745 | 782 |
| 746 class PartialSerializer : public Serializer { | 783 class PartialSerializer : public Serializer { |
| 747 public: | 784 public: |
| 748 PartialSerializer(Isolate* isolate, Serializer* startup_snapshot_serializer, | 785 PartialSerializer(Isolate* isolate, Serializer* startup_snapshot_serializer, |
| 749 SnapshotByteSink* sink) | 786 SnapshotByteSink* sink) |
| 750 : Serializer(isolate, sink), | 787 : Serializer(isolate, sink), |
| 751 startup_serializer_(startup_snapshot_serializer), | 788 startup_serializer_(startup_snapshot_serializer), |
| (...skipping 18 matching lines...) Expand all Loading... |
| 770 return o->IsName() || o->IsSharedFunctionInfo() || | 807 return o->IsName() || o->IsSharedFunctionInfo() || |
| 771 o->IsHeapNumber() || o->IsCode() || | 808 o->IsHeapNumber() || o->IsCode() || |
| 772 o->IsScopeInfo() || | 809 o->IsScopeInfo() || |
| 773 o->map() == | 810 o->map() == |
| 774 startup_serializer_->isolate()->heap()->fixed_cow_array_map(); | 811 startup_serializer_->isolate()->heap()->fixed_cow_array_map(); |
| 775 } | 812 } |
| 776 | 813 |
| 777 void SerializeOutdatedContextsAsFixedArray(); | 814 void SerializeOutdatedContextsAsFixedArray(); |
| 778 | 815 |
| 779 Serializer* startup_serializer_; | 816 Serializer* startup_serializer_; |
| 780 List<BackReference> outdated_contexts_; | 817 List<Context*> outdated_contexts_; |
| 781 Object* global_object_; | 818 Object* global_object_; |
| 782 PartialCacheIndexMap partial_cache_index_map_; | 819 PartialCacheIndexMap partial_cache_index_map_; |
| 783 DISALLOW_COPY_AND_ASSIGN(PartialSerializer); | 820 DISALLOW_COPY_AND_ASSIGN(PartialSerializer); |
| 784 }; | 821 }; |
| 785 | 822 |
| 786 | 823 |
| 787 class StartupSerializer : public Serializer { | 824 class StartupSerializer : public Serializer { |
| 788 public: | 825 public: |
| 789 StartupSerializer(Isolate* isolate, SnapshotByteSink* sink) | 826 StartupSerializer(Isolate* isolate, SnapshotByteSink* sink) |
| 790 : Serializer(isolate, sink), root_index_wave_front_(0) { | 827 : Serializer(isolate, sink), root_index_wave_front_(0) { |
| 791 // Clear the cache of objects used by the partial snapshot. After the | 828 // Clear the cache of objects used by the partial snapshot. After the |
| 792 // strong roots have been serialized we can create a partial snapshot | 829 // strong roots have been serialized we can create a partial snapshot |
| 793 // which will repopulate the cache with objects needed by that partial | 830 // which will repopulate the cache with objects needed by that partial |
| 794 // snapshot. | 831 // snapshot. |
| 795 isolate->partial_snapshot_cache()->Clear(); | 832 isolate->partial_snapshot_cache()->Clear(); |
| 796 InitializeCodeAddressMap(); | 833 InitializeCodeAddressMap(); |
| 797 } | 834 } |
| 798 | 835 |
| 799 // The StartupSerializer has to serialize the root array, which is slightly | 836 // The StartupSerializer has to serialize the root array, which is slightly |
| 800 // different. | 837 // different. |
| 801 void VisitPointers(Object** start, Object** end) OVERRIDE; | 838 void VisitPointers(Object** start, Object** end) OVERRIDE; |
| 802 | 839 |
| 803 // Serialize the current state of the heap. The order is: | 840 // Serialize the current state of the heap. The order is: |
| 804 // 1) Strong references. | 841 // 1) Strong references. |
| 805 // 2) Partial snapshot cache. | 842 // 2) Partial snapshot cache. |
| 806 // 3) Weak references (e.g. the string table). | 843 // 3) Weak references (e.g. the string table). |
| 807 virtual void SerializeStrongReferences(); | 844 virtual void SerializeStrongReferences(); |
| 808 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, | 845 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, |
| 809 WhereToPoint where_to_point, int skip) OVERRIDE; | 846 WhereToPoint where_to_point, int skip) OVERRIDE; |
| 810 void SerializeWeakReferences(); | 847 void SerializeWeakReferencesAndDeferred(); |
| 811 void Serialize() { | 848 void Serialize() { |
| 812 SerializeStrongReferences(); | 849 SerializeStrongReferences(); |
| 813 SerializeWeakReferences(); | 850 SerializeWeakReferencesAndDeferred(); |
| 814 Pad(); | |
| 815 } | 851 } |
| 816 | 852 |
| 817 private: | 853 private: |
| 818 intptr_t root_index_wave_front_; | 854 intptr_t root_index_wave_front_; |
| 819 DISALLOW_COPY_AND_ASSIGN(StartupSerializer); | 855 DISALLOW_COPY_AND_ASSIGN(StartupSerializer); |
| 820 }; | 856 }; |
| 821 | 857 |
| 822 | 858 |
| 823 class CodeSerializer : public Serializer { | 859 class CodeSerializer : public Serializer { |
| 824 public: | 860 public: |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 971 kNumInternalizedStringsOffset + kInt32Size; | 1007 kNumInternalizedStringsOffset + kInt32Size; |
| 972 static const int kNumCodeStubKeysOffset = kNumReservationsOffset + kInt32Size; | 1008 static const int kNumCodeStubKeysOffset = kNumReservationsOffset + kInt32Size; |
| 973 static const int kPayloadLengthOffset = kNumCodeStubKeysOffset + kInt32Size; | 1009 static const int kPayloadLengthOffset = kNumCodeStubKeysOffset + kInt32Size; |
| 974 static const int kChecksum1Offset = kPayloadLengthOffset + kInt32Size; | 1010 static const int kChecksum1Offset = kPayloadLengthOffset + kInt32Size; |
| 975 static const int kChecksum2Offset = kChecksum1Offset + kInt32Size; | 1011 static const int kChecksum2Offset = kChecksum1Offset + kInt32Size; |
| 976 static const int kHeaderSize = kChecksum2Offset + kInt32Size; | 1012 static const int kHeaderSize = kChecksum2Offset + kInt32Size; |
| 977 }; | 1013 }; |
| 978 } } // namespace v8::internal | 1014 } } // namespace v8::internal |
| 979 | 1015 |
| 980 #endif // V8_SERIALIZE_H_ | 1016 #endif // V8_SERIALIZE_H_ |
| OLD | NEW |