| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_SNAPSHOT_SERIALIZE_H_ | 5 #ifndef V8_SNAPSHOT_SERIALIZE_H_ |
| 6 #define V8_SNAPSHOT_SERIALIZE_H_ | 6 #define V8_SNAPSHOT_SERIALIZE_H_ |
| 7 | 7 |
| 8 #include "src/hashmap.h" | 8 #include "src/hashmap.h" |
| 9 #include "src/heap-profiler.h" | 9 #include "src/heap-profiler.h" |
| 10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
| (...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 299 public: | 299 public: |
| 300 static void Iterate(Isolate* isolate, ObjectVisitor* visitor); | 300 static void Iterate(Isolate* isolate, ObjectVisitor* visitor); |
| 301 | 301 |
| 302 static int nop() { return kNop; } | 302 static int nop() { return kNop; } |
| 303 | 303 |
| 304 // No reservation for large object space necessary. | 304 // No reservation for large object space necessary. |
| 305 static const int kNumberOfPreallocatedSpaces = LAST_PAGED_SPACE + 1; | 305 static const int kNumberOfPreallocatedSpaces = LAST_PAGED_SPACE + 1; |
| 306 static const int kNumberOfSpaces = LAST_SPACE + 1; | 306 static const int kNumberOfSpaces = LAST_SPACE + 1; |
| 307 | 307 |
| 308 protected: | 308 protected: |
| 309 static bool CanBeDeferred(HeapObject* o) { |
| 310 return !o->IsString() && !o->IsScript(); |
| 311 } |
| 312 |
| 309 // ---------- byte code range 0x00..0x7f ---------- | 313 // ---------- byte code range 0x00..0x7f ---------- |
| 310 // Byte codes in this range represent Where, HowToCode and WhereToPoint. | 314 // Byte codes in this range represent Where, HowToCode and WhereToPoint. |
| 311 // Where the pointed-to object can be found: | 315 // Where the pointed-to object can be found: |
| 312 // The static assert below will trigger when the number of preallocated spaces | 316 // The static assert below will trigger when the number of preallocated spaces |
| 313 // changed. If that happens, update the bytecode ranges in the comments below. | 317 // changed. If that happens, update the bytecode ranges in the comments below. |
| 314 STATIC_ASSERT(5 == kNumberOfSpaces); | 318 STATIC_ASSERT(5 == kNumberOfSpaces); |
| 315 enum Where { | 319 enum Where { |
| 316 // 0x00..0x04 Allocate new object, in specified space. | 320 // 0x00..0x04 Allocate new object, in specified space. |
| 317 kNewObject = 0, | 321 kNewObject = 0, |
| 318 // 0x05 Unused (including 0x25, 0x45, 0x65). | 322 // 0x05 Unused (including 0x25, 0x45, 0x65). |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 366 // ---------- Misc ---------- | 370 // ---------- Misc ---------- |
| 367 // Skip. | 371 // Skip. |
| 368 static const int kSkip = 0x1d; | 372 static const int kSkip = 0x1d; |
| 369 // Internal reference encoded as offsets of pc and target from code entry. | 373 // Internal reference encoded as offsets of pc and target from code entry. |
| 370 static const int kInternalReference = 0x1e; | 374 static const int kInternalReference = 0x1e; |
| 371 static const int kInternalReferenceEncoded = 0x1f; | 375 static const int kInternalReferenceEncoded = 0x1f; |
| 372 // Do nothing, used for padding. | 376 // Do nothing, used for padding. |
| 373 static const int kNop = 0x3d; | 377 static const int kNop = 0x3d; |
| 374 // Move to next reserved chunk. | 378 // Move to next reserved chunk. |
| 375 static const int kNextChunk = 0x3e; | 379 static const int kNextChunk = 0x3e; |
| 380 // Deferring object content. |
| 381 static const int kDeferred = 0x3f; |
| 376 // A tag emitted at strategic points in the snapshot to delineate sections. | 382 // A tag emitted at strategic points in the snapshot to delineate sections. |
| 377 // If the deserializer does not find these at the expected moments then it | 383 // If the deserializer does not find these at the expected moments then it |
| 378 // is an indication that the snapshot and the VM do not fit together. | 384 // is an indication that the snapshot and the VM do not fit together. |
| 379 // Examine the build process for architecture, version or configuration | 385 // Examine the build process for architecture, version or configuration |
| 380 // mismatches. | 386 // mismatches. |
| 381 static const int kSynchronize = 0x5d; | 387 static const int kSynchronize = 0x5d; |
| 382 // Used for the source code of the natives, which is in the executable, but | 388 // Used for the source code of the natives, which is in the executable, but |
| 383 // is referred to from external strings in the snapshot. | 389 // is referred to from external strings in the snapshot. |
| 384 static const int kNativesStringResource = 0x5e; | 390 static const int kNativesStringResource = 0x5e; |
| 385 // Raw data of variable length. | 391 // Raw data of variable length. |
| (...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 546 bool deserializing_user_code() { return deserializing_user_code_; } | 552 bool deserializing_user_code() { return deserializing_user_code_; } |
| 547 | 553 |
| 548 void DecodeReservation(Vector<const SerializedData::Reservation> res); | 554 void DecodeReservation(Vector<const SerializedData::Reservation> res); |
| 549 | 555 |
| 550 bool ReserveSpace(); | 556 bool ReserveSpace(); |
| 551 | 557 |
| 552 void UnalignedCopy(Object** dest, Object** src) { | 558 void UnalignedCopy(Object** dest, Object** src) { |
| 553 memcpy(dest, src, sizeof(*src)); | 559 memcpy(dest, src, sizeof(*src)); |
| 554 } | 560 } |
| 555 | 561 |
| 556 // Allocation sites are present in the snapshot, and must be linked into | 562 void DeserializeDeferredObjects(); |
| 557 // a list at deserialization time. | |
| 558 void RelinkAllocationSite(AllocationSite* site); | |
| 559 | 563 |
| 560 // Fills in some heap data in an area from start to end (non-inclusive). The | 564 // Fills in some heap data in an area from start to end (non-inclusive). The |
| 561 // space id is used for the write barrier. The object_address is the address | 565 // space id is used for the write barrier. The object_address is the address |
| 562 // of the object we are writing into, or NULL if we are not writing into an | 566 // of the object we are writing into, or NULL if we are not writing into an |
| 563 // object, i.e. if we are writing a series of tagged values that are not on | 567 // object, i.e. if we are writing a series of tagged values that are not on |
| 564 // the heap. | 568 // the heap. Return false if the object content has been deferred. |
| 565 void ReadData(Object** start, Object** end, int space, | 569 bool ReadData(Object** start, Object** end, int space, |
| 566 Address object_address); | 570 Address object_address); |
| 567 void ReadObject(int space_number, Object** write_back); | 571 void ReadObject(int space_number, Object** write_back); |
| 568 Address Allocate(int space_index, int size); | 572 Address Allocate(int space_index, int size); |
| 569 | 573 |
| 570 // Special handling for serialized code like hooking up internalized strings. | 574 // Special handling for serialized code like hooking up internalized strings. |
| 571 HeapObject* ProcessNewObjectFromSerializedCode(HeapObject* obj); | 575 HeapObject* PostProcessNewObject(HeapObject* obj); |
| 576 |
| 577 void RelinkAllocationSite(AllocationSite* obj); |
| 572 | 578 |
| 573 // This returns the address of an object that has been described in the | 579 // This returns the address of an object that has been described in the |
| 574 // snapshot by chunk index and offset. | 580 // snapshot by chunk index and offset. |
| 575 HeapObject* GetBackReferencedObject(int space); | 581 HeapObject* GetBackReferencedObject(int space); |
| 576 | 582 |
| 577 // Cached current isolate. | 583 // Cached current isolate. |
| 578 Isolate* isolate_; | 584 Isolate* isolate_; |
| 579 | 585 |
| 580 // Objects from the attached object descriptions in the serialized user code. | 586 // Objects from the attached object descriptions in the serialized user code. |
| 581 Vector<Handle<Object> > attached_objects_; | 587 Vector<Handle<Object> > attached_objects_; |
| (...skipping 23 matching lines...) Expand all Loading... |
| 605 | 611 |
| 606 // There can be only one serializer per V8 process. | 612 // There can be only one serializer per V8 process. |
| 607 class Serializer : public SerializerDeserializer { | 613 class Serializer : public SerializerDeserializer { |
| 608 public: | 614 public: |
| 609 Serializer(Isolate* isolate, SnapshotByteSink* sink); | 615 Serializer(Isolate* isolate, SnapshotByteSink* sink); |
| 610 ~Serializer(); | 616 ~Serializer(); |
| 611 void VisitPointers(Object** start, Object** end) override; | 617 void VisitPointers(Object** start, Object** end) override; |
| 612 | 618 |
| 613 void EncodeReservations(List<SerializedData::Reservation>* out) const; | 619 void EncodeReservations(List<SerializedData::Reservation>* out) const; |
| 614 | 620 |
| 621 void SerializeDeferredObjects(); |
| 622 |
| 615 Isolate* isolate() const { return isolate_; } | 623 Isolate* isolate() const { return isolate_; } |
| 616 | 624 |
| 617 BackReferenceMap* back_reference_map() { return &back_reference_map_; } | 625 BackReferenceMap* back_reference_map() { return &back_reference_map_; } |
| 618 RootIndexMap* root_index_map() { return &root_index_map_; } | 626 RootIndexMap* root_index_map() { return &root_index_map_; } |
| 619 | 627 |
| 620 #ifdef OBJECT_PRINT | 628 #ifdef OBJECT_PRINT |
| 621 void CountInstanceType(Map* map, int size); | 629 void CountInstanceType(Map* map, int size); |
| 622 #endif // OBJECT_PRINT | 630 #endif // OBJECT_PRINT |
| 623 | 631 |
| 624 protected: | 632 protected: |
| 625 class ObjectSerializer : public ObjectVisitor { | 633 class ObjectSerializer : public ObjectVisitor { |
| 626 public: | 634 public: |
| 627 ObjectSerializer(Serializer* serializer, Object* o, SnapshotByteSink* sink, | 635 ObjectSerializer(Serializer* serializer, Object* o, SnapshotByteSink* sink, |
| 628 HowToCode how_to_code, WhereToPoint where_to_point) | 636 HowToCode how_to_code, WhereToPoint where_to_point) |
| 629 : serializer_(serializer), | 637 : serializer_(serializer), |
| 630 object_(HeapObject::cast(o)), | 638 object_(HeapObject::cast(o)), |
| 631 sink_(sink), | 639 sink_(sink), |
| 632 reference_representation_(how_to_code + where_to_point), | 640 reference_representation_(how_to_code + where_to_point), |
| 633 bytes_processed_so_far_(0), | 641 bytes_processed_so_far_(0), |
| 634 is_code_object_(o->IsCode()), | 642 is_code_object_(o->IsCode()), |
| 635 code_has_been_output_(false) {} | 643 code_has_been_output_(false) {} |
| 636 void Serialize(); | 644 void Serialize(); |
| 645 void SerializeDeferred(); |
| 637 void VisitPointers(Object** start, Object** end); | 646 void VisitPointers(Object** start, Object** end); |
| 638 void VisitEmbeddedPointer(RelocInfo* target); | 647 void VisitEmbeddedPointer(RelocInfo* target); |
| 639 void VisitExternalReference(Address* p); | 648 void VisitExternalReference(Address* p); |
| 640 void VisitExternalReference(RelocInfo* rinfo); | 649 void VisitExternalReference(RelocInfo* rinfo); |
| 641 void VisitInternalReference(RelocInfo* rinfo); | 650 void VisitInternalReference(RelocInfo* rinfo); |
| 642 void VisitCodeTarget(RelocInfo* target); | 651 void VisitCodeTarget(RelocInfo* target); |
| 643 void VisitCodeEntry(Address entry_address); | 652 void VisitCodeEntry(Address entry_address); |
| 644 void VisitCell(RelocInfo* rinfo); | 653 void VisitCell(RelocInfo* rinfo); |
| 645 void VisitRuntimeEntry(RelocInfo* reloc); | 654 void VisitRuntimeEntry(RelocInfo* reloc); |
| 646 // Used for seralizing the external strings that hold the natives source. | 655 // Used for seralizing the external strings that hold the natives source. |
| (...skipping 21 matching lines...) Expand all Loading... |
| 668 | 677 |
| 669 Serializer* serializer_; | 678 Serializer* serializer_; |
| 670 HeapObject* object_; | 679 HeapObject* object_; |
| 671 SnapshotByteSink* sink_; | 680 SnapshotByteSink* sink_; |
| 672 int reference_representation_; | 681 int reference_representation_; |
| 673 int bytes_processed_so_far_; | 682 int bytes_processed_so_far_; |
| 674 bool is_code_object_; | 683 bool is_code_object_; |
| 675 bool code_has_been_output_; | 684 bool code_has_been_output_; |
| 676 }; | 685 }; |
| 677 | 686 |
| 687 class RecursionScope { |
| 688 public: |
| 689 explicit RecursionScope(Serializer* serializer) : serializer_(serializer) { |
| 690 serializer_->recursion_depth_++; |
| 691 } |
| 692 ~RecursionScope() { serializer_->recursion_depth_--; } |
| 693 bool ExceedsMaximum() { |
| 694 return serializer_->recursion_depth_ >= kMaxRecursionDepth; |
| 695 } |
| 696 |
| 697 private: |
| 698 static const int kMaxRecursionDepth = 32; |
| 699 Serializer* serializer_; |
| 700 }; |
| 701 |
| 678 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, | 702 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, |
| 679 WhereToPoint where_to_point, int skip) = 0; | 703 WhereToPoint where_to_point, int skip) = 0; |
| 680 | 704 |
| 681 void PutRoot(int index, HeapObject* object, HowToCode how, WhereToPoint where, | 705 void PutRoot(int index, HeapObject* object, HowToCode how, WhereToPoint where, |
| 682 int skip); | 706 int skip); |
| 683 | 707 |
| 708 void PutBackReference(HeapObject* object, BackReference reference); |
| 709 |
| 684 // Returns true if the object was successfully serialized. | 710 // Returns true if the object was successfully serialized. |
| 685 bool SerializeKnownObject(HeapObject* obj, HowToCode how_to_code, | 711 bool SerializeKnownObject(HeapObject* obj, HowToCode how_to_code, |
| 686 WhereToPoint where_to_point, int skip); | 712 WhereToPoint where_to_point, int skip); |
| 687 | 713 |
| 688 inline void FlushSkip(int skip) { | 714 inline void FlushSkip(int skip) { |
| 689 if (skip != 0) { | 715 if (skip != 0) { |
| 690 sink_->Put(kSkip, "SkipFromSerializeObject"); | 716 sink_->Put(kSkip, "SkipFromSerializeObject"); |
| 691 sink_->PutInt(skip, "SkipDistanceFromSerializeObject"); | 717 sink_->PutInt(skip, "SkipDistanceFromSerializeObject"); |
| 692 } | 718 } |
| 693 } | 719 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 715 Code* CopyCode(Code* code); | 741 Code* CopyCode(Code* code); |
| 716 | 742 |
| 717 inline uint32_t max_chunk_size(int space) const { | 743 inline uint32_t max_chunk_size(int space) const { |
| 718 DCHECK_LE(0, space); | 744 DCHECK_LE(0, space); |
| 719 DCHECK_LT(space, kNumberOfSpaces); | 745 DCHECK_LT(space, kNumberOfSpaces); |
| 720 return max_chunk_size_[space]; | 746 return max_chunk_size_[space]; |
| 721 } | 747 } |
| 722 | 748 |
| 723 SnapshotByteSink* sink() const { return sink_; } | 749 SnapshotByteSink* sink() const { return sink_; } |
| 724 | 750 |
| 751 void QueueDeferredObject(HeapObject* obj) { |
| 752 DCHECK(back_reference_map_.Lookup(obj).is_valid()); |
| 753 deferred_objects_.Add(obj); |
| 754 } |
| 755 |
| 725 void OutputStatistics(const char* name); | 756 void OutputStatistics(const char* name); |
| 726 | 757 |
| 727 Isolate* isolate_; | 758 Isolate* isolate_; |
| 728 | 759 |
| 729 SnapshotByteSink* sink_; | 760 SnapshotByteSink* sink_; |
| 730 ExternalReferenceEncoder external_reference_encoder_; | 761 ExternalReferenceEncoder external_reference_encoder_; |
| 731 | 762 |
| 732 BackReferenceMap back_reference_map_; | 763 BackReferenceMap back_reference_map_; |
| 733 RootIndexMap root_index_map_; | 764 RootIndexMap root_index_map_; |
| 734 | 765 |
| 766 int recursion_depth_; |
| 767 |
| 735 friend class Deserializer; | 768 friend class Deserializer; |
| 736 friend class ObjectSerializer; | 769 friend class ObjectSerializer; |
| 770 friend class RecursionScope; |
| 737 friend class SnapshotData; | 771 friend class SnapshotData; |
| 738 | 772 |
| 739 private: | 773 private: |
| 740 CodeAddressMap* code_address_map_; | 774 CodeAddressMap* code_address_map_; |
| 741 // Objects from the same space are put into chunks for bulk-allocation | 775 // Objects from the same space are put into chunks for bulk-allocation |
| 742 // when deserializing. We have to make sure that each chunk fits into a | 776 // when deserializing. We have to make sure that each chunk fits into a |
| 743 // page. So we track the chunk size in pending_chunk_ of a space, but | 777 // page. So we track the chunk size in pending_chunk_ of a space, but |
| 744 // when it exceeds a page, we complete the current chunk and start a new one. | 778 // when it exceeds a page, we complete the current chunk and start a new one. |
| 745 uint32_t pending_chunk_[kNumberOfPreallocatedSpaces]; | 779 uint32_t pending_chunk_[kNumberOfPreallocatedSpaces]; |
| 746 List<uint32_t> completed_chunks_[kNumberOfPreallocatedSpaces]; | 780 List<uint32_t> completed_chunks_[kNumberOfPreallocatedSpaces]; |
| 747 uint32_t max_chunk_size_[kNumberOfPreallocatedSpaces]; | 781 uint32_t max_chunk_size_[kNumberOfPreallocatedSpaces]; |
| 748 | 782 |
| 749 // We map serialized large objects to indexes for back-referencing. | 783 // We map serialized large objects to indexes for back-referencing. |
| 750 uint32_t large_objects_total_size_; | 784 uint32_t large_objects_total_size_; |
| 751 uint32_t seen_large_objects_index_; | 785 uint32_t seen_large_objects_index_; |
| 752 | 786 |
| 753 List<byte> code_buffer_; | 787 List<byte> code_buffer_; |
| 754 | 788 |
| 789 // To handle stack overflow. |
| 790 List<HeapObject*> deferred_objects_; |
| 791 |
| 755 #ifdef OBJECT_PRINT | 792 #ifdef OBJECT_PRINT |
| 756 static const int kInstanceTypes = 256; | 793 static const int kInstanceTypes = 256; |
| 757 int* instance_type_count_; | 794 int* instance_type_count_; |
| 758 size_t* instance_type_size_; | 795 size_t* instance_type_size_; |
| 759 #endif // OBJECT_PRINT | 796 #endif // OBJECT_PRINT |
| 760 | 797 |
| 761 DISALLOW_COPY_AND_ASSIGN(Serializer); | 798 DISALLOW_COPY_AND_ASSIGN(Serializer); |
| 762 }; | 799 }; |
| 763 | 800 |
| 764 | 801 |
| (...skipping 25 matching lines...) Expand all Loading... |
| 790 DCHECK(!o->IsScript()); | 827 DCHECK(!o->IsScript()); |
| 791 return o->IsName() || o->IsSharedFunctionInfo() || o->IsHeapNumber() || | 828 return o->IsName() || o->IsSharedFunctionInfo() || o->IsHeapNumber() || |
| 792 o->IsCode() || o->IsScopeInfo() || o->IsExecutableAccessorInfo() || | 829 o->IsCode() || o->IsScopeInfo() || o->IsExecutableAccessorInfo() || |
| 793 o->map() == | 830 o->map() == |
| 794 startup_serializer_->isolate()->heap()->fixed_cow_array_map(); | 831 startup_serializer_->isolate()->heap()->fixed_cow_array_map(); |
| 795 } | 832 } |
| 796 | 833 |
| 797 void SerializeOutdatedContextsAsFixedArray(); | 834 void SerializeOutdatedContextsAsFixedArray(); |
| 798 | 835 |
| 799 Serializer* startup_serializer_; | 836 Serializer* startup_serializer_; |
| 800 List<BackReference> outdated_contexts_; | 837 List<Context*> outdated_contexts_; |
| 801 Object* global_object_; | 838 Object* global_object_; |
| 802 PartialCacheIndexMap partial_cache_index_map_; | 839 PartialCacheIndexMap partial_cache_index_map_; |
| 803 DISALLOW_COPY_AND_ASSIGN(PartialSerializer); | 840 DISALLOW_COPY_AND_ASSIGN(PartialSerializer); |
| 804 }; | 841 }; |
| 805 | 842 |
| 806 | 843 |
| 807 class StartupSerializer : public Serializer { | 844 class StartupSerializer : public Serializer { |
| 808 public: | 845 public: |
| 809 StartupSerializer(Isolate* isolate, SnapshotByteSink* sink) | 846 StartupSerializer(Isolate* isolate, SnapshotByteSink* sink) |
| 810 : Serializer(isolate, sink), root_index_wave_front_(0) { | 847 : Serializer(isolate, sink), root_index_wave_front_(0) { |
| (...skipping 11 matching lines...) Expand all Loading... |
| 822 // different. | 859 // different. |
| 823 void VisitPointers(Object** start, Object** end) override; | 860 void VisitPointers(Object** start, Object** end) override; |
| 824 | 861 |
| 825 // Serialize the current state of the heap. The order is: | 862 // Serialize the current state of the heap. The order is: |
| 826 // 1) Strong references. | 863 // 1) Strong references. |
| 827 // 2) Partial snapshot cache. | 864 // 2) Partial snapshot cache. |
| 828 // 3) Weak references (e.g. the string table). | 865 // 3) Weak references (e.g. the string table). |
| 829 virtual void SerializeStrongReferences(); | 866 virtual void SerializeStrongReferences(); |
| 830 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, | 867 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, |
| 831 WhereToPoint where_to_point, int skip) override; | 868 WhereToPoint where_to_point, int skip) override; |
| 832 void SerializeWeakReferences(); | 869 void SerializeWeakReferencesAndDeferred(); |
| 833 void Serialize() { | 870 void Serialize() { |
| 834 SerializeStrongReferences(); | 871 SerializeStrongReferences(); |
| 835 SerializeWeakReferences(); | 872 SerializeWeakReferencesAndDeferred(); |
| 836 Pad(); | |
| 837 } | 873 } |
| 838 | 874 |
| 839 private: | 875 private: |
| 840 intptr_t root_index_wave_front_; | 876 intptr_t root_index_wave_front_; |
| 841 DISALLOW_COPY_AND_ASSIGN(StartupSerializer); | 877 DISALLOW_COPY_AND_ASSIGN(StartupSerializer); |
| 842 }; | 878 }; |
| 843 | 879 |
| 844 | 880 |
| 845 class CodeSerializer : public Serializer { | 881 class CodeSerializer : public Serializer { |
| 846 public: | 882 public: |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 995 kNumInternalizedStringsOffset + kInt32Size; | 1031 kNumInternalizedStringsOffset + kInt32Size; |
| 996 static const int kNumCodeStubKeysOffset = kNumReservationsOffset + kInt32Size; | 1032 static const int kNumCodeStubKeysOffset = kNumReservationsOffset + kInt32Size; |
| 997 static const int kPayloadLengthOffset = kNumCodeStubKeysOffset + kInt32Size; | 1033 static const int kPayloadLengthOffset = kNumCodeStubKeysOffset + kInt32Size; |
| 998 static const int kChecksum1Offset = kPayloadLengthOffset + kInt32Size; | 1034 static const int kChecksum1Offset = kPayloadLengthOffset + kInt32Size; |
| 999 static const int kChecksum2Offset = kChecksum1Offset + kInt32Size; | 1035 static const int kChecksum2Offset = kChecksum1Offset + kInt32Size; |
| 1000 static const int kHeaderSize = kChecksum2Offset + kInt32Size; | 1036 static const int kHeaderSize = kChecksum2Offset + kInt32Size; |
| 1001 }; | 1037 }; |
| 1002 } } // namespace v8::internal | 1038 } } // namespace v8::internal |
| 1003 | 1039 |
| 1004 #endif // V8_SNAPSHOT_SERIALIZE_H_ | 1040 #endif // V8_SNAPSHOT_SERIALIZE_H_ |
| OLD | NEW |