| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_SNAPSHOT_SERIALIZE_H_ | 5 #ifndef V8_SNAPSHOT_SERIALIZE_H_ |
| 6 #define V8_SNAPSHOT_SERIALIZE_H_ | 6 #define V8_SNAPSHOT_SERIALIZE_H_ |
| 7 | 7 |
| 8 #include "src/hashmap.h" | 8 #include "src/hashmap.h" |
| 9 #include "src/heap-profiler.h" | 9 #include "src/heap-profiler.h" |
| 10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
| (...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 299 public: | 299 public: |
| 300 static void Iterate(Isolate* isolate, ObjectVisitor* visitor); | 300 static void Iterate(Isolate* isolate, ObjectVisitor* visitor); |
| 301 | 301 |
| 302 static int nop() { return kNop; } | 302 static int nop() { return kNop; } |
| 303 | 303 |
| 304 // No reservation for large object space necessary. | 304 // No reservation for large object space necessary. |
| 305 static const int kNumberOfPreallocatedSpaces = LAST_PAGED_SPACE + 1; | 305 static const int kNumberOfPreallocatedSpaces = LAST_PAGED_SPACE + 1; |
| 306 static const int kNumberOfSpaces = LAST_SPACE + 1; | 306 static const int kNumberOfSpaces = LAST_SPACE + 1; |
| 307 | 307 |
| 308 protected: | 308 protected: |
| 309 static bool CanBeDeferred(HeapObject* o) { | |
| 310 return !o->IsString() && !o->IsScript(); | |
| 311 } | |
| 312 | |
| 313 // ---------- byte code range 0x00..0x7f ---------- | 309 // ---------- byte code range 0x00..0x7f ---------- |
| 314 // Byte codes in this range represent Where, HowToCode and WhereToPoint. | 310 // Byte codes in this range represent Where, HowToCode and WhereToPoint. |
| 315 // Where the pointed-to object can be found: | 311 // Where the pointed-to object can be found: |
| 316 // The static assert below will trigger when the number of preallocated spaces | 312 // The static assert below will trigger when the number of preallocated spaces |
| 317 // changed. If that happens, update the bytecode ranges in the comments below. | 313 // changed. If that happens, update the bytecode ranges in the comments below. |
| 318 STATIC_ASSERT(5 == kNumberOfSpaces); | 314 STATIC_ASSERT(5 == kNumberOfSpaces); |
| 319 enum Where { | 315 enum Where { |
| 320 // 0x00..0x04 Allocate new object, in specified space. | 316 // 0x00..0x04 Allocate new object, in specified space. |
| 321 kNewObject = 0, | 317 kNewObject = 0, |
| 322 // 0x05 Unused (including 0x25, 0x45, 0x65). | 318 // 0x05 Unused (including 0x25, 0x45, 0x65). |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 370 // ---------- Misc ---------- | 366 // ---------- Misc ---------- |
| 371 // Skip. | 367 // Skip. |
| 372 static const int kSkip = 0x1d; | 368 static const int kSkip = 0x1d; |
| 373 // Internal reference encoded as offsets of pc and target from code entry. | 369 // Internal reference encoded as offsets of pc and target from code entry. |
| 374 static const int kInternalReference = 0x1e; | 370 static const int kInternalReference = 0x1e; |
| 375 static const int kInternalReferenceEncoded = 0x1f; | 371 static const int kInternalReferenceEncoded = 0x1f; |
| 376 // Do nothing, used for padding. | 372 // Do nothing, used for padding. |
| 377 static const int kNop = 0x3d; | 373 static const int kNop = 0x3d; |
| 378 // Move to next reserved chunk. | 374 // Move to next reserved chunk. |
| 379 static const int kNextChunk = 0x3e; | 375 static const int kNextChunk = 0x3e; |
| 380 // Deferring object content. | |
| 381 static const int kDeferred = 0x3f; | |
| 382 // A tag emitted at strategic points in the snapshot to delineate sections. | 376 // A tag emitted at strategic points in the snapshot to delineate sections. |
| 383 // If the deserializer does not find these at the expected moments then it | 377 // If the deserializer does not find these at the expected moments then it |
| 384 // is an indication that the snapshot and the VM do not fit together. | 378 // is an indication that the snapshot and the VM do not fit together. |
| 385 // Examine the build process for architecture, version or configuration | 379 // Examine the build process for architecture, version or configuration |
| 386 // mismatches. | 380 // mismatches. |
| 387 static const int kSynchronize = 0x5d; | 381 static const int kSynchronize = 0x5d; |
| 388 // Used for the source code of the natives, which is in the executable, but | 382 // Used for the source code of the natives, which is in the executable, but |
| 389 // is referred to from external strings in the snapshot. | 383 // is referred to from external strings in the snapshot. |
| 390 static const int kNativesStringResource = 0x5e; | 384 static const int kNativesStringResource = 0x5e; |
| 391 // Raw data of variable length. | 385 // Raw data of variable length. |
| (...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 552 bool deserializing_user_code() { return deserializing_user_code_; } | 546 bool deserializing_user_code() { return deserializing_user_code_; } |
| 553 | 547 |
| 554 void DecodeReservation(Vector<const SerializedData::Reservation> res); | 548 void DecodeReservation(Vector<const SerializedData::Reservation> res); |
| 555 | 549 |
| 556 bool ReserveSpace(); | 550 bool ReserveSpace(); |
| 557 | 551 |
| 558 void UnalignedCopy(Object** dest, Object** src) { | 552 void UnalignedCopy(Object** dest, Object** src) { |
| 559 memcpy(dest, src, sizeof(*src)); | 553 memcpy(dest, src, sizeof(*src)); |
| 560 } | 554 } |
| 561 | 555 |
| 562 void DeserializeDeferredObjects(); | 556 // Allocation sites are present in the snapshot, and must be linked into |
| 557 // a list at deserialization time. |
| 558 void RelinkAllocationSite(AllocationSite* site); |
| 563 | 559 |
| 564 // Fills in some heap data in an area from start to end (non-inclusive). The | 560 // Fills in some heap data in an area from start to end (non-inclusive). The |
| 565 // space id is used for the write barrier. The object_address is the address | 561 // space id is used for the write barrier. The object_address is the address |
| 566 // of the object we are writing into, or NULL if we are not writing into an | 562 // of the object we are writing into, or NULL if we are not writing into an |
| 567 // object, i.e. if we are writing a series of tagged values that are not on | 563 // object, i.e. if we are writing a series of tagged values that are not on |
| 568 // the heap. Return false if the object content has been deferred. | 564 // the heap. |
| 569 bool ReadData(Object** start, Object** end, int space, | 565 void ReadData(Object** start, Object** end, int space, |
| 570 Address object_address); | 566 Address object_address); |
| 571 void ReadObject(int space_number, Object** write_back); | 567 void ReadObject(int space_number, Object** write_back); |
| 572 Address Allocate(int space_index, int size); | 568 Address Allocate(int space_index, int size); |
| 573 | 569 |
| 574 // Special handling for serialized code like hooking up internalized strings. | 570 // Special handling for serialized code like hooking up internalized strings. |
| 575 HeapObject* PostProcessNewObject(HeapObject* obj); | 571 HeapObject* ProcessNewObjectFromSerializedCode(HeapObject* obj); |
| 576 | |
| 577 void RelinkAllocationSite(AllocationSite* obj); | |
| 578 | 572 |
| 579 // This returns the address of an object that has been described in the | 573 // This returns the address of an object that has been described in the |
| 580 // snapshot by chunk index and offset. | 574 // snapshot by chunk index and offset. |
| 581 HeapObject* GetBackReferencedObject(int space); | 575 HeapObject* GetBackReferencedObject(int space); |
| 582 | 576 |
| 583 // Cached current isolate. | 577 // Cached current isolate. |
| 584 Isolate* isolate_; | 578 Isolate* isolate_; |
| 585 | 579 |
| 586 // Objects from the attached object descriptions in the serialized user code. | 580 // Objects from the attached object descriptions in the serialized user code. |
| 587 Vector<Handle<Object> > attached_objects_; | 581 Vector<Handle<Object> > attached_objects_; |
| (...skipping 23 matching lines...) Expand all Loading... |
| 611 | 605 |
| 612 // There can be only one serializer per V8 process. | 606 // There can be only one serializer per V8 process. |
| 613 class Serializer : public SerializerDeserializer { | 607 class Serializer : public SerializerDeserializer { |
| 614 public: | 608 public: |
| 615 Serializer(Isolate* isolate, SnapshotByteSink* sink); | 609 Serializer(Isolate* isolate, SnapshotByteSink* sink); |
| 616 ~Serializer(); | 610 ~Serializer(); |
| 617 void VisitPointers(Object** start, Object** end) override; | 611 void VisitPointers(Object** start, Object** end) override; |
| 618 | 612 |
| 619 void EncodeReservations(List<SerializedData::Reservation>* out) const; | 613 void EncodeReservations(List<SerializedData::Reservation>* out) const; |
| 620 | 614 |
| 621 void SerializeDeferredObjects(); | |
| 622 | |
| 623 Isolate* isolate() const { return isolate_; } | 615 Isolate* isolate() const { return isolate_; } |
| 624 | 616 |
| 625 BackReferenceMap* back_reference_map() { return &back_reference_map_; } | 617 BackReferenceMap* back_reference_map() { return &back_reference_map_; } |
| 626 RootIndexMap* root_index_map() { return &root_index_map_; } | 618 RootIndexMap* root_index_map() { return &root_index_map_; } |
| 627 | 619 |
| 628 #ifdef OBJECT_PRINT | 620 #ifdef OBJECT_PRINT |
| 629 void CountInstanceType(Map* map, int size); | 621 void CountInstanceType(Map* map, int size); |
| 630 #endif // OBJECT_PRINT | 622 #endif // OBJECT_PRINT |
| 631 | 623 |
| 632 protected: | 624 protected: |
| 633 class ObjectSerializer : public ObjectVisitor { | 625 class ObjectSerializer : public ObjectVisitor { |
| 634 public: | 626 public: |
| 635 ObjectSerializer(Serializer* serializer, Object* o, SnapshotByteSink* sink, | 627 ObjectSerializer(Serializer* serializer, Object* o, SnapshotByteSink* sink, |
| 636 HowToCode how_to_code, WhereToPoint where_to_point) | 628 HowToCode how_to_code, WhereToPoint where_to_point) |
| 637 : serializer_(serializer), | 629 : serializer_(serializer), |
| 638 object_(HeapObject::cast(o)), | 630 object_(HeapObject::cast(o)), |
| 639 sink_(sink), | 631 sink_(sink), |
| 640 reference_representation_(how_to_code + where_to_point), | 632 reference_representation_(how_to_code + where_to_point), |
| 641 bytes_processed_so_far_(0), | 633 bytes_processed_so_far_(0), |
| 642 is_code_object_(o->IsCode()), | 634 is_code_object_(o->IsCode()), |
| 643 code_has_been_output_(false) {} | 635 code_has_been_output_(false) {} |
| 644 void Serialize(); | 636 void Serialize(); |
| 645 void SerializeDeferred(); | |
| 646 void VisitPointers(Object** start, Object** end); | 637 void VisitPointers(Object** start, Object** end); |
| 647 void VisitEmbeddedPointer(RelocInfo* target); | 638 void VisitEmbeddedPointer(RelocInfo* target); |
| 648 void VisitExternalReference(Address* p); | 639 void VisitExternalReference(Address* p); |
| 649 void VisitExternalReference(RelocInfo* rinfo); | 640 void VisitExternalReference(RelocInfo* rinfo); |
| 650 void VisitInternalReference(RelocInfo* rinfo); | 641 void VisitInternalReference(RelocInfo* rinfo); |
| 651 void VisitCodeTarget(RelocInfo* target); | 642 void VisitCodeTarget(RelocInfo* target); |
| 652 void VisitCodeEntry(Address entry_address); | 643 void VisitCodeEntry(Address entry_address); |
| 653 void VisitCell(RelocInfo* rinfo); | 644 void VisitCell(RelocInfo* rinfo); |
| 654 void VisitRuntimeEntry(RelocInfo* reloc); | 645 void VisitRuntimeEntry(RelocInfo* reloc); |
| 655 // Used for seralizing the external strings that hold the natives source. | 646 // Used for seralizing the external strings that hold the natives source. |
| (...skipping 21 matching lines...) Expand all Loading... |
| 677 | 668 |
| 678 Serializer* serializer_; | 669 Serializer* serializer_; |
| 679 HeapObject* object_; | 670 HeapObject* object_; |
| 680 SnapshotByteSink* sink_; | 671 SnapshotByteSink* sink_; |
| 681 int reference_representation_; | 672 int reference_representation_; |
| 682 int bytes_processed_so_far_; | 673 int bytes_processed_so_far_; |
| 683 bool is_code_object_; | 674 bool is_code_object_; |
| 684 bool code_has_been_output_; | 675 bool code_has_been_output_; |
| 685 }; | 676 }; |
| 686 | 677 |
| 687 class RecursionScope { | |
| 688 public: | |
| 689 explicit RecursionScope(Serializer* serializer) : serializer_(serializer) { | |
| 690 serializer_->recursion_depth_++; | |
| 691 } | |
| 692 ~RecursionScope() { serializer_->recursion_depth_--; } | |
| 693 bool ExceedsMaximum() { | |
| 694 return serializer_->recursion_depth_ >= kMaxRecursionDepth; | |
| 695 } | |
| 696 | |
| 697 private: | |
| 698 static const int kMaxRecursionDepth = 32; | |
| 699 Serializer* serializer_; | |
| 700 }; | |
| 701 | |
| 702 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, | 678 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, |
| 703 WhereToPoint where_to_point, int skip) = 0; | 679 WhereToPoint where_to_point, int skip) = 0; |
| 704 | 680 |
| 705 void PutRoot(int index, HeapObject* object, HowToCode how, WhereToPoint where, | 681 void PutRoot(int index, HeapObject* object, HowToCode how, WhereToPoint where, |
| 706 int skip); | 682 int skip); |
| 707 | 683 |
| 708 void PutBackReference(HeapObject* object, BackReference reference); | |
| 709 | |
| 710 // Returns true if the object was successfully serialized. | 684 // Returns true if the object was successfully serialized. |
| 711 bool SerializeKnownObject(HeapObject* obj, HowToCode how_to_code, | 685 bool SerializeKnownObject(HeapObject* obj, HowToCode how_to_code, |
| 712 WhereToPoint where_to_point, int skip); | 686 WhereToPoint where_to_point, int skip); |
| 713 | 687 |
| 714 inline void FlushSkip(int skip) { | 688 inline void FlushSkip(int skip) { |
| 715 if (skip != 0) { | 689 if (skip != 0) { |
| 716 sink_->Put(kSkip, "SkipFromSerializeObject"); | 690 sink_->Put(kSkip, "SkipFromSerializeObject"); |
| 717 sink_->PutInt(skip, "SkipDistanceFromSerializeObject"); | 691 sink_->PutInt(skip, "SkipDistanceFromSerializeObject"); |
| 718 } | 692 } |
| 719 } | 693 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 741 Code* CopyCode(Code* code); | 715 Code* CopyCode(Code* code); |
| 742 | 716 |
| 743 inline uint32_t max_chunk_size(int space) const { | 717 inline uint32_t max_chunk_size(int space) const { |
| 744 DCHECK_LE(0, space); | 718 DCHECK_LE(0, space); |
| 745 DCHECK_LT(space, kNumberOfSpaces); | 719 DCHECK_LT(space, kNumberOfSpaces); |
| 746 return max_chunk_size_[space]; | 720 return max_chunk_size_[space]; |
| 747 } | 721 } |
| 748 | 722 |
| 749 SnapshotByteSink* sink() const { return sink_; } | 723 SnapshotByteSink* sink() const { return sink_; } |
| 750 | 724 |
| 751 void QueueDeferredObject(HeapObject* obj) { | |
| 752 DCHECK(back_reference_map_.Lookup(obj).is_valid()); | |
| 753 deferred_objects_.Add(obj); | |
| 754 } | |
| 755 | |
| 756 void OutputStatistics(const char* name); | 725 void OutputStatistics(const char* name); |
| 757 | 726 |
| 758 Isolate* isolate_; | 727 Isolate* isolate_; |
| 759 | 728 |
| 760 SnapshotByteSink* sink_; | 729 SnapshotByteSink* sink_; |
| 761 ExternalReferenceEncoder external_reference_encoder_; | 730 ExternalReferenceEncoder external_reference_encoder_; |
| 762 | 731 |
| 763 BackReferenceMap back_reference_map_; | 732 BackReferenceMap back_reference_map_; |
| 764 RootIndexMap root_index_map_; | 733 RootIndexMap root_index_map_; |
| 765 | 734 |
| 766 int recursion_depth_; | |
| 767 | |
| 768 friend class Deserializer; | 735 friend class Deserializer; |
| 769 friend class ObjectSerializer; | 736 friend class ObjectSerializer; |
| 770 friend class RecursionScope; | |
| 771 friend class SnapshotData; | 737 friend class SnapshotData; |
| 772 | 738 |
| 773 private: | 739 private: |
| 774 CodeAddressMap* code_address_map_; | 740 CodeAddressMap* code_address_map_; |
| 775 // Objects from the same space are put into chunks for bulk-allocation | 741 // Objects from the same space are put into chunks for bulk-allocation |
| 776 // when deserializing. We have to make sure that each chunk fits into a | 742 // when deserializing. We have to make sure that each chunk fits into a |
| 777 // page. So we track the chunk size in pending_chunk_ of a space, but | 743 // page. So we track the chunk size in pending_chunk_ of a space, but |
| 778 // when it exceeds a page, we complete the current chunk and start a new one. | 744 // when it exceeds a page, we complete the current chunk and start a new one. |
| 779 uint32_t pending_chunk_[kNumberOfPreallocatedSpaces]; | 745 uint32_t pending_chunk_[kNumberOfPreallocatedSpaces]; |
| 780 List<uint32_t> completed_chunks_[kNumberOfPreallocatedSpaces]; | 746 List<uint32_t> completed_chunks_[kNumberOfPreallocatedSpaces]; |
| 781 uint32_t max_chunk_size_[kNumberOfPreallocatedSpaces]; | 747 uint32_t max_chunk_size_[kNumberOfPreallocatedSpaces]; |
| 782 | 748 |
| 783 // We map serialized large objects to indexes for back-referencing. | 749 // We map serialized large objects to indexes for back-referencing. |
| 784 uint32_t large_objects_total_size_; | 750 uint32_t large_objects_total_size_; |
| 785 uint32_t seen_large_objects_index_; | 751 uint32_t seen_large_objects_index_; |
| 786 | 752 |
| 787 List<byte> code_buffer_; | 753 List<byte> code_buffer_; |
| 788 | 754 |
| 789 // To handle stack overflow. | |
| 790 List<HeapObject*> deferred_objects_; | |
| 791 | |
| 792 #ifdef OBJECT_PRINT | 755 #ifdef OBJECT_PRINT |
| 793 static const int kInstanceTypes = 256; | 756 static const int kInstanceTypes = 256; |
| 794 int* instance_type_count_; | 757 int* instance_type_count_; |
| 795 size_t* instance_type_size_; | 758 size_t* instance_type_size_; |
| 796 #endif // OBJECT_PRINT | 759 #endif // OBJECT_PRINT |
| 797 | 760 |
| 798 DISALLOW_COPY_AND_ASSIGN(Serializer); | 761 DISALLOW_COPY_AND_ASSIGN(Serializer); |
| 799 }; | 762 }; |
| 800 | 763 |
| 801 | 764 |
| (...skipping 25 matching lines...) Expand all Loading... |
| 827 DCHECK(!o->IsScript()); | 790 DCHECK(!o->IsScript()); |
| 828 return o->IsName() || o->IsSharedFunctionInfo() || o->IsHeapNumber() || | 791 return o->IsName() || o->IsSharedFunctionInfo() || o->IsHeapNumber() || |
| 829 o->IsCode() || o->IsScopeInfo() || o->IsExecutableAccessorInfo() || | 792 o->IsCode() || o->IsScopeInfo() || o->IsExecutableAccessorInfo() || |
| 830 o->map() == | 793 o->map() == |
| 831 startup_serializer_->isolate()->heap()->fixed_cow_array_map(); | 794 startup_serializer_->isolate()->heap()->fixed_cow_array_map(); |
| 832 } | 795 } |
| 833 | 796 |
| 834 void SerializeOutdatedContextsAsFixedArray(); | 797 void SerializeOutdatedContextsAsFixedArray(); |
| 835 | 798 |
| 836 Serializer* startup_serializer_; | 799 Serializer* startup_serializer_; |
| 837 List<Context*> outdated_contexts_; | 800 List<BackReference> outdated_contexts_; |
| 838 Object* global_object_; | 801 Object* global_object_; |
| 839 PartialCacheIndexMap partial_cache_index_map_; | 802 PartialCacheIndexMap partial_cache_index_map_; |
| 840 DISALLOW_COPY_AND_ASSIGN(PartialSerializer); | 803 DISALLOW_COPY_AND_ASSIGN(PartialSerializer); |
| 841 }; | 804 }; |
| 842 | 805 |
| 843 | 806 |
| 844 class StartupSerializer : public Serializer { | 807 class StartupSerializer : public Serializer { |
| 845 public: | 808 public: |
| 846 StartupSerializer(Isolate* isolate, SnapshotByteSink* sink) | 809 StartupSerializer(Isolate* isolate, SnapshotByteSink* sink) |
| 847 : Serializer(isolate, sink), root_index_wave_front_(0) { | 810 : Serializer(isolate, sink), root_index_wave_front_(0) { |
| (...skipping 11 matching lines...) Expand all Loading... |
| 859 // different. | 822 // different. |
| 860 void VisitPointers(Object** start, Object** end) override; | 823 void VisitPointers(Object** start, Object** end) override; |
| 861 | 824 |
| 862 // Serialize the current state of the heap. The order is: | 825 // Serialize the current state of the heap. The order is: |
| 863 // 1) Strong references. | 826 // 1) Strong references. |
| 864 // 2) Partial snapshot cache. | 827 // 2) Partial snapshot cache. |
| 865 // 3) Weak references (e.g. the string table). | 828 // 3) Weak references (e.g. the string table). |
| 866 virtual void SerializeStrongReferences(); | 829 virtual void SerializeStrongReferences(); |
| 867 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, | 830 virtual void SerializeObject(HeapObject* o, HowToCode how_to_code, |
| 868 WhereToPoint where_to_point, int skip) override; | 831 WhereToPoint where_to_point, int skip) override; |
| 869 void SerializeWeakReferencesAndDeferred(); | 832 void SerializeWeakReferences(); |
| 870 void Serialize() { | 833 void Serialize() { |
| 871 SerializeStrongReferences(); | 834 SerializeStrongReferences(); |
| 872 SerializeWeakReferencesAndDeferred(); | 835 SerializeWeakReferences(); |
| 836 Pad(); |
| 873 } | 837 } |
| 874 | 838 |
| 875 private: | 839 private: |
| 876 intptr_t root_index_wave_front_; | 840 intptr_t root_index_wave_front_; |
| 877 DISALLOW_COPY_AND_ASSIGN(StartupSerializer); | 841 DISALLOW_COPY_AND_ASSIGN(StartupSerializer); |
| 878 }; | 842 }; |
| 879 | 843 |
| 880 | 844 |
| 881 class CodeSerializer : public Serializer { | 845 class CodeSerializer : public Serializer { |
| 882 public: | 846 public: |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1031 kNumInternalizedStringsOffset + kInt32Size; | 995 kNumInternalizedStringsOffset + kInt32Size; |
| 1032 static const int kNumCodeStubKeysOffset = kNumReservationsOffset + kInt32Size; | 996 static const int kNumCodeStubKeysOffset = kNumReservationsOffset + kInt32Size; |
| 1033 static const int kPayloadLengthOffset = kNumCodeStubKeysOffset + kInt32Size; | 997 static const int kPayloadLengthOffset = kNumCodeStubKeysOffset + kInt32Size; |
| 1034 static const int kChecksum1Offset = kPayloadLengthOffset + kInt32Size; | 998 static const int kChecksum1Offset = kPayloadLengthOffset + kInt32Size; |
| 1035 static const int kChecksum2Offset = kChecksum1Offset + kInt32Size; | 999 static const int kChecksum2Offset = kChecksum1Offset + kInt32Size; |
| 1036 static const int kHeaderSize = kChecksum2Offset + kInt32Size; | 1000 static const int kHeaderSize = kChecksum2Offset + kInt32Size; |
| 1037 }; | 1001 }; |
| 1038 } } // namespace v8::internal | 1002 } } // namespace v8::internal |
| 1039 | 1003 |
| 1040 #endif // V8_SNAPSHOT_SERIALIZE_H_ | 1004 #endif // V8_SNAPSHOT_SERIALIZE_H_ |
| OLD | NEW |