OLD | NEW |
---|---|
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 26 matching lines...) Expand all Loading... | |
37 #include "runtime.h" | 37 #include "runtime.h" |
38 #include "serialize.h" | 38 #include "serialize.h" |
39 #include "stub-cache.h" | 39 #include "stub-cache.h" |
40 #include "v8threads.h" | 40 #include "v8threads.h" |
41 #include "top.h" | 41 #include "top.h" |
42 #include "bootstrapper.h" | 42 #include "bootstrapper.h" |
43 | 43 |
44 namespace v8 { | 44 namespace v8 { |
45 namespace internal { | 45 namespace internal { |
46 | 46 |
47 // 32-bit encoding: a RelativeAddress must be able to fit in a | |
48 // pointer: it is encoded as an Address with (from LS to MS bits): | |
49 // - 2 bits identifying this as a HeapObject. | |
50 // - 4 bits to encode the AllocationSpace (including special values for | |
51 // code and fixed arrays in LO space) | |
52 // - 27 bits identifying a word in the space, in one of three formats: | |
53 // - paged spaces: 16 bits of page number, 11 bits of word offset in page | |
54 // - NEW space: 27 bits of word offset | |
55 // - LO space: 27 bits of page number | |
56 | |
57 const int kSpaceShift = kHeapObjectTagSize; | |
58 const int kSpaceBits = 4; | |
59 const int kSpaceMask = (1 << kSpaceBits) - 1; | |
60 | |
61 const int kOffsetShift = kSpaceShift + kSpaceBits; | |
62 const int kOffsetBits = 11; | |
63 const int kOffsetMask = (1 << kOffsetBits) - 1; | |
64 | |
65 const int kPageShift = kOffsetShift + kOffsetBits; | |
66 const int kPageBits = 32 - (kOffsetBits + kSpaceBits + kHeapObjectTagSize); | |
67 const int kPageMask = (1 << kPageBits) - 1; | |
68 | |
69 const int kPageAndOffsetShift = kOffsetShift; | |
70 const int kPageAndOffsetBits = kPageBits + kOffsetBits; | |
71 const int kPageAndOffsetMask = (1 << kPageAndOffsetBits) - 1; | |
72 | |
73 // These values are special allocation space tags used for | |
74 // serialization. | |
75 // Mark the pages executable on platforms that support it. | |
76 const int kLargeCode = LAST_SPACE + 1; | |
77 // Allocate extra remembered-set bits. | |
78 const int kLargeFixedArray = LAST_SPACE + 2; | |
79 | |
80 | |
81 static inline AllocationSpace GetSpace(Address addr) { | |
82 const intptr_t encoded = reinterpret_cast<intptr_t>(addr); | |
83 int space_number = (static_cast<int>(encoded >> kSpaceShift) & kSpaceMask); | |
84 if (space_number > LAST_SPACE) space_number = LO_SPACE; | |
85 return static_cast<AllocationSpace>(space_number); | |
86 } | |
87 | |
88 | |
89 static inline bool IsLargeExecutableObject(Address addr) { | |
90 const intptr_t encoded = reinterpret_cast<intptr_t>(addr); | |
91 const int space_number = | |
92 (static_cast<int>(encoded >> kSpaceShift) & kSpaceMask); | |
93 return (space_number == kLargeCode); | |
94 } | |
95 | |
96 | |
97 static inline bool IsLargeFixedArray(Address addr) { | |
98 const intptr_t encoded = reinterpret_cast<intptr_t>(addr); | |
99 const int space_number = | |
100 (static_cast<int>(encoded >> kSpaceShift) & kSpaceMask); | |
101 return (space_number == kLargeFixedArray); | |
102 } | |
103 | |
104 | |
105 static inline int PageIndex(Address addr) { | |
106 const intptr_t encoded = reinterpret_cast<intptr_t>(addr); | |
107 return static_cast<int>(encoded >> kPageShift) & kPageMask; | |
108 } | |
109 | |
110 | |
111 static inline int PageOffset(Address addr) { | |
112 const intptr_t encoded = reinterpret_cast<intptr_t>(addr); | |
113 const int offset = static_cast<int>(encoded >> kOffsetShift) & kOffsetMask; | |
114 return offset << kObjectAlignmentBits; | |
115 } | |
116 | |
117 | |
118 static inline int NewSpaceOffset(Address addr) { | |
119 const intptr_t encoded = reinterpret_cast<intptr_t>(addr); | |
120 const int page_offset = | |
121 static_cast<int>(encoded >> kPageAndOffsetShift) & kPageAndOffsetMask; | |
122 return page_offset << kObjectAlignmentBits; | |
123 } | |
124 | |
125 | |
126 static inline int LargeObjectIndex(Address addr) { | |
127 const intptr_t encoded = reinterpret_cast<intptr_t>(addr); | |
128 return static_cast<int>(encoded >> kPageAndOffsetShift) & kPageAndOffsetMask; | |
129 } | |
130 | |
131 | |
132 // A RelativeAddress encodes a heap address that is independent of | |
133 // the actual memory addresses in real heap. The general case (for the | |
134 // OLD, CODE and MAP spaces) is as a (space id, page number, page offset) | |
135 // triple. The NEW space has page number == 0, because there are no | |
136 // pages. The LARGE_OBJECT space has page offset = 0, since there is | |
137 // exactly one object per page. RelativeAddresses are encodable as | |
138 // Addresses, so that they can replace the map() pointers of | |
139 // HeapObjects. The encoded Addresses are also encoded as HeapObjects | |
140 // and allow for marking (is_marked() see mark(), clear_mark()...) as | |
141 // used by the Mark-Compact collector. | |
142 | |
143 class RelativeAddress { | |
144 public: | |
145 RelativeAddress(AllocationSpace space, | |
146 int page_index, | |
147 int page_offset) | |
148 : space_(space), page_index_(page_index), page_offset_(page_offset) { | |
149 // Assert that the space encoding (plus the two pseudo-spaces for | |
150 // special large objects) fits in the available bits. | |
151 ASSERT(((LAST_SPACE + 2) & ~kSpaceMask) == 0); | |
152 ASSERT(space <= LAST_SPACE && space >= 0); | |
153 } | |
154 | |
155 // Return the encoding of 'this' as an Address. Decode with constructor. | |
156 Address Encode() const; | |
157 | |
158 AllocationSpace space() const { | |
159 if (space_ > LAST_SPACE) return LO_SPACE; | |
160 return static_cast<AllocationSpace>(space_); | |
161 } | |
162 int page_index() const { return page_index_; } | |
163 int page_offset() const { return page_offset_; } | |
164 | |
165 bool in_paged_space() const { | |
166 return space_ == CODE_SPACE || | |
167 space_ == OLD_POINTER_SPACE || | |
168 space_ == OLD_DATA_SPACE || | |
169 space_ == MAP_SPACE || | |
170 space_ == CELL_SPACE; | |
171 } | |
172 | |
173 void next_address(int offset) { page_offset_ += offset; } | |
174 void next_page(int init_offset = 0) { | |
175 page_index_++; | |
176 page_offset_ = init_offset; | |
177 } | |
178 | |
179 #ifdef DEBUG | |
180 void Verify(); | |
181 #endif | |
182 | |
183 void set_to_large_code_object() { | |
184 ASSERT(space_ == LO_SPACE); | |
185 space_ = kLargeCode; | |
186 } | |
187 void set_to_large_fixed_array() { | |
188 ASSERT(space_ == LO_SPACE); | |
189 space_ = kLargeFixedArray; | |
190 } | |
191 | |
192 | |
193 private: | |
194 int space_; | |
195 int page_index_; | |
196 int page_offset_; | |
197 }; | |
198 | |
199 | |
200 Address RelativeAddress::Encode() const { | |
201 ASSERT(page_index_ >= 0); | |
202 int word_offset = 0; | |
203 int result = 0; | |
204 switch (space_) { | |
205 case MAP_SPACE: | |
206 case CELL_SPACE: | |
207 case OLD_POINTER_SPACE: | |
208 case OLD_DATA_SPACE: | |
209 case CODE_SPACE: | |
210 ASSERT_EQ(0, page_index_ & ~kPageMask); | |
211 word_offset = page_offset_ >> kObjectAlignmentBits; | |
212 ASSERT_EQ(0, word_offset & ~kOffsetMask); | |
213 result = (page_index_ << kPageShift) | (word_offset << kOffsetShift); | |
214 break; | |
215 case NEW_SPACE: | |
216 ASSERT_EQ(0, page_index_); | |
217 word_offset = page_offset_ >> kObjectAlignmentBits; | |
218 ASSERT_EQ(0, word_offset & ~kPageAndOffsetMask); | |
219 result = word_offset << kPageAndOffsetShift; | |
220 break; | |
221 case LO_SPACE: | |
222 case kLargeCode: | |
223 case kLargeFixedArray: | |
224 ASSERT_EQ(0, page_offset_); | |
225 ASSERT_EQ(0, page_index_ & ~kPageAndOffsetMask); | |
226 result = page_index_ << kPageAndOffsetShift; | |
227 break; | |
228 } | |
229 // OR in AllocationSpace and kHeapObjectTag | |
230 ASSERT_EQ(0, space_ & ~kSpaceMask); | |
231 result |= (space_ << kSpaceShift) | kHeapObjectTag; | |
232 return reinterpret_cast<Address>(result); | |
233 } | |
234 | |
235 | |
236 #ifdef DEBUG | |
237 void RelativeAddress::Verify() { | |
238 ASSERT(page_offset_ >= 0 && page_index_ >= 0); | |
239 switch (space_) { | |
240 case MAP_SPACE: | |
241 case CELL_SPACE: | |
242 case OLD_POINTER_SPACE: | |
243 case OLD_DATA_SPACE: | |
244 case CODE_SPACE: | |
245 ASSERT(Page::kObjectStartOffset <= page_offset_ && | |
246 page_offset_ <= Page::kPageSize); | |
247 break; | |
248 case NEW_SPACE: | |
249 ASSERT(page_index_ == 0); | |
250 break; | |
251 case LO_SPACE: | |
252 case kLargeCode: | |
253 case kLargeFixedArray: | |
254 ASSERT(page_offset_ == 0); | |
255 break; | |
256 } | |
257 } | |
258 #endif | |
259 | |
260 enum GCTreatment { | |
261 DataObject, // Object that cannot contain a reference to new space. | |
262 PointerObject, // Object that can contain a reference to new space. | |
263 CodeObject // Object that contains executable code. | |
264 }; | |
265 | |
266 // A SimulatedHeapSpace simulates the allocation of objects in a page in | |
267 // the heap. It uses linear allocation - that is, it doesn't simulate the | |
268 // use of a free list. This simulated | |
269 // allocation must exactly match that done by Heap. | |
270 | |
271 class SimulatedHeapSpace { | |
272 public: | |
273 // The default constructor initializes to an invalid state. | |
274 SimulatedHeapSpace(): current_(LAST_SPACE, -1, -1) {} | |
275 | |
276 // Sets 'this' to the first address in 'space' that would be | |
277 // returned by allocation in an empty heap. | |
278 void InitEmptyHeap(AllocationSpace space); | |
279 | |
280 // Sets 'this' to the next address in 'space' that would be returned | |
281 // by allocation in the current heap. Intended only for testing | |
282 // serialization and deserialization in the current address space. | |
283 void InitCurrentHeap(AllocationSpace space); | |
284 | |
285 // Returns the RelativeAddress where the next | |
286 // object of 'size' bytes will be allocated, and updates 'this' to | |
287 // point to the next free address beyond that object. | |
288 RelativeAddress Allocate(int size, GCTreatment special_gc_treatment); | |
289 | |
290 private: | |
291 RelativeAddress current_; | |
292 }; | |
293 | |
294 | |
295 void SimulatedHeapSpace::InitEmptyHeap(AllocationSpace space) { | |
296 switch (space) { | |
297 case MAP_SPACE: | |
298 case CELL_SPACE: | |
299 case OLD_POINTER_SPACE: | |
300 case OLD_DATA_SPACE: | |
301 case CODE_SPACE: | |
302 current_ = RelativeAddress(space, 0, Page::kObjectStartOffset); | |
303 break; | |
304 case NEW_SPACE: | |
305 case LO_SPACE: | |
306 current_ = RelativeAddress(space, 0, 0); | |
307 break; | |
308 } | |
309 } | |
310 | |
311 | |
312 void SimulatedHeapSpace::InitCurrentHeap(AllocationSpace space) { | |
313 switch (space) { | |
314 case MAP_SPACE: | |
315 case CELL_SPACE: | |
316 case OLD_POINTER_SPACE: | |
317 case OLD_DATA_SPACE: | |
318 case CODE_SPACE: { | |
319 PagedSpace* ps; | |
320 if (space == MAP_SPACE) { | |
321 ps = Heap::map_space(); | |
322 } else if (space == CELL_SPACE) { | |
323 ps = Heap::cell_space(); | |
324 } else if (space == OLD_POINTER_SPACE) { | |
325 ps = Heap::old_pointer_space(); | |
326 } else if (space == OLD_DATA_SPACE) { | |
327 ps = Heap::old_data_space(); | |
328 } else { | |
329 ASSERT(space == CODE_SPACE); | |
330 ps = Heap::code_space(); | |
331 } | |
332 Address top = ps->top(); | |
333 Page* top_page = Page::FromAllocationTop(top); | |
334 int page_index = 0; | |
335 PageIterator it(ps, PageIterator::PAGES_IN_USE); | |
336 while (it.has_next()) { | |
337 if (it.next() == top_page) break; | |
338 page_index++; | |
339 } | |
340 current_ = RelativeAddress(space, | |
341 page_index, | |
342 top_page->Offset(top)); | |
343 break; | |
344 } | |
345 case NEW_SPACE: | |
346 current_ = RelativeAddress(space, | |
347 0, | |
348 static_cast<int>(Heap::NewSpaceTop() | |
349 - Heap::NewSpaceStart())); | |
350 break; | |
351 case LO_SPACE: | |
352 int page_index = 0; | |
353 for (LargeObjectIterator it(Heap::lo_space()); it.has_next(); it.next()) { | |
354 page_index++; | |
355 } | |
356 current_ = RelativeAddress(space, page_index, 0); | |
357 break; | |
358 } | |
359 } | |
360 | |
361 | |
362 RelativeAddress SimulatedHeapSpace::Allocate(int size, | |
363 GCTreatment special_gc_treatment) { | |
364 #ifdef DEBUG | |
365 current_.Verify(); | |
366 #endif | |
367 int alloc_size = OBJECT_SIZE_ALIGN(size); | |
368 if (current_.in_paged_space() && | |
369 current_.page_offset() + alloc_size > Page::kPageSize) { | |
370 ASSERT(alloc_size <= Page::kMaxHeapObjectSize); | |
371 current_.next_page(Page::kObjectStartOffset); | |
372 } | |
373 RelativeAddress result = current_; | |
374 if (current_.space() == LO_SPACE) { | |
375 current_.next_page(); | |
376 if (special_gc_treatment == CodeObject) { | |
377 result.set_to_large_code_object(); | |
378 } else if (special_gc_treatment == PointerObject) { | |
379 result.set_to_large_fixed_array(); | |
380 } | |
381 } else { | |
382 current_.next_address(alloc_size); | |
383 } | |
384 #ifdef DEBUG | |
385 current_.Verify(); | |
386 result.Verify(); | |
387 #endif | |
388 return result; | |
389 } | |
390 | |
391 // ----------------------------------------------------------------------------- | 47 // ----------------------------------------------------------------------------- |
392 // Coding of external references. | 48 // Coding of external references. |
393 | 49 |
394 // The encoding of an external reference. The type is in the high word. | 50 // The encoding of an external reference. The type is in the high word. |
395 // The id is in the low word. | 51 // The id is in the low word. |
396 static uint32_t EncodeExternal(TypeCode type, uint16_t id) { | 52 static uint32_t EncodeExternal(TypeCode type, uint16_t id) { |
397 return static_cast<uint32_t>(type) << 16 | id; | 53 return static_cast<uint32_t>(type) << 16 | id; |
398 } | 54 } |
399 | 55 |
400 | 56 |
(...skipping 421 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
822 | 478 |
823 | 479 |
824 ExternalReferenceDecoder::~ExternalReferenceDecoder() { | 480 ExternalReferenceDecoder::~ExternalReferenceDecoder() { |
825 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) { | 481 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) { |
826 DeleteArray(encodings_[type]); | 482 DeleteArray(encodings_[type]); |
827 } | 483 } |
828 DeleteArray(encodings_); | 484 DeleteArray(encodings_); |
829 } | 485 } |
830 | 486 |
831 | 487 |
832 //------------------------------------------------------------------------------ | |
833 // Implementation of Serializer | |
834 | |
835 | |
836 // Helper class to write the bytes of the serialized heap. | |
837 | |
838 class SnapshotWriter { | |
839 public: | |
840 SnapshotWriter() { | |
841 len_ = 0; | |
842 max_ = 8 << 10; // 8K initial size | |
843 str_ = NewArray<byte>(max_); | |
844 } | |
845 | |
846 ~SnapshotWriter() { | |
847 DeleteArray(str_); | |
848 } | |
849 | |
850 void GetBytes(byte** str, int* len) { | |
851 *str = NewArray<byte>(len_); | |
852 memcpy(*str, str_, len_); | |
853 *len = len_; | |
854 } | |
855 | |
856 void Reserve(int bytes, int pos); | |
857 | |
858 void PutC(char c) { | |
859 InsertC(c, len_); | |
860 } | |
861 | |
862 void PutInt(int i) { | |
863 InsertInt(i, len_); | |
864 } | |
865 | |
866 void PutAddress(Address p) { | |
867 PutBytes(reinterpret_cast<byte*>(&p), sizeof(p)); | |
868 } | |
869 | |
870 void PutBytes(const byte* a, int size) { | |
871 InsertBytes(a, len_, size); | |
872 } | |
873 | |
874 void PutString(const char* s) { | |
875 InsertString(s, len_); | |
876 } | |
877 | |
878 int InsertC(char c, int pos) { | |
879 Reserve(1, pos); | |
880 str_[pos] = c; | |
881 len_++; | |
882 return pos + 1; | |
883 } | |
884 | |
885 int InsertInt(int i, int pos) { | |
886 return InsertBytes(reinterpret_cast<byte*>(&i), pos, sizeof(i)); | |
887 } | |
888 | |
889 int InsertBytes(const byte* a, int pos, int size) { | |
890 Reserve(size, pos); | |
891 memcpy(&str_[pos], a, size); | |
892 len_ += size; | |
893 return pos + size; | |
894 } | |
895 | |
896 int InsertString(const char* s, int pos); | |
897 | |
898 int length() { return len_; } | |
899 | |
900 Address position() { return reinterpret_cast<Address>(&str_[len_]); } | |
901 | |
902 private: | |
903 byte* str_; // the snapshot | |
904 int len_; // the current length of str_ | |
905 int max_; // the allocated size of str_ | |
906 }; | |
907 | |
908 | |
909 void SnapshotWriter::Reserve(int bytes, int pos) { | |
910 CHECK(0 <= pos && pos <= len_); | |
911 while (len_ + bytes >= max_) { | |
912 max_ *= 2; | |
913 byte* old = str_; | |
914 str_ = NewArray<byte>(max_); | |
915 memcpy(str_, old, len_); | |
916 DeleteArray(old); | |
917 } | |
918 if (pos < len_) { | |
919 byte* old = str_; | |
920 str_ = NewArray<byte>(max_); | |
921 memcpy(str_, old, pos); | |
922 memcpy(str_ + pos + bytes, old + pos, len_ - pos); | |
923 DeleteArray(old); | |
924 } | |
925 } | |
926 | |
927 int SnapshotWriter::InsertString(const char* s, int pos) { | |
928 int size = StrLength(s); | |
929 pos = InsertC('[', pos); | |
930 pos = InsertInt(size, pos); | |
931 pos = InsertC(']', pos); | |
932 return InsertBytes(reinterpret_cast<const byte*>(s), pos, size); | |
933 } | |
934 | |
935 | |
936 class ReferenceUpdater: public ObjectVisitor { | |
937 public: | |
938 ReferenceUpdater(HeapObject* obj, Serializer* serializer) | |
939 : obj_address_(obj->address()), | |
940 serializer_(serializer), | |
941 reference_encoder_(serializer->reference_encoder_), | |
942 offsets_(8), | |
943 addresses_(8), | |
944 offsets_32_bit_(0), | |
945 data_32_bit_(0) { | |
946 } | |
947 | |
948 virtual void VisitPointers(Object** start, Object** end) { | |
949 for (Object** p = start; p < end; ++p) { | |
950 if ((*p)->IsHeapObject()) { | |
951 offsets_.Add( | |
952 static_cast<int>(reinterpret_cast<Address>(p) - obj_address_)); | |
953 Address a = serializer_->GetSavedAddress(HeapObject::cast(*p)); | |
954 addresses_.Add(a); | |
955 } | |
956 } | |
957 } | |
958 | |
959 virtual void VisitCodeTarget(RelocInfo* rinfo) { | |
960 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | |
961 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | |
962 Address encoded_target = serializer_->GetSavedAddress(target); | |
963 // All calls and jumps are to code objects that encode into 32 bits. | |
964 offsets_32_bit_.Add( | |
965 static_cast<int>(rinfo->target_address_address() - obj_address_)); | |
966 uint32_t small_target = | |
967 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(encoded_target)); | |
968 ASSERT(reinterpret_cast<uintptr_t>(encoded_target) == small_target); | |
969 data_32_bit_.Add(small_target); | |
970 } | |
971 | |
972 | |
973 virtual void VisitExternalReferences(Address* start, Address* end) { | |
974 for (Address* p = start; p < end; ++p) { | |
975 uint32_t code = reference_encoder_->Encode(*p); | |
976 CHECK(*p == NULL ? code == 0 : code != 0); | |
977 offsets_.Add( | |
978 static_cast<int>(reinterpret_cast<Address>(p) - obj_address_)); | |
979 addresses_.Add(reinterpret_cast<Address>(code)); | |
980 } | |
981 } | |
982 | |
983 virtual void VisitRuntimeEntry(RelocInfo* rinfo) { | |
984 Address target = rinfo->target_address(); | |
985 uint32_t encoding = reference_encoder_->Encode(target); | |
986 CHECK(target == NULL ? encoding == 0 : encoding != 0); | |
987 offsets_.Add( | |
988 static_cast<int>(rinfo->target_address_address() - obj_address_)); | |
989 addresses_.Add(reinterpret_cast<Address>(encoding)); | |
990 } | |
991 | |
992 void Update(Address start_address) { | |
993 for (int i = 0; i < offsets_.length(); i++) { | |
994 memcpy(start_address + offsets_[i], &addresses_[i], sizeof(Address)); | |
995 } | |
996 for (int i = 0; i < offsets_32_bit_.length(); i++) { | |
997 memcpy(start_address + offsets_32_bit_[i], &data_32_bit_[i], | |
998 sizeof(uint32_t)); | |
999 } | |
1000 } | |
1001 | |
1002 private: | |
1003 Address obj_address_; | |
1004 Serializer* serializer_; | |
1005 ExternalReferenceEncoder* reference_encoder_; | |
1006 List<int> offsets_; | |
1007 List<Address> addresses_; | |
1008 // Some updates are 32-bit even on a 64-bit platform. | |
1009 // We keep a separate list of them on 64-bit platforms. | |
1010 List<int> offsets_32_bit_; | |
1011 List<uint32_t> data_32_bit_; | |
1012 }; | |
1013 | |
1014 | |
1015 // Helper functions for a map of encoded heap object addresses. | |
1016 static uint32_t HeapObjectHash(HeapObject* key) { | |
1017 uint32_t low32bits = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key)); | |
1018 return low32bits >> 2; | |
1019 } | |
1020 | |
1021 | |
1022 static bool MatchHeapObject(void* key1, void* key2) { | |
1023 return key1 == key2; | |
1024 } | |
1025 | |
1026 | |
1027 Serializer::Serializer() | |
1028 : global_handles_(4), | |
1029 saved_addresses_(MatchHeapObject) { | |
1030 root_ = true; | |
1031 roots_ = 0; | |
1032 objects_ = 0; | |
1033 reference_encoder_ = NULL; | |
1034 writer_ = new SnapshotWriter(); | |
1035 for (int i = 0; i <= LAST_SPACE; i++) { | |
1036 allocator_[i] = new SimulatedHeapSpace(); | |
1037 } | |
1038 } | |
1039 | |
1040 | |
1041 Serializer::~Serializer() { | |
1042 for (int i = 0; i <= LAST_SPACE; i++) { | |
1043 delete allocator_[i]; | |
1044 } | |
1045 if (reference_encoder_) delete reference_encoder_; | |
1046 delete writer_; | |
1047 } | |
1048 | |
1049 | |
1050 bool Serializer::serialization_enabled_ = false; | 488 bool Serializer::serialization_enabled_ = false; |
1051 bool Serializer::too_late_to_enable_now_ = false; | 489 bool Serializer::too_late_to_enable_now_ = false; |
1052 | 490 |
1053 | 491 |
1054 #ifdef DEBUG | 492 Deserializer::Deserializer(SnapshotByteSource* source) |
1055 static const int kMaxTagLength = 32; | |
1056 | |
1057 void Serializer::Synchronize(const char* tag) { | |
1058 if (FLAG_debug_serialization) { | |
1059 int length = StrLength(tag); | |
1060 ASSERT(length <= kMaxTagLength); | |
1061 writer_->PutC('S'); | |
1062 writer_->PutInt(length); | |
1063 writer_->PutBytes(reinterpret_cast<const byte*>(tag), length); | |
1064 } | |
1065 } | |
1066 #endif | |
1067 | |
1068 | |
1069 void Serializer::InitializeAllocators() { | |
1070 for (int i = 0; i <= LAST_SPACE; i++) { | |
1071 allocator_[i]->InitEmptyHeap(static_cast<AllocationSpace>(i)); | |
1072 } | |
1073 } | |
1074 | |
1075 | |
1076 bool Serializer::IsVisited(HeapObject* obj) { | |
1077 HashMap::Entry* entry = | |
1078 saved_addresses_.Lookup(obj, HeapObjectHash(obj), false); | |
1079 return entry != NULL; | |
1080 } | |
1081 | |
1082 | |
1083 Address Serializer::GetSavedAddress(HeapObject* obj) { | |
1084 HashMap::Entry* entry = | |
1085 saved_addresses_.Lookup(obj, HeapObjectHash(obj), false); | |
1086 ASSERT(entry != NULL); | |
1087 return reinterpret_cast<Address>(entry->value); | |
1088 } | |
1089 | |
1090 | |
1091 void Serializer::SaveAddress(HeapObject* obj, Address addr) { | |
1092 HashMap::Entry* entry = | |
1093 saved_addresses_.Lookup(obj, HeapObjectHash(obj), true); | |
1094 entry->value = addr; | |
1095 } | |
1096 | |
1097 | |
1098 void Serializer::Serialize() { | |
1099 // No active threads. | |
1100 CHECK_EQ(NULL, ThreadState::FirstInUse()); | |
1101 // No active or weak handles. | |
1102 CHECK(HandleScopeImplementer::instance()->blocks()->is_empty()); | |
1103 CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles()); | |
1104 // We need a counter function during serialization to resolve the | |
1105 // references to counters in the code on the heap. | |
1106 CHECK(StatsTable::HasCounterFunction()); | |
1107 CHECK(enabled()); | |
1108 InitializeAllocators(); | |
1109 reference_encoder_ = new ExternalReferenceEncoder(); | |
1110 PutHeader(); | |
1111 Heap::IterateRoots(this, VISIT_ONLY_STRONG); | |
1112 PutLog(); | |
1113 PutContextStack(); | |
1114 Disable(); | |
1115 } | |
1116 | |
1117 | |
1118 void Serializer::Finalize(byte** str, int* len) { | |
1119 writer_->GetBytes(str, len); | |
1120 } | |
1121 | |
1122 | |
1123 // Serialize objects by writing them into the stream. | |
1124 | |
1125 void Serializer::VisitPointers(Object** start, Object** end) { | |
1126 bool root = root_; | |
1127 root_ = false; | |
1128 for (Object** p = start; p < end; ++p) { | |
1129 bool serialized; | |
1130 Address a = Encode(*p, &serialized); | |
1131 if (root) { | |
1132 roots_++; | |
1133 // If the object was not just serialized, | |
1134 // write its encoded address instead. | |
1135 if (!serialized) PutEncodedAddress(a); | |
1136 } | |
1137 } | |
1138 root_ = root; | |
1139 } | |
1140 | |
1141 | |
1142 void Serializer::VisitCodeTarget(RelocInfo* rinfo) { | |
1143 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | |
1144 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | |
1145 bool serialized; | |
1146 Encode(target, &serialized); | |
1147 } | |
1148 | |
1149 | |
1150 class GlobalHandlesRetriever: public ObjectVisitor { | |
1151 public: | |
1152 explicit GlobalHandlesRetriever(List<Object**>* handles) | |
1153 : global_handles_(handles) {} | |
1154 | |
1155 virtual void VisitPointers(Object** start, Object** end) { | |
1156 for (; start != end; ++start) { | |
1157 global_handles_->Add(start); | |
1158 } | |
1159 } | |
1160 | |
1161 private: | |
1162 List<Object**>* global_handles_; | |
1163 }; | |
1164 | |
1165 | |
1166 void Serializer::PutFlags() { | |
1167 writer_->PutC('F'); | |
1168 List<const char*>* argv = FlagList::argv(); | |
1169 writer_->PutInt(argv->length()); | |
1170 writer_->PutC('['); | |
1171 for (int i = 0; i < argv->length(); i++) { | |
1172 if (i > 0) writer_->PutC('|'); | |
1173 writer_->PutString((*argv)[i]); | |
1174 DeleteArray((*argv)[i]); | |
1175 } | |
1176 writer_->PutC(']'); | |
1177 flags_end_ = writer_->length(); | |
1178 delete argv; | |
1179 } | |
1180 | |
1181 | |
1182 void Serializer::PutHeader() { | |
1183 PutFlags(); | |
1184 writer_->PutC('D'); | |
1185 #ifdef DEBUG | |
1186 writer_->PutC(FLAG_debug_serialization ? '1' : '0'); | |
1187 #else | |
1188 writer_->PutC('0'); | |
1189 #endif | |
1190 #ifdef V8_NATIVE_REGEXP | |
1191 writer_->PutC('N'); | |
1192 #else // Interpreted regexp | |
1193 writer_->PutC('I'); | |
1194 #endif | |
1195 // Write sizes of paged memory spaces. Allocate extra space for the old | |
1196 // and code spaces, because objects in new space will be promoted to them. | |
1197 writer_->PutC('S'); | |
1198 writer_->PutC('['); | |
1199 writer_->PutInt(Heap::old_pointer_space()->Size() + | |
1200 Heap::new_space()->Size()); | |
1201 writer_->PutC('|'); | |
1202 writer_->PutInt(Heap::old_data_space()->Size() + Heap::new_space()->Size()); | |
1203 writer_->PutC('|'); | |
1204 writer_->PutInt(Heap::code_space()->Size() + Heap::new_space()->Size()); | |
1205 writer_->PutC('|'); | |
1206 writer_->PutInt(Heap::map_space()->Size()); | |
1207 writer_->PutC('|'); | |
1208 writer_->PutInt(Heap::cell_space()->Size()); | |
1209 writer_->PutC(']'); | |
1210 // Write global handles. | |
1211 writer_->PutC('G'); | |
1212 writer_->PutC('['); | |
1213 GlobalHandlesRetriever ghr(&global_handles_); | |
1214 GlobalHandles::IterateStrongRoots(&ghr); | |
1215 for (int i = 0; i < global_handles_.length(); i++) { | |
1216 writer_->PutC('N'); | |
1217 } | |
1218 writer_->PutC(']'); | |
1219 } | |
1220 | |
1221 | |
1222 void Serializer::PutLog() { | |
1223 #ifdef ENABLE_LOGGING_AND_PROFILING | |
1224 if (FLAG_log_code) { | |
1225 Logger::TearDown(); | |
1226 int pos = writer_->InsertC('L', flags_end_); | |
1227 bool exists; | |
1228 Vector<const char> log = ReadFile(FLAG_logfile, &exists); | |
1229 writer_->InsertString(log.start(), pos); | |
1230 log.Dispose(); | |
1231 } | |
1232 #endif | |
1233 } | |
1234 | |
1235 | |
1236 static int IndexOf(const List<Object**>& list, Object** element) { | |
1237 for (int i = 0; i < list.length(); i++) { | |
1238 if (list[i] == element) return i; | |
1239 } | |
1240 return -1; | |
1241 } | |
1242 | |
1243 | |
1244 void Serializer::PutGlobalHandleStack(const List<Handle<Object> >& stack) { | |
1245 writer_->PutC('['); | |
1246 writer_->PutInt(stack.length()); | |
1247 for (int i = stack.length() - 1; i >= 0; i--) { | |
1248 writer_->PutC('|'); | |
1249 int gh_index = IndexOf(global_handles_, stack[i].location()); | |
1250 CHECK_GE(gh_index, 0); | |
1251 writer_->PutInt(gh_index); | |
1252 } | |
1253 writer_->PutC(']'); | |
1254 } | |
1255 | |
1256 | |
1257 void Serializer::PutContextStack() { | |
1258 List<Context*> contexts(2); | |
1259 while (HandleScopeImplementer::instance()->HasSavedContexts()) { | |
1260 Context* context = | |
1261 HandleScopeImplementer::instance()->RestoreContext(); | |
1262 contexts.Add(context); | |
1263 } | |
1264 for (int i = contexts.length() - 1; i >= 0; i--) { | |
1265 HandleScopeImplementer::instance()->SaveContext(contexts[i]); | |
1266 } | |
1267 writer_->PutC('C'); | |
1268 writer_->PutC('['); | |
1269 writer_->PutInt(contexts.length()); | |
1270 if (!contexts.is_empty()) { | |
1271 Object** start = reinterpret_cast<Object**>(&contexts.first()); | |
1272 VisitPointers(start, start + contexts.length()); | |
1273 } | |
1274 writer_->PutC(']'); | |
1275 } | |
1276 | |
1277 void Serializer::PutEncodedAddress(Address addr) { | |
1278 writer_->PutC('P'); | |
1279 writer_->PutAddress(addr); | |
1280 } | |
1281 | |
1282 | |
1283 Address Serializer::Encode(Object* o, bool* serialized) { | |
1284 *serialized = false; | |
1285 if (o->IsSmi()) { | |
1286 return reinterpret_cast<Address>(o); | |
1287 } else { | |
1288 HeapObject* obj = HeapObject::cast(o); | |
1289 if (IsVisited(obj)) { | |
1290 return GetSavedAddress(obj); | |
1291 } else { | |
1292 // First visit: serialize the object. | |
1293 *serialized = true; | |
1294 return PutObject(obj); | |
1295 } | |
1296 } | |
1297 } | |
1298 | |
1299 | |
1300 Address Serializer::PutObject(HeapObject* obj) { | |
1301 Map* map = obj->map(); | |
1302 InstanceType type = map->instance_type(); | |
1303 int size = obj->SizeFromMap(map); | |
1304 | |
1305 // Simulate the allocation of obj to predict where it will be | |
1306 // allocated during deserialization. | |
1307 Address addr = Allocate(obj).Encode(); | |
1308 | |
1309 SaveAddress(obj, addr); | |
1310 | |
1311 if (type == CODE_TYPE) { | |
1312 LOG(CodeMoveEvent(obj->address(), addr)); | |
1313 } | |
1314 | |
1315 // Write out the object prologue: type, size, and simulated address of obj. | |
1316 writer_->PutC('['); | |
1317 CHECK_EQ(0, static_cast<int>(size & kObjectAlignmentMask)); | |
1318 writer_->PutInt(type); | |
1319 writer_->PutInt(size >> kObjectAlignmentBits); | |
1320 PutEncodedAddress(addr); // encodes AllocationSpace | |
1321 | |
1322 // Visit all the pointers in the object other than the map. This | |
1323 // will recursively serialize any as-yet-unvisited objects. | |
1324 obj->Iterate(this); | |
1325 | |
1326 // Mark end of recursively embedded objects, start of object body. | |
1327 writer_->PutC('|'); | |
1328 // Write out the raw contents of the object. No compression, but | |
1329 // fast to deserialize. | |
1330 writer_->PutBytes(obj->address(), size); | |
1331 // Update pointers and external references in the written object. | |
1332 ReferenceUpdater updater(obj, this); | |
1333 obj->Iterate(&updater); | |
1334 updater.Update(writer_->position() - size); | |
1335 | |
1336 #ifdef DEBUG | |
1337 if (FLAG_debug_serialization) { | |
1338 // Write out the object epilogue to catch synchronization errors. | |
1339 PutEncodedAddress(addr); | |
1340 writer_->PutC(']'); | |
1341 } | |
1342 #endif | |
1343 | |
1344 objects_++; | |
1345 return addr; | |
1346 } | |
1347 | |
1348 | |
1349 RelativeAddress Serializer::Allocate(HeapObject* obj) { | |
1350 // Find out which AllocationSpace 'obj' is in. | |
1351 AllocationSpace s; | |
1352 bool found = false; | |
1353 for (int i = FIRST_SPACE; !found && i <= LAST_SPACE; i++) { | |
1354 s = static_cast<AllocationSpace>(i); | |
1355 found = Heap::InSpace(obj, s); | |
1356 } | |
1357 CHECK(found); | |
1358 int size = obj->Size(); | |
1359 if (s == NEW_SPACE) { | |
1360 if (size > Heap::MaxObjectSizeInPagedSpace()) { | |
1361 s = LO_SPACE; | |
1362 } else { | |
1363 OldSpace* space = Heap::TargetSpace(obj); | |
1364 ASSERT(space == Heap::old_pointer_space() || | |
1365 space == Heap::old_data_space()); | |
1366 s = (space == Heap::old_pointer_space()) ? | |
1367 OLD_POINTER_SPACE : | |
1368 OLD_DATA_SPACE; | |
1369 } | |
1370 } | |
1371 GCTreatment gc_treatment = DataObject; | |
1372 if (obj->IsFixedArray()) gc_treatment = PointerObject; | |
1373 else if (obj->IsCode()) gc_treatment = CodeObject; | |
1374 return allocator_[s]->Allocate(size, gc_treatment); | |
1375 } | |
1376 | |
1377 | |
1378 //------------------------------------------------------------------------------ | |
1379 // Implementation of Deserializer | |
1380 | |
1381 | |
1382 static const int kInitArraySize = 32; | |
1383 | |
1384 | |
1385 Deserializer::Deserializer(const byte* str, int len) | |
1386 : reader_(str, len), | |
1387 map_pages_(kInitArraySize), | |
1388 cell_pages_(kInitArraySize), | |
1389 old_pointer_pages_(kInitArraySize), | |
1390 old_data_pages_(kInitArraySize), | |
1391 code_pages_(kInitArraySize), | |
1392 large_objects_(kInitArraySize), | |
1393 global_handles_(4) { | |
1394 root_ = true; | |
1395 roots_ = 0; | |
1396 objects_ = 0; | |
1397 reference_decoder_ = NULL; | |
1398 #ifdef DEBUG | |
1399 expect_debug_information_ = false; | |
1400 #endif | |
1401 } | |
1402 | |
1403 | |
1404 Deserializer::~Deserializer() { | |
1405 if (reference_decoder_) delete reference_decoder_; | |
1406 } | |
1407 | |
1408 | |
1409 void Deserializer::ExpectEncodedAddress(Address expected) { | |
1410 Address a = GetEncodedAddress(); | |
1411 USE(a); | |
1412 ASSERT(a == expected); | |
1413 } | |
1414 | |
1415 | |
1416 #ifdef DEBUG | |
1417 void Deserializer::Synchronize(const char* tag) { | |
1418 if (expect_debug_information_) { | |
1419 char buf[kMaxTagLength]; | |
1420 reader_.ExpectC('S'); | |
1421 int length = reader_.GetInt(); | |
1422 ASSERT(length <= kMaxTagLength); | |
1423 reader_.GetBytes(reinterpret_cast<Address>(buf), length); | |
1424 ASSERT_EQ(StrLength(tag), length); | |
1425 ASSERT(strncmp(tag, buf, length) == 0); | |
1426 } | |
1427 } | |
1428 #endif | |
1429 | |
1430 | |
1431 class NoGlobalHandlesChecker : public ObjectVisitor { | |
1432 public: | |
1433 virtual void VisitPointers(Object** start, Object** end) { | |
1434 ASSERT(false); | |
1435 } | |
1436 }; | |
1437 | |
1438 | |
1439 class GlobalHandleDestroyer : public ObjectVisitor { | |
1440 void VisitPointers(Object**start, Object**end) { | |
1441 while (start < end) { | |
1442 GlobalHandles::Destroy(start++); | |
1443 } | |
1444 } | |
1445 }; | |
1446 | |
1447 | |
1448 void Deserializer::Deserialize() { | |
1449 // No global handles. | |
1450 NoGlobalHandlesChecker checker; | |
1451 GlobalHandles::IterateStrongRoots(&checker); | |
1452 // No active threads. | |
1453 ASSERT_EQ(NULL, ThreadState::FirstInUse()); | |
1454 // No active handles. | |
1455 ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty()); | |
1456 reference_decoder_ = new ExternalReferenceDecoder(); | |
1457 // By setting linear allocation only, we forbid the use of free list | |
1458 // allocation which is not predicted by SimulatedAddress. | |
1459 GetHeader(); | |
1460 Heap::IterateRoots(this, VISIT_ONLY_STRONG); | |
1461 GetContextStack(); | |
1462 // Any global handles that have been set up by deserialization are leaked | |
1463 // since noone is keeping track of them. So we discard them now. | |
1464 GlobalHandleDestroyer destroyer; | |
1465 GlobalHandles::IterateStrongRoots(&destroyer); | |
1466 } | |
1467 | |
1468 | |
1469 void Deserializer::VisitPointers(Object** start, Object** end) { | |
1470 bool root = root_; | |
1471 root_ = false; | |
1472 for (Object** p = start; p < end; ++p) { | |
1473 if (root) { | |
1474 roots_++; | |
1475 // Read the next object or pointer from the stream | |
1476 // pointer in the stream. | |
1477 int c = reader_.GetC(); | |
1478 if (c == '[') { | |
1479 *p = GetObject(); // embedded object | |
1480 } else { | |
1481 ASSERT(c == 'P'); // pointer to previously serialized object | |
1482 *p = Resolve(reader_.GetAddress()); | |
1483 } | |
1484 } else { | |
1485 // A pointer internal to a HeapObject that we've already | |
1486 // read: resolve it to a true address (or Smi) | |
1487 *p = Resolve(reinterpret_cast<Address>(*p)); | |
1488 } | |
1489 } | |
1490 root_ = root; | |
1491 } | |
1492 | |
1493 | |
1494 void Deserializer::VisitCodeTarget(RelocInfo* rinfo) { | |
1495 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | |
1496 // On all platforms, the encoded code object address is only 32 bits. | |
1497 Address encoded_address = reinterpret_cast<Address>(Memory::uint32_at( | |
1498 reinterpret_cast<Address>(rinfo->target_object_address()))); | |
1499 Code* target_object = reinterpret_cast<Code*>(Resolve(encoded_address)); | |
1500 rinfo->set_target_address(target_object->instruction_start()); | |
1501 } | |
1502 | |
1503 | |
1504 void Deserializer::VisitExternalReferences(Address* start, Address* end) { | |
1505 for (Address* p = start; p < end; ++p) { | |
1506 uint32_t code = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(*p)); | |
1507 *p = reference_decoder_->Decode(code); | |
1508 } | |
1509 } | |
1510 | |
1511 | |
1512 void Deserializer::VisitRuntimeEntry(RelocInfo* rinfo) { | |
1513 uint32_t* pc = reinterpret_cast<uint32_t*>(rinfo->target_address_address()); | |
1514 uint32_t encoding = *pc; | |
1515 Address target = reference_decoder_->Decode(encoding); | |
1516 rinfo->set_target_address(target); | |
1517 } | |
1518 | |
1519 | |
1520 void Deserializer::GetFlags() { | |
1521 reader_.ExpectC('F'); | |
1522 int argc = reader_.GetInt() + 1; | |
1523 char** argv = NewArray<char*>(argc); | |
1524 reader_.ExpectC('['); | |
1525 for (int i = 1; i < argc; i++) { | |
1526 if (i > 1) reader_.ExpectC('|'); | |
1527 argv[i] = reader_.GetString(); | |
1528 } | |
1529 reader_.ExpectC(']'); | |
1530 has_log_ = false; | |
1531 for (int i = 1; i < argc; i++) { | |
1532 if (strcmp("--log_code", argv[i]) == 0) { | |
1533 has_log_ = true; | |
1534 } else if (strcmp("--nouse_ic", argv[i]) == 0) { | |
1535 FLAG_use_ic = false; | |
1536 } else if (strcmp("--debug_code", argv[i]) == 0) { | |
1537 FLAG_debug_code = true; | |
1538 } else if (strcmp("--nolazy", argv[i]) == 0) { | |
1539 FLAG_lazy = false; | |
1540 } | |
1541 DeleteArray(argv[i]); | |
1542 } | |
1543 | |
1544 DeleteArray(argv); | |
1545 } | |
1546 | |
1547 | |
1548 void Deserializer::GetLog() { | |
1549 if (has_log_) { | |
1550 reader_.ExpectC('L'); | |
1551 char* snapshot_log = reader_.GetString(); | |
1552 #ifdef ENABLE_LOGGING_AND_PROFILING | |
1553 if (FLAG_log_code) { | |
1554 LOG(Preamble(snapshot_log)); | |
1555 } | |
1556 #endif | |
1557 DeleteArray(snapshot_log); | |
1558 } | |
1559 } | |
1560 | |
1561 | |
1562 static void InitPagedSpace(PagedSpace* space, | |
1563 int capacity, | |
1564 List<Page*>* page_list) { | |
1565 if (!space->EnsureCapacity(capacity)) { | |
1566 V8::FatalProcessOutOfMemory("InitPagedSpace"); | |
1567 } | |
1568 PageIterator it(space, PageIterator::ALL_PAGES); | |
1569 while (it.has_next()) page_list->Add(it.next()); | |
1570 } | |
1571 | |
1572 | |
1573 void Deserializer::GetHeader() { | |
1574 reader_.ExpectC('D'); | |
1575 #ifdef DEBUG | |
1576 expect_debug_information_ = reader_.GetC() == '1'; | |
1577 #else | |
1578 // In release mode, don't attempt to read a snapshot containing | |
1579 // synchronization tags. | |
1580 if (reader_.GetC() != '0') FATAL("Snapshot contains synchronization tags."); | |
1581 #endif | |
1582 #ifdef V8_NATIVE_REGEXP | |
1583 reader_.ExpectC('N'); | |
1584 #else // Interpreted regexp. | |
1585 reader_.ExpectC('I'); | |
1586 #endif | |
1587 // Ensure sufficient capacity in paged memory spaces to avoid growth | |
1588 // during deserialization. | |
1589 reader_.ExpectC('S'); | |
1590 reader_.ExpectC('['); | |
1591 InitPagedSpace(Heap::old_pointer_space(), | |
1592 reader_.GetInt(), | |
1593 &old_pointer_pages_); | |
1594 reader_.ExpectC('|'); | |
1595 InitPagedSpace(Heap::old_data_space(), reader_.GetInt(), &old_data_pages_); | |
1596 reader_.ExpectC('|'); | |
1597 InitPagedSpace(Heap::code_space(), reader_.GetInt(), &code_pages_); | |
1598 reader_.ExpectC('|'); | |
1599 InitPagedSpace(Heap::map_space(), reader_.GetInt(), &map_pages_); | |
1600 reader_.ExpectC('|'); | |
1601 InitPagedSpace(Heap::cell_space(), reader_.GetInt(), &cell_pages_); | |
1602 reader_.ExpectC(']'); | |
1603 // Create placeholders for global handles later to be fill during | |
1604 // IterateRoots. | |
1605 reader_.ExpectC('G'); | |
1606 reader_.ExpectC('['); | |
1607 int c = reader_.GetC(); | |
1608 while (c != ']') { | |
1609 ASSERT(c == 'N'); | |
1610 global_handles_.Add(GlobalHandles::Create(NULL).location()); | |
1611 c = reader_.GetC(); | |
1612 } | |
1613 } | |
1614 | |
1615 | |
1616 void Deserializer::GetGlobalHandleStack(List<Handle<Object> >* stack) { | |
1617 reader_.ExpectC('['); | |
1618 int length = reader_.GetInt(); | |
1619 for (int i = 0; i < length; i++) { | |
1620 reader_.ExpectC('|'); | |
1621 int gh_index = reader_.GetInt(); | |
1622 stack->Add(global_handles_[gh_index]); | |
1623 } | |
1624 reader_.ExpectC(']'); | |
1625 } | |
1626 | |
1627 | |
1628 void Deserializer::GetContextStack() { | |
1629 reader_.ExpectC('C'); | |
1630 CHECK_EQ(reader_.GetC(), '['); | |
1631 int count = reader_.GetInt(); | |
1632 List<Context*> entered_contexts(count); | |
1633 if (count > 0) { | |
1634 Object** start = reinterpret_cast<Object**>(&entered_contexts.first()); | |
1635 VisitPointers(start, start + count); | |
1636 } | |
1637 reader_.ExpectC(']'); | |
1638 for (int i = 0; i < count; i++) { | |
1639 HandleScopeImplementer::instance()->SaveContext(entered_contexts[i]); | |
1640 } | |
1641 } | |
1642 | |
1643 | |
1644 Address Deserializer::GetEncodedAddress() { | |
1645 reader_.ExpectC('P'); | |
1646 return reader_.GetAddress(); | |
1647 } | |
1648 | |
1649 | |
1650 Object* Deserializer::GetObject() { | |
1651 // Read the prologue: type, size and encoded address. | |
1652 InstanceType type = static_cast<InstanceType>(reader_.GetInt()); | |
1653 int size = reader_.GetInt() << kObjectAlignmentBits; | |
1654 Address a = GetEncodedAddress(); | |
1655 | |
1656 // Get a raw object of the right size in the right space. | |
1657 AllocationSpace space = GetSpace(a); | |
1658 Object* o; | |
1659 if (IsLargeExecutableObject(a)) { | |
1660 o = Heap::lo_space()->AllocateRawCode(size); | |
1661 } else if (IsLargeFixedArray(a)) { | |
1662 o = Heap::lo_space()->AllocateRawFixedArray(size); | |
1663 } else { | |
1664 AllocationSpace retry_space = (space == NEW_SPACE) | |
1665 ? Heap::TargetSpaceId(type) | |
1666 : space; | |
1667 o = Heap::AllocateRaw(size, space, retry_space); | |
1668 } | |
1669 ASSERT(!o->IsFailure()); | |
1670 // Check that the simulation of heap allocation was correct. | |
1671 ASSERT(o == Resolve(a)); | |
1672 | |
1673 // Read any recursively embedded objects. | |
1674 int c = reader_.GetC(); | |
1675 while (c == '[') { | |
1676 GetObject(); | |
1677 c = reader_.GetC(); | |
1678 } | |
1679 ASSERT(c == '|'); | |
1680 | |
1681 HeapObject* obj = reinterpret_cast<HeapObject*>(o); | |
1682 // Read the uninterpreted contents of the object after the map | |
1683 reader_.GetBytes(obj->address(), size); | |
1684 #ifdef DEBUG | |
1685 if (expect_debug_information_) { | |
1686 // Read in the epilogue to check that we're still synchronized | |
1687 ExpectEncodedAddress(a); | |
1688 reader_.ExpectC(']'); | |
1689 } | |
1690 #endif | |
1691 | |
1692 // Resolve the encoded pointers we just read in. | |
1693 // Same as obj->Iterate(this), but doesn't rely on the map pointer being set. | |
1694 VisitPointer(reinterpret_cast<Object**>(obj->address())); | |
1695 obj->IterateBody(type, size, this); | |
1696 | |
1697 if (type == CODE_TYPE) { | |
1698 LOG(CodeMoveEvent(a, obj->address())); | |
1699 } | |
1700 objects_++; | |
1701 return o; | |
1702 } | |
1703 | |
1704 | |
1705 static inline Object* ResolvePaged(int page_index, | |
1706 int page_offset, | |
1707 PagedSpace* space, | |
1708 List<Page*>* page_list) { | |
1709 ASSERT(page_index < page_list->length()); | |
1710 Address address = (*page_list)[page_index]->OffsetToAddress(page_offset); | |
1711 return HeapObject::FromAddress(address); | |
1712 } | |
1713 | |
1714 | |
1715 template<typename T> | |
1716 void ConcatReversed(List<T>* target, const List<T>& source) { | |
1717 for (int i = source.length() - 1; i >= 0; i--) { | |
1718 target->Add(source[i]); | |
1719 } | |
1720 } | |
1721 | |
1722 | |
1723 Object* Deserializer::Resolve(Address encoded) { | |
1724 Object* o = reinterpret_cast<Object*>(encoded); | |
1725 if (o->IsSmi()) return o; | |
1726 | |
1727 // Encoded addresses of HeapObjects always have 'HeapObject' tags. | |
1728 ASSERT(o->IsHeapObject()); | |
1729 switch (GetSpace(encoded)) { | |
1730 // For Map space and Old space, we cache the known Pages in map_pages, | |
1731 // old_pointer_pages and old_data_pages. Even though MapSpace keeps a list | |
1732 // of page addresses, we don't rely on it since GetObject uses AllocateRaw, | |
1733 // and that appears not to update the page list. | |
1734 case MAP_SPACE: | |
1735 return ResolvePaged(PageIndex(encoded), PageOffset(encoded), | |
1736 Heap::map_space(), &map_pages_); | |
1737 case CELL_SPACE: | |
1738 return ResolvePaged(PageIndex(encoded), PageOffset(encoded), | |
1739 Heap::cell_space(), &cell_pages_); | |
1740 case OLD_POINTER_SPACE: | |
1741 return ResolvePaged(PageIndex(encoded), PageOffset(encoded), | |
1742 Heap::old_pointer_space(), &old_pointer_pages_); | |
1743 case OLD_DATA_SPACE: | |
1744 return ResolvePaged(PageIndex(encoded), PageOffset(encoded), | |
1745 Heap::old_data_space(), &old_data_pages_); | |
1746 case CODE_SPACE: | |
1747 return ResolvePaged(PageIndex(encoded), PageOffset(encoded), | |
1748 Heap::code_space(), &code_pages_); | |
1749 case NEW_SPACE: | |
1750 return HeapObject::FromAddress(Heap::NewSpaceStart() + | |
1751 NewSpaceOffset(encoded)); | |
1752 case LO_SPACE: | |
1753 // Cache the known large_objects, allocated one per 'page' | |
1754 int index = LargeObjectIndex(encoded); | |
1755 if (index >= large_objects_.length()) { | |
1756 int new_object_count = | |
1757 Heap::lo_space()->PageCount() - large_objects_.length(); | |
1758 List<Object*> new_objects(new_object_count); | |
1759 LargeObjectIterator it(Heap::lo_space()); | |
1760 for (int i = 0; i < new_object_count; i++) { | |
1761 new_objects.Add(it.next()); | |
1762 } | |
1763 #ifdef DEBUG | |
1764 for (int i = large_objects_.length() - 1; i >= 0; i--) { | |
1765 ASSERT(it.next() == large_objects_[i]); | |
1766 } | |
1767 #endif | |
1768 ConcatReversed(&large_objects_, new_objects); | |
1769 ASSERT(index < large_objects_.length()); | |
1770 } | |
1771 return large_objects_[index]; // s.page_offset() is ignored. | |
1772 } | |
1773 UNREACHABLE(); | |
1774 return NULL; | |
1775 } | |
1776 | |
1777 | |
1778 Deserializer2::Deserializer2(SnapshotByteSource* source) | |
1779 : source_(source), | 493 : source_(source), |
1780 external_reference_decoder_(NULL) { | 494 external_reference_decoder_(NULL) { |
1781 } | 495 } |
1782 | 496 |
1783 | 497 |
1784 // This routine both allocates a new object, and also keeps | 498 // This routine both allocates a new object, and also keeps |
1785 // track of where objects have been allocated so that we can | 499 // track of where objects have been allocated so that we can |
1786 // fix back references when deserializing. | 500 // fix back references when deserializing. |
1787 Address Deserializer2::Allocate(int space_index, Space* space, int size) { | 501 Address Deserializer::Allocate(int space_index, Space* space, int size) { |
1788 Address address; | 502 Address address; |
1789 if (!SpaceIsLarge(space_index)) { | 503 if (!SpaceIsLarge(space_index)) { |
1790 ASSERT(!SpaceIsPaged(space_index) || | 504 ASSERT(!SpaceIsPaged(space_index) || |
1791 size <= Page::kPageSize - Page::kObjectStartOffset); | 505 size <= Page::kPageSize - Page::kObjectStartOffset); |
1792 Object* new_allocation; | 506 Object* new_allocation; |
1793 if (space_index == NEW_SPACE) { | 507 if (space_index == NEW_SPACE) { |
1794 new_allocation = reinterpret_cast<NewSpace*>(space)->AllocateRaw(size); | 508 new_allocation = reinterpret_cast<NewSpace*>(space)->AllocateRaw(size); |
1795 } else { | 509 } else { |
1796 new_allocation = reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size); | 510 new_allocation = reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size); |
1797 } | 511 } |
(...skipping 20 matching lines...) Expand all Loading... | |
1818 address = new_object->address(); | 532 address = new_object->address(); |
1819 high_water_[LO_SPACE] = address + size; | 533 high_water_[LO_SPACE] = address + size; |
1820 } | 534 } |
1821 last_object_address_ = address; | 535 last_object_address_ = address; |
1822 return address; | 536 return address; |
1823 } | 537 } |
1824 | 538 |
1825 | 539 |
1826 // This returns the address of an object that has been described in the | 540 // This returns the address of an object that has been described in the |
1827 // snapshot as being offset bytes back in a particular space. | 541 // snapshot as being offset bytes back in a particular space. |
1828 HeapObject* Deserializer2::GetAddressFromEnd(int space) { | 542 HeapObject* Deserializer::GetAddressFromEnd(int space) { |
1829 int offset = source_->GetInt(); | 543 int offset = source_->GetInt(); |
1830 ASSERT(!SpaceIsLarge(space)); | 544 ASSERT(!SpaceIsLarge(space)); |
1831 offset <<= kObjectAlignmentBits; | 545 offset <<= kObjectAlignmentBits; |
1832 return HeapObject::FromAddress(high_water_[space] - offset); | 546 return HeapObject::FromAddress(high_water_[space] - offset); |
1833 } | 547 } |
1834 | 548 |
1835 | 549 |
1836 // This returns the address of an object that has been described in the | 550 // This returns the address of an object that has been described in the |
1837 // snapshot as being offset bytes into a particular space. | 551 // snapshot as being offset bytes into a particular space. |
1838 HeapObject* Deserializer2::GetAddressFromStart(int space) { | 552 HeapObject* Deserializer::GetAddressFromStart(int space) { |
1839 int offset = source_->GetInt(); | 553 int offset = source_->GetInt(); |
1840 if (SpaceIsLarge(space)) { | 554 if (SpaceIsLarge(space)) { |
1841 // Large spaces have one object per 'page'. | 555 // Large spaces have one object per 'page'. |
1842 return HeapObject::FromAddress(pages_[LO_SPACE][offset]); | 556 return HeapObject::FromAddress(pages_[LO_SPACE][offset]); |
1843 } | 557 } |
1844 offset <<= kObjectAlignmentBits; | 558 offset <<= kObjectAlignmentBits; |
1845 if (space == NEW_SPACE) { | 559 if (space == NEW_SPACE) { |
1846 // New space has only one space - numbered 0. | 560 // New space has only one space - numbered 0. |
1847 return HeapObject::FromAddress(pages_[space][0] + offset); | 561 return HeapObject::FromAddress(pages_[space][0] + offset); |
1848 } | 562 } |
1849 ASSERT(SpaceIsPaged(space)); | 563 ASSERT(SpaceIsPaged(space)); |
1850 int page_of_pointee = offset >> Page::kPageSizeBits; | 564 int page_of_pointee = offset >> Page::kPageSizeBits; |
1851 Address object_address = pages_[space][page_of_pointee] + | 565 Address object_address = pages_[space][page_of_pointee] + |
1852 (offset & Page::kPageAlignmentMask); | 566 (offset & Page::kPageAlignmentMask); |
1853 return HeapObject::FromAddress(object_address); | 567 return HeapObject::FromAddress(object_address); |
1854 } | 568 } |
1855 | 569 |
1856 | 570 |
1857 void Deserializer2::Deserialize() { | 571 void Deserializer::Deserialize() { |
1858 // Don't GC while deserializing - just expand the heap. | 572 // Don't GC while deserializing - just expand the heap. |
1859 AlwaysAllocateScope always_allocate; | 573 AlwaysAllocateScope always_allocate; |
1860 // Don't use the free lists while deserializing. | 574 // Don't use the free lists while deserializing. |
1861 LinearAllocationScope allocate_linearly; | 575 LinearAllocationScope allocate_linearly; |
1862 // No active threads. | 576 // No active threads. |
1863 ASSERT_EQ(NULL, ThreadState::FirstInUse()); | 577 ASSERT_EQ(NULL, ThreadState::FirstInUse()); |
1864 // No active handles. | 578 // No active handles. |
1865 ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty()); | 579 ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty()); |
1866 ASSERT(external_reference_decoder_ == NULL); | 580 ASSERT(external_reference_decoder_ == NULL); |
1867 external_reference_decoder_ = new ExternalReferenceDecoder(); | 581 external_reference_decoder_ = new ExternalReferenceDecoder(); |
1868 Heap::IterateRoots(this, VISIT_ONLY_STRONG); | 582 Heap::IterateRoots(this, VISIT_ONLY_STRONG); |
1869 ASSERT(source_->AtEOF()); | 583 ASSERT(source_->AtEOF()); |
1870 delete external_reference_decoder_; | 584 delete external_reference_decoder_; |
1871 external_reference_decoder_ = NULL; | 585 external_reference_decoder_ = NULL; |
1872 } | 586 } |
1873 | 587 |
1874 | 588 |
1875 // This is called on the roots. It is the driver of the deserialization | 589 // This is called on the roots. It is the driver of the deserialization |
1876 // process. It is also called on the body of each function. | 590 // process. It is also called on the body of each function. |
1877 void Deserializer2::VisitPointers(Object** start, Object** end) { | 591 void Deserializer::VisitPointers(Object** start, Object** end) { |
1878 // The space must be new space. Any other space would cause ReadChunk to try | 592 // The space must be new space. Any other space would cause ReadChunk to try |
1879 // to update the remembered using NULL as the address. | 593 // to update the remembered using NULL as the address. |
1880 ReadChunk(start, end, NEW_SPACE, NULL); | 594 ReadChunk(start, end, NEW_SPACE, NULL); |
1881 } | 595 } |
1882 | 596 |
1883 | 597 |
1884 // This routine writes the new object into the pointer provided and then | 598 // This routine writes the new object into the pointer provided and then |
1885 // returns true if the new object was in young space and false otherwise. | 599 // returns true if the new object was in young space and false otherwise. |
1886 // The reason for this strange interface is that otherwise the object is | 600 // The reason for this strange interface is that otherwise the object is |
1887 // written very late, which means the ByteArray map is not set up by the | 601 // written very late, which means the ByteArray map is not set up by the |
1888 // time we need to use it to mark the space at the end of a page free (by | 602 // time we need to use it to mark the space at the end of a page free (by |
1889 // making it into a byte array). | 603 // making it into a byte array). |
1890 void Deserializer2::ReadObject(int space_number, | 604 void Deserializer::ReadObject(int space_number, |
1891 Space* space, | 605 Space* space, |
1892 Object** write_back) { | 606 Object** write_back) { |
1893 int size = source_->GetInt() << kObjectAlignmentBits; | 607 int size = source_->GetInt() << kObjectAlignmentBits; |
1894 Address address = Allocate(space_number, space, size); | 608 Address address = Allocate(space_number, space, size); |
1895 *write_back = HeapObject::FromAddress(address); | 609 *write_back = HeapObject::FromAddress(address); |
1896 Object** current = reinterpret_cast<Object**>(address); | 610 Object** current = reinterpret_cast<Object**>(address); |
1897 Object** limit = current + (size >> kPointerSizeLog2); | 611 Object** limit = current + (size >> kPointerSizeLog2); |
1898 ReadChunk(current, limit, space_number, address); | 612 ReadChunk(current, limit, space_number, address); |
1899 } | 613 } |
1900 | 614 |
1901 | 615 |
1902 #define ONE_CASE_PER_SPACE(base_tag) \ | 616 #define ONE_CASE_PER_SPACE(base_tag) \ |
1903 case (base_tag) + NEW_SPACE: /* NOLINT */ \ | 617 case (base_tag) + NEW_SPACE: /* NOLINT */ \ |
1904 case (base_tag) + OLD_POINTER_SPACE: /* NOLINT */ \ | 618 case (base_tag) + OLD_POINTER_SPACE: /* NOLINT */ \ |
1905 case (base_tag) + OLD_DATA_SPACE: /* NOLINT */ \ | 619 case (base_tag) + OLD_DATA_SPACE: /* NOLINT */ \ |
1906 case (base_tag) + CODE_SPACE: /* NOLINT */ \ | 620 case (base_tag) + CODE_SPACE: /* NOLINT */ \ |
1907 case (base_tag) + MAP_SPACE: /* NOLINT */ \ | 621 case (base_tag) + MAP_SPACE: /* NOLINT */ \ |
1908 case (base_tag) + CELL_SPACE: /* NOLINT */ \ | 622 case (base_tag) + CELL_SPACE: /* NOLINT */ \ |
1909 case (base_tag) + kLargeData: /* NOLINT */ \ | 623 case (base_tag) + kLargeData: /* NOLINT */ \ |
1910 case (base_tag) + kLargeCode: /* NOLINT */ \ | 624 case (base_tag) + kLargeCode: /* NOLINT */ \ |
1911 case (base_tag) + kLargeFixedArray: /* NOLINT */ | 625 case (base_tag) + kLargeFixedArray: /* NOLINT */ |
1912 | 626 |
1913 | 627 |
1914 void Deserializer2::ReadChunk(Object** current, | 628 void Deserializer::ReadChunk(Object** current, |
1915 Object** limit, | 629 Object** limit, |
1916 int space, | 630 int space, |
1917 Address address) { | 631 Address address) { |
1918 while (current < limit) { | 632 while (current < limit) { |
1919 int data = source_->Get(); | 633 int data = source_->Get(); |
1920 switch (data) { | 634 switch (data) { |
1921 #define RAW_CASE(index, size) \ | 635 #define RAW_CASE(index, size) \ |
1922 case RAW_DATA_SERIALIZATION + index: { \ | 636 case RAW_DATA_SERIALIZATION + index: { \ |
1923 byte* raw_data_out = reinterpret_cast<byte*>(current); \ | 637 byte* raw_data_out = reinterpret_cast<byte*>(current); \ |
1924 source_->CopyRaw(raw_data_out, size); \ | 638 source_->CopyRaw(raw_data_out, size); \ |
1925 current = reinterpret_cast<Object**>(raw_data_out + size); \ | 639 current = reinterpret_cast<Object**>(raw_data_out + size); \ |
1926 break; \ | 640 break; \ |
1927 } | 641 } |
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2094 for (int shift = max_shift; shift > 0; shift -= 7) { | 808 for (int shift = max_shift; shift > 0; shift -= 7) { |
2095 if (integer >= static_cast<uintptr_t>(1u) << shift) { | 809 if (integer >= static_cast<uintptr_t>(1u) << shift) { |
2096 Put(((integer >> shift) & 0x7f) | 0x80, "IntPart"); | 810 Put(((integer >> shift) & 0x7f) | 0x80, "IntPart"); |
2097 } | 811 } |
2098 } | 812 } |
2099 PutSection(integer & 0x7f, "IntLastPart"); | 813 PutSection(integer & 0x7f, "IntLastPart"); |
2100 } | 814 } |
2101 | 815 |
2102 #ifdef DEBUG | 816 #ifdef DEBUG |
2103 | 817 |
2104 void Deserializer2::Synchronize(const char* tag) { | 818 void Deserializer::Synchronize(const char* tag) { |
2105 int data = source_->Get(); | 819 int data = source_->Get(); |
2106 // If this assert fails then that indicates that you have a mismatch between | 820 // If this assert fails then that indicates that you have a mismatch between |
2107 // the number of GC roots when serializing and deserializing. | 821 // the number of GC roots when serializing and deserializing. |
2108 ASSERT(data == SYNCHRONIZE); | 822 CHECK(data == SYNCHRONIZE); |
Christian Plesner Hansen
2009/11/13 13:43:36
Why is this not an ASSERT? Also CHECK_EQ?
| |
2109 do { | 823 do { |
2110 int character = source_->Get(); | 824 int character = source_->Get(); |
2111 if (character == 0) break; | 825 if (character == 0) break; |
2112 if (FLAG_debug_serialization) { | 826 if (FLAG_debug_serialization) { |
2113 PrintF("%c", character); | 827 PrintF("%c", character); |
2114 } | 828 } |
2115 } while (true); | 829 } while (true); |
2116 if (FLAG_debug_serialization) { | 830 if (FLAG_debug_serialization) { |
2117 PrintF("\n"); | 831 PrintF("\n"); |
2118 } | 832 } |
2119 } | 833 } |
2120 | 834 |
2121 | 835 |
2122 void Serializer2::Synchronize(const char* tag) { | 836 void Serializer::Synchronize(const char* tag) { |
2123 sink_->Put(SYNCHRONIZE, tag); | 837 sink_->Put(SYNCHRONIZE, tag); |
2124 int character; | 838 int character; |
2125 do { | 839 do { |
2126 character = *tag++; | 840 character = *tag++; |
2127 sink_->PutSection(character, "TagCharacter"); | 841 sink_->PutSection(character, "TagCharacter"); |
2128 } while (character != 0); | 842 } while (character != 0); |
2129 } | 843 } |
2130 | 844 |
2131 #endif | 845 #endif |
2132 | 846 |
2133 Serializer2::Serializer2(SnapshotByteSink* sink) | 847 Serializer::Serializer(SnapshotByteSink* sink) |
2134 : sink_(sink), | 848 : sink_(sink), |
2135 current_root_index_(0), | 849 current_root_index_(0), |
2136 external_reference_encoder_(NULL) { | 850 external_reference_encoder_(NULL) { |
2137 for (int i = 0; i <= LAST_SPACE; i++) { | 851 for (int i = 0; i <= LAST_SPACE; i++) { |
2138 fullness_[i] = 0; | 852 fullness_[i] = 0; |
2139 } | 853 } |
2140 } | 854 } |
2141 | 855 |
2142 | 856 |
2143 void Serializer2::Serialize() { | 857 void Serializer::Serialize() { |
2144 // No active threads. | 858 // No active threads. |
2145 CHECK_EQ(NULL, ThreadState::FirstInUse()); | 859 CHECK_EQ(NULL, ThreadState::FirstInUse()); |
2146 // No active or weak handles. | 860 // No active or weak handles. |
2147 CHECK(HandleScopeImplementer::instance()->blocks()->is_empty()); | 861 CHECK(HandleScopeImplementer::instance()->blocks()->is_empty()); |
2148 CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles()); | 862 CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles()); |
2149 ASSERT(external_reference_encoder_ == NULL); | 863 ASSERT(external_reference_encoder_ == NULL); |
2150 external_reference_encoder_ = new ExternalReferenceEncoder(); | 864 external_reference_encoder_ = new ExternalReferenceEncoder(); |
2151 Heap::IterateRoots(this, VISIT_ONLY_STRONG); | 865 Heap::IterateRoots(this, VISIT_ONLY_STRONG); |
2152 delete external_reference_encoder_; | 866 delete external_reference_encoder_; |
2153 external_reference_encoder_ = NULL; | 867 external_reference_encoder_ = NULL; |
2154 } | 868 } |
2155 | 869 |
2156 | 870 |
2157 void Serializer2::VisitPointers(Object** start, Object** end) { | 871 void Serializer::VisitPointers(Object** start, Object** end) { |
2158 for (Object** current = start; current < end; current++) { | 872 for (Object** current = start; current < end; current++) { |
2159 if ((*current)->IsSmi()) { | 873 if ((*current)->IsSmi()) { |
2160 sink_->Put(RAW_DATA_SERIALIZATION, "RawData"); | 874 sink_->Put(RAW_DATA_SERIALIZATION, "RawData"); |
2161 sink_->PutInt(kPointerSize, "length"); | 875 sink_->PutInt(kPointerSize, "length"); |
2162 for (int i = 0; i < kPointerSize; i++) { | 876 for (int i = 0; i < kPointerSize; i++) { |
2163 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); | 877 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); |
2164 } | 878 } |
2165 } else { | 879 } else { |
2166 SerializeObject(*current, TAGGED_REPRESENTATION); | 880 SerializeObject(*current, TAGGED_REPRESENTATION); |
2167 } | 881 } |
2168 } | 882 } |
2169 } | 883 } |
2170 | 884 |
2171 | 885 |
2172 void Serializer2::SerializeObject( | 886 void Serializer::SerializeObject( |
2173 Object* o, | 887 Object* o, |
2174 ReferenceRepresentation reference_representation) { | 888 ReferenceRepresentation reference_representation) { |
2175 ASSERT(o->IsHeapObject()); | 889 ASSERT(o->IsHeapObject()); |
2176 HeapObject* heap_object = HeapObject::cast(o); | 890 HeapObject* heap_object = HeapObject::cast(o); |
2177 MapWord map_word = heap_object->map_word(); | 891 MapWord map_word = heap_object->map_word(); |
2178 if (map_word.IsSerializationAddress()) { | 892 if (map_word.IsSerializationAddress()) { |
2179 int space = SpaceOfAlreadySerializedObject(heap_object); | 893 int space = SpaceOfAlreadySerializedObject(heap_object); |
2180 int address = map_word.ToSerializationAddress(); | 894 int address = map_word.ToSerializationAddress(); |
2181 int offset = CurrentAllocationAddress(space) - address; | 895 int offset = CurrentAllocationAddress(space) - address; |
2182 bool from_start = true; | 896 bool from_start = true; |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2226 ObjectSerializer serializer(this, | 940 ObjectSerializer serializer(this, |
2227 heap_object, | 941 heap_object, |
2228 sink_, | 942 sink_, |
2229 reference_representation); | 943 reference_representation); |
2230 serializer.Serialize(); | 944 serializer.Serialize(); |
2231 } | 945 } |
2232 } | 946 } |
2233 | 947 |
2234 | 948 |
2235 | 949 |
2236 void Serializer2::ObjectSerializer::Serialize() { | 950 void Serializer::ObjectSerializer::Serialize() { |
2237 int space = Serializer2::SpaceOfObject(object_); | 951 int space = Serializer::SpaceOfObject(object_); |
2238 int size = object_->Size(); | 952 int size = object_->Size(); |
2239 | 953 |
2240 if (reference_representation_ == TAGGED_REPRESENTATION) { | 954 if (reference_representation_ == TAGGED_REPRESENTATION) { |
2241 sink_->Put(OBJECT_SERIALIZATION + space, "ObjectSerialization"); | 955 sink_->Put(OBJECT_SERIALIZATION + space, "ObjectSerialization"); |
2242 } else { | 956 } else { |
2243 ASSERT(reference_representation_ == CODE_TARGET_REPRESENTATION); | 957 ASSERT(reference_representation_ == CODE_TARGET_REPRESENTATION); |
2244 sink_->Put(CODE_OBJECT_SERIALIZATION + space, "ObjectSerialization"); | 958 sink_->Put(CODE_OBJECT_SERIALIZATION + space, "ObjectSerialization"); |
2245 } | 959 } |
2246 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words"); | 960 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words"); |
2247 | 961 |
(...skipping 12 matching lines...) Expand all Loading... | |
2260 serializer_->SerializeObject(map, TAGGED_REPRESENTATION); | 974 serializer_->SerializeObject(map, TAGGED_REPRESENTATION); |
2261 | 975 |
2262 // Serialize the rest of the object. | 976 // Serialize the rest of the object. |
2263 ASSERT(bytes_processed_so_far_ == 0); | 977 ASSERT(bytes_processed_so_far_ == 0); |
2264 bytes_processed_so_far_ = kPointerSize; | 978 bytes_processed_so_far_ = kPointerSize; |
2265 object_->IterateBody(map->instance_type(), size, this); | 979 object_->IterateBody(map->instance_type(), size, this); |
2266 OutputRawData(object_->address() + size); | 980 OutputRawData(object_->address() + size); |
2267 } | 981 } |
2268 | 982 |
2269 | 983 |
2270 void Serializer2::ObjectSerializer::VisitPointers(Object** start, | 984 void Serializer::ObjectSerializer::VisitPointers(Object** start, |
2271 Object** end) { | 985 Object** end) { |
2272 Object** current = start; | 986 Object** current = start; |
2273 while (current < end) { | 987 while (current < end) { |
2274 while (current < end && (*current)->IsSmi()) current++; | 988 while (current < end && (*current)->IsSmi()) current++; |
2275 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); | 989 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); |
2276 | 990 |
2277 while (current < end && !(*current)->IsSmi()) { | 991 while (current < end && !(*current)->IsSmi()) { |
2278 serializer_->SerializeObject(*current, TAGGED_REPRESENTATION); | 992 serializer_->SerializeObject(*current, TAGGED_REPRESENTATION); |
2279 bytes_processed_so_far_ += kPointerSize; | 993 bytes_processed_so_far_ += kPointerSize; |
2280 current++; | 994 current++; |
2281 } | 995 } |
2282 } | 996 } |
2283 } | 997 } |
2284 | 998 |
2285 | 999 |
2286 void Serializer2::ObjectSerializer::VisitExternalReferences(Address* start, | 1000 void Serializer::ObjectSerializer::VisitExternalReferences(Address* start, |
2287 Address* end) { | 1001 Address* end) { |
2288 Address references_start = reinterpret_cast<Address>(start); | 1002 Address references_start = reinterpret_cast<Address>(start); |
2289 OutputRawData(references_start); | 1003 OutputRawData(references_start); |
2290 | 1004 |
2291 for (Address* current = start; current < end; current++) { | 1005 for (Address* current = start; current < end; current++) { |
2292 sink_->Put(EXTERNAL_REFERENCE_SERIALIZATION, "ExternalReference"); | 1006 sink_->Put(EXTERNAL_REFERENCE_SERIALIZATION, "ExternalReference"); |
2293 int reference_id = serializer_->EncodeExternalReference(*current); | 1007 int reference_id = serializer_->EncodeExternalReference(*current); |
2294 sink_->PutInt(reference_id, "reference id"); | 1008 sink_->PutInt(reference_id, "reference id"); |
2295 } | 1009 } |
2296 bytes_processed_so_far_ += static_cast<int>((end - start) * kPointerSize); | 1010 bytes_processed_so_far_ += static_cast<int>((end - start) * kPointerSize); |
2297 } | 1011 } |
2298 | 1012 |
2299 | 1013 |
2300 void Serializer2::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) { | 1014 void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) { |
2301 Address target_start = rinfo->target_address_address(); | 1015 Address target_start = rinfo->target_address_address(); |
2302 OutputRawData(target_start); | 1016 OutputRawData(target_start); |
2303 Address target = rinfo->target_address(); | 1017 Address target = rinfo->target_address(); |
2304 uint32_t encoding = serializer_->EncodeExternalReference(target); | 1018 uint32_t encoding = serializer_->EncodeExternalReference(target); |
2305 CHECK(target == NULL ? encoding == 0 : encoding != 0); | 1019 CHECK(target == NULL ? encoding == 0 : encoding != 0); |
2306 sink_->Put(EXTERNAL_BRANCH_TARGET_SERIALIZATION, "ExternalReference"); | 1020 sink_->Put(EXTERNAL_BRANCH_TARGET_SERIALIZATION, "ExternalReference"); |
2307 sink_->PutInt(encoding, "reference id"); | 1021 sink_->PutInt(encoding, "reference id"); |
2308 bytes_processed_so_far_ += Assembler::kExternalTargetSize; | 1022 bytes_processed_so_far_ += Assembler::kExternalTargetSize; |
2309 } | 1023 } |
2310 | 1024 |
2311 | 1025 |
2312 void Serializer2::ObjectSerializer::VisitCodeTarget(RelocInfo* rinfo) { | 1026 void Serializer::ObjectSerializer::VisitCodeTarget(RelocInfo* rinfo) { |
2313 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | 1027 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
2314 Address target_start = rinfo->target_address_address(); | 1028 Address target_start = rinfo->target_address_address(); |
2315 OutputRawData(target_start); | 1029 OutputRawData(target_start); |
2316 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 1030 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
2317 serializer_->SerializeObject(target, CODE_TARGET_REPRESENTATION); | 1031 serializer_->SerializeObject(target, CODE_TARGET_REPRESENTATION); |
2318 bytes_processed_so_far_ += Assembler::kCallTargetSize; | 1032 bytes_processed_so_far_ += Assembler::kCallTargetSize; |
2319 } | 1033 } |
2320 | 1034 |
2321 | 1035 |
2322 void Serializer2::ObjectSerializer::VisitExternalAsciiString( | 1036 void Serializer::ObjectSerializer::VisitExternalAsciiString( |
2323 v8::String::ExternalAsciiStringResource** resource_pointer) { | 1037 v8::String::ExternalAsciiStringResource** resource_pointer) { |
2324 Address references_start = reinterpret_cast<Address>(resource_pointer); | 1038 Address references_start = reinterpret_cast<Address>(resource_pointer); |
2325 OutputRawData(references_start); | 1039 OutputRawData(references_start); |
2326 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { | 1040 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { |
2327 // Use raw_unchecked when maps are munged. | 1041 // Use raw_unchecked when maps are munged. |
2328 Object* source = Heap::raw_unchecked_natives_source_cache()->get(i); | 1042 Object* source = Heap::raw_unchecked_natives_source_cache()->get(i); |
2329 if (!source->IsUndefined()) { | 1043 if (!source->IsUndefined()) { |
2330 // Don't use cast when maps are munged. | 1044 // Don't use cast when maps are munged. |
2331 ExternalAsciiString* string = | 1045 ExternalAsciiString* string = |
2332 reinterpret_cast<ExternalAsciiString*>(source); | 1046 reinterpret_cast<ExternalAsciiString*>(source); |
2333 typedef v8::String::ExternalAsciiStringResource Resource; | 1047 typedef v8::String::ExternalAsciiStringResource Resource; |
2334 Resource* resource = string->resource(); | 1048 Resource* resource = string->resource(); |
2335 if (resource == *resource_pointer) { | 1049 if (resource == *resource_pointer) { |
2336 sink_->Put(NATIVES_STRING_RESOURCE, "NativesStringResource"); | 1050 sink_->Put(NATIVES_STRING_RESOURCE, "NativesStringResource"); |
2337 sink_->PutSection(i, "NativesStringResourceEnd"); | 1051 sink_->PutSection(i, "NativesStringResourceEnd"); |
2338 bytes_processed_so_far_ += sizeof(resource); | 1052 bytes_processed_so_far_ += sizeof(resource); |
2339 return; | 1053 return; |
2340 } | 1054 } |
2341 } | 1055 } |
2342 } | 1056 } |
2343 // One of the strings in the natives cache should match the resource. We | 1057 // One of the strings in the natives cache should match the resource. We |
2344 // can't serialize any other kinds of external strings. | 1058 // can't serialize any other kinds of external strings. |
2345 UNREACHABLE(); | 1059 UNREACHABLE(); |
2346 } | 1060 } |
2347 | 1061 |
2348 | 1062 |
2349 void Serializer2::ObjectSerializer::OutputRawData(Address up_to) { | 1063 void Serializer::ObjectSerializer::OutputRawData(Address up_to) { |
2350 Address object_start = object_->address(); | 1064 Address object_start = object_->address(); |
2351 int up_to_offset = static_cast<int>(up_to - object_start); | 1065 int up_to_offset = static_cast<int>(up_to - object_start); |
2352 int skipped = up_to_offset - bytes_processed_so_far_; | 1066 int skipped = up_to_offset - bytes_processed_so_far_; |
2353 // This assert will fail if the reloc info gives us the target_address_address | 1067 // This assert will fail if the reloc info gives us the target_address_address |
2354 // locations in a non-ascending order. Luckily that doesn't happen. | 1068 // locations in a non-ascending order. Luckily that doesn't happen. |
2355 ASSERT(skipped >= 0); | 1069 ASSERT(skipped >= 0); |
2356 if (skipped != 0) { | 1070 if (skipped != 0) { |
2357 Address base = object_start + bytes_processed_so_far_; | 1071 Address base = object_start + bytes_processed_so_far_; |
2358 #define RAW_CASE(index, length) \ | 1072 #define RAW_CASE(index, length) \ |
2359 if (skipped == length) { \ | 1073 if (skipped == length) { \ |
2360 sink_->PutSection(RAW_DATA_SERIALIZATION + index, "RawDataFixed"); \ | 1074 sink_->PutSection(RAW_DATA_SERIALIZATION + index, "RawDataFixed"); \ |
2361 } else /* NOLINT */ | 1075 } else /* NOLINT */ |
2362 COMMON_RAW_LENGTHS(RAW_CASE) | 1076 COMMON_RAW_LENGTHS(RAW_CASE) |
2363 #undef RAW_CASE | 1077 #undef RAW_CASE |
2364 { /* NOLINT */ | 1078 { /* NOLINT */ |
2365 sink_->Put(RAW_DATA_SERIALIZATION, "RawData"); | 1079 sink_->Put(RAW_DATA_SERIALIZATION, "RawData"); |
2366 sink_->PutInt(skipped, "length"); | 1080 sink_->PutInt(skipped, "length"); |
2367 } | 1081 } |
2368 for (int i = 0; i < skipped; i++) { | 1082 for (int i = 0; i < skipped; i++) { |
2369 unsigned int data = base[i]; | 1083 unsigned int data = base[i]; |
2370 sink_->PutSection(data, "Byte"); | 1084 sink_->PutSection(data, "Byte"); |
2371 } | 1085 } |
2372 bytes_processed_so_far_ += skipped; | 1086 bytes_processed_so_far_ += skipped; |
2373 } | 1087 } |
2374 } | 1088 } |
2375 | 1089 |
2376 | 1090 |
2377 int Serializer2::SpaceOfObject(HeapObject* object) { | 1091 int Serializer::SpaceOfObject(HeapObject* object) { |
2378 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { | 1092 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { |
2379 AllocationSpace s = static_cast<AllocationSpace>(i); | 1093 AllocationSpace s = static_cast<AllocationSpace>(i); |
2380 if (Heap::InSpace(object, s)) { | 1094 if (Heap::InSpace(object, s)) { |
2381 if (i == LO_SPACE) { | 1095 if (i == LO_SPACE) { |
2382 if (object->IsCode()) { | 1096 if (object->IsCode()) { |
2383 return kLargeCode; | 1097 return kLargeCode; |
2384 } else if (object->IsFixedArray()) { | 1098 } else if (object->IsFixedArray()) { |
2385 return kLargeFixedArray; | 1099 return kLargeFixedArray; |
2386 } else { | 1100 } else { |
2387 return kLargeData; | 1101 return kLargeData; |
2388 } | 1102 } |
2389 } | 1103 } |
2390 return i; | 1104 return i; |
2391 } | 1105 } |
2392 } | 1106 } |
2393 UNREACHABLE(); | 1107 UNREACHABLE(); |
2394 return 0; | 1108 return 0; |
2395 } | 1109 } |
2396 | 1110 |
2397 | 1111 |
2398 int Serializer2::SpaceOfAlreadySerializedObject(HeapObject* object) { | 1112 int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) { |
2399 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { | 1113 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { |
2400 AllocationSpace s = static_cast<AllocationSpace>(i); | 1114 AllocationSpace s = static_cast<AllocationSpace>(i); |
2401 if (Heap::InSpace(object, s)) { | 1115 if (Heap::InSpace(object, s)) { |
2402 return i; | 1116 return i; |
2403 } | 1117 } |
2404 } | 1118 } |
2405 UNREACHABLE(); | 1119 UNREACHABLE(); |
2406 return 0; | 1120 return 0; |
2407 } | 1121 } |
2408 | 1122 |
2409 | 1123 |
2410 int Serializer2::Allocate(int space, int size, bool* new_page) { | 1124 int Serializer::Allocate(int space, int size, bool* new_page) { |
2411 ASSERT(space >= 0 && space < kNumberOfSpaces); | 1125 ASSERT(space >= 0 && space < kNumberOfSpaces); |
2412 if (SpaceIsLarge(space)) { | 1126 if (SpaceIsLarge(space)) { |
2413 // In large object space we merely number the objects instead of trying to | 1127 // In large object space we merely number the objects instead of trying to |
2414 // determine some sort of address. | 1128 // determine some sort of address. |
2415 *new_page = true; | 1129 *new_page = true; |
2416 return fullness_[LO_SPACE]++; | 1130 return fullness_[LO_SPACE]++; |
2417 } | 1131 } |
2418 *new_page = false; | 1132 *new_page = false; |
2419 if (fullness_[space] == 0) { | 1133 if (fullness_[space] == 0) { |
2420 *new_page = true; | 1134 *new_page = true; |
(...skipping 13 matching lines...) Expand all Loading... | |
2434 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); | 1148 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); |
2435 } | 1149 } |
2436 } | 1150 } |
2437 int allocation_address = fullness_[space]; | 1151 int allocation_address = fullness_[space]; |
2438 fullness_[space] = allocation_address + size; | 1152 fullness_[space] = allocation_address + size; |
2439 return allocation_address; | 1153 return allocation_address; |
2440 } | 1154 } |
2441 | 1155 |
2442 | 1156 |
2443 } } // namespace v8::internal | 1157 } } // namespace v8::internal |
OLD | NEW |