| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/platform/platform.h" | 9 #include "src/base/platform/platform.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 810 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 821 string->SetForwardedInternalizedString(canonical); | 821 string->SetForwardedInternalizedString(canonical); |
| 822 return canonical; | 822 return canonical; |
| 823 } | 823 } |
| 824 } | 824 } |
| 825 return obj; | 825 return obj; |
| 826 } | 826 } |
| 827 | 827 |
| 828 | 828 |
| 829 HeapObject* Deserializer::GetBackReferencedObject(int space) { | 829 HeapObject* Deserializer::GetBackReferencedObject(int space) { |
| 830 HeapObject* obj; | 830 HeapObject* obj; |
| 831 BackReference back_reference(source_.GetInt()); |
| 831 if (space == LO_SPACE) { | 832 if (space == LO_SPACE) { |
| 832 uint32_t index = source_.GetInt(); | 833 CHECK(back_reference.chunk_index() == 0); |
| 834 uint32_t index = back_reference.large_object_index(); |
| 833 obj = deserialized_large_objects_[index]; | 835 obj = deserialized_large_objects_[index]; |
| 834 } else { | 836 } else { |
| 835 BackReference back_reference(source_.GetInt()); | |
| 836 DCHECK(space < kNumberOfPreallocatedSpaces); | 837 DCHECK(space < kNumberOfPreallocatedSpaces); |
| 837 uint32_t chunk_index = back_reference.chunk_index(); | 838 uint32_t chunk_index = back_reference.chunk_index(); |
| 838 DCHECK_LE(chunk_index, current_chunk_[space]); | 839 DCHECK_LE(chunk_index, current_chunk_[space]); |
| 839 uint32_t chunk_offset = back_reference.chunk_offset(); | 840 uint32_t chunk_offset = back_reference.chunk_offset(); |
| 840 obj = HeapObject::FromAddress(reservations_[space][chunk_index].start + | 841 obj = HeapObject::FromAddress(reservations_[space][chunk_index].start + |
| 841 chunk_offset); | 842 chunk_offset); |
| 842 } | 843 } |
| 843 if (deserializing_user_code() && obj->IsInternalizedString()) { | 844 if (deserializing_user_code() && obj->IsInternalizedString()) { |
| 844 obj = String::cast(obj)->GetForwardedInternalizedString(); | 845 obj = String::cast(obj)->GetForwardedInternalizedString(); |
| 845 } | 846 } |
| (...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1308 | 1309 |
| 1309 case kNextChunk: { | 1310 case kNextChunk: { |
| 1310 int space = source_.Get(); | 1311 int space = source_.Get(); |
| 1311 DCHECK(space < kNumberOfPreallocatedSpaces); | 1312 DCHECK(space < kNumberOfPreallocatedSpaces); |
| 1312 int chunk_index = current_chunk_[space]; | 1313 int chunk_index = current_chunk_[space]; |
| 1313 const Heap::Reservation& reservation = reservations_[space]; | 1314 const Heap::Reservation& reservation = reservations_[space]; |
| 1314 // Make sure the current chunk is indeed exhausted. | 1315 // Make sure the current chunk is indeed exhausted. |
| 1315 CHECK_EQ(reservation[chunk_index].end, high_water_[space]); | 1316 CHECK_EQ(reservation[chunk_index].end, high_water_[space]); |
| 1316 // Move to next reserved chunk. | 1317 // Move to next reserved chunk. |
| 1317 chunk_index = ++current_chunk_[space]; | 1318 chunk_index = ++current_chunk_[space]; |
| 1318 DCHECK_LT(chunk_index, reservation.length()); | 1319 CHECK_LT(chunk_index, reservation.length()); |
| 1319 high_water_[space] = reservation[chunk_index].start; | 1320 high_water_[space] = reservation[chunk_index].start; |
| 1320 break; | 1321 break; |
| 1321 } | 1322 } |
| 1322 | 1323 |
| 1323 FOUR_CASES(kHotObjectWithSkip) | 1324 FOUR_CASES(kHotObjectWithSkip) |
| 1324 FOUR_CASES(kHotObjectWithSkip + 4) { | 1325 FOUR_CASES(kHotObjectWithSkip + 4) { |
| 1325 int skip = source_.GetInt(); | 1326 int skip = source_.GetInt(); |
| 1326 current = reinterpret_cast<Object**>( | 1327 current = reinterpret_cast<Object**>( |
| 1327 reinterpret_cast<Address>(current) + skip); | 1328 reinterpret_cast<Address>(current) + skip); |
| 1328 // Fall through. | 1329 // Fall through. |
| 1329 } | 1330 } |
| 1330 FOUR_CASES(kHotObject) | 1331 FOUR_CASES(kHotObject) |
| 1331 FOUR_CASES(kHotObject + 4) { | 1332 FOUR_CASES(kHotObject + 4) { |
| 1332 int index = data & kHotObjectIndexMask; | 1333 int index = data & kHotObjectIndexMask; |
| 1333 Object* hot_object = hot_objects_.Get(index); | 1334 Object* hot_object = hot_objects_.Get(index); |
| 1334 UnalignedCopy(current, &hot_object); | 1335 UnalignedCopy(current, &hot_object); |
| 1335 if (write_barrier_needed && isolate->heap()->InNewSpace(hot_object)) { | 1336 if (write_barrier_needed && isolate->heap()->InNewSpace(hot_object)) { |
| 1336 Address current_address = reinterpret_cast<Address>(current); | 1337 Address current_address = reinterpret_cast<Address>(current); |
| 1337 isolate->heap()->RecordWrite( | 1338 isolate->heap()->RecordWrite( |
| 1338 current_object_address, | 1339 current_object_address, |
| 1339 static_cast<int>(current_address - current_object_address)); | 1340 static_cast<int>(current_address - current_object_address)); |
| 1340 } | 1341 } |
| 1341 current++; | 1342 current++; |
| 1342 break; | 1343 break; |
| 1343 } | 1344 } |
| 1344 | 1345 |
| 1345 case kSynchronize: { | 1346 case kSynchronize: { |
| 1346 // If we get here then that indicates that you have a mismatch between | 1347 // If we get here then that indicates that you have a mismatch between |
| 1347 // the number of GC roots when serializing and deserializing. | 1348 // the number of GC roots when serializing and deserializing. |
| 1348 UNREACHABLE(); | 1349 CHECK(false); |
| 1349 } | 1350 } |
| 1350 | 1351 |
| 1351 default: | 1352 default: |
| 1352 UNREACHABLE(); | 1353 CHECK(false); |
| 1353 } | 1354 } |
| 1354 } | 1355 } |
| 1355 DCHECK_EQ(limit, current); | 1356 CHECK_EQ(limit, current); |
| 1356 } | 1357 } |
| 1357 | 1358 |
| 1358 | 1359 |
| 1359 Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink) | 1360 Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink) |
| 1360 : isolate_(isolate), | 1361 : isolate_(isolate), |
| 1361 sink_(sink), | 1362 sink_(sink), |
| 1362 external_reference_encoder_(new ExternalReferenceEncoder(isolate)), | 1363 external_reference_encoder_(new ExternalReferenceEncoder(isolate)), |
| 1363 root_index_map_(isolate), | 1364 root_index_map_(isolate), |
| 1364 code_address_map_(NULL), | 1365 code_address_map_(NULL), |
| 1365 large_objects_total_size_(0), | 1366 large_objects_total_size_(0), |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1441 sink_->PutInt(size >> kObjectAlignmentBits, "FixedArray size in words"); | 1442 sink_->PutInt(size >> kObjectAlignmentBits, "FixedArray size in words"); |
| 1442 Map* map = isolate_->heap()->fixed_array_map(); | 1443 Map* map = isolate_->heap()->fixed_array_map(); |
| 1443 SerializeObject(map, kPlain, kStartOfObject, 0); | 1444 SerializeObject(map, kPlain, kStartOfObject, 0); |
| 1444 Smi* length_smi = Smi::FromInt(length); | 1445 Smi* length_smi = Smi::FromInt(length); |
| 1445 sink_->Put(kOnePointerRawData, "Smi"); | 1446 sink_->Put(kOnePointerRawData, "Smi"); |
| 1446 for (int i = 0; i < kPointerSize; i++) { | 1447 for (int i = 0; i < kPointerSize; i++) { |
| 1447 sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte"); | 1448 sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte"); |
| 1448 } | 1449 } |
| 1449 for (int i = 0; i < length; i++) { | 1450 for (int i = 0; i < length; i++) { |
| 1450 BackReference back_ref = outdated_contexts_[i]; | 1451 BackReference back_ref = outdated_contexts_[i]; |
| 1452 DCHECK(BackReferenceIsAlreadyAllocated(back_ref)); |
| 1451 sink_->Put(kBackref + back_ref.space(), "BackRef"); | 1453 sink_->Put(kBackref + back_ref.space(), "BackRef"); |
| 1452 sink_->PutInt(back_ref.reference(), "BackRefValue"); | 1454 sink_->PutInt(back_ref.reference(), "BackRefValue"); |
| 1453 } | 1455 } |
| 1454 } | 1456 } |
| 1455 } | 1457 } |
| 1456 | 1458 |
| 1457 | 1459 |
| 1458 bool Serializer::ShouldBeSkipped(Object** current) { | 1460 bool Serializer::ShouldBeSkipped(Object** current) { |
| 1459 Object** roots = isolate()->heap()->roots_array_start(); | 1461 Object** roots = isolate()->heap()->roots_array_start(); |
| 1460 return current == &roots[Heap::kStoreBufferTopRootIndex] | 1462 return current == &roots[Heap::kStoreBufferTopRootIndex] |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1539 int length = isolate->serialize_partial_snapshot_cache_length(); | 1541 int length = isolate->serialize_partial_snapshot_cache_length(); |
| 1540 isolate->PushToPartialSnapshotCache(heap_object); | 1542 isolate->PushToPartialSnapshotCache(heap_object); |
| 1541 startup_serializer_->VisitPointer(reinterpret_cast<Object**>(&heap_object)); | 1543 startup_serializer_->VisitPointer(reinterpret_cast<Object**>(&heap_object)); |
| 1542 // We don't recurse from the startup snapshot generator into the partial | 1544 // We don't recurse from the startup snapshot generator into the partial |
| 1543 // snapshot generator. | 1545 // snapshot generator. |
| 1544 DCHECK(length == isolate->serialize_partial_snapshot_cache_length() - 1); | 1546 DCHECK(length == isolate->serialize_partial_snapshot_cache_length() - 1); |
| 1545 return length; | 1547 return length; |
| 1546 } | 1548 } |
| 1547 | 1549 |
| 1548 | 1550 |
| 1551 #ifdef DEBUG |
| 1552 bool Serializer::BackReferenceIsAlreadyAllocated(BackReference reference) { |
| 1553 DCHECK(reference.is_valid()); |
| 1554 DCHECK(!reference.is_source()); |
| 1555 DCHECK(!reference.is_global_proxy()); |
| 1556 AllocationSpace space = reference.space(); |
| 1557 int chunk_index = reference.chunk_index(); |
| 1558 if (space == LO_SPACE) { |
| 1559 return chunk_index == 0 && |
| 1560 reference.large_object_index() < seen_large_objects_index_; |
| 1561 } else if (chunk_index == completed_chunks_[space].length()) { |
| 1562 return reference.chunk_offset() < pending_chunk_[space]; |
| 1563 } else { |
| 1564 return chunk_index < completed_chunks_[space].length() && |
| 1565 reference.chunk_offset() < completed_chunks_[space][chunk_index]; |
| 1566 } |
| 1567 } |
| 1568 #endif // DEBUG |
| 1569 |
| 1570 |
| 1549 bool Serializer::SerializeKnownObject(HeapObject* obj, HowToCode how_to_code, | 1571 bool Serializer::SerializeKnownObject(HeapObject* obj, HowToCode how_to_code, |
| 1550 WhereToPoint where_to_point, int skip) { | 1572 WhereToPoint where_to_point, int skip) { |
| 1551 if (how_to_code == kPlain && where_to_point == kStartOfObject) { | 1573 if (how_to_code == kPlain && where_to_point == kStartOfObject) { |
| 1552 // Encode a reference to a hot object by its index in the working set. | 1574 // Encode a reference to a hot object by its index in the working set. |
| 1553 int index = hot_objects_.Find(obj); | 1575 int index = hot_objects_.Find(obj); |
| 1554 if (index != HotObjectsList::kNotFound) { | 1576 if (index != HotObjectsList::kNotFound) { |
| 1555 DCHECK(index >= 0 && index <= kMaxHotObjectIndex); | 1577 DCHECK(index >= 0 && index <= kMaxHotObjectIndex); |
| 1556 if (FLAG_trace_serializer) { | 1578 if (FLAG_trace_serializer) { |
| 1557 PrintF(" Encoding hot object %d:", index); | 1579 PrintF(" Encoding hot object %d:", index); |
| 1558 obj->ShortPrint(); | 1580 obj->ShortPrint(); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1593 } | 1615 } |
| 1594 | 1616 |
| 1595 AllocationSpace space = back_reference.space(); | 1617 AllocationSpace space = back_reference.space(); |
| 1596 if (skip == 0) { | 1618 if (skip == 0) { |
| 1597 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRef"); | 1619 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRef"); |
| 1598 } else { | 1620 } else { |
| 1599 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space, | 1621 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space, |
| 1600 "BackRefWithSkip"); | 1622 "BackRefWithSkip"); |
| 1601 sink_->PutInt(skip, "BackRefSkipDistance"); | 1623 sink_->PutInt(skip, "BackRefSkipDistance"); |
| 1602 } | 1624 } |
| 1625 DCHECK(BackReferenceIsAlreadyAllocated(back_reference)); |
| 1603 sink_->PutInt(back_reference.reference(), "BackRefValue"); | 1626 sink_->PutInt(back_reference.reference(), "BackRefValue"); |
| 1604 | 1627 |
| 1605 hot_objects_.Add(obj); | 1628 hot_objects_.Add(obj); |
| 1606 } | 1629 } |
| 1607 return true; | 1630 return true; |
| 1608 } | 1631 } |
| 1609 return false; | 1632 return false; |
| 1610 } | 1633 } |
| 1611 | 1634 |
| 1612 | 1635 |
| (...skipping 955 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2568 return GetHeaderValue(kNumInternalizedStringsOffset); | 2591 return GetHeaderValue(kNumInternalizedStringsOffset); |
| 2569 } | 2592 } |
| 2570 | 2593 |
| 2571 Vector<const uint32_t> SerializedCodeData::CodeStubKeys() const { | 2594 Vector<const uint32_t> SerializedCodeData::CodeStubKeys() const { |
| 2572 int reservations_size = GetHeaderValue(kReservationsOffset) * kInt32Size; | 2595 int reservations_size = GetHeaderValue(kReservationsOffset) * kInt32Size; |
| 2573 const byte* start = data_ + kHeaderSize + reservations_size; | 2596 const byte* start = data_ + kHeaderSize + reservations_size; |
| 2574 return Vector<const uint32_t>(reinterpret_cast<const uint32_t*>(start), | 2597 return Vector<const uint32_t>(reinterpret_cast<const uint32_t*>(start), |
| 2575 GetHeaderValue(kNumCodeStubKeysOffset)); | 2598 GetHeaderValue(kNumCodeStubKeysOffset)); |
| 2576 } | 2599 } |
| 2577 } } // namespace v8::internal | 2600 } } // namespace v8::internal |
| OLD | NEW |