Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(14)

Side by Side Diff: src/snapshot/serialize.cc

Issue 1139113002: Revert of Prevent stack overflow in the serializer/deserializer. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/snapshot/serialize.h ('k') | test/cctest/test-serialize.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/platform/platform.h" 9 #include "src/base/platform/platform.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 542 matching lines...) Expand 10 before | Expand all | Expand 10 after
553 Initialize(isolate); 553 Initialize(isolate);
554 if (!ReserveSpace()) V8::FatalProcessOutOfMemory("deserializing context"); 554 if (!ReserveSpace()) V8::FatalProcessOutOfMemory("deserializing context");
555 // No active threads. 555 // No active threads.
556 DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse()); 556 DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse());
557 // No active handles. 557 // No active handles.
558 DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty()); 558 DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty());
559 isolate_->heap()->IterateSmiRoots(this); 559 isolate_->heap()->IterateSmiRoots(this);
560 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); 560 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
561 isolate_->heap()->RepairFreeListsAfterDeserialization(); 561 isolate_->heap()->RepairFreeListsAfterDeserialization();
562 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); 562 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
563 DeserializeDeferredObjects();
564 563
565 isolate_->heap()->set_native_contexts_list( 564 isolate_->heap()->set_native_contexts_list(
566 isolate_->heap()->undefined_value()); 565 isolate_->heap()->undefined_value());
567 566
568 // The allocation site list is build during root iteration, but if no sites 567 // The allocation site list is build during root iteration, but if no sites
569 // were encountered then it needs to be initialized to undefined. 568 // were encountered then it needs to be initialized to undefined.
570 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { 569 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
571 isolate_->heap()->set_allocation_sites_list( 570 isolate_->heap()->set_allocation_sites_list(
572 isolate_->heap()->undefined_value()); 571 isolate_->heap()->undefined_value());
573 } 572 }
(...skipping 29 matching lines...) Expand all
603 602
604 DisallowHeapAllocation no_gc; 603 DisallowHeapAllocation no_gc;
605 // Keep track of the code space start and end pointers in case new 604 // Keep track of the code space start and end pointers in case new
606 // code objects were unserialized 605 // code objects were unserialized
607 OldSpace* code_space = isolate_->heap()->code_space(); 606 OldSpace* code_space = isolate_->heap()->code_space();
608 Address start_address = code_space->top(); 607 Address start_address = code_space->top();
609 Object* root; 608 Object* root;
610 Object* outdated_contexts; 609 Object* outdated_contexts;
611 VisitPointer(&root); 610 VisitPointer(&root);
612 VisitPointer(&outdated_contexts); 611 VisitPointer(&outdated_contexts);
613 DeserializeDeferredObjects();
614 612
615 // There's no code deserialized here. If this assert fires 613 // There's no code deserialized here. If this assert fires
616 // then that's changed and logging should be added to notify 614 // then that's changed and logging should be added to notify
617 // the profiler et al of the new code. 615 // the profiler et al of the new code.
618 CHECK_EQ(start_address, code_space->top()); 616 CHECK_EQ(start_address, code_space->top());
619 CHECK(outdated_contexts->IsFixedArray()); 617 CHECK(outdated_contexts->IsFixedArray());
620 *outdated_contexts_out = 618 *outdated_contexts_out =
621 Handle<FixedArray>(FixedArray::cast(outdated_contexts), isolate); 619 Handle<FixedArray>(FixedArray::cast(outdated_contexts), isolate);
622 return Handle<Object>(root, isolate); 620 return Handle<Object>(root, isolate);
623 } 621 }
624 622
625 623
626 MaybeHandle<SharedFunctionInfo> Deserializer::DeserializeCode( 624 MaybeHandle<SharedFunctionInfo> Deserializer::DeserializeCode(
627 Isolate* isolate) { 625 Isolate* isolate) {
628 Initialize(isolate); 626 Initialize(isolate);
629 if (!ReserveSpace()) { 627 if (!ReserveSpace()) {
630 return Handle<SharedFunctionInfo>(); 628 return Handle<SharedFunctionInfo>();
631 } else { 629 } else {
632 deserializing_user_code_ = true; 630 deserializing_user_code_ = true;
633 DisallowHeapAllocation no_gc; 631 DisallowHeapAllocation no_gc;
634 Object* root; 632 Object* root;
635 VisitPointer(&root); 633 VisitPointer(&root);
636 DeserializeDeferredObjects();
637 return Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(root)); 634 return Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(root));
638 } 635 }
639 } 636 }
640 637
641 638
642 Deserializer::~Deserializer() { 639 Deserializer::~Deserializer() {
643 // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed. 640 // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed.
644 // DCHECK(source_.AtEOF()); 641 // DCHECK(source_.AtEOF());
645 attached_objects_.Dispose(); 642 attached_objects_.Dispose();
646 } 643 }
647 644
648 645
649 // This is called on the roots. It is the driver of the deserialization 646 // This is called on the roots. It is the driver of the deserialization
650 // process. It is also called on the body of each function. 647 // process. It is also called on the body of each function.
651 void Deserializer::VisitPointers(Object** start, Object** end) { 648 void Deserializer::VisitPointers(Object** start, Object** end) {
652 // The space must be new space. Any other space would cause ReadChunk to try 649 // The space must be new space. Any other space would cause ReadChunk to try
653 // to update the remembered using NULL as the address. 650 // to update the remembered using NULL as the address.
654 ReadData(start, end, NEW_SPACE, NULL); 651 ReadData(start, end, NEW_SPACE, NULL);
655 } 652 }
656 653
657 654
658 void Deserializer::DeserializeDeferredObjects() { 655 void Deserializer::RelinkAllocationSite(AllocationSite* site) {
659 for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) { 656 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
660 int space = code & kSpaceMask; 657 site->set_weak_next(isolate_->heap()->undefined_value());
661 DCHECK(space <= kNumberOfSpaces); 658 } else {
662 DCHECK(code - space == kNewObject); 659 site->set_weak_next(isolate_->heap()->allocation_sites_list());
663 HeapObject* object = GetBackReferencedObject(space);
664 int size = source_.GetInt() << kPointerSizeLog2;
665 Address obj_address = object->address();
666 Object** start = reinterpret_cast<Object**>(obj_address + kPointerSize);
667 Object** end = reinterpret_cast<Object**>(obj_address + size);
668 bool filled = ReadData(start, end, space, obj_address);
669 CHECK(filled);
670 if (object->IsAllocationSite()) {
671 RelinkAllocationSite(AllocationSite::cast(object));
672 }
673 } 660 }
661 isolate_->heap()->set_allocation_sites_list(site);
674 } 662 }
675 663
676 664
677 // Used to insert a deserialized internalized string into the string table. 665 // Used to insert a deserialized internalized string into the string table.
678 class StringTableInsertionKey : public HashTableKey { 666 class StringTableInsertionKey : public HashTableKey {
679 public: 667 public:
680 explicit StringTableInsertionKey(String* string) 668 explicit StringTableInsertionKey(String* string)
681 : string_(string), hash_(HashForObject(string)) { 669 : string_(string), hash_(HashForObject(string)) {
682 DCHECK(string->IsInternalizedString()); 670 DCHECK(string->IsInternalizedString());
683 } 671 }
(...skipping 14 matching lines...) Expand all
698 686
699 MUST_USE_RESULT virtual Handle<Object> AsHandle(Isolate* isolate) override { 687 MUST_USE_RESULT virtual Handle<Object> AsHandle(Isolate* isolate) override {
700 return handle(string_, isolate); 688 return handle(string_, isolate);
701 } 689 }
702 690
703 String* string_; 691 String* string_;
704 uint32_t hash_; 692 uint32_t hash_;
705 }; 693 };
706 694
707 695
708 HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj) { 696 HeapObject* Deserializer::ProcessNewObjectFromSerializedCode(HeapObject* obj) {
709 DCHECK(deserializing_user_code());
710 if (obj->IsString()) { 697 if (obj->IsString()) {
711 String* string = String::cast(obj); 698 String* string = String::cast(obj);
712 // Uninitialize hash field as the hash seed may have changed. 699 // Uninitialize hash field as the hash seed may have changed.
713 string->set_hash_field(String::kEmptyHashField); 700 string->set_hash_field(String::kEmptyHashField);
714 if (string->IsInternalizedString()) { 701 if (string->IsInternalizedString()) {
715 DisallowHeapAllocation no_gc; 702 DisallowHeapAllocation no_gc;
716 HandleScope scope(isolate_); 703 HandleScope scope(isolate_);
717 StringTableInsertionKey key(string); 704 StringTableInsertionKey key(string);
718 String* canonical = *StringTable::LookupKey(isolate_, &key); 705 String* canonical = *StringTable::LookupKey(isolate_, &key);
719 string->SetForwardedInternalizedString(canonical); 706 string->SetForwardedInternalizedString(canonical);
720 return canonical; 707 return canonical;
721 } 708 }
722 } else if (obj->IsScript()) { 709 } else if (obj->IsScript()) {
723 Script::cast(obj)->set_id(isolate_->heap()->NextScriptId()); 710 Script::cast(obj)->set_id(isolate_->heap()->NextScriptId());
724 } else {
725 DCHECK(CanBeDeferred(obj));
726 } 711 }
727 return obj; 712 return obj;
728 } 713 }
729 714
730 715
731 void Deserializer::RelinkAllocationSite(AllocationSite* obj) {
732 DCHECK(obj->IsAllocationSite());
733 // Allocation sites are present in the snapshot, and must be linked into
734 // a list at deserialization time.
735 AllocationSite* site = AllocationSite::cast(obj);
736 // TODO(mvstanton): consider treating the heap()->allocation_sites_list()
737 // as a (weak) root. If this root is relocated correctly,
738 // RelinkAllocationSite() isn't necessary.
739 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
740 site->set_weak_next(isolate_->heap()->undefined_value());
741 } else {
742 site->set_weak_next(isolate_->heap()->allocation_sites_list());
743 }
744 isolate_->heap()->set_allocation_sites_list(site);
745 }
746
747
748 HeapObject* Deserializer::GetBackReferencedObject(int space) { 716 HeapObject* Deserializer::GetBackReferencedObject(int space) {
749 HeapObject* obj; 717 HeapObject* obj;
750 BackReference back_reference(source_.GetInt()); 718 BackReference back_reference(source_.GetInt());
751 if (space == LO_SPACE) { 719 if (space == LO_SPACE) {
752 CHECK(back_reference.chunk_index() == 0); 720 CHECK(back_reference.chunk_index() == 0);
753 uint32_t index = back_reference.large_object_index(); 721 uint32_t index = back_reference.large_object_index();
754 obj = deserialized_large_objects_[index]; 722 obj = deserialized_large_objects_[index];
755 } else { 723 } else {
756 DCHECK(space < kNumberOfPreallocatedSpaces); 724 DCHECK(space < kNumberOfPreallocatedSpaces);
757 uint32_t chunk_index = back_reference.chunk_index(); 725 uint32_t chunk_index = back_reference.chunk_index();
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
793 obj = isolate_->heap()->DoubleAlignForDeserialization(obj, reserved_size); 761 obj = isolate_->heap()->DoubleAlignForDeserialization(obj, reserved_size);
794 address = obj->address(); 762 address = obj->address();
795 } 763 }
796 764
797 isolate_->heap()->OnAllocationEvent(obj, size); 765 isolate_->heap()->OnAllocationEvent(obj, size);
798 Object** current = reinterpret_cast<Object**>(address); 766 Object** current = reinterpret_cast<Object**>(address);
799 Object** limit = current + (size >> kPointerSizeLog2); 767 Object** limit = current + (size >> kPointerSizeLog2);
800 if (FLAG_log_snapshot_positions) { 768 if (FLAG_log_snapshot_positions) {
801 LOG(isolate_, SnapshotPositionEvent(address, source_.position())); 769 LOG(isolate_, SnapshotPositionEvent(address, source_.position()));
802 } 770 }
771 ReadData(current, limit, space_number, address);
803 772
804 if (ReadData(current, limit, space_number, address)) { 773 // TODO(mvstanton): consider treating the heap()->allocation_sites_list()
805 // Only post process if object content has not been deferred. 774 // as a (weak) root. If this root is relocated correctly,
806 if (obj->IsAllocationSite()) { 775 // RelinkAllocationSite() isn't necessary.
807 RelinkAllocationSite(AllocationSite::cast(obj)); 776 if (obj->IsAllocationSite()) RelinkAllocationSite(AllocationSite::cast(obj));
808 }
809 }
810 777
811 if (deserializing_user_code()) obj = PostProcessNewObject(obj); 778 // Fix up strings from serialized user code.
779 if (deserializing_user_code()) obj = ProcessNewObjectFromSerializedCode(obj);
812 780
813 Object* write_back_obj = obj; 781 Object* write_back_obj = obj;
814 UnalignedCopy(write_back, &write_back_obj); 782 UnalignedCopy(write_back, &write_back_obj);
815 #ifdef DEBUG 783 #ifdef DEBUG
816 if (obj->IsCode()) { 784 if (obj->IsCode()) {
817 DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE); 785 DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE);
786 #ifdef VERIFY_HEAP
787 obj->ObjectVerify();
788 #endif // VERIFY_HEAP
818 } else { 789 } else {
819 DCHECK(space_number != CODE_SPACE); 790 DCHECK(space_number != CODE_SPACE);
820 } 791 }
821 #endif // DEBUG 792 #endif // DEBUG
822 } 793 }
823 794
824 795
825 // We know the space requirements before deserialization and can 796 // We know the space requirements before deserialization and can
826 // pre-allocate that reserved space. During deserialization, all we need 797 // pre-allocate that reserved space. During deserialization, all we need
827 // to do is to bump up the pointer for each space in the reserved 798 // to do is to bump up the pointer for each space in the reserved
(...skipping 23 matching lines...) Expand all
851 // Assert that the current reserved chunk is still big enough. 822 // Assert that the current reserved chunk is still big enough.
852 const Heap::Reservation& reservation = reservations_[space_index]; 823 const Heap::Reservation& reservation = reservations_[space_index];
853 int chunk_index = current_chunk_[space_index]; 824 int chunk_index = current_chunk_[space_index];
854 CHECK_LE(high_water_[space_index], reservation[chunk_index].end); 825 CHECK_LE(high_water_[space_index], reservation[chunk_index].end);
855 #endif 826 #endif
856 return address; 827 return address;
857 } 828 }
858 } 829 }
859 830
860 831
861 bool Deserializer::ReadData(Object** current, Object** limit, int source_space, 832 void Deserializer::ReadData(Object** current, Object** limit, int source_space,
862 Address current_object_address) { 833 Address current_object_address) {
863 Isolate* const isolate = isolate_; 834 Isolate* const isolate = isolate_;
864 // Write barrier support costs around 1% in startup time. In fact there 835 // Write barrier support costs around 1% in startup time. In fact there
865 // are no new space objects in current boot snapshots, so it's not needed, 836 // are no new space objects in current boot snapshots, so it's not needed,
866 // but that may change. 837 // but that may change.
867 bool write_barrier_needed = 838 bool write_barrier_needed =
868 (current_object_address != NULL && source_space != NEW_SPACE && 839 (current_object_address != NULL && source_space != NEW_SPACE &&
869 source_space != CODE_SPACE); 840 source_space != CODE_SPACE);
870 while (current < limit) { 841 while (current < limit) {
871 byte data = source_.Get(); 842 byte data = source_.Get();
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after
1108 const Heap::Reservation& reservation = reservations_[space]; 1079 const Heap::Reservation& reservation = reservations_[space];
1109 // Make sure the current chunk is indeed exhausted. 1080 // Make sure the current chunk is indeed exhausted.
1110 CHECK_EQ(reservation[chunk_index].end, high_water_[space]); 1081 CHECK_EQ(reservation[chunk_index].end, high_water_[space]);
1111 // Move to next reserved chunk. 1082 // Move to next reserved chunk.
1112 chunk_index = ++current_chunk_[space]; 1083 chunk_index = ++current_chunk_[space];
1113 CHECK_LT(chunk_index, reservation.length()); 1084 CHECK_LT(chunk_index, reservation.length());
1114 high_water_[space] = reservation[chunk_index].start; 1085 high_water_[space] = reservation[chunk_index].start;
1115 break; 1086 break;
1116 } 1087 }
1117 1088
1118 case kDeferred: {
1119 // Deferred can only occur right after the heap object header.
1120 DCHECK(current == reinterpret_cast<Object**>(current_object_address +
1121 kPointerSize));
1122 current = limit;
1123 return false;
1124 }
1125
1126 case kSynchronize: 1089 case kSynchronize:
1127 // If we get here then that indicates that you have a mismatch between 1090 // If we get here then that indicates that you have a mismatch between
1128 // the number of GC roots when serializing and deserializing. 1091 // the number of GC roots when serializing and deserializing.
1129 CHECK(false); 1092 CHECK(false);
1130 break; 1093 break;
1131 1094
1132 case kNativesStringResource: { 1095 case kNativesStringResource: {
1133 DCHECK(!isolate_->heap()->deserialization_complete()); 1096 DCHECK(!isolate_->heap()->deserialization_complete());
1134 int index = source_.Get(); 1097 int index = source_.Get();
1135 Vector<const char> source_vector = Natives::GetScriptSource(index); 1098 Vector<const char> source_vector = Natives::GetScriptSource(index);
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1222 } 1185 }
1223 1186
1224 #undef SIXTEEN_CASES 1187 #undef SIXTEEN_CASES
1225 #undef FOUR_CASES 1188 #undef FOUR_CASES
1226 1189
1227 default: 1190 default:
1228 CHECK(false); 1191 CHECK(false);
1229 } 1192 }
1230 } 1193 }
1231 CHECK_EQ(limit, current); 1194 CHECK_EQ(limit, current);
1232 return true;
1233 } 1195 }
1234 1196
1235 1197
1236 Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink) 1198 Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink)
1237 : isolate_(isolate), 1199 : isolate_(isolate),
1238 sink_(sink), 1200 sink_(sink),
1239 external_reference_encoder_(isolate), 1201 external_reference_encoder_(isolate),
1240 root_index_map_(isolate), 1202 root_index_map_(isolate),
1241 recursion_depth_(0),
1242 code_address_map_(NULL), 1203 code_address_map_(NULL),
1243 large_objects_total_size_(0), 1204 large_objects_total_size_(0),
1244 seen_large_objects_index_(0) { 1205 seen_large_objects_index_(0) {
1245 // The serializer is meant to be used only to generate initial heap images 1206 // The serializer is meant to be used only to generate initial heap images
1246 // from a context in which there is only one isolate. 1207 // from a context in which there is only one isolate.
1247 for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) { 1208 for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) {
1248 pending_chunk_[i] = 0; 1209 pending_chunk_[i] = 0;
1249 max_chunk_size_[i] = static_cast<uint32_t>( 1210 max_chunk_size_[i] = static_cast<uint32_t>(
1250 MemoryAllocator::PageAreaSize(static_cast<AllocationSpace>(i))); 1211 MemoryAllocator::PageAreaSize(static_cast<AllocationSpace>(i)));
1251 } 1212 }
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
1307 PrintF("%10d %10" V8_PTR_PREFIX "d %s\n", instance_type_count_[Name], \ 1268 PrintF("%10d %10" V8_PTR_PREFIX "d %s\n", instance_type_count_[Name], \
1308 instance_type_size_[Name], #Name); \ 1269 instance_type_size_[Name], #Name); \
1309 } 1270 }
1310 INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE) 1271 INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
1311 #undef PRINT_INSTANCE_TYPE 1272 #undef PRINT_INSTANCE_TYPE
1312 PrintF("\n"); 1273 PrintF("\n");
1313 #endif // OBJECT_PRINT 1274 #endif // OBJECT_PRINT
1314 } 1275 }
1315 1276
1316 1277
1317 void Serializer::SerializeDeferredObjects() {
1318 while (deferred_objects_.length() > 0) {
1319 HeapObject* obj = deferred_objects_.RemoveLast();
1320 ObjectSerializer obj_serializer(this, obj, sink_, kPlain, kStartOfObject);
1321 obj_serializer.SerializeDeferred();
1322 }
1323 sink_->Put(kSynchronize, "Finished with deferred objects");
1324 }
1325
1326
1327 void StartupSerializer::SerializeStrongReferences() { 1278 void StartupSerializer::SerializeStrongReferences() {
1328 Isolate* isolate = this->isolate(); 1279 Isolate* isolate = this->isolate();
1329 // No active threads. 1280 // No active threads.
1330 CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse()); 1281 CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse());
1331 // No active or weak handles. 1282 // No active or weak handles.
1332 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty()); 1283 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
1333 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles()); 1284 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
1334 CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles()); 1285 CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles());
1335 // We don't support serializing installed extensions. 1286 // We don't support serializing installed extensions.
1336 CHECK(!isolate->has_installed_extensions()); 1287 CHECK(!isolate->has_installed_extensions());
(...skipping 24 matching lines...) Expand all
1361 1312
1362 1313
1363 void PartialSerializer::Serialize(Object** o) { 1314 void PartialSerializer::Serialize(Object** o) {
1364 if ((*o)->IsContext()) { 1315 if ((*o)->IsContext()) {
1365 Context* context = Context::cast(*o); 1316 Context* context = Context::cast(*o);
1366 global_object_ = context->global_object(); 1317 global_object_ = context->global_object();
1367 back_reference_map()->AddGlobalProxy(context->global_proxy()); 1318 back_reference_map()->AddGlobalProxy(context->global_proxy());
1368 } 1319 }
1369 VisitPointer(o); 1320 VisitPointer(o);
1370 SerializeOutdatedContextsAsFixedArray(); 1321 SerializeOutdatedContextsAsFixedArray();
1371 SerializeDeferredObjects();
1372 Pad(); 1322 Pad();
1373 } 1323 }
1374 1324
1375 1325
1376 void PartialSerializer::SerializeOutdatedContextsAsFixedArray() { 1326 void PartialSerializer::SerializeOutdatedContextsAsFixedArray() {
1377 int length = outdated_contexts_.length(); 1327 int length = outdated_contexts_.length();
1378 if (length == 0) { 1328 if (length == 0) {
1379 FixedArray* empty = isolate_->heap()->empty_fixed_array(); 1329 FixedArray* empty = isolate_->heap()->empty_fixed_array();
1380 SerializeObject(empty, kPlain, kStartOfObject, 0); 1330 SerializeObject(empty, kPlain, kStartOfObject, 0);
1381 } else { 1331 } else {
1382 // Serialize an imaginary fixed array containing outdated contexts. 1332 // Serialize an imaginary fixed array containing outdated contexts.
1383 int size = FixedArray::SizeFor(length); 1333 int size = FixedArray::SizeFor(length);
1384 Allocate(NEW_SPACE, size); 1334 Allocate(NEW_SPACE, size);
1385 sink_->Put(kNewObject + NEW_SPACE, "emulated FixedArray"); 1335 sink_->Put(kNewObject + NEW_SPACE, "emulated FixedArray");
1386 sink_->PutInt(size >> kObjectAlignmentBits, "FixedArray size in words"); 1336 sink_->PutInt(size >> kObjectAlignmentBits, "FixedArray size in words");
1387 Map* map = isolate_->heap()->fixed_array_map(); 1337 Map* map = isolate_->heap()->fixed_array_map();
1388 SerializeObject(map, kPlain, kStartOfObject, 0); 1338 SerializeObject(map, kPlain, kStartOfObject, 0);
1389 Smi* length_smi = Smi::FromInt(length); 1339 Smi* length_smi = Smi::FromInt(length);
1390 sink_->Put(kOnePointerRawData, "Smi"); 1340 sink_->Put(kOnePointerRawData, "Smi");
1391 for (int i = 0; i < kPointerSize; i++) { 1341 for (int i = 0; i < kPointerSize; i++) {
1392 sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte"); 1342 sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte");
1393 } 1343 }
1394 for (int i = 0; i < length; i++) { 1344 for (int i = 0; i < length; i++) {
1395 Context* context = outdated_contexts_[i]; 1345 BackReference back_ref = outdated_contexts_[i];
1396 BackReference back_reference = back_reference_map_.Lookup(context); 1346 DCHECK(BackReferenceIsAlreadyAllocated(back_ref));
1397 sink_->Put(kBackref + back_reference.space(), "BackRef"); 1347 sink_->Put(kBackref + back_ref.space(), "BackRef");
1398 PutBackReference(context, back_reference); 1348 sink_->PutInt(back_ref.reference(), "BackRefValue");
1399 } 1349 }
1400 } 1350 }
1401 } 1351 }
1402 1352
1403 1353
1404 bool Serializer::ShouldBeSkipped(Object** current) { 1354 bool Serializer::ShouldBeSkipped(Object** current) {
1405 Object** roots = isolate()->heap()->roots_array_start(); 1355 Object** roots = isolate()->heap()->roots_array_start();
1406 return current == &roots[Heap::kStoreBufferTopRootIndex] 1356 return current == &roots[Heap::kStoreBufferTopRootIndex]
1407 || current == &roots[Heap::kStackLimitRootIndex] 1357 || current == &roots[Heap::kStackLimitRootIndex]
1408 || current == &roots[Heap::kRealStackLimitRootIndex]; 1358 || current == &roots[Heap::kRealStackLimitRootIndex];
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
1551 } 1501 }
1552 1502
1553 AllocationSpace space = back_reference.space(); 1503 AllocationSpace space = back_reference.space();
1554 if (skip == 0) { 1504 if (skip == 0) {
1555 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRef"); 1505 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRef");
1556 } else { 1506 } else {
1557 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space, 1507 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space,
1558 "BackRefWithSkip"); 1508 "BackRefWithSkip");
1559 sink_->PutInt(skip, "BackRefSkipDistance"); 1509 sink_->PutInt(skip, "BackRefSkipDistance");
1560 } 1510 }
1561 PutBackReference(obj, back_reference); 1511 DCHECK(BackReferenceIsAlreadyAllocated(back_reference));
1512 sink_->PutInt(back_reference.reference(), "BackRefValue");
1513
1514 hot_objects_.Add(obj);
1562 } 1515 }
1563 return true; 1516 return true;
1564 } 1517 }
1565 return false; 1518 return false;
1566 } 1519 }
1567 1520
1568 1521
1569 void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, 1522 void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
1570 WhereToPoint where_to_point, int skip) { 1523 WhereToPoint where_to_point, int skip) {
1571 DCHECK(!obj->IsJSFunction()); 1524 DCHECK(!obj->IsJSFunction());
(...skipping 15 matching lines...) Expand all
1587 1540
1588 FlushSkip(skip); 1541 FlushSkip(skip);
1589 1542
1590 // Object has not yet been serialized. Serialize it here. 1543 // Object has not yet been serialized. Serialize it here.
1591 ObjectSerializer object_serializer(this, obj, sink_, how_to_code, 1544 ObjectSerializer object_serializer(this, obj, sink_, how_to_code,
1592 where_to_point); 1545 where_to_point);
1593 object_serializer.Serialize(); 1546 object_serializer.Serialize();
1594 } 1547 }
1595 1548
1596 1549
1597 void StartupSerializer::SerializeWeakReferencesAndDeferred() { 1550 void StartupSerializer::SerializeWeakReferences() {
1598 // This phase comes right after the serialization (of the snapshot). 1551 // This phase comes right after the serialization (of the snapshot).
1599 // After we have done the partial serialization the partial snapshot cache 1552 // After we have done the partial serialization the partial snapshot cache
1600 // will contain some references needed to decode the partial snapshot. We 1553 // will contain some references needed to decode the partial snapshot. We
1601 // add one entry with 'undefined' which is the sentinel that the deserializer 1554 // add one entry with 'undefined' which is the sentinel that the deserializer
1602 // uses to know it is done deserializing the array. 1555 // uses to know it is done deserializing the array.
1603 Object* undefined = isolate()->heap()->undefined_value(); 1556 Object* undefined = isolate()->heap()->undefined_value();
1604 VisitPointer(&undefined); 1557 VisitPointer(&undefined);
1605 isolate()->heap()->IterateWeakRoots(this, VISIT_ALL); 1558 isolate()->heap()->IterateWeakRoots(this, VISIT_ALL);
1606 SerializeDeferredObjects();
1607 Pad(); 1559 Pad();
1608 } 1560 }
1609 1561
1610 1562
1611 void Serializer::PutRoot(int root_index, 1563 void Serializer::PutRoot(int root_index,
1612 HeapObject* object, 1564 HeapObject* object,
1613 SerializerDeserializer::HowToCode how_to_code, 1565 SerializerDeserializer::HowToCode how_to_code,
1614 SerializerDeserializer::WhereToPoint where_to_point, 1566 SerializerDeserializer::WhereToPoint where_to_point,
1615 int skip) { 1567 int skip) {
1616 if (FLAG_trace_serializer) { 1568 if (FLAG_trace_serializer) {
(...skipping 12 matching lines...) Expand all
1629 sink_->PutInt(skip, "SkipInPutRoot"); 1581 sink_->PutInt(skip, "SkipInPutRoot");
1630 } 1582 }
1631 } else { 1583 } else {
1632 FlushSkip(skip); 1584 FlushSkip(skip);
1633 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization"); 1585 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
1634 sink_->PutInt(root_index, "root_index"); 1586 sink_->PutInt(root_index, "root_index");
1635 } 1587 }
1636 } 1588 }
1637 1589
1638 1590
1639 void Serializer::PutBackReference(HeapObject* object, BackReference reference) {
1640 DCHECK(BackReferenceIsAlreadyAllocated(reference));
1641 sink_->PutInt(reference.reference(), "BackRefValue");
1642 hot_objects_.Add(object);
1643 }
1644
1645
1646 void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, 1591 void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
1647 WhereToPoint where_to_point, int skip) { 1592 WhereToPoint where_to_point, int skip) {
1648 if (obj->IsMap()) { 1593 if (obj->IsMap()) {
1649 // The code-caches link to context-specific code objects, which 1594 // The code-caches link to context-specific code objects, which
1650 // the startup and context serializes cannot currently handle. 1595 // the startup and context serializes cannot currently handle.
1651 DCHECK(Map::cast(obj)->code_cache() == obj->GetHeap()->empty_fixed_array()); 1596 DCHECK(Map::cast(obj)->code_cache() == obj->GetHeap()->empty_fixed_array());
1652 } 1597 }
1653 1598
1654 // Replace typed arrays by undefined. 1599 // Replace typed arrays by undefined.
1655 if (obj->IsJSTypedArray()) obj = isolate_->heap()->undefined_value(); 1600 if (obj->IsJSTypedArray()) obj = isolate_->heap()->undefined_value();
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1689 } 1634 }
1690 1635
1691 // Object has not yet been serialized. Serialize it here. 1636 // Object has not yet been serialized. Serialize it here.
1692 ObjectSerializer serializer(this, obj, sink_, how_to_code, where_to_point); 1637 ObjectSerializer serializer(this, obj, sink_, how_to_code, where_to_point);
1693 serializer.Serialize(); 1638 serializer.Serialize();
1694 1639
1695 if (obj->IsContext() && 1640 if (obj->IsContext() &&
1696 Context::cast(obj)->global_object() == global_object_) { 1641 Context::cast(obj)->global_object() == global_object_) {
1697 // Context refers to the current global object. This reference will 1642 // Context refers to the current global object. This reference will
1698 // become outdated after deserialization. 1643 // become outdated after deserialization.
1699 outdated_contexts_.Add(Context::cast(obj)); 1644 BackReference back_reference = back_reference_map_.Lookup(obj);
1645 DCHECK(back_reference.is_valid());
1646 outdated_contexts_.Add(back_reference);
1700 } 1647 }
1701 } 1648 }
1702 1649
1703 1650
1704 void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space, 1651 void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space,
1705 int size, Map* map) { 1652 int size, Map* map) {
1706 if (serializer_->code_address_map_) { 1653 if (serializer_->code_address_map_) {
1707 const char* code_name = 1654 const char* code_name =
1708 serializer_->code_address_map_->Lookup(object_->address()); 1655 serializer_->code_address_map_->Lookup(object_->address());
1709 LOG(serializer_->isolate_, 1656 LOG(serializer_->isolate_,
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after
1856 int size = object_->Size(); 1803 int size = object_->Size();
1857 Map* map = object_->map(); 1804 Map* map = object_->map();
1858 AllocationSpace space = 1805 AllocationSpace space =
1859 MemoryChunk::FromAddress(object_->address())->owner()->identity(); 1806 MemoryChunk::FromAddress(object_->address())->owner()->identity();
1860 SerializePrologue(space, size, map); 1807 SerializePrologue(space, size, map);
1861 1808
1862 // Serialize the rest of the object. 1809 // Serialize the rest of the object.
1863 CHECK_EQ(0, bytes_processed_so_far_); 1810 CHECK_EQ(0, bytes_processed_so_far_);
1864 bytes_processed_so_far_ = kPointerSize; 1811 bytes_processed_so_far_ = kPointerSize;
1865 1812
1866 RecursionScope recursion(serializer_);
1867 // Objects that are immediately post processed during deserialization
1868 // cannot be deferred, since post processing requires the object content.
1869 if (recursion.ExceedsMaximum() && CanBeDeferred(object_)) {
1870 serializer_->QueueDeferredObject(object_);
1871 sink_->Put(kDeferred, "Deferring object content");
1872 return;
1873 }
1874
1875 object_->IterateBody(map->instance_type(), size, this); 1813 object_->IterateBody(map->instance_type(), size, this);
1876 OutputRawData(object_->address() + size); 1814 OutputRawData(object_->address() + size);
1877 } 1815 }
1878
1879
1880 void Serializer::ObjectSerializer::SerializeDeferred() {
1881 if (FLAG_trace_serializer) {
1882 PrintF(" Encoding deferred heap object: ");
1883 object_->ShortPrint();
1884 PrintF("\n");
1885 }
1886
1887 int size = object_->Size();
1888 Map* map = object_->map();
1889 BackReference reference = serializer_->back_reference_map()->Lookup(object_);
1890
1891 // Serialize the rest of the object.
1892 CHECK_EQ(0, bytes_processed_so_far_);
1893 bytes_processed_so_far_ = kPointerSize;
1894
1895 sink_->Put(kNewObject + reference.space(), "deferred object");
1896 serializer_->PutBackReference(object_, reference);
1897 sink_->PutInt(size >> kPointerSizeLog2, "deferred object size");
1898
1899 object_->IterateBody(map->instance_type(), size, this);
1900 OutputRawData(object_->address() + size);
1901 }
1902 1816
1903 1817
1904 void Serializer::ObjectSerializer::VisitPointers(Object** start, 1818 void Serializer::ObjectSerializer::VisitPointers(Object** start,
1905 Object** end) { 1819 Object** end) {
1906 Object** current = start; 1820 Object** current = start;
1907 while (current < end) { 1821 while (current < end) {
1908 while (current < end && (*current)->IsSmi()) current++; 1822 while (current < end && (*current)->IsSmi()) current++;
1909 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); 1823 if (current < end) OutputRawData(reinterpret_cast<Address>(current));
1910 1824
1911 while (current < end && !(*current)->IsSmi()) { 1825 while (current < end && !(*current)->IsSmi()) {
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after
2213 if (script->IsScript()) Script::cast(script)->name()->ShortPrint(); 2127 if (script->IsScript()) Script::cast(script)->name()->ShortPrint();
2214 PrintF("]\n"); 2128 PrintF("]\n");
2215 } 2129 }
2216 2130
2217 // Serialize code object. 2131 // Serialize code object.
2218 SnapshotByteSink sink(info->code()->CodeSize() * 2); 2132 SnapshotByteSink sink(info->code()->CodeSize() * 2);
2219 CodeSerializer cs(isolate, &sink, *source, info->code()); 2133 CodeSerializer cs(isolate, &sink, *source, info->code());
2220 DisallowHeapAllocation no_gc; 2134 DisallowHeapAllocation no_gc;
2221 Object** location = Handle<Object>::cast(info).location(); 2135 Object** location = Handle<Object>::cast(info).location();
2222 cs.VisitPointer(location); 2136 cs.VisitPointer(location);
2223 cs.SerializeDeferredObjects();
2224 cs.Pad(); 2137 cs.Pad();
2225 2138
2226 SerializedCodeData data(sink.data(), cs); 2139 SerializedCodeData data(sink.data(), cs);
2227 ScriptData* script_data = data.GetScriptData(); 2140 ScriptData* script_data = data.GetScriptData();
2228 2141
2229 if (FLAG_profile_deserialization) { 2142 if (FLAG_profile_deserialization) {
2230 double ms = timer.Elapsed().InMillisecondsF(); 2143 double ms = timer.Elapsed().InMillisecondsF();
2231 int length = script_data->length(); 2144 int length = script_data->length();
2232 PrintF("[Serializing to %d bytes took %0.3f ms]\n", length, ms); 2145 PrintF("[Serializing to %d bytes took %0.3f ms]\n", length, ms);
2233 } 2146 }
(...skipping 445 matching lines...) Expand 10 before | Expand all | Expand 10 after
2679 DisallowHeapAllocation no_gc; 2592 DisallowHeapAllocation no_gc;
2680 SerializedCodeData* scd = new SerializedCodeData(cached_data); 2593 SerializedCodeData* scd = new SerializedCodeData(cached_data);
2681 SanityCheckResult r = scd->SanityCheck(isolate, source); 2594 SanityCheckResult r = scd->SanityCheck(isolate, source);
2682 if (r == CHECK_SUCCESS) return scd; 2595 if (r == CHECK_SUCCESS) return scd;
2683 cached_data->Reject(); 2596 cached_data->Reject();
2684 source->GetIsolate()->counters()->code_cache_reject_reason()->AddSample(r); 2597 source->GetIsolate()->counters()->code_cache_reject_reason()->AddSample(r);
2685 delete scd; 2598 delete scd;
2686 return NULL; 2599 return NULL;
2687 } 2600 }
2688 } } // namespace v8::internal 2601 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/snapshot/serialize.h ('k') | test/cctest/test-serialize.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698