Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(126)

Side by Side Diff: src/snapshot/serialize.cc

Issue 1181053003: Version 4.3.61.28 (cherry-pick) (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@4.3
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/snapshot/serialize.h ('k') | test/cctest/test-serialize.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/platform/platform.h" 9 #include "src/base/platform/platform.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 551 matching lines...) Expand 10 before | Expand all | Expand 10 after
562 Initialize(isolate); 562 Initialize(isolate);
563 if (!ReserveSpace()) V8::FatalProcessOutOfMemory("deserializing context"); 563 if (!ReserveSpace()) V8::FatalProcessOutOfMemory("deserializing context");
564 // No active threads. 564 // No active threads.
565 DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse()); 565 DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse());
566 // No active handles. 566 // No active handles.
567 DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty()); 567 DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty());
568 isolate_->heap()->IterateSmiRoots(this); 568 isolate_->heap()->IterateSmiRoots(this);
569 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); 569 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
570 isolate_->heap()->RepairFreeListsAfterDeserialization(); 570 isolate_->heap()->RepairFreeListsAfterDeserialization();
571 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); 571 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
572 DeserializeDeferredObjects();
572 573
573 isolate_->heap()->set_native_contexts_list( 574 isolate_->heap()->set_native_contexts_list(
574 isolate_->heap()->undefined_value()); 575 isolate_->heap()->undefined_value());
575 isolate_->heap()->set_array_buffers_list( 576 isolate_->heap()->set_array_buffers_list(
576 isolate_->heap()->undefined_value()); 577 isolate_->heap()->undefined_value());
577 isolate->heap()->set_new_array_buffer_views_list( 578 isolate->heap()->set_new_array_buffer_views_list(
578 isolate_->heap()->undefined_value()); 579 isolate_->heap()->undefined_value());
579 580
580 // The allocation site list is build during root iteration, but if no sites 581 // The allocation site list is build during root iteration, but if no sites
581 // were encountered then it needs to be initialized to undefined. 582 // were encountered then it needs to be initialized to undefined.
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
615 616
616 DisallowHeapAllocation no_gc; 617 DisallowHeapAllocation no_gc;
617 // Keep track of the code space start and end pointers in case new 618 // Keep track of the code space start and end pointers in case new
618 // code objects were unserialized 619 // code objects were unserialized
619 OldSpace* code_space = isolate_->heap()->code_space(); 620 OldSpace* code_space = isolate_->heap()->code_space();
620 Address start_address = code_space->top(); 621 Address start_address = code_space->top();
621 Object* root; 622 Object* root;
622 Object* outdated_contexts; 623 Object* outdated_contexts;
623 VisitPointer(&root); 624 VisitPointer(&root);
624 VisitPointer(&outdated_contexts); 625 VisitPointer(&outdated_contexts);
626 DeserializeDeferredObjects();
625 627
626 // There's no code deserialized here. If this assert fires 628 // There's no code deserialized here. If this assert fires
627 // then that's changed and logging should be added to notify 629 // then that's changed and logging should be added to notify
628 // the profiler et al of the new code. 630 // the profiler et al of the new code.
629 CHECK_EQ(start_address, code_space->top()); 631 CHECK_EQ(start_address, code_space->top());
630 CHECK(outdated_contexts->IsFixedArray()); 632 CHECK(outdated_contexts->IsFixedArray());
631 *outdated_contexts_out = 633 *outdated_contexts_out =
632 Handle<FixedArray>(FixedArray::cast(outdated_contexts), isolate); 634 Handle<FixedArray>(FixedArray::cast(outdated_contexts), isolate);
633 return Handle<Object>(root, isolate); 635 return Handle<Object>(root, isolate);
634 } 636 }
635 637
636 638
637 MaybeHandle<SharedFunctionInfo> Deserializer::DeserializeCode( 639 MaybeHandle<SharedFunctionInfo> Deserializer::DeserializeCode(
638 Isolate* isolate) { 640 Isolate* isolate) {
639 Initialize(isolate); 641 Initialize(isolate);
640 if (!ReserveSpace()) { 642 if (!ReserveSpace()) {
641 return Handle<SharedFunctionInfo>(); 643 return Handle<SharedFunctionInfo>();
642 } else { 644 } else {
643 deserializing_user_code_ = true; 645 deserializing_user_code_ = true;
644 DisallowHeapAllocation no_gc; 646 DisallowHeapAllocation no_gc;
645 Object* root; 647 Object* root;
646 VisitPointer(&root); 648 VisitPointer(&root);
649 DeserializeDeferredObjects();
647 return Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(root)); 650 return Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(root));
648 } 651 }
649 } 652 }
650 653
651 654
652 Deserializer::~Deserializer() { 655 Deserializer::~Deserializer() {
653 // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed. 656 // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed.
654 // DCHECK(source_.AtEOF()); 657 // DCHECK(source_.AtEOF());
655 attached_objects_.Dispose(); 658 attached_objects_.Dispose();
656 } 659 }
657 660
658 661
659 // This is called on the roots. It is the driver of the deserialization 662 // This is called on the roots. It is the driver of the deserialization
660 // process. It is also called on the body of each function. 663 // process. It is also called on the body of each function.
661 void Deserializer::VisitPointers(Object** start, Object** end) { 664 void Deserializer::VisitPointers(Object** start, Object** end) {
662 // The space must be new space. Any other space would cause ReadChunk to try 665 // The space must be new space. Any other space would cause ReadChunk to try
663 // to update the remembered using NULL as the address. 666 // to update the remembered using NULL as the address.
664 ReadData(start, end, NEW_SPACE, NULL); 667 ReadData(start, end, NEW_SPACE, NULL);
665 } 668 }
666 669
667 670
668 void Deserializer::RelinkAllocationSite(AllocationSite* site) { 671 void Deserializer::DeserializeDeferredObjects() {
669 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { 672 for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
670 site->set_weak_next(isolate_->heap()->undefined_value()); 673 int space = code & kSpaceMask;
671 } else { 674 DCHECK(space <= kNumberOfSpaces);
672 site->set_weak_next(isolate_->heap()->allocation_sites_list()); 675 DCHECK(code - space == kNewObject);
676 HeapObject* object = GetBackReferencedObject(space);
677 int size = source_.GetInt() << kPointerSizeLog2;
678 Address obj_address = object->address();
679 Object** start = reinterpret_cast<Object**>(obj_address + kPointerSize);
680 Object** end = reinterpret_cast<Object**>(obj_address + size);
681 bool filled = ReadData(start, end, space, obj_address);
682 CHECK(filled);
683 if (object->IsAllocationSite()) {
684 RelinkAllocationSite(AllocationSite::cast(object));
685 }
673 } 686 }
674 isolate_->heap()->set_allocation_sites_list(site);
675 } 687 }
676 688
677 689
678 // Used to insert a deserialized internalized string into the string table. 690 // Used to insert a deserialized internalized string into the string table.
679 class StringTableInsertionKey : public HashTableKey { 691 class StringTableInsertionKey : public HashTableKey {
680 public: 692 public:
681 explicit StringTableInsertionKey(String* string) 693 explicit StringTableInsertionKey(String* string)
682 : string_(string), hash_(HashForObject(string)) { 694 : string_(string), hash_(HashForObject(string)) {
683 DCHECK(string->IsInternalizedString()); 695 DCHECK(string->IsInternalizedString());
684 } 696 }
(...skipping 15 matching lines...) Expand all
700 MUST_USE_RESULT virtual Handle<Object> AsHandle(Isolate* isolate) 712 MUST_USE_RESULT virtual Handle<Object> AsHandle(Isolate* isolate)
701 OVERRIDE { 713 OVERRIDE {
702 return handle(string_, isolate); 714 return handle(string_, isolate);
703 } 715 }
704 716
705 String* string_; 717 String* string_;
706 uint32_t hash_; 718 uint32_t hash_;
707 }; 719 };
708 720
709 721
710 HeapObject* Deserializer::ProcessNewObjectFromSerializedCode(HeapObject* obj) { 722 HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj) {
723 DCHECK(deserializing_user_code());
711 if (obj->IsString()) { 724 if (obj->IsString()) {
712 String* string = String::cast(obj); 725 String* string = String::cast(obj);
713 // Uninitialize hash field as the hash seed may have changed. 726 // Uninitialize hash field as the hash seed may have changed.
714 string->set_hash_field(String::kEmptyHashField); 727 string->set_hash_field(String::kEmptyHashField);
715 if (string->IsInternalizedString()) { 728 if (string->IsInternalizedString()) {
716 DisallowHeapAllocation no_gc; 729 DisallowHeapAllocation no_gc;
717 HandleScope scope(isolate_); 730 HandleScope scope(isolate_);
718 StringTableInsertionKey key(string); 731 StringTableInsertionKey key(string);
719 String* canonical = *StringTable::LookupKey(isolate_, &key); 732 String* canonical = *StringTable::LookupKey(isolate_, &key);
720 string->SetForwardedInternalizedString(canonical); 733 string->SetForwardedInternalizedString(canonical);
721 return canonical; 734 return canonical;
722 } 735 }
723 } else if (obj->IsScript()) { 736 } else if (obj->IsScript()) {
724 Script::cast(obj)->set_id(isolate_->heap()->NextScriptId()); 737 Script::cast(obj)->set_id(isolate_->heap()->NextScriptId());
738 } else {
739 DCHECK(CanBeDeferred(obj));
725 } 740 }
726 return obj; 741 return obj;
727 } 742 }
728 743
729 744
745 void Deserializer::RelinkAllocationSite(AllocationSite* obj) {
746 DCHECK(obj->IsAllocationSite());
747 // Allocation sites are present in the snapshot, and must be linked into
748 // a list at deserialization time.
749 AllocationSite* site = AllocationSite::cast(obj);
750 // TODO(mvstanton): consider treating the heap()->allocation_sites_list()
751 // as a (weak) root. If this root is relocated correctly,
752 // RelinkAllocationSite() isn't necessary.
753 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
754 site->set_weak_next(isolate_->heap()->undefined_value());
755 } else {
756 site->set_weak_next(isolate_->heap()->allocation_sites_list());
757 }
758 isolate_->heap()->set_allocation_sites_list(site);
759 }
760
761
730 HeapObject* Deserializer::GetBackReferencedObject(int space) { 762 HeapObject* Deserializer::GetBackReferencedObject(int space) {
731 HeapObject* obj; 763 HeapObject* obj;
732 BackReference back_reference(source_.GetInt()); 764 BackReference back_reference(source_.GetInt());
733 if (space == LO_SPACE) { 765 if (space == LO_SPACE) {
734 CHECK(back_reference.chunk_index() == 0); 766 CHECK(back_reference.chunk_index() == 0);
735 uint32_t index = back_reference.large_object_index(); 767 uint32_t index = back_reference.large_object_index();
736 obj = deserialized_large_objects_[index]; 768 obj = deserialized_large_objects_[index];
737 } else { 769 } else {
738 DCHECK(space < kNumberOfPreallocatedSpaces); 770 DCHECK(space < kNumberOfPreallocatedSpaces);
739 uint32_t chunk_index = back_reference.chunk_index(); 771 uint32_t chunk_index = back_reference.chunk_index();
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
775 obj = isolate_->heap()->DoubleAlignForDeserialization(obj, reserved_size); 807 obj = isolate_->heap()->DoubleAlignForDeserialization(obj, reserved_size);
776 address = obj->address(); 808 address = obj->address();
777 } 809 }
778 810
779 isolate_->heap()->OnAllocationEvent(obj, size); 811 isolate_->heap()->OnAllocationEvent(obj, size);
780 Object** current = reinterpret_cast<Object**>(address); 812 Object** current = reinterpret_cast<Object**>(address);
781 Object** limit = current + (size >> kPointerSizeLog2); 813 Object** limit = current + (size >> kPointerSizeLog2);
782 if (FLAG_log_snapshot_positions) { 814 if (FLAG_log_snapshot_positions) {
783 LOG(isolate_, SnapshotPositionEvent(address, source_.position())); 815 LOG(isolate_, SnapshotPositionEvent(address, source_.position()));
784 } 816 }
785 ReadData(current, limit, space_number, address);
786 817
787 // TODO(mvstanton): consider treating the heap()->allocation_sites_list() 818 if (ReadData(current, limit, space_number, address)) {
788 // as a (weak) root. If this root is relocated correctly, 819 // Only post process if object content has not been deferred.
789 // RelinkAllocationSite() isn't necessary. 820 if (obj->IsAllocationSite()) {
790 if (obj->IsAllocationSite()) RelinkAllocationSite(AllocationSite::cast(obj)); 821 RelinkAllocationSite(AllocationSite::cast(obj));
822 }
791 823
792 // Fix up strings from serialized user code. 824 if (deserializing_user_code()) obj = PostProcessNewObject(obj);
793 if (deserializing_user_code()) obj = ProcessNewObjectFromSerializedCode(obj); 825 }
794 826
795 Object* write_back_obj = obj; 827 Object* write_back_obj = obj;
796 UnalignedCopy(write_back, &write_back_obj); 828 UnalignedCopy(write_back, &write_back_obj);
797 #ifdef DEBUG 829 #ifdef DEBUG
798 if (obj->IsCode()) { 830 if (obj->IsCode()) {
799 DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE); 831 DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE);
800 #ifdef VERIFY_HEAP
801 obj->ObjectVerify();
802 #endif // VERIFY_HEAP
803 } else { 832 } else {
804 DCHECK(space_number != CODE_SPACE); 833 DCHECK(space_number != CODE_SPACE);
805 } 834 }
806 #endif // DEBUG 835 #endif // DEBUG
807 } 836 }
808 837
809 838
810 // We know the space requirements before deserialization and can 839 // We know the space requirements before deserialization and can
811 // pre-allocate that reserved space. During deserialization, all we need 840 // pre-allocate that reserved space. During deserialization, all we need
812 // to do is to bump up the pointer for each space in the reserved 841 // to do is to bump up the pointer for each space in the reserved
(...skipping 23 matching lines...) Expand all
836 // Assert that the current reserved chunk is still big enough. 865 // Assert that the current reserved chunk is still big enough.
837 const Heap::Reservation& reservation = reservations_[space_index]; 866 const Heap::Reservation& reservation = reservations_[space_index];
838 int chunk_index = current_chunk_[space_index]; 867 int chunk_index = current_chunk_[space_index];
839 CHECK_LE(high_water_[space_index], reservation[chunk_index].end); 868 CHECK_LE(high_water_[space_index], reservation[chunk_index].end);
840 #endif 869 #endif
841 return address; 870 return address;
842 } 871 }
843 } 872 }
844 873
845 874
846 void Deserializer::ReadData(Object** current, Object** limit, int source_space, 875 bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
847 Address current_object_address) { 876 Address current_object_address) {
848 Isolate* const isolate = isolate_; 877 Isolate* const isolate = isolate_;
849 // Write barrier support costs around 1% in startup time. In fact there 878 // Write barrier support costs around 1% in startup time. In fact there
850 // are no new space objects in current boot snapshots, so it's not needed, 879 // are no new space objects in current boot snapshots, so it's not needed,
851 // but that may change. 880 // but that may change.
852 bool write_barrier_needed = 881 bool write_barrier_needed =
853 (current_object_address != NULL && source_space != NEW_SPACE && 882 (current_object_address != NULL && source_space != NEW_SPACE &&
854 source_space != CELL_SPACE && source_space != CODE_SPACE && 883 source_space != CELL_SPACE && source_space != CODE_SPACE &&
855 source_space != OLD_DATA_SPACE); 884 source_space != OLD_DATA_SPACE);
856 while (current < limit) { 885 while (current < limit) {
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after
1096 const Heap::Reservation& reservation = reservations_[space]; 1125 const Heap::Reservation& reservation = reservations_[space];
1097 // Make sure the current chunk is indeed exhausted. 1126 // Make sure the current chunk is indeed exhausted.
1098 CHECK_EQ(reservation[chunk_index].end, high_water_[space]); 1127 CHECK_EQ(reservation[chunk_index].end, high_water_[space]);
1099 // Move to next reserved chunk. 1128 // Move to next reserved chunk.
1100 chunk_index = ++current_chunk_[space]; 1129 chunk_index = ++current_chunk_[space];
1101 CHECK_LT(chunk_index, reservation.length()); 1130 CHECK_LT(chunk_index, reservation.length());
1102 high_water_[space] = reservation[chunk_index].start; 1131 high_water_[space] = reservation[chunk_index].start;
1103 break; 1132 break;
1104 } 1133 }
1105 1134
1135 case kDeferred: {
1136 // Deferred can only occur right after the heap object header.
1137 DCHECK(current == reinterpret_cast<Object**>(current_object_address +
1138 kPointerSize));
1139 HeapObject* obj = HeapObject::FromAddress(current_object_address);
1140 // If the deferred object is a map, its instance type may be used
1141 // during deserialization. Initialize it with a temporary value.
1142 if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE);
1143 current = limit;
1144 return false;
1145 }
1146
1106 case kSynchronize: 1147 case kSynchronize:
1107 // If we get here then that indicates that you have a mismatch between 1148 // If we get here then that indicates that you have a mismatch between
1108 // the number of GC roots when serializing and deserializing. 1149 // the number of GC roots when serializing and deserializing.
1109 CHECK(false); 1150 CHECK(false);
1110 break; 1151 break;
1111 1152
1112 case kNativesStringResource: { 1153 case kNativesStringResource: {
1113 DCHECK(!isolate_->heap()->deserialization_complete()); 1154 DCHECK(!isolate_->heap()->deserialization_complete());
1114 int index = source_.Get(); 1155 int index = source_.Get();
1115 Vector<const char> source_vector = Natives::GetScriptSource(index); 1156 Vector<const char> source_vector = Natives::GetScriptSource(index);
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1202 } 1243 }
1203 1244
1204 #undef SIXTEEN_CASES 1245 #undef SIXTEEN_CASES
1205 #undef FOUR_CASES 1246 #undef FOUR_CASES
1206 1247
1207 default: 1248 default:
1208 CHECK(false); 1249 CHECK(false);
1209 } 1250 }
1210 } 1251 }
1211 CHECK_EQ(limit, current); 1252 CHECK_EQ(limit, current);
1253 return true;
1212 } 1254 }
1213 1255
1214 1256
1215 Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink) 1257 Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink)
1216 : isolate_(isolate), 1258 : isolate_(isolate),
1217 sink_(sink), 1259 sink_(sink),
1218 external_reference_encoder_(isolate), 1260 external_reference_encoder_(isolate),
1219 root_index_map_(isolate), 1261 root_index_map_(isolate),
1262 recursion_depth_(0),
1220 code_address_map_(NULL), 1263 code_address_map_(NULL),
1221 large_objects_total_size_(0), 1264 large_objects_total_size_(0),
1222 seen_large_objects_index_(0) { 1265 seen_large_objects_index_(0) {
1223 // The serializer is meant to be used only to generate initial heap images 1266 // The serializer is meant to be used only to generate initial heap images
1224 // from a context in which there is only one isolate. 1267 // from a context in which there is only one isolate.
1225 for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) { 1268 for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) {
1226 pending_chunk_[i] = 0; 1269 pending_chunk_[i] = 0;
1227 max_chunk_size_[i] = static_cast<uint32_t>( 1270 max_chunk_size_[i] = static_cast<uint32_t>(
1228 MemoryAllocator::PageAreaSize(static_cast<AllocationSpace>(i))); 1271 MemoryAllocator::PageAreaSize(static_cast<AllocationSpace>(i)));
1229 } 1272 }
1230 } 1273 }
1231 1274
1232 1275
1233 Serializer::~Serializer() { 1276 Serializer::~Serializer() {
1234 if (code_address_map_ != NULL) delete code_address_map_; 1277 if (code_address_map_ != NULL) delete code_address_map_;
1235 } 1278 }
1236 1279
1237 1280
1281 void Serializer::SerializeDeferredObjects() {
1282 while (deferred_objects_.length() > 0) {
1283 HeapObject* obj = deferred_objects_.RemoveLast();
1284 ObjectSerializer obj_serializer(this, obj, sink_, kPlain, kStartOfObject);
1285 obj_serializer.SerializeDeferred();
1286 }
1287 sink_->Put(kSynchronize, "Finished with deferred objects");
1288 }
1289
1290
1238 void StartupSerializer::SerializeStrongReferences() { 1291 void StartupSerializer::SerializeStrongReferences() {
1239 Isolate* isolate = this->isolate(); 1292 Isolate* isolate = this->isolate();
1240 // No active threads. 1293 // No active threads.
1241 CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse()); 1294 CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse());
1242 // No active or weak handles. 1295 // No active or weak handles.
1243 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty()); 1296 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
1244 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles()); 1297 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
1245 CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles()); 1298 CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles());
1246 // We don't support serializing installed extensions. 1299 // We don't support serializing installed extensions.
1247 CHECK(!isolate->has_installed_extensions()); 1300 CHECK(!isolate->has_installed_extensions());
(...skipping 24 matching lines...) Expand all
1272 1325
1273 1326
1274 void PartialSerializer::Serialize(Object** o) { 1327 void PartialSerializer::Serialize(Object** o) {
1275 if ((*o)->IsContext()) { 1328 if ((*o)->IsContext()) {
1276 Context* context = Context::cast(*o); 1329 Context* context = Context::cast(*o);
1277 global_object_ = context->global_object(); 1330 global_object_ = context->global_object();
1278 back_reference_map()->AddGlobalProxy(context->global_proxy()); 1331 back_reference_map()->AddGlobalProxy(context->global_proxy());
1279 } 1332 }
1280 VisitPointer(o); 1333 VisitPointer(o);
1281 SerializeOutdatedContextsAsFixedArray(); 1334 SerializeOutdatedContextsAsFixedArray();
1335 SerializeDeferredObjects();
1282 Pad(); 1336 Pad();
1283 } 1337 }
1284 1338
1285 1339
1286 void PartialSerializer::SerializeOutdatedContextsAsFixedArray() { 1340 void PartialSerializer::SerializeOutdatedContextsAsFixedArray() {
1287 int length = outdated_contexts_.length(); 1341 int length = outdated_contexts_.length();
1288 if (length == 0) { 1342 if (length == 0) {
1289 FixedArray* empty = isolate_->heap()->empty_fixed_array(); 1343 FixedArray* empty = isolate_->heap()->empty_fixed_array();
1290 SerializeObject(empty, kPlain, kStartOfObject, 0); 1344 SerializeObject(empty, kPlain, kStartOfObject, 0);
1291 } else { 1345 } else {
1292 // Serialize an imaginary fixed array containing outdated contexts. 1346 // Serialize an imaginary fixed array containing outdated contexts.
1293 int size = FixedArray::SizeFor(length); 1347 int size = FixedArray::SizeFor(length);
1294 Allocate(NEW_SPACE, size); 1348 Allocate(NEW_SPACE, size);
1295 sink_->Put(kNewObject + NEW_SPACE, "emulated FixedArray"); 1349 sink_->Put(kNewObject + NEW_SPACE, "emulated FixedArray");
1296 sink_->PutInt(size >> kObjectAlignmentBits, "FixedArray size in words"); 1350 sink_->PutInt(size >> kObjectAlignmentBits, "FixedArray size in words");
1297 Map* map = isolate_->heap()->fixed_array_map(); 1351 Map* map = isolate_->heap()->fixed_array_map();
1298 SerializeObject(map, kPlain, kStartOfObject, 0); 1352 SerializeObject(map, kPlain, kStartOfObject, 0);
1299 Smi* length_smi = Smi::FromInt(length); 1353 Smi* length_smi = Smi::FromInt(length);
1300 sink_->Put(kOnePointerRawData, "Smi"); 1354 sink_->Put(kOnePointerRawData, "Smi");
1301 for (int i = 0; i < kPointerSize; i++) { 1355 for (int i = 0; i < kPointerSize; i++) {
1302 sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte"); 1356 sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte");
1303 } 1357 }
1304 for (int i = 0; i < length; i++) { 1358 for (int i = 0; i < length; i++) {
1305 BackReference back_ref = outdated_contexts_[i]; 1359 Context* context = outdated_contexts_[i];
1306 DCHECK(BackReferenceIsAlreadyAllocated(back_ref)); 1360 BackReference back_reference = back_reference_map_.Lookup(context);
1307 sink_->Put(kBackref + back_ref.space(), "BackRef"); 1361 sink_->Put(kBackref + back_reference.space(), "BackRef");
1308 sink_->PutInt(back_ref.reference(), "BackRefValue"); 1362 PutBackReference(context, back_reference);
1309 } 1363 }
1310 } 1364 }
1311 } 1365 }
1312 1366
1313 1367
1314 bool Serializer::ShouldBeSkipped(Object** current) { 1368 bool Serializer::ShouldBeSkipped(Object** current) {
1315 Object** roots = isolate()->heap()->roots_array_start(); 1369 Object** roots = isolate()->heap()->roots_array_start();
1316 return current == &roots[Heap::kStoreBufferTopRootIndex] 1370 return current == &roots[Heap::kStoreBufferTopRootIndex]
1317 || current == &roots[Heap::kStackLimitRootIndex] 1371 || current == &roots[Heap::kStackLimitRootIndex]
1318 || current == &roots[Heap::kRealStackLimitRootIndex]; 1372 || current == &roots[Heap::kRealStackLimitRootIndex];
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
1461 } 1515 }
1462 1516
1463 AllocationSpace space = back_reference.space(); 1517 AllocationSpace space = back_reference.space();
1464 if (skip == 0) { 1518 if (skip == 0) {
1465 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRef"); 1519 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRef");
1466 } else { 1520 } else {
1467 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space, 1521 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space,
1468 "BackRefWithSkip"); 1522 "BackRefWithSkip");
1469 sink_->PutInt(skip, "BackRefSkipDistance"); 1523 sink_->PutInt(skip, "BackRefSkipDistance");
1470 } 1524 }
1471 DCHECK(BackReferenceIsAlreadyAllocated(back_reference)); 1525 PutBackReference(obj, back_reference);
1472 sink_->PutInt(back_reference.reference(), "BackRefValue");
1473
1474 hot_objects_.Add(obj);
1475 } 1526 }
1476 return true; 1527 return true;
1477 } 1528 }
1478 return false; 1529 return false;
1479 } 1530 }
1480 1531
1481 1532
1482 void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, 1533 void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
1483 WhereToPoint where_to_point, int skip) { 1534 WhereToPoint where_to_point, int skip) {
1484 DCHECK(!obj->IsJSFunction()); 1535 DCHECK(!obj->IsJSFunction());
(...skipping 15 matching lines...) Expand all
1500 1551
1501 FlushSkip(skip); 1552 FlushSkip(skip);
1502 1553
1503 // Object has not yet been serialized. Serialize it here. 1554 // Object has not yet been serialized. Serialize it here.
1504 ObjectSerializer object_serializer(this, obj, sink_, how_to_code, 1555 ObjectSerializer object_serializer(this, obj, sink_, how_to_code,
1505 where_to_point); 1556 where_to_point);
1506 object_serializer.Serialize(); 1557 object_serializer.Serialize();
1507 } 1558 }
1508 1559
1509 1560
1510 void StartupSerializer::SerializeWeakReferences() { 1561 void StartupSerializer::SerializeWeakReferencesAndDeferred() {
1511 // This phase comes right after the serialization (of the snapshot). 1562 // This phase comes right after the serialization (of the snapshot).
1512 // After we have done the partial serialization the partial snapshot cache 1563 // After we have done the partial serialization the partial snapshot cache
1513 // will contain some references needed to decode the partial snapshot. We 1564 // will contain some references needed to decode the partial snapshot. We
1514 // add one entry with 'undefined' which is the sentinel that the deserializer 1565 // add one entry with 'undefined' which is the sentinel that the deserializer
1515 // uses to know it is done deserializing the array. 1566 // uses to know it is done deserializing the array.
1516 Object* undefined = isolate()->heap()->undefined_value(); 1567 Object* undefined = isolate()->heap()->undefined_value();
1517 VisitPointer(&undefined); 1568 VisitPointer(&undefined);
1518 isolate()->heap()->IterateWeakRoots(this, VISIT_ALL); 1569 isolate()->heap()->IterateWeakRoots(this, VISIT_ALL);
1570 SerializeDeferredObjects();
1519 Pad(); 1571 Pad();
1520 } 1572 }
1521 1573
1522 1574
1523 void Serializer::PutRoot(int root_index, 1575 void Serializer::PutRoot(int root_index,
1524 HeapObject* object, 1576 HeapObject* object,
1525 SerializerDeserializer::HowToCode how_to_code, 1577 SerializerDeserializer::HowToCode how_to_code,
1526 SerializerDeserializer::WhereToPoint where_to_point, 1578 SerializerDeserializer::WhereToPoint where_to_point,
1527 int skip) { 1579 int skip) {
1528 if (FLAG_trace_serializer) { 1580 if (FLAG_trace_serializer) {
(...skipping 12 matching lines...) Expand all
1541 sink_->PutInt(skip, "SkipInPutRoot"); 1593 sink_->PutInt(skip, "SkipInPutRoot");
1542 } 1594 }
1543 } else { 1595 } else {
1544 FlushSkip(skip); 1596 FlushSkip(skip);
1545 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization"); 1597 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
1546 sink_->PutInt(root_index, "root_index"); 1598 sink_->PutInt(root_index, "root_index");
1547 } 1599 }
1548 } 1600 }
1549 1601
1550 1602
1603 void Serializer::PutBackReference(HeapObject* object, BackReference reference) {
1604 DCHECK(BackReferenceIsAlreadyAllocated(reference));
1605 sink_->PutInt(reference.reference(), "BackRefValue");
1606 hot_objects_.Add(object);
1607 }
1608
1609
1551 void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, 1610 void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
1552 WhereToPoint where_to_point, int skip) { 1611 WhereToPoint where_to_point, int skip) {
1553 if (obj->IsMap()) { 1612 if (obj->IsMap()) {
1554 // The code-caches link to context-specific code objects, which 1613 // The code-caches link to context-specific code objects, which
1555 // the startup and context serializes cannot currently handle. 1614 // the startup and context serializes cannot currently handle.
1556 DCHECK(Map::cast(obj)->code_cache() == obj->GetHeap()->empty_fixed_array()); 1615 DCHECK(Map::cast(obj)->code_cache() == obj->GetHeap()->empty_fixed_array());
1557 } 1616 }
1558 1617
1559 // Replace typed arrays by undefined. 1618 // Replace typed arrays by undefined.
1560 if (obj->IsJSTypedArray()) obj = isolate_->heap()->undefined_value(); 1619 if (obj->IsJSTypedArray()) obj = isolate_->heap()->undefined_value();
(...skipping 27 matching lines...) Expand all
1588 FlushSkip(skip); 1647 FlushSkip(skip);
1589 1648
1590 // Object has not yet been serialized. Serialize it here. 1649 // Object has not yet been serialized. Serialize it here.
1591 ObjectSerializer serializer(this, obj, sink_, how_to_code, where_to_point); 1650 ObjectSerializer serializer(this, obj, sink_, how_to_code, where_to_point);
1592 serializer.Serialize(); 1651 serializer.Serialize();
1593 1652
1594 if (obj->IsContext() && 1653 if (obj->IsContext() &&
1595 Context::cast(obj)->global_object() == global_object_) { 1654 Context::cast(obj)->global_object() == global_object_) {
1596 // Context refers to the current global object. This reference will 1655 // Context refers to the current global object. This reference will
1597 // become outdated after deserialization. 1656 // become outdated after deserialization.
1598 BackReference back_reference = back_reference_map_.Lookup(obj); 1657 outdated_contexts_.Add(Context::cast(obj));
1599 DCHECK(back_reference.is_valid());
1600 outdated_contexts_.Add(back_reference);
1601 } 1658 }
1602 } 1659 }
1603 1660
1604 1661
1605 void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space, 1662 void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space,
1606 int size, Map* map) { 1663 int size, Map* map) {
1607 if (serializer_->code_address_map_) { 1664 if (serializer_->code_address_map_) {
1608 const char* code_name = 1665 const char* code_name =
1609 serializer_->code_address_map_->Lookup(object_->address()); 1666 serializer_->code_address_map_->Lookup(object_->address());
1610 LOG(serializer_->isolate_, 1667 LOG(serializer_->isolate_,
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
1714 void Serializer::ObjectSerializer::Serialize() { 1771 void Serializer::ObjectSerializer::Serialize() {
1715 if (FLAG_trace_serializer) { 1772 if (FLAG_trace_serializer) {
1716 PrintF(" Encoding heap object: "); 1773 PrintF(" Encoding heap object: ");
1717 object_->ShortPrint(); 1774 object_->ShortPrint();
1718 PrintF("\n"); 1775 PrintF("\n");
1719 } 1776 }
1720 1777
1721 // We cannot serialize typed array objects correctly. 1778 // We cannot serialize typed array objects correctly.
1722 DCHECK(!object_->IsJSTypedArray()); 1779 DCHECK(!object_->IsJSTypedArray());
1723 1780
1781 // We don't expect fillers.
1782 DCHECK(!object_->IsFiller());
1783
1724 if (object_->IsScript()) { 1784 if (object_->IsScript()) {
1725 // Clear cached line ends. 1785 // Clear cached line ends.
1726 Object* undefined = serializer_->isolate()->heap()->undefined_value(); 1786 Object* undefined = serializer_->isolate()->heap()->undefined_value();
1727 Script::cast(object_)->set_line_ends(undefined); 1787 Script::cast(object_)->set_line_ends(undefined);
1728 } 1788 }
1729 1789
1730 if (object_->IsExternalString()) { 1790 if (object_->IsExternalString()) {
1731 Heap* heap = serializer_->isolate()->heap(); 1791 Heap* heap = serializer_->isolate()->heap();
1732 if (object_->map() != heap->native_source_string_map()) { 1792 if (object_->map() != heap->native_source_string_map()) {
1733 // Usually we cannot recreate resources for external strings. To work 1793 // Usually we cannot recreate resources for external strings. To work
(...skipping 10 matching lines...) Expand all
1744 int size = object_->Size(); 1804 int size = object_->Size();
1745 Map* map = object_->map(); 1805 Map* map = object_->map();
1746 AllocationSpace space = 1806 AllocationSpace space =
1747 MemoryChunk::FromAddress(object_->address())->owner()->identity(); 1807 MemoryChunk::FromAddress(object_->address())->owner()->identity();
1748 SerializePrologue(space, size, map); 1808 SerializePrologue(space, size, map);
1749 1809
1750 // Serialize the rest of the object. 1810 // Serialize the rest of the object.
1751 CHECK_EQ(0, bytes_processed_so_far_); 1811 CHECK_EQ(0, bytes_processed_so_far_);
1752 bytes_processed_so_far_ = kPointerSize; 1812 bytes_processed_so_far_ = kPointerSize;
1753 1813
1814 RecursionScope recursion(serializer_);
1815 // Objects that are immediately post processed during deserialization
1816 // cannot be deferred, since post processing requires the object content.
1817 if (recursion.ExceedsMaximum() && CanBeDeferred(object_)) {
1818 serializer_->QueueDeferredObject(object_);
1819 sink_->Put(kDeferred, "Deferring object content");
1820 return;
1821 }
1822
1754 object_->IterateBody(map->instance_type(), size, this); 1823 object_->IterateBody(map->instance_type(), size, this);
1755 OutputRawData(object_->address() + size); 1824 OutputRawData(object_->address() + size);
1756 } 1825 }
1826
1827
1828 void Serializer::ObjectSerializer::SerializeDeferred() {
1829 if (FLAG_trace_serializer) {
1830 PrintF(" Encoding deferred heap object: ");
1831 object_->ShortPrint();
1832 PrintF("\n");
1833 }
1834
1835 int size = object_->Size();
1836 Map* map = object_->map();
1837 BackReference reference = serializer_->back_reference_map()->Lookup(object_);
1838
1839 // Serialize the rest of the object.
1840 CHECK_EQ(0, bytes_processed_so_far_);
1841 bytes_processed_so_far_ = kPointerSize;
1842
1843 sink_->Put(kNewObject + reference.space(), "deferred object");
1844 serializer_->PutBackReference(object_, reference);
1845 sink_->PutInt(size >> kPointerSizeLog2, "deferred object size");
1846
1847 object_->IterateBody(map->instance_type(), size, this);
1848 OutputRawData(object_->address() + size);
1849 }
1757 1850
1758 1851
1759 void Serializer::ObjectSerializer::VisitPointers(Object** start, 1852 void Serializer::ObjectSerializer::VisitPointers(Object** start,
1760 Object** end) { 1853 Object** end) {
1761 Object** current = start; 1854 Object** current = start;
1762 while (current < end) { 1855 while (current < end) {
1763 while (current < end && (*current)->IsSmi()) current++; 1856 while (current < end && (*current)->IsSmi()) current++;
1764 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); 1857 if (current < end) OutputRawData(reinterpret_cast<Address>(current));
1765 1858
1766 while (current < end && !(*current)->IsSmi()) { 1859 while (current < end && !(*current)->IsSmi()) {
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after
2068 if (script->IsScript()) Script::cast(script)->name()->ShortPrint(); 2161 if (script->IsScript()) Script::cast(script)->name()->ShortPrint();
2069 PrintF("]\n"); 2162 PrintF("]\n");
2070 } 2163 }
2071 2164
2072 // Serialize code object. 2165 // Serialize code object.
2073 SnapshotByteSink sink(info->code()->CodeSize() * 2); 2166 SnapshotByteSink sink(info->code()->CodeSize() * 2);
2074 CodeSerializer cs(isolate, &sink, *source, info->code()); 2167 CodeSerializer cs(isolate, &sink, *source, info->code());
2075 DisallowHeapAllocation no_gc; 2168 DisallowHeapAllocation no_gc;
2076 Object** location = Handle<Object>::cast(info).location(); 2169 Object** location = Handle<Object>::cast(info).location();
2077 cs.VisitPointer(location); 2170 cs.VisitPointer(location);
2171 cs.SerializeDeferredObjects();
2078 cs.Pad(); 2172 cs.Pad();
2079 2173
2080 SerializedCodeData data(sink.data(), cs); 2174 SerializedCodeData data(sink.data(), cs);
2081 ScriptData* script_data = data.GetScriptData(); 2175 ScriptData* script_data = data.GetScriptData();
2082 2176
2083 if (FLAG_profile_deserialization) { 2177 if (FLAG_profile_deserialization) {
2084 double ms = timer.Elapsed().InMillisecondsF(); 2178 double ms = timer.Elapsed().InMillisecondsF();
2085 int length = script_data->length(); 2179 int length = script_data->length();
2086 PrintF("[Serializing to %d bytes took %0.3f ms]\n", length, ms); 2180 PrintF("[Serializing to %d bytes took %0.3f ms]\n", length, ms);
2087 } 2181 }
(...skipping 445 matching lines...) Expand 10 before | Expand all | Expand 10 after
2533 DisallowHeapAllocation no_gc; 2627 DisallowHeapAllocation no_gc;
2534 SerializedCodeData* scd = new SerializedCodeData(cached_data); 2628 SerializedCodeData* scd = new SerializedCodeData(cached_data);
2535 SanityCheckResult r = scd->SanityCheck(isolate, source); 2629 SanityCheckResult r = scd->SanityCheck(isolate, source);
2536 if (r == CHECK_SUCCESS) return scd; 2630 if (r == CHECK_SUCCESS) return scd;
2537 cached_data->Reject(); 2631 cached_data->Reject();
2538 source->GetIsolate()->counters()->code_cache_reject_reason()->AddSample(r); 2632 source->GetIsolate()->counters()->code_cache_reject_reason()->AddSample(r);
2539 delete scd; 2633 delete scd;
2540 return NULL; 2634 return NULL;
2541 } 2635 }
2542 } } // namespace v8::internal 2636 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/snapshot/serialize.h ('k') | test/cctest/test-serialize.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698