| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/platform/platform.h" | 9 #include "src/base/platform/platform.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 498 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 509 for (auto& r : res) { | 509 for (auto& r : res) { |
| 510 reservations_[current_space].Add({r.chunk_size(), NULL, NULL}); | 510 reservations_[current_space].Add({r.chunk_size(), NULL, NULL}); |
| 511 if (r.is_last()) current_space++; | 511 if (r.is_last()) current_space++; |
| 512 } | 512 } |
| 513 DCHECK_EQ(kNumberOfSpaces, current_space); | 513 DCHECK_EQ(kNumberOfSpaces, current_space); |
| 514 for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) current_chunk_[i] = 0; | 514 for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) current_chunk_[i] = 0; |
| 515 } | 515 } |
| 516 | 516 |
| 517 | 517 |
| 518 void Deserializer::FlushICacheForNewCodeObjects() { | 518 void Deserializer::FlushICacheForNewCodeObjects() { |
| 519 PageIterator it(isolate_->heap()->code_space()); | 519 if (!deserializing_user_code_) { |
| 520 while (it.has_next()) { | 520 // The entire isolate is newly deserialized. Simply flush all code pages. |
| 521 Page* p = it.next(); | 521 PageIterator it(isolate_->heap()->code_space()); |
| 522 CpuFeatures::FlushICache(p->area_start(), p->area_end() - p->area_start()); | 522 while (it.has_next()) { |
| 523 Page* p = it.next(); |
| 524 CpuFeatures::FlushICache(p->area_start(), |
| 525 p->area_end() - p->area_start()); |
| 526 } |
| 527 } |
| 528 for (Code* code : new_code_objects_) { |
| 529 CpuFeatures::FlushICache(code->instruction_start(), |
| 530 code->instruction_size()); |
| 523 } | 531 } |
| 524 } | 532 } |
| 525 | 533 |
| 526 | 534 |
| 527 bool Deserializer::ReserveSpace() { | 535 bool Deserializer::ReserveSpace() { |
| 528 #ifdef DEBUG | 536 #ifdef DEBUG |
| 529 for (int i = NEW_SPACE; i < kNumberOfSpaces; ++i) { | 537 for (int i = NEW_SPACE; i < kNumberOfSpaces; ++i) { |
| 530 CHECK(reservations_[i].length() > 0); | 538 CHECK(reservations_[i].length() > 0); |
| 531 } | 539 } |
| 532 #endif // DEBUG | 540 #endif // DEBUG |
| (...skipping 16 matching lines...) Expand all Loading... |
| 549 } | 557 } |
| 550 | 558 |
| 551 | 559 |
| 552 void Deserializer::Deserialize(Isolate* isolate) { | 560 void Deserializer::Deserialize(Isolate* isolate) { |
| 553 Initialize(isolate); | 561 Initialize(isolate); |
| 554 if (!ReserveSpace()) V8::FatalProcessOutOfMemory("deserializing context"); | 562 if (!ReserveSpace()) V8::FatalProcessOutOfMemory("deserializing context"); |
| 555 // No active threads. | 563 // No active threads. |
| 556 DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse()); | 564 DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse()); |
| 557 // No active handles. | 565 // No active handles. |
| 558 DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty()); | 566 DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty()); |
| 559 isolate_->heap()->IterateSmiRoots(this); | 567 |
| 560 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); | 568 { |
| 561 isolate_->heap()->RepairFreeListsAfterDeserialization(); | 569 DisallowHeapAllocation no_gc; |
| 562 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); | 570 isolate_->heap()->IterateSmiRoots(this); |
| 571 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); |
| 572 isolate_->heap()->RepairFreeListsAfterDeserialization(); |
| 573 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); |
| 574 DeserializeDeferredObjects(); |
| 575 } |
| 563 | 576 |
| 564 isolate_->heap()->set_native_contexts_list( | 577 isolate_->heap()->set_native_contexts_list( |
| 565 isolate_->heap()->undefined_value()); | 578 isolate_->heap()->undefined_value()); |
| 566 | 579 |
| 567 // The allocation site list is build during root iteration, but if no sites | 580 // The allocation site list is build during root iteration, but if no sites |
| 568 // were encountered then it needs to be initialized to undefined. | 581 // were encountered then it needs to be initialized to undefined. |
| 569 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { | 582 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { |
| 570 isolate_->heap()->set_allocation_sites_list( | 583 isolate_->heap()->set_allocation_sites_list( |
| 571 isolate_->heap()->undefined_value()); | 584 isolate_->heap()->undefined_value()); |
| 572 } | 585 } |
| (...skipping 28 matching lines...) Expand all Loading... |
| 601 SetAttachedObjects(attached_objects); | 614 SetAttachedObjects(attached_objects); |
| 602 | 615 |
| 603 DisallowHeapAllocation no_gc; | 616 DisallowHeapAllocation no_gc; |
| 604 // Keep track of the code space start and end pointers in case new | 617 // Keep track of the code space start and end pointers in case new |
| 605 // code objects were unserialized | 618 // code objects were unserialized |
| 606 OldSpace* code_space = isolate_->heap()->code_space(); | 619 OldSpace* code_space = isolate_->heap()->code_space(); |
| 607 Address start_address = code_space->top(); | 620 Address start_address = code_space->top(); |
| 608 Object* root; | 621 Object* root; |
| 609 Object* outdated_contexts; | 622 Object* outdated_contexts; |
| 610 VisitPointer(&root); | 623 VisitPointer(&root); |
| 624 DeserializeDeferredObjects(); |
| 611 VisitPointer(&outdated_contexts); | 625 VisitPointer(&outdated_contexts); |
| 612 | 626 |
| 613 // There's no code deserialized here. If this assert fires | 627 // There's no code deserialized here. If this assert fires then that's |
| 614 // then that's changed and logging should be added to notify | 628 // changed and logging should be added to notify the profiler et al of the |
| 615 // the profiler et al of the new code. | 629 // new code, which also has to be flushed from instruction cache. |
| 616 CHECK_EQ(start_address, code_space->top()); | 630 CHECK_EQ(start_address, code_space->top()); |
| 617 CHECK(outdated_contexts->IsFixedArray()); | 631 CHECK(outdated_contexts->IsFixedArray()); |
| 618 *outdated_contexts_out = | 632 *outdated_contexts_out = |
| 619 Handle<FixedArray>(FixedArray::cast(outdated_contexts), isolate); | 633 Handle<FixedArray>(FixedArray::cast(outdated_contexts), isolate); |
| 620 return Handle<Object>(root, isolate); | 634 return Handle<Object>(root, isolate); |
| 621 } | 635 } |
| 622 | 636 |
| 623 | 637 |
| 624 MaybeHandle<SharedFunctionInfo> Deserializer::DeserializeCode( | 638 MaybeHandle<SharedFunctionInfo> Deserializer::DeserializeCode( |
| 625 Isolate* isolate) { | 639 Isolate* isolate) { |
| 626 Initialize(isolate); | 640 Initialize(isolate); |
| 627 if (!ReserveSpace()) { | 641 if (!ReserveSpace()) { |
| 628 return Handle<SharedFunctionInfo>(); | 642 return Handle<SharedFunctionInfo>(); |
| 629 } else { | 643 } else { |
| 630 deserializing_user_code_ = true; | 644 deserializing_user_code_ = true; |
| 631 DisallowHeapAllocation no_gc; | 645 HandleScope scope(isolate); |
| 632 Object* root; | 646 Handle<SharedFunctionInfo> result; |
| 633 VisitPointer(&root); | 647 { |
| 634 return Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(root)); | 648 DisallowHeapAllocation no_gc; |
| 649 Object* root; |
| 650 VisitPointer(&root); |
| 651 DeserializeDeferredObjects(); |
| 652 result = Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(root)); |
| 653 } |
| 654 CommitNewInternalizedStrings(isolate); |
| 655 return scope.CloseAndEscape(result); |
| 635 } | 656 } |
| 636 } | 657 } |
| 637 | 658 |
| 638 | 659 |
| 639 Deserializer::~Deserializer() { | 660 Deserializer::~Deserializer() { |
| 640 // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed. | 661 // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed. |
| 641 // DCHECK(source_.AtEOF()); | 662 // DCHECK(source_.AtEOF()); |
| 642 attached_objects_.Dispose(); | 663 attached_objects_.Dispose(); |
| 643 } | 664 } |
| 644 | 665 |
| 645 | 666 |
| 646 // This is called on the roots. It is the driver of the deserialization | 667 // This is called on the roots. It is the driver of the deserialization |
| 647 // process. It is also called on the body of each function. | 668 // process. It is also called on the body of each function. |
| 648 void Deserializer::VisitPointers(Object** start, Object** end) { | 669 void Deserializer::VisitPointers(Object** start, Object** end) { |
| 649 // The space must be new space. Any other space would cause ReadChunk to try | 670 // The space must be new space. Any other space would cause ReadChunk to try |
| 650 // to update the remembered using NULL as the address. | 671 // to update the remembered using NULL as the address. |
| 651 ReadData(start, end, NEW_SPACE, NULL); | 672 ReadData(start, end, NEW_SPACE, NULL); |
| 652 } | 673 } |
| 653 | 674 |
| 654 | 675 |
| 655 void Deserializer::RelinkAllocationSite(AllocationSite* site) { | 676 void Deserializer::DeserializeDeferredObjects() { |
| 656 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { | 677 for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) { |
| 657 site->set_weak_next(isolate_->heap()->undefined_value()); | 678 int space = code & kSpaceMask; |
| 658 } else { | 679 DCHECK(space <= kNumberOfSpaces); |
| 659 site->set_weak_next(isolate_->heap()->allocation_sites_list()); | 680 DCHECK(code - space == kNewObject); |
| 681 HeapObject* object = GetBackReferencedObject(space); |
| 682 int size = source_.GetInt() << kPointerSizeLog2; |
| 683 Address obj_address = object->address(); |
| 684 Object** start = reinterpret_cast<Object**>(obj_address + kPointerSize); |
| 685 Object** end = reinterpret_cast<Object**>(obj_address + size); |
| 686 bool filled = ReadData(start, end, space, obj_address); |
| 687 CHECK(filled); |
| 688 DCHECK(CanBeDeferred(object)); |
| 689 PostProcessNewObject(object, space); |
| 660 } | 690 } |
| 661 isolate_->heap()->set_allocation_sites_list(site); | |
| 662 } | 691 } |
| 663 | 692 |
| 664 | 693 |
| 665 // Used to insert a deserialized internalized string into the string table. | 694 // Used to insert a deserialized internalized string into the string table. |
| 666 class StringTableInsertionKey : public HashTableKey { | 695 class StringTableInsertionKey : public HashTableKey { |
| 667 public: | 696 public: |
| 668 explicit StringTableInsertionKey(String* string) | 697 explicit StringTableInsertionKey(String* string) |
| 669 : string_(string), hash_(HashForObject(string)) { | 698 : string_(string), hash_(HashForObject(string)) { |
| 670 DCHECK(string->IsInternalizedString()); | 699 DCHECK(string->IsInternalizedString()); |
| 671 } | 700 } |
| 672 | 701 |
| 673 bool IsMatch(Object* string) override { | 702 bool IsMatch(Object* string) override { |
| 674 // We know that all entries in a hash table had their hash keys created. | 703 // We know that all entries in a hash table had their hash keys created. |
| 675 // Use that knowledge to have fast failure. | 704 // Use that knowledge to have fast failure. |
| 676 if (hash_ != HashForObject(string)) return false; | 705 if (hash_ != HashForObject(string)) return false; |
| 677 // We want to compare the content of two internalized strings here. | 706 // We want to compare the content of two internalized strings here. |
| 678 return string_->SlowEquals(String::cast(string)); | 707 return string_->SlowEquals(String::cast(string)); |
| 679 } | 708 } |
| 680 | 709 |
| 681 uint32_t Hash() override { return hash_; } | 710 uint32_t Hash() override { return hash_; } |
| 682 | 711 |
| 683 uint32_t HashForObject(Object* key) override { | 712 uint32_t HashForObject(Object* key) override { |
| 684 return String::cast(key)->Hash(); | 713 return String::cast(key)->Hash(); |
| 685 } | 714 } |
| 686 | 715 |
| 687 MUST_USE_RESULT virtual Handle<Object> AsHandle(Isolate* isolate) override { | 716 MUST_USE_RESULT virtual Handle<Object> AsHandle(Isolate* isolate) override { |
| 688 return handle(string_, isolate); | 717 return handle(string_, isolate); |
| 689 } | 718 } |
| 690 | 719 |
| 720 private: |
| 691 String* string_; | 721 String* string_; |
| 692 uint32_t hash_; | 722 uint32_t hash_; |
| 723 DisallowHeapAllocation no_gc; |
| 693 }; | 724 }; |
| 694 | 725 |
| 695 | 726 |
| 696 HeapObject* Deserializer::ProcessNewObjectFromSerializedCode(HeapObject* obj) { | 727 HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj, int space) { |
| 697 if (obj->IsString()) { | 728 if (deserializing_user_code()) { |
| 698 String* string = String::cast(obj); | 729 if (obj->IsString()) { |
| 699 // Uninitialize hash field as the hash seed may have changed. | 730 String* string = String::cast(obj); |
| 700 string->set_hash_field(String::kEmptyHashField); | 731 // Uninitialize hash field as the hash seed may have changed. |
| 701 if (string->IsInternalizedString()) { | 732 string->set_hash_field(String::kEmptyHashField); |
| 702 DisallowHeapAllocation no_gc; | 733 if (string->IsInternalizedString()) { |
| 703 HandleScope scope(isolate_); | 734 // Canonicalize the internalized string. If it already exists in the |
| 704 StringTableInsertionKey key(string); | 735 // string table, set it to forward to the existing one. |
| 705 String* canonical = *StringTable::LookupKey(isolate_, &key); | 736 StringTableInsertionKey key(string); |
| 706 string->SetForwardedInternalizedString(canonical); | 737 String* canonical = StringTable::LookupKeyIfExists(isolate_, &key); |
| 707 return canonical; | 738 if (canonical == NULL) { |
| 739 new_internalized_strings_.Add(handle(string)); |
| 740 return string; |
| 741 } else { |
| 742 string->SetForwardedInternalizedString(canonical); |
| 743 return canonical; |
| 744 } |
| 745 } |
| 746 } else if (obj->IsScript()) { |
| 747 // Assign a new script id to avoid collision. |
| 748 Script::cast(obj)->set_id(isolate_->heap()->NextScriptId()); |
| 749 } else { |
| 750 DCHECK(CanBeDeferred(obj)); |
| 708 } | 751 } |
| 709 } else if (obj->IsScript()) { | 752 } |
| 710 Script::cast(obj)->set_id(isolate_->heap()->NextScriptId()); | 753 if (obj->IsAllocationSite()) { |
| 754 DCHECK(obj->IsAllocationSite()); |
| 755 // Allocation sites are present in the snapshot, and must be linked into |
| 756 // a list at deserialization time. |
| 757 AllocationSite* site = AllocationSite::cast(obj); |
| 758 // TODO(mvstanton): consider treating the heap()->allocation_sites_list() |
| 759 // as a (weak) root. If this root is relocated correctly, this becomes |
| 760 // unnecessary. |
| 761 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { |
| 762 site->set_weak_next(isolate_->heap()->undefined_value()); |
| 763 } else { |
| 764 site->set_weak_next(isolate_->heap()->allocation_sites_list()); |
| 765 } |
| 766 isolate_->heap()->set_allocation_sites_list(site); |
| 767 } else if (obj->IsCode()) { |
| 768 // We flush all code pages after deserializing the startup snapshot. In that |
| 769 // case, we only need to remember code objects in the large object space. |
| 770 // When deserializing user code, remember each individual code object. |
| 771 if (deserializing_user_code() || space == LO_SPACE) { |
| 772 new_code_objects_.Add(Code::cast(obj)); |
| 773 } |
| 711 } | 774 } |
| 712 return obj; | 775 return obj; |
| 713 } | 776 } |
| 714 | 777 |
| 715 | 778 |
| 779 void Deserializer::CommitNewInternalizedStrings(Isolate* isolate) { |
| 780 StringTable::EnsureCapacityForDeserialization( |
| 781 isolate, new_internalized_strings_.length()); |
| 782 for (Handle<String> string : new_internalized_strings_) { |
| 783 StringTableInsertionKey key(*string); |
| 784 DCHECK_NULL(StringTable::LookupKeyIfExists(isolate, &key)); |
| 785 StringTable::LookupKey(isolate, &key); |
| 786 } |
| 787 } |
| 788 |
| 789 |
| 716 HeapObject* Deserializer::GetBackReferencedObject(int space) { | 790 HeapObject* Deserializer::GetBackReferencedObject(int space) { |
| 717 HeapObject* obj; | 791 HeapObject* obj; |
| 718 BackReference back_reference(source_.GetInt()); | 792 BackReference back_reference(source_.GetInt()); |
| 719 if (space == LO_SPACE) { | 793 if (space == LO_SPACE) { |
| 720 CHECK(back_reference.chunk_index() == 0); | 794 CHECK(back_reference.chunk_index() == 0); |
| 721 uint32_t index = back_reference.large_object_index(); | 795 uint32_t index = back_reference.large_object_index(); |
| 722 obj = deserialized_large_objects_[index]; | 796 obj = deserialized_large_objects_[index]; |
| 723 } else { | 797 } else { |
| 724 DCHECK(space < kNumberOfPreallocatedSpaces); | 798 DCHECK(space < kNumberOfPreallocatedSpaces); |
| 725 uint32_t chunk_index = back_reference.chunk_index(); | 799 uint32_t chunk_index = back_reference.chunk_index(); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 750 int size = next_int << kObjectAlignmentBits; | 824 int size = next_int << kObjectAlignmentBits; |
| 751 address = Allocate(space_number, size); | 825 address = Allocate(space_number, size); |
| 752 obj = HeapObject::FromAddress(address); | 826 obj = HeapObject::FromAddress(address); |
| 753 | 827 |
| 754 isolate_->heap()->OnAllocationEvent(obj, size); | 828 isolate_->heap()->OnAllocationEvent(obj, size); |
| 755 Object** current = reinterpret_cast<Object**>(address); | 829 Object** current = reinterpret_cast<Object**>(address); |
| 756 Object** limit = current + (size >> kPointerSizeLog2); | 830 Object** limit = current + (size >> kPointerSizeLog2); |
| 757 if (FLAG_log_snapshot_positions) { | 831 if (FLAG_log_snapshot_positions) { |
| 758 LOG(isolate_, SnapshotPositionEvent(address, source_.position())); | 832 LOG(isolate_, SnapshotPositionEvent(address, source_.position())); |
| 759 } | 833 } |
| 760 ReadData(current, limit, space_number, address); | |
| 761 | 834 |
| 762 // TODO(mvstanton): consider treating the heap()->allocation_sites_list() | 835 if (ReadData(current, limit, space_number, address)) { |
| 763 // as a (weak) root. If this root is relocated correctly, | 836 // Only post process if object content has not been deferred. |
| 764 // RelinkAllocationSite() isn't necessary. | 837 obj = PostProcessNewObject(obj, space_number); |
| 765 if (obj->IsAllocationSite()) RelinkAllocationSite(AllocationSite::cast(obj)); | 838 } |
| 766 | |
| 767 // Fix up strings from serialized user code. | |
| 768 if (deserializing_user_code()) obj = ProcessNewObjectFromSerializedCode(obj); | |
| 769 | 839 |
| 770 Object* write_back_obj = obj; | 840 Object* write_back_obj = obj; |
| 771 UnalignedCopy(write_back, &write_back_obj); | 841 UnalignedCopy(write_back, &write_back_obj); |
| 772 #ifdef DEBUG | 842 #ifdef DEBUG |
| 773 if (obj->IsCode()) { | 843 if (obj->IsCode()) { |
| 774 DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE); | 844 DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE); |
| 775 #ifdef VERIFY_HEAP | |
| 776 obj->ObjectVerify(); | |
| 777 #endif // VERIFY_HEAP | |
| 778 } else { | 845 } else { |
| 779 DCHECK(space_number != CODE_SPACE); | 846 DCHECK(space_number != CODE_SPACE); |
| 780 } | 847 } |
| 781 #endif // DEBUG | 848 #endif // DEBUG |
| 782 } | 849 } |
| 783 | 850 |
| 784 | 851 |
| 785 // We know the space requirements before deserialization and can | 852 // We know the space requirements before deserialization and can |
| 786 // pre-allocate that reserved space. During deserialization, all we need | 853 // pre-allocate that reserved space. During deserialization, all we need |
| 787 // to do is to bump up the pointer for each space in the reserved | 854 // to do is to bump up the pointer for each space in the reserved |
| (...skipping 23 matching lines...) Expand all Loading... |
| 811 // Assert that the current reserved chunk is still big enough. | 878 // Assert that the current reserved chunk is still big enough. |
| 812 const Heap::Reservation& reservation = reservations_[space_index]; | 879 const Heap::Reservation& reservation = reservations_[space_index]; |
| 813 int chunk_index = current_chunk_[space_index]; | 880 int chunk_index = current_chunk_[space_index]; |
| 814 CHECK_LE(high_water_[space_index], reservation[chunk_index].end); | 881 CHECK_LE(high_water_[space_index], reservation[chunk_index].end); |
| 815 #endif | 882 #endif |
| 816 return address; | 883 return address; |
| 817 } | 884 } |
| 818 } | 885 } |
| 819 | 886 |
| 820 | 887 |
| 821 void Deserializer::ReadData(Object** current, Object** limit, int source_space, | 888 bool Deserializer::ReadData(Object** current, Object** limit, int source_space, |
| 822 Address current_object_address) { | 889 Address current_object_address) { |
| 823 Isolate* const isolate = isolate_; | 890 Isolate* const isolate = isolate_; |
| 824 // Write barrier support costs around 1% in startup time. In fact there | 891 // Write barrier support costs around 1% in startup time. In fact there |
| 825 // are no new space objects in current boot snapshots, so it's not needed, | 892 // are no new space objects in current boot snapshots, so it's not needed, |
| 826 // but that may change. | 893 // but that may change. |
| 827 bool write_barrier_needed = | 894 bool write_barrier_needed = |
| 828 (current_object_address != NULL && source_space != NEW_SPACE && | 895 (current_object_address != NULL && source_space != NEW_SPACE && |
| 829 source_space != CODE_SPACE); | 896 source_space != CODE_SPACE); |
| 830 while (current < limit) { | 897 while (current < limit) { |
| 831 byte data = source_.Get(); | 898 byte data = source_.Get(); |
| (...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1068 const Heap::Reservation& reservation = reservations_[space]; | 1135 const Heap::Reservation& reservation = reservations_[space]; |
| 1069 // Make sure the current chunk is indeed exhausted. | 1136 // Make sure the current chunk is indeed exhausted. |
| 1070 CHECK_EQ(reservation[chunk_index].end, high_water_[space]); | 1137 CHECK_EQ(reservation[chunk_index].end, high_water_[space]); |
| 1071 // Move to next reserved chunk. | 1138 // Move to next reserved chunk. |
| 1072 chunk_index = ++current_chunk_[space]; | 1139 chunk_index = ++current_chunk_[space]; |
| 1073 CHECK_LT(chunk_index, reservation.length()); | 1140 CHECK_LT(chunk_index, reservation.length()); |
| 1074 high_water_[space] = reservation[chunk_index].start; | 1141 high_water_[space] = reservation[chunk_index].start; |
| 1075 break; | 1142 break; |
| 1076 } | 1143 } |
| 1077 | 1144 |
| 1145 case kDeferred: { |
| 1146 // Deferred can only occur right after the heap object header. |
| 1147 DCHECK(current == reinterpret_cast<Object**>(current_object_address + |
| 1148 kPointerSize)); |
| 1149 HeapObject* obj = HeapObject::FromAddress(current_object_address); |
| 1150 // If the deferred object is a map, its instance type may be used |
| 1151 // during deserialization. Initialize it with a temporary value. |
| 1152 if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE); |
| 1153 current = limit; |
| 1154 return false; |
| 1155 } |
| 1156 |
| 1078 case kSynchronize: | 1157 case kSynchronize: |
| 1079 // If we get here then that indicates that you have a mismatch between | 1158 // If we get here then that indicates that you have a mismatch between |
| 1080 // the number of GC roots when serializing and deserializing. | 1159 // the number of GC roots when serializing and deserializing. |
| 1081 CHECK(false); | 1160 CHECK(false); |
| 1082 break; | 1161 break; |
| 1083 | 1162 |
| 1084 case kNativesStringResource: { | 1163 case kNativesStringResource: { |
| 1085 DCHECK(!isolate_->heap()->deserialization_complete()); | 1164 DCHECK(!isolate_->heap()->deserialization_complete()); |
| 1086 int index = source_.Get(); | 1165 int index = source_.Get(); |
| 1087 Vector<const char> source_vector = Natives::GetScriptSource(index); | 1166 Vector<const char> source_vector = Natives::GetScriptSource(index); |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1174 } | 1253 } |
| 1175 | 1254 |
| 1176 #undef SIXTEEN_CASES | 1255 #undef SIXTEEN_CASES |
| 1177 #undef FOUR_CASES | 1256 #undef FOUR_CASES |
| 1178 | 1257 |
| 1179 default: | 1258 default: |
| 1180 CHECK(false); | 1259 CHECK(false); |
| 1181 } | 1260 } |
| 1182 } | 1261 } |
| 1183 CHECK_EQ(limit, current); | 1262 CHECK_EQ(limit, current); |
| 1263 return true; |
| 1184 } | 1264 } |
| 1185 | 1265 |
| 1186 | 1266 |
| 1187 Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink) | 1267 Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink) |
| 1188 : isolate_(isolate), | 1268 : isolate_(isolate), |
| 1189 sink_(sink), | 1269 sink_(sink), |
| 1190 external_reference_encoder_(isolate), | 1270 external_reference_encoder_(isolate), |
| 1191 root_index_map_(isolate), | 1271 root_index_map_(isolate), |
| 1272 recursion_depth_(0), |
| 1192 code_address_map_(NULL), | 1273 code_address_map_(NULL), |
| 1193 large_objects_total_size_(0), | 1274 large_objects_total_size_(0), |
| 1194 seen_large_objects_index_(0) { | 1275 seen_large_objects_index_(0) { |
| 1195 // The serializer is meant to be used only to generate initial heap images | 1276 // The serializer is meant to be used only to generate initial heap images |
| 1196 // from a context in which there is only one isolate. | 1277 // from a context in which there is only one isolate. |
| 1197 for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) { | 1278 for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) { |
| 1198 pending_chunk_[i] = 0; | 1279 pending_chunk_[i] = 0; |
| 1199 max_chunk_size_[i] = static_cast<uint32_t>( | 1280 max_chunk_size_[i] = static_cast<uint32_t>( |
| 1200 MemoryAllocator::PageAreaSize(static_cast<AllocationSpace>(i))); | 1281 MemoryAllocator::PageAreaSize(static_cast<AllocationSpace>(i))); |
| 1201 } | 1282 } |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1257 PrintF("%10d %10" V8_PTR_PREFIX "d %s\n", instance_type_count_[Name], \ | 1338 PrintF("%10d %10" V8_PTR_PREFIX "d %s\n", instance_type_count_[Name], \ |
| 1258 instance_type_size_[Name], #Name); \ | 1339 instance_type_size_[Name], #Name); \ |
| 1259 } | 1340 } |
| 1260 INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE) | 1341 INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE) |
| 1261 #undef PRINT_INSTANCE_TYPE | 1342 #undef PRINT_INSTANCE_TYPE |
| 1262 PrintF("\n"); | 1343 PrintF("\n"); |
| 1263 #endif // OBJECT_PRINT | 1344 #endif // OBJECT_PRINT |
| 1264 } | 1345 } |
| 1265 | 1346 |
| 1266 | 1347 |
| 1348 void Serializer::SerializeDeferredObjects() { |
| 1349 while (deferred_objects_.length() > 0) { |
| 1350 HeapObject* obj = deferred_objects_.RemoveLast(); |
| 1351 ObjectSerializer obj_serializer(this, obj, sink_, kPlain, kStartOfObject); |
| 1352 obj_serializer.SerializeDeferred(); |
| 1353 } |
| 1354 sink_->Put(kSynchronize, "Finished with deferred objects"); |
| 1355 } |
| 1356 |
| 1357 |
| 1267 void StartupSerializer::SerializeStrongReferences() { | 1358 void StartupSerializer::SerializeStrongReferences() { |
| 1268 Isolate* isolate = this->isolate(); | 1359 Isolate* isolate = this->isolate(); |
| 1269 // No active threads. | 1360 // No active threads. |
| 1270 CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse()); | 1361 CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse()); |
| 1271 // No active or weak handles. | 1362 // No active or weak handles. |
| 1272 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty()); | 1363 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty()); |
| 1273 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles()); | 1364 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles()); |
| 1274 CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles()); | 1365 CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles()); |
| 1275 // We don't support serializing installed extensions. | 1366 // We don't support serializing installed extensions. |
| 1276 CHECK(!isolate->has_installed_extensions()); | 1367 CHECK(!isolate->has_installed_extensions()); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1300 } | 1391 } |
| 1301 | 1392 |
| 1302 | 1393 |
| 1303 void PartialSerializer::Serialize(Object** o) { | 1394 void PartialSerializer::Serialize(Object** o) { |
| 1304 if ((*o)->IsContext()) { | 1395 if ((*o)->IsContext()) { |
| 1305 Context* context = Context::cast(*o); | 1396 Context* context = Context::cast(*o); |
| 1306 global_object_ = context->global_object(); | 1397 global_object_ = context->global_object(); |
| 1307 back_reference_map()->AddGlobalProxy(context->global_proxy()); | 1398 back_reference_map()->AddGlobalProxy(context->global_proxy()); |
| 1308 } | 1399 } |
| 1309 VisitPointer(o); | 1400 VisitPointer(o); |
| 1401 SerializeDeferredObjects(); |
| 1310 SerializeOutdatedContextsAsFixedArray(); | 1402 SerializeOutdatedContextsAsFixedArray(); |
| 1311 Pad(); | 1403 Pad(); |
| 1312 } | 1404 } |
| 1313 | 1405 |
| 1314 | 1406 |
| 1315 void PartialSerializer::SerializeOutdatedContextsAsFixedArray() { | 1407 void PartialSerializer::SerializeOutdatedContextsAsFixedArray() { |
| 1316 int length = outdated_contexts_.length(); | 1408 int length = outdated_contexts_.length(); |
| 1317 if (length == 0) { | 1409 if (length == 0) { |
| 1318 FixedArray* empty = isolate_->heap()->empty_fixed_array(); | 1410 FixedArray* empty = isolate_->heap()->empty_fixed_array(); |
| 1319 SerializeObject(empty, kPlain, kStartOfObject, 0); | 1411 SerializeObject(empty, kPlain, kStartOfObject, 0); |
| 1320 } else { | 1412 } else { |
| 1321 // Serialize an imaginary fixed array containing outdated contexts. | 1413 // Serialize an imaginary fixed array containing outdated contexts. |
| 1322 int size = FixedArray::SizeFor(length); | 1414 int size = FixedArray::SizeFor(length); |
| 1323 Allocate(NEW_SPACE, size); | 1415 Allocate(NEW_SPACE, size); |
| 1324 sink_->Put(kNewObject + NEW_SPACE, "emulated FixedArray"); | 1416 sink_->Put(kNewObject + NEW_SPACE, "emulated FixedArray"); |
| 1325 sink_->PutInt(size >> kObjectAlignmentBits, "FixedArray size in words"); | 1417 sink_->PutInt(size >> kObjectAlignmentBits, "FixedArray size in words"); |
| 1326 Map* map = isolate_->heap()->fixed_array_map(); | 1418 Map* map = isolate_->heap()->fixed_array_map(); |
| 1327 SerializeObject(map, kPlain, kStartOfObject, 0); | 1419 SerializeObject(map, kPlain, kStartOfObject, 0); |
| 1328 Smi* length_smi = Smi::FromInt(length); | 1420 Smi* length_smi = Smi::FromInt(length); |
| 1329 sink_->Put(kOnePointerRawData, "Smi"); | 1421 sink_->Put(kOnePointerRawData, "Smi"); |
| 1330 for (int i = 0; i < kPointerSize; i++) { | 1422 for (int i = 0; i < kPointerSize; i++) { |
| 1331 sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte"); | 1423 sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte"); |
| 1332 } | 1424 } |
| 1333 for (int i = 0; i < length; i++) { | 1425 for (int i = 0; i < length; i++) { |
| 1334 BackReference back_ref = outdated_contexts_[i]; | 1426 Context* context = outdated_contexts_[i]; |
| 1335 DCHECK(BackReferenceIsAlreadyAllocated(back_ref)); | 1427 BackReference back_reference = back_reference_map_.Lookup(context); |
| 1336 sink_->Put(kBackref + back_ref.space(), "BackRef"); | 1428 sink_->Put(kBackref + back_reference.space(), "BackRef"); |
| 1337 sink_->PutInt(back_ref.reference(), "BackRefValue"); | 1429 PutBackReference(context, back_reference); |
| 1338 } | 1430 } |
| 1339 } | 1431 } |
| 1340 } | 1432 } |
| 1341 | 1433 |
| 1342 | 1434 |
| 1343 bool Serializer::ShouldBeSkipped(Object** current) { | 1435 bool Serializer::ShouldBeSkipped(Object** current) { |
| 1344 Object** roots = isolate()->heap()->roots_array_start(); | 1436 Object** roots = isolate()->heap()->roots_array_start(); |
| 1345 return current == &roots[Heap::kStoreBufferTopRootIndex] | 1437 return current == &roots[Heap::kStoreBufferTopRootIndex] |
| 1346 || current == &roots[Heap::kStackLimitRootIndex] | 1438 || current == &roots[Heap::kStackLimitRootIndex] |
| 1347 || current == &roots[Heap::kRealStackLimitRootIndex]; | 1439 || current == &roots[Heap::kRealStackLimitRootIndex]; |
| (...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1490 } | 1582 } |
| 1491 | 1583 |
| 1492 AllocationSpace space = back_reference.space(); | 1584 AllocationSpace space = back_reference.space(); |
| 1493 if (skip == 0) { | 1585 if (skip == 0) { |
| 1494 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRef"); | 1586 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRef"); |
| 1495 } else { | 1587 } else { |
| 1496 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space, | 1588 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space, |
| 1497 "BackRefWithSkip"); | 1589 "BackRefWithSkip"); |
| 1498 sink_->PutInt(skip, "BackRefSkipDistance"); | 1590 sink_->PutInt(skip, "BackRefSkipDistance"); |
| 1499 } | 1591 } |
| 1500 DCHECK(BackReferenceIsAlreadyAllocated(back_reference)); | 1592 PutBackReference(obj, back_reference); |
| 1501 sink_->PutInt(back_reference.reference(), "BackRefValue"); | |
| 1502 | |
| 1503 hot_objects_.Add(obj); | |
| 1504 } | 1593 } |
| 1505 return true; | 1594 return true; |
| 1506 } | 1595 } |
| 1507 return false; | 1596 return false; |
| 1508 } | 1597 } |
| 1509 | 1598 |
| 1510 | 1599 |
| 1511 void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, | 1600 void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, |
| 1512 WhereToPoint where_to_point, int skip) { | 1601 WhereToPoint where_to_point, int skip) { |
| 1513 DCHECK(!obj->IsJSFunction()); | 1602 DCHECK(!obj->IsJSFunction()); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1529 | 1618 |
| 1530 FlushSkip(skip); | 1619 FlushSkip(skip); |
| 1531 | 1620 |
| 1532 // Object has not yet been serialized. Serialize it here. | 1621 // Object has not yet been serialized. Serialize it here. |
| 1533 ObjectSerializer object_serializer(this, obj, sink_, how_to_code, | 1622 ObjectSerializer object_serializer(this, obj, sink_, how_to_code, |
| 1534 where_to_point); | 1623 where_to_point); |
| 1535 object_serializer.Serialize(); | 1624 object_serializer.Serialize(); |
| 1536 } | 1625 } |
| 1537 | 1626 |
| 1538 | 1627 |
| 1539 void StartupSerializer::SerializeWeakReferences() { | 1628 void StartupSerializer::SerializeWeakReferencesAndDeferred() { |
| 1540 // This phase comes right after the serialization (of the snapshot). | 1629 // This phase comes right after the serialization (of the snapshot). |
| 1541 // After we have done the partial serialization the partial snapshot cache | 1630 // After we have done the partial serialization the partial snapshot cache |
| 1542 // will contain some references needed to decode the partial snapshot. We | 1631 // will contain some references needed to decode the partial snapshot. We |
| 1543 // add one entry with 'undefined' which is the sentinel that the deserializer | 1632 // add one entry with 'undefined' which is the sentinel that the deserializer |
| 1544 // uses to know it is done deserializing the array. | 1633 // uses to know it is done deserializing the array. |
| 1545 Object* undefined = isolate()->heap()->undefined_value(); | 1634 Object* undefined = isolate()->heap()->undefined_value(); |
| 1546 VisitPointer(&undefined); | 1635 VisitPointer(&undefined); |
| 1547 isolate()->heap()->IterateWeakRoots(this, VISIT_ALL); | 1636 isolate()->heap()->IterateWeakRoots(this, VISIT_ALL); |
| 1637 SerializeDeferredObjects(); |
| 1548 Pad(); | 1638 Pad(); |
| 1549 } | 1639 } |
| 1550 | 1640 |
| 1551 | 1641 |
| 1552 void Serializer::PutRoot(int root_index, | 1642 void Serializer::PutRoot(int root_index, |
| 1553 HeapObject* object, | 1643 HeapObject* object, |
| 1554 SerializerDeserializer::HowToCode how_to_code, | 1644 SerializerDeserializer::HowToCode how_to_code, |
| 1555 SerializerDeserializer::WhereToPoint where_to_point, | 1645 SerializerDeserializer::WhereToPoint where_to_point, |
| 1556 int skip) { | 1646 int skip) { |
| 1557 if (FLAG_trace_serializer) { | 1647 if (FLAG_trace_serializer) { |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1570 sink_->PutInt(skip, "SkipInPutRoot"); | 1660 sink_->PutInt(skip, "SkipInPutRoot"); |
| 1571 } | 1661 } |
| 1572 } else { | 1662 } else { |
| 1573 FlushSkip(skip); | 1663 FlushSkip(skip); |
| 1574 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization"); | 1664 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization"); |
| 1575 sink_->PutInt(root_index, "root_index"); | 1665 sink_->PutInt(root_index, "root_index"); |
| 1576 } | 1666 } |
| 1577 } | 1667 } |
| 1578 | 1668 |
| 1579 | 1669 |
| 1670 void Serializer::PutBackReference(HeapObject* object, BackReference reference) { |
| 1671 DCHECK(BackReferenceIsAlreadyAllocated(reference)); |
| 1672 sink_->PutInt(reference.reference(), "BackRefValue"); |
| 1673 hot_objects_.Add(object); |
| 1674 } |
| 1675 |
| 1676 |
| 1580 void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, | 1677 void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code, |
| 1581 WhereToPoint where_to_point, int skip) { | 1678 WhereToPoint where_to_point, int skip) { |
| 1582 if (obj->IsMap()) { | 1679 if (obj->IsMap()) { |
| 1583 // The code-caches link to context-specific code objects, which | 1680 // The code-caches link to context-specific code objects, which |
| 1584 // the startup and context serializes cannot currently handle. | 1681 // the startup and context serializes cannot currently handle. |
| 1585 DCHECK(Map::cast(obj)->code_cache() == obj->GetHeap()->empty_fixed_array()); | 1682 DCHECK(Map::cast(obj)->code_cache() == obj->GetHeap()->empty_fixed_array()); |
| 1586 } | 1683 } |
| 1587 | 1684 |
| 1588 // Replace typed arrays by undefined. | 1685 // Replace typed arrays by undefined. |
| 1589 if (obj->IsJSTypedArray()) obj = isolate_->heap()->undefined_value(); | 1686 if (obj->IsJSTypedArray()) obj = isolate_->heap()->undefined_value(); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1623 } | 1720 } |
| 1624 | 1721 |
| 1625 // Object has not yet been serialized. Serialize it here. | 1722 // Object has not yet been serialized. Serialize it here. |
| 1626 ObjectSerializer serializer(this, obj, sink_, how_to_code, where_to_point); | 1723 ObjectSerializer serializer(this, obj, sink_, how_to_code, where_to_point); |
| 1627 serializer.Serialize(); | 1724 serializer.Serialize(); |
| 1628 | 1725 |
| 1629 if (obj->IsContext() && | 1726 if (obj->IsContext() && |
| 1630 Context::cast(obj)->global_object() == global_object_) { | 1727 Context::cast(obj)->global_object() == global_object_) { |
| 1631 // Context refers to the current global object. This reference will | 1728 // Context refers to the current global object. This reference will |
| 1632 // become outdated after deserialization. | 1729 // become outdated after deserialization. |
| 1633 BackReference back_reference = back_reference_map_.Lookup(obj); | 1730 outdated_contexts_.Add(Context::cast(obj)); |
| 1634 DCHECK(back_reference.is_valid()); | |
| 1635 outdated_contexts_.Add(back_reference); | |
| 1636 } | 1731 } |
| 1637 } | 1732 } |
| 1638 | 1733 |
| 1639 | 1734 |
| 1640 void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space, | 1735 void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space, |
| 1641 int size, Map* map) { | 1736 int size, Map* map) { |
| 1642 if (serializer_->code_address_map_) { | 1737 if (serializer_->code_address_map_) { |
| 1643 const char* code_name = | 1738 const char* code_name = |
| 1644 serializer_->code_address_map_->Lookup(object_->address()); | 1739 serializer_->code_address_map_->Lookup(object_->address()); |
| 1645 LOG(serializer_->isolate_, | 1740 LOG(serializer_->isolate_, |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1746 void Serializer::ObjectSerializer::Serialize() { | 1841 void Serializer::ObjectSerializer::Serialize() { |
| 1747 if (FLAG_trace_serializer) { | 1842 if (FLAG_trace_serializer) { |
| 1748 PrintF(" Encoding heap object: "); | 1843 PrintF(" Encoding heap object: "); |
| 1749 object_->ShortPrint(); | 1844 object_->ShortPrint(); |
| 1750 PrintF("\n"); | 1845 PrintF("\n"); |
| 1751 } | 1846 } |
| 1752 | 1847 |
| 1753 // We cannot serialize typed array objects correctly. | 1848 // We cannot serialize typed array objects correctly. |
| 1754 DCHECK(!object_->IsJSTypedArray()); | 1849 DCHECK(!object_->IsJSTypedArray()); |
| 1755 | 1850 |
| 1851 // We don't expect fillers. |
| 1852 DCHECK(!object_->IsFiller()); |
| 1853 |
| 1756 if (object_->IsPrototypeInfo()) { | 1854 if (object_->IsPrototypeInfo()) { |
| 1757 Object* prototype_users = PrototypeInfo::cast(object_)->prototype_users(); | 1855 Object* prototype_users = PrototypeInfo::cast(object_)->prototype_users(); |
| 1758 if (prototype_users->IsWeakFixedArray()) { | 1856 if (prototype_users->IsWeakFixedArray()) { |
| 1759 WeakFixedArray::cast(prototype_users)->Compact(); | 1857 WeakFixedArray::cast(prototype_users)->Compact(); |
| 1760 } | 1858 } |
| 1761 } | 1859 } |
| 1762 | 1860 |
| 1763 if (object_->IsScript()) { | 1861 if (object_->IsScript()) { |
| 1764 // Clear cached line ends. | 1862 // Clear cached line ends. |
| 1765 Object* undefined = serializer_->isolate()->heap()->undefined_value(); | 1863 Object* undefined = serializer_->isolate()->heap()->undefined_value(); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1783 int size = object_->Size(); | 1881 int size = object_->Size(); |
| 1784 Map* map = object_->map(); | 1882 Map* map = object_->map(); |
| 1785 AllocationSpace space = | 1883 AllocationSpace space = |
| 1786 MemoryChunk::FromAddress(object_->address())->owner()->identity(); | 1884 MemoryChunk::FromAddress(object_->address())->owner()->identity(); |
| 1787 SerializePrologue(space, size, map); | 1885 SerializePrologue(space, size, map); |
| 1788 | 1886 |
| 1789 // Serialize the rest of the object. | 1887 // Serialize the rest of the object. |
| 1790 CHECK_EQ(0, bytes_processed_so_far_); | 1888 CHECK_EQ(0, bytes_processed_so_far_); |
| 1791 bytes_processed_so_far_ = kPointerSize; | 1889 bytes_processed_so_far_ = kPointerSize; |
| 1792 | 1890 |
| 1891 RecursionScope recursion(serializer_); |
| 1892 // Objects that are immediately post processed during deserialization |
| 1893 // cannot be deferred, since post processing requires the object content. |
| 1894 if (recursion.ExceedsMaximum() && CanBeDeferred(object_)) { |
| 1895 serializer_->QueueDeferredObject(object_); |
| 1896 sink_->Put(kDeferred, "Deferring object content"); |
| 1897 return; |
| 1898 } |
| 1899 |
| 1793 object_->IterateBody(map->instance_type(), size, this); | 1900 object_->IterateBody(map->instance_type(), size, this); |
| 1794 OutputRawData(object_->address() + size); | 1901 OutputRawData(object_->address() + size); |
| 1795 } | 1902 } |
| 1903 |
| 1904 |
| 1905 void Serializer::ObjectSerializer::SerializeDeferred() { |
| 1906 if (FLAG_trace_serializer) { |
| 1907 PrintF(" Encoding deferred heap object: "); |
| 1908 object_->ShortPrint(); |
| 1909 PrintF("\n"); |
| 1910 } |
| 1911 |
| 1912 int size = object_->Size(); |
| 1913 Map* map = object_->map(); |
| 1914 BackReference reference = serializer_->back_reference_map()->Lookup(object_); |
| 1915 |
| 1916 // Serialize the rest of the object. |
| 1917 CHECK_EQ(0, bytes_processed_so_far_); |
| 1918 bytes_processed_so_far_ = kPointerSize; |
| 1919 |
| 1920 sink_->Put(kNewObject + reference.space(), "deferred object"); |
| 1921 serializer_->PutBackReference(object_, reference); |
| 1922 sink_->PutInt(size >> kPointerSizeLog2, "deferred object size"); |
| 1923 |
| 1924 object_->IterateBody(map->instance_type(), size, this); |
| 1925 OutputRawData(object_->address() + size); |
| 1926 } |
| 1796 | 1927 |
| 1797 | 1928 |
| 1798 void Serializer::ObjectSerializer::VisitPointers(Object** start, | 1929 void Serializer::ObjectSerializer::VisitPointers(Object** start, |
| 1799 Object** end) { | 1930 Object** end) { |
| 1800 Object** current = start; | 1931 Object** current = start; |
| 1801 while (current < end) { | 1932 while (current < end) { |
| 1802 while (current < end && (*current)->IsSmi()) current++; | 1933 while (current < end && (*current)->IsSmi()) current++; |
| 1803 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); | 1934 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); |
| 1804 | 1935 |
| 1805 while (current < end && !(*current)->IsSmi()) { | 1936 while (current < end && !(*current)->IsSmi()) { |
| (...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2107 if (script->IsScript()) Script::cast(script)->name()->ShortPrint(); | 2238 if (script->IsScript()) Script::cast(script)->name()->ShortPrint(); |
| 2108 PrintF("]\n"); | 2239 PrintF("]\n"); |
| 2109 } | 2240 } |
| 2110 | 2241 |
| 2111 // Serialize code object. | 2242 // Serialize code object. |
| 2112 SnapshotByteSink sink(info->code()->CodeSize() * 2); | 2243 SnapshotByteSink sink(info->code()->CodeSize() * 2); |
| 2113 CodeSerializer cs(isolate, &sink, *source, info->code()); | 2244 CodeSerializer cs(isolate, &sink, *source, info->code()); |
| 2114 DisallowHeapAllocation no_gc; | 2245 DisallowHeapAllocation no_gc; |
| 2115 Object** location = Handle<Object>::cast(info).location(); | 2246 Object** location = Handle<Object>::cast(info).location(); |
| 2116 cs.VisitPointer(location); | 2247 cs.VisitPointer(location); |
| 2248 cs.SerializeDeferredObjects(); |
| 2117 cs.Pad(); | 2249 cs.Pad(); |
| 2118 | 2250 |
| 2119 SerializedCodeData data(sink.data(), cs); | 2251 SerializedCodeData data(sink.data(), cs); |
| 2120 ScriptData* script_data = data.GetScriptData(); | 2252 ScriptData* script_data = data.GetScriptData(); |
| 2121 | 2253 |
| 2122 if (FLAG_profile_deserialization) { | 2254 if (FLAG_profile_deserialization) { |
| 2123 double ms = timer.Elapsed().InMillisecondsF(); | 2255 double ms = timer.Elapsed().InMillisecondsF(); |
| 2124 int length = script_data->length(); | 2256 int length = script_data->length(); |
| 2125 PrintF("[Serializing to %d bytes took %0.3f ms]\n", length, ms); | 2257 PrintF("[Serializing to %d bytes took %0.3f ms]\n", length, ms); |
| 2126 } | 2258 } |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2185 // We expect no instantiated function objects or contexts. | 2317 // We expect no instantiated function objects or contexts. |
| 2186 CHECK(!obj->IsJSFunction() && !obj->IsContext()); | 2318 CHECK(!obj->IsJSFunction() && !obj->IsContext()); |
| 2187 | 2319 |
| 2188 SerializeGeneric(obj, how_to_code, where_to_point); | 2320 SerializeGeneric(obj, how_to_code, where_to_point); |
| 2189 } | 2321 } |
| 2190 | 2322 |
| 2191 | 2323 |
| 2192 void CodeSerializer::SerializeGeneric(HeapObject* heap_object, | 2324 void CodeSerializer::SerializeGeneric(HeapObject* heap_object, |
| 2193 HowToCode how_to_code, | 2325 HowToCode how_to_code, |
| 2194 WhereToPoint where_to_point) { | 2326 WhereToPoint where_to_point) { |
| 2195 if (heap_object->IsInternalizedString()) num_internalized_strings_++; | |
| 2196 | |
| 2197 // Object has not yet been serialized. Serialize it here. | 2327 // Object has not yet been serialized. Serialize it here. |
| 2198 ObjectSerializer serializer(this, heap_object, sink_, how_to_code, | 2328 ObjectSerializer serializer(this, heap_object, sink_, how_to_code, |
| 2199 where_to_point); | 2329 where_to_point); |
| 2200 serializer.Serialize(); | 2330 serializer.Serialize(); |
| 2201 } | 2331 } |
| 2202 | 2332 |
| 2203 | 2333 |
| 2204 void CodeSerializer::SerializeBuiltin(int builtin_index, HowToCode how_to_code, | 2334 void CodeSerializer::SerializeBuiltin(int builtin_index, HowToCode how_to_code, |
| 2205 WhereToPoint where_to_point) { | 2335 WhereToPoint where_to_point) { |
| 2206 DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) || | 2336 DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) || |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2298 HandleScope scope(isolate); | 2428 HandleScope scope(isolate); |
| 2299 | 2429 |
| 2300 SmartPointer<SerializedCodeData> scd( | 2430 SmartPointer<SerializedCodeData> scd( |
| 2301 SerializedCodeData::FromCachedData(isolate, cached_data, *source)); | 2431 SerializedCodeData::FromCachedData(isolate, cached_data, *source)); |
| 2302 if (scd.is_empty()) { | 2432 if (scd.is_empty()) { |
| 2303 if (FLAG_profile_deserialization) PrintF("[Cached code failed check]\n"); | 2433 if (FLAG_profile_deserialization) PrintF("[Cached code failed check]\n"); |
| 2304 DCHECK(cached_data->rejected()); | 2434 DCHECK(cached_data->rejected()); |
| 2305 return MaybeHandle<SharedFunctionInfo>(); | 2435 return MaybeHandle<SharedFunctionInfo>(); |
| 2306 } | 2436 } |
| 2307 | 2437 |
| 2308 // Eagerly expand string table to avoid allocations during deserialization. | |
| 2309 StringTable::EnsureCapacityForDeserialization(isolate, | |
| 2310 scd->NumInternalizedStrings()); | |
| 2311 | |
| 2312 // Prepare and register list of attached objects. | 2438 // Prepare and register list of attached objects. |
| 2313 Vector<const uint32_t> code_stub_keys = scd->CodeStubKeys(); | 2439 Vector<const uint32_t> code_stub_keys = scd->CodeStubKeys(); |
| 2314 Vector<Handle<Object> > attached_objects = Vector<Handle<Object> >::New( | 2440 Vector<Handle<Object> > attached_objects = Vector<Handle<Object> >::New( |
| 2315 code_stub_keys.length() + kCodeStubsBaseIndex); | 2441 code_stub_keys.length() + kCodeStubsBaseIndex); |
| 2316 attached_objects[kSourceObjectIndex] = source; | 2442 attached_objects[kSourceObjectIndex] = source; |
| 2317 for (int i = 0; i < code_stub_keys.length(); i++) { | 2443 for (int i = 0; i < code_stub_keys.length(); i++) { |
| 2318 attached_objects[i + kCodeStubsBaseIndex] = | 2444 attached_objects[i + kCodeStubsBaseIndex] = |
| 2319 CodeStub::GetCode(isolate, code_stub_keys[i]).ToHandleChecked(); | 2445 CodeStub::GetCode(isolate, code_stub_keys[i]).ToHandleChecked(); |
| 2320 } | 2446 } |
| 2321 | 2447 |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2465 // Allocate backing store and create result data. | 2591 // Allocate backing store and create result data. |
| 2466 AllocateData(size); | 2592 AllocateData(size); |
| 2467 | 2593 |
| 2468 // Set header values. | 2594 // Set header values. |
| 2469 SetMagicNumber(cs.isolate()); | 2595 SetMagicNumber(cs.isolate()); |
| 2470 SetHeaderValue(kVersionHashOffset, Version::Hash()); | 2596 SetHeaderValue(kVersionHashOffset, Version::Hash()); |
| 2471 SetHeaderValue(kSourceHashOffset, SourceHash(cs.source())); | 2597 SetHeaderValue(kSourceHashOffset, SourceHash(cs.source())); |
| 2472 SetHeaderValue(kCpuFeaturesOffset, | 2598 SetHeaderValue(kCpuFeaturesOffset, |
| 2473 static_cast<uint32_t>(CpuFeatures::SupportedFeatures())); | 2599 static_cast<uint32_t>(CpuFeatures::SupportedFeatures())); |
| 2474 SetHeaderValue(kFlagHashOffset, FlagList::Hash()); | 2600 SetHeaderValue(kFlagHashOffset, FlagList::Hash()); |
| 2475 SetHeaderValue(kNumInternalizedStringsOffset, cs.num_internalized_strings()); | |
| 2476 SetHeaderValue(kNumReservationsOffset, reservations.length()); | 2601 SetHeaderValue(kNumReservationsOffset, reservations.length()); |
| 2477 SetHeaderValue(kNumCodeStubKeysOffset, num_stub_keys); | 2602 SetHeaderValue(kNumCodeStubKeysOffset, num_stub_keys); |
| 2478 SetHeaderValue(kPayloadLengthOffset, payload.length()); | 2603 SetHeaderValue(kPayloadLengthOffset, payload.length()); |
| 2479 | 2604 |
| 2480 Checksum checksum(payload.ToConstVector()); | 2605 Checksum checksum(payload.ToConstVector()); |
| 2481 SetHeaderValue(kChecksum1Offset, checksum.a()); | 2606 SetHeaderValue(kChecksum1Offset, checksum.a()); |
| 2482 SetHeaderValue(kChecksum2Offset, checksum.b()); | 2607 SetHeaderValue(kChecksum2Offset, checksum.b()); |
| 2483 | 2608 |
| 2484 // Copy reservation chunk sizes. | 2609 // Copy reservation chunk sizes. |
| 2485 CopyBytes(data_ + kHeaderSize, reinterpret_cast<byte*>(reservations.begin()), | 2610 CopyBytes(data_ + kHeaderSize, reinterpret_cast<byte*>(reservations.begin()), |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2543 int payload_offset = kHeaderSize + reservations_size + code_stubs_size; | 2668 int payload_offset = kHeaderSize + reservations_size + code_stubs_size; |
| 2544 int padded_payload_offset = POINTER_SIZE_ALIGN(payload_offset); | 2669 int padded_payload_offset = POINTER_SIZE_ALIGN(payload_offset); |
| 2545 const byte* payload = data_ + padded_payload_offset; | 2670 const byte* payload = data_ + padded_payload_offset; |
| 2546 DCHECK(IsAligned(reinterpret_cast<intptr_t>(payload), kPointerAlignment)); | 2671 DCHECK(IsAligned(reinterpret_cast<intptr_t>(payload), kPointerAlignment)); |
| 2547 int length = GetHeaderValue(kPayloadLengthOffset); | 2672 int length = GetHeaderValue(kPayloadLengthOffset); |
| 2548 DCHECK_EQ(data_ + size_, payload + length); | 2673 DCHECK_EQ(data_ + size_, payload + length); |
| 2549 return Vector<const byte>(payload, length); | 2674 return Vector<const byte>(payload, length); |
| 2550 } | 2675 } |
| 2551 | 2676 |
| 2552 | 2677 |
| 2553 int SerializedCodeData::NumInternalizedStrings() const { | |
| 2554 return GetHeaderValue(kNumInternalizedStringsOffset); | |
| 2555 } | |
| 2556 | |
| 2557 Vector<const uint32_t> SerializedCodeData::CodeStubKeys() const { | 2678 Vector<const uint32_t> SerializedCodeData::CodeStubKeys() const { |
| 2558 int reservations_size = GetHeaderValue(kNumReservationsOffset) * kInt32Size; | 2679 int reservations_size = GetHeaderValue(kNumReservationsOffset) * kInt32Size; |
| 2559 const byte* start = data_ + kHeaderSize + reservations_size; | 2680 const byte* start = data_ + kHeaderSize + reservations_size; |
| 2560 return Vector<const uint32_t>(reinterpret_cast<const uint32_t*>(start), | 2681 return Vector<const uint32_t>(reinterpret_cast<const uint32_t*>(start), |
| 2561 GetHeaderValue(kNumCodeStubKeysOffset)); | 2682 GetHeaderValue(kNumCodeStubKeysOffset)); |
| 2562 } | 2683 } |
| 2563 | 2684 |
| 2564 | 2685 |
| 2565 SerializedCodeData::SerializedCodeData(ScriptData* data) | 2686 SerializedCodeData::SerializedCodeData(ScriptData* data) |
| 2566 : SerializedData(const_cast<byte*>(data->data()), data->length()) {} | 2687 : SerializedData(const_cast<byte*>(data->data()), data->length()) {} |
| 2567 | 2688 |
| 2568 | 2689 |
| 2569 SerializedCodeData* SerializedCodeData::FromCachedData(Isolate* isolate, | 2690 SerializedCodeData* SerializedCodeData::FromCachedData(Isolate* isolate, |
| 2570 ScriptData* cached_data, | 2691 ScriptData* cached_data, |
| 2571 String* source) { | 2692 String* source) { |
| 2572 DisallowHeapAllocation no_gc; | 2693 DisallowHeapAllocation no_gc; |
| 2573 SerializedCodeData* scd = new SerializedCodeData(cached_data); | 2694 SerializedCodeData* scd = new SerializedCodeData(cached_data); |
| 2574 SanityCheckResult r = scd->SanityCheck(isolate, source); | 2695 SanityCheckResult r = scd->SanityCheck(isolate, source); |
| 2575 if (r == CHECK_SUCCESS) return scd; | 2696 if (r == CHECK_SUCCESS) return scd; |
| 2576 cached_data->Reject(); | 2697 cached_data->Reject(); |
| 2577 source->GetIsolate()->counters()->code_cache_reject_reason()->AddSample(r); | 2698 source->GetIsolate()->counters()->code_cache_reject_reason()->AddSample(r); |
| 2578 delete scd; | 2699 delete scd; |
| 2579 return NULL; | 2700 return NULL; |
| 2580 } | 2701 } |
| 2581 } } // namespace v8::internal | 2702 } } // namespace v8::internal |
| OLD | NEW |