Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(18)

Side by Side Diff: src/serialize.cc

Issue 11028027: Revert trunk to bleeding_edge at r12484 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/serialize.h ('k') | src/snapshot.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 581 matching lines...) Expand 10 before | Expand all | Expand 10 after
592 592
593 593
594 bool Serializer::serialization_enabled_ = false; 594 bool Serializer::serialization_enabled_ = false;
595 bool Serializer::too_late_to_enable_now_ = false; 595 bool Serializer::too_late_to_enable_now_ = false;
596 596
597 597
598 Deserializer::Deserializer(SnapshotByteSource* source) 598 Deserializer::Deserializer(SnapshotByteSource* source)
599 : isolate_(NULL), 599 : isolate_(NULL),
600 source_(source), 600 source_(source),
601 external_reference_decoder_(NULL) { 601 external_reference_decoder_(NULL) {
602 for (int i = 0; i < LAST_SPACE + 1; i++) { 602 }
603 reservations_[i] = kUninitializedReservation; 603
604
605 // This routine both allocates a new object, and also keeps
606 // track of where objects have been allocated so that we can
607 // fix back references when deserializing.
608 Address Deserializer::Allocate(int space_index, Space* space, int size) {
609 Address address;
610 if (!SpaceIsLarge(space_index)) {
611 ASSERT(!SpaceIsPaged(space_index) ||
612 size <= Page::kPageSize - Page::kObjectStartOffset);
613 MaybeObject* maybe_new_allocation;
614 if (space_index == NEW_SPACE) {
615 maybe_new_allocation =
616 reinterpret_cast<NewSpace*>(space)->AllocateRaw(size);
617 } else {
618 maybe_new_allocation =
619 reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size);
620 }
621 ASSERT(!maybe_new_allocation->IsFailure());
622 Object* new_allocation = maybe_new_allocation->ToObjectUnchecked();
623 HeapObject* new_object = HeapObject::cast(new_allocation);
624 address = new_object->address();
625 high_water_[space_index] = address + size;
626 } else {
627 ASSERT(SpaceIsLarge(space_index));
628 LargeObjectSpace* lo_space = reinterpret_cast<LargeObjectSpace*>(space);
629 Object* new_allocation;
630 if (space_index == kLargeData || space_index == kLargeFixedArray) {
631 new_allocation =
632 lo_space->AllocateRaw(size, NOT_EXECUTABLE)->ToObjectUnchecked();
633 } else {
634 ASSERT_EQ(kLargeCode, space_index);
635 new_allocation =
636 lo_space->AllocateRaw(size, EXECUTABLE)->ToObjectUnchecked();
637 }
638 HeapObject* new_object = HeapObject::cast(new_allocation);
639 // Record all large objects in the same space.
640 address = new_object->address();
641 pages_[LO_SPACE].Add(address);
604 } 642 }
643 last_object_address_ = address;
644 return address;
645 }
646
647
648 // This returns the address of an object that has been described in the
649 // snapshot as being offset bytes back in a particular space.
650 HeapObject* Deserializer::GetAddressFromEnd(int space) {
651 int offset = source_->GetInt();
652 ASSERT(!SpaceIsLarge(space));
653 offset <<= kObjectAlignmentBits;
654 return HeapObject::FromAddress(high_water_[space] - offset);
655 }
656
657
658 // This returns the address of an object that has been described in the
659 // snapshot as being offset bytes into a particular space.
660 HeapObject* Deserializer::GetAddressFromStart(int space) {
661 int offset = source_->GetInt();
662 if (SpaceIsLarge(space)) {
663 // Large spaces have one object per 'page'.
664 return HeapObject::FromAddress(pages_[LO_SPACE][offset]);
665 }
666 offset <<= kObjectAlignmentBits;
667 if (space == NEW_SPACE) {
668 // New space has only one space - numbered 0.
669 return HeapObject::FromAddress(pages_[space][0] + offset);
670 }
671 ASSERT(SpaceIsPaged(space));
672 int page_of_pointee = offset >> kPageSizeBits;
673 Address object_address = pages_[space][page_of_pointee] +
674 (offset & Page::kPageAlignmentMask);
675 return HeapObject::FromAddress(object_address);
605 } 676 }
606 677
607 678
608 void Deserializer::Deserialize() { 679 void Deserializer::Deserialize() {
609 isolate_ = Isolate::Current(); 680 isolate_ = Isolate::Current();
610 ASSERT(isolate_ != NULL); 681 ASSERT(isolate_ != NULL);
611 isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]); 682 {
612 // No active threads. 683 // Don't GC while deserializing - just expand the heap.
613 ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse()); 684 AlwaysAllocateScope always_allocate;
614 // No active handles. 685 // Don't use the free lists while deserializing.
615 ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty()); 686 LinearAllocationScope allocate_linearly;
616 ASSERT_EQ(NULL, external_reference_decoder_); 687 // No active threads.
617 external_reference_decoder_ = new ExternalReferenceDecoder(); 688 ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse());
618 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); 689 // No active handles.
619 isolate_->heap()->RepairFreeListsAfterBoot(); 690 ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
620 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); 691 ASSERT_EQ(NULL, external_reference_decoder_);
692 external_reference_decoder_ = new ExternalReferenceDecoder();
693 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
694 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
621 695
622 isolate_->heap()->set_native_contexts_list( 696 isolate_->heap()->set_native_contexts_list(
623 isolate_->heap()->undefined_value()); 697 isolate_->heap()->undefined_value());
624 698
625 // Update data pointers to the external strings containing natives sources. 699 // Update data pointers to the external strings containing natives sources.
626 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { 700 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
627 Object* source = isolate_->heap()->natives_source_cache()->get(i); 701 Object* source = isolate_->heap()->natives_source_cache()->get(i);
628 if (!source->IsUndefined()) { 702 if (!source->IsUndefined()) {
629 ExternalAsciiString::cast(source)->update_data_cache(); 703 ExternalAsciiString::cast(source)->update_data_cache();
704 }
630 } 705 }
631 } 706 }
632 707
633 // Issue code events for newly deserialized code objects. 708 // Issue code events for newly deserialized code objects.
634 LOG_CODE_EVENT(isolate_, LogCodeObjects()); 709 LOG_CODE_EVENT(isolate_, LogCodeObjects());
635 LOG_CODE_EVENT(isolate_, LogCompiledFunctions()); 710 LOG_CODE_EVENT(isolate_, LogCompiledFunctions());
636 } 711 }
637 712
638 713
639 void Deserializer::DeserializePartial(Object** root) { 714 void Deserializer::DeserializePartial(Object** root) {
640 isolate_ = Isolate::Current(); 715 isolate_ = Isolate::Current();
641 for (int i = NEW_SPACE; i < kNumberOfSpaces; i++) { 716 // Don't GC while deserializing - just expand the heap.
642 ASSERT(reservations_[i] != kUninitializedReservation); 717 AlwaysAllocateScope always_allocate;
643 } 718 // Don't use the free lists while deserializing.
644 isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]); 719 LinearAllocationScope allocate_linearly;
645 if (external_reference_decoder_ == NULL) { 720 if (external_reference_decoder_ == NULL) {
646 external_reference_decoder_ = new ExternalReferenceDecoder(); 721 external_reference_decoder_ = new ExternalReferenceDecoder();
647 } 722 }
648 723
649 // Keep track of the code space start and end pointers in case new 724 // Keep track of the code space start and end pointers in case new
650 // code objects were unserialized 725 // code objects were unserialized
651 OldSpace* code_space = isolate_->heap()->code_space(); 726 OldSpace* code_space = isolate_->heap()->code_space();
652 Address start_address = code_space->top(); 727 Address start_address = code_space->top();
653 VisitPointer(root); 728 VisitPointer(root);
654 729
(...skipping 21 matching lines...) Expand all
676 ReadChunk(start, end, NEW_SPACE, NULL); 751 ReadChunk(start, end, NEW_SPACE, NULL);
677 } 752 }
678 753
679 754
680 // This routine writes the new object into the pointer provided and then 755 // This routine writes the new object into the pointer provided and then
681 // returns true if the new object was in young space and false otherwise. 756 // returns true if the new object was in young space and false otherwise.
682 // The reason for this strange interface is that otherwise the object is 757 // The reason for this strange interface is that otherwise the object is
683 // written very late, which means the FreeSpace map is not set up by the 758 // written very late, which means the FreeSpace map is not set up by the
684 // time we need to use it to mark the space at the end of a page free. 759 // time we need to use it to mark the space at the end of a page free.
685 void Deserializer::ReadObject(int space_number, 760 void Deserializer::ReadObject(int space_number,
761 Space* space,
686 Object** write_back) { 762 Object** write_back) {
687 int size = source_->GetInt() << kObjectAlignmentBits; 763 int size = source_->GetInt() << kObjectAlignmentBits;
688 Address address = Allocate(space_number, size); 764 Address address = Allocate(space_number, space, size);
689 *write_back = HeapObject::FromAddress(address); 765 *write_back = HeapObject::FromAddress(address);
690 Object** current = reinterpret_cast<Object**>(address); 766 Object** current = reinterpret_cast<Object**>(address);
691 Object** limit = current + (size >> kPointerSizeLog2); 767 Object** limit = current + (size >> kPointerSizeLog2);
692 if (FLAG_log_snapshot_positions) { 768 if (FLAG_log_snapshot_positions) {
693 LOG(isolate_, SnapshotPositionEvent(address, source_->position())); 769 LOG(isolate_, SnapshotPositionEvent(address, source_->position()));
694 } 770 }
695 ReadChunk(current, limit, space_number, address); 771 ReadChunk(current, limit, space_number, address);
696 #ifdef DEBUG 772 #ifdef DEBUG
697 bool is_codespace = (space_number == CODE_SPACE); 773 bool is_codespace = (space == HEAP->code_space()) ||
774 ((space == HEAP->lo_space()) && (space_number == kLargeCode));
698 ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace); 775 ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace);
699 #endif 776 #endif
700 } 777 }
701 778
779
780 // This macro is always used with a constant argument so it should all fold
781 // away to almost nothing in the generated code. It might be nicer to do this
782 // with the ternary operator but there are type issues with that.
783 #define ASSIGN_DEST_SPACE(space_number) \
784 Space* dest_space; \
785 if (space_number == NEW_SPACE) { \
786 dest_space = isolate->heap()->new_space(); \
787 } else if (space_number == OLD_POINTER_SPACE) { \
788 dest_space = isolate->heap()->old_pointer_space(); \
789 } else if (space_number == OLD_DATA_SPACE) { \
790 dest_space = isolate->heap()->old_data_space(); \
791 } else if (space_number == CODE_SPACE) { \
792 dest_space = isolate->heap()->code_space(); \
793 } else if (space_number == MAP_SPACE) { \
794 dest_space = isolate->heap()->map_space(); \
795 } else if (space_number == CELL_SPACE) { \
796 dest_space = isolate->heap()->cell_space(); \
797 } else { \
798 ASSERT(space_number >= LO_SPACE); \
799 dest_space = isolate->heap()->lo_space(); \
800 }
801
802
803 static const int kUnknownOffsetFromStart = -1;
804
805
702 void Deserializer::ReadChunk(Object** current, 806 void Deserializer::ReadChunk(Object** current,
703 Object** limit, 807 Object** limit,
704 int source_space, 808 int source_space,
705 Address current_object_address) { 809 Address current_object_address) {
706 Isolate* const isolate = isolate_; 810 Isolate* const isolate = isolate_;
707 // Write barrier support costs around 1% in startup time. In fact there
708 // are no new space objects in current boot snapshots, so it's not needed,
709 // but that may change.
710 bool write_barrier_needed = (current_object_address != NULL && 811 bool write_barrier_needed = (current_object_address != NULL &&
711 source_space != NEW_SPACE && 812 source_space != NEW_SPACE &&
712 source_space != CELL_SPACE && 813 source_space != CELL_SPACE &&
713 source_space != CODE_SPACE && 814 source_space != CODE_SPACE &&
714 source_space != OLD_DATA_SPACE); 815 source_space != OLD_DATA_SPACE);
715 while (current < limit) { 816 while (current < limit) {
716 int data = source_->Get(); 817 int data = source_->Get();
717 switch (data) { 818 switch (data) {
718 #define CASE_STATEMENT(where, how, within, space_number) \ 819 #define CASE_STATEMENT(where, how, within, space_number) \
719 case where + how + within + space_number: \ 820 case where + how + within + space_number: \
720 ASSERT((where & ~kPointedToMask) == 0); \ 821 ASSERT((where & ~kPointedToMask) == 0); \
721 ASSERT((how & ~kHowToCodeMask) == 0); \ 822 ASSERT((how & ~kHowToCodeMask) == 0); \
722 ASSERT((within & ~kWhereToPointMask) == 0); \ 823 ASSERT((within & ~kWhereToPointMask) == 0); \
723 ASSERT((space_number & ~kSpaceMask) == 0); 824 ASSERT((space_number & ~kSpaceMask) == 0);
724 825
725 #define CASE_BODY(where, how, within, space_number_if_any) \ 826 #define CASE_BODY(where, how, within, space_number_if_any, offset_from_start) \
726 { \ 827 { \
727 bool emit_write_barrier = false; \ 828 bool emit_write_barrier = false; \
728 bool current_was_incremented = false; \ 829 bool current_was_incremented = false; \
729 int space_number = space_number_if_any == kAnyOldSpace ? \ 830 int space_number = space_number_if_any == kAnyOldSpace ? \
730 (data & kSpaceMask) : space_number_if_any; \ 831 (data & kSpaceMask) : space_number_if_any; \
731 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\ 832 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
732 ReadObject(space_number, current); \ 833 ASSIGN_DEST_SPACE(space_number) \
834 ReadObject(space_number, dest_space, current); \
733 emit_write_barrier = (space_number == NEW_SPACE); \ 835 emit_write_barrier = (space_number == NEW_SPACE); \
734 } else { \ 836 } else { \
735 Object* new_object = NULL; /* May not be a real Object pointer. */ \ 837 Object* new_object = NULL; /* May not be a real Object pointer. */ \
736 if (where == kNewObject) { \ 838 if (where == kNewObject) { \
737 ReadObject(space_number, &new_object); \ 839 ASSIGN_DEST_SPACE(space_number) \
840 ReadObject(space_number, dest_space, &new_object); \
738 } else if (where == kRootArray) { \ 841 } else if (where == kRootArray) { \
739 int root_id = source_->GetInt(); \ 842 int root_id = source_->GetInt(); \
740 new_object = isolate->heap()->roots_array_start()[root_id]; \ 843 new_object = isolate->heap()->roots_array_start()[root_id]; \
741 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \ 844 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
742 } else if (where == kPartialSnapshotCache) { \ 845 } else if (where == kPartialSnapshotCache) { \
743 int cache_index = source_->GetInt(); \ 846 int cache_index = source_->GetInt(); \
744 new_object = isolate->serialize_partial_snapshot_cache() \ 847 new_object = isolate->serialize_partial_snapshot_cache() \
745 [cache_index]; \ 848 [cache_index]; \
746 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \ 849 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
747 } else if (where == kExternalReference) { \ 850 } else if (where == kExternalReference) { \
748 int skip = source_->GetInt(); \
749 current = reinterpret_cast<Object**>(reinterpret_cast<Address>( \
750 current) + skip); \
751 int reference_id = source_->GetInt(); \ 851 int reference_id = source_->GetInt(); \
752 Address address = external_reference_decoder_-> \ 852 Address address = external_reference_decoder_-> \
753 Decode(reference_id); \ 853 Decode(reference_id); \
754 new_object = reinterpret_cast<Object*>(address); \ 854 new_object = reinterpret_cast<Object*>(address); \
755 } else if (where == kBackref) { \ 855 } else if (where == kBackref) { \
756 emit_write_barrier = (space_number == NEW_SPACE); \ 856 emit_write_barrier = (space_number == NEW_SPACE); \
757 new_object = GetAddressFromEnd(data & kSpaceMask); \ 857 new_object = GetAddressFromEnd(data & kSpaceMask); \
758 } else { \ 858 } else { \
759 ASSERT(where == kBackrefWithSkip); \ 859 ASSERT(where == kFromStart); \
760 int skip = source_->GetInt(); \ 860 if (offset_from_start == kUnknownOffsetFromStart) { \
761 current = reinterpret_cast<Object**>( \ 861 emit_write_barrier = (space_number == NEW_SPACE); \
762 reinterpret_cast<Address>(current) + skip); \ 862 new_object = GetAddressFromStart(data & kSpaceMask); \
763 emit_write_barrier = (space_number == NEW_SPACE); \ 863 } else { \
764 new_object = GetAddressFromEnd(data & kSpaceMask); \ 864 Address object_address = pages_[space_number][0] + \
865 (offset_from_start << kObjectAlignmentBits); \
866 new_object = HeapObject::FromAddress(object_address); \
867 } \
765 } \ 868 } \
766 if (within == kInnerPointer) { \ 869 if (within == kInnerPointer) { \
767 if (space_number != CODE_SPACE || new_object->IsCode()) { \ 870 if (space_number != CODE_SPACE || new_object->IsCode()) { \
768 Code* new_code_object = reinterpret_cast<Code*>(new_object); \ 871 Code* new_code_object = reinterpret_cast<Code*>(new_object); \
769 new_object = reinterpret_cast<Object*>( \ 872 new_object = reinterpret_cast<Object*>( \
770 new_code_object->instruction_start()); \ 873 new_code_object->instruction_start()); \
771 } else { \ 874 } else { \
772 ASSERT(space_number == CODE_SPACE); \ 875 ASSERT(space_number == CODE_SPACE || space_number == kLargeCode);\
773 JSGlobalPropertyCell* cell = \ 876 JSGlobalPropertyCell* cell = \
774 JSGlobalPropertyCell::cast(new_object); \ 877 JSGlobalPropertyCell::cast(new_object); \
775 new_object = reinterpret_cast<Object*>( \ 878 new_object = reinterpret_cast<Object*>( \
776 cell->ValueAddress()); \ 879 cell->ValueAddress()); \
777 } \ 880 } \
778 } \ 881 } \
779 if (how == kFromCode) { \ 882 if (how == kFromCode) { \
780 Address location_of_branch_data = \ 883 Address location_of_branch_data = \
781 reinterpret_cast<Address>(current); \ 884 reinterpret_cast<Address>(current); \
782 Assembler::deserialization_set_special_target_at( \ 885 Assembler::deserialization_set_special_target_at( \
(...skipping 11 matching lines...) Expand all
794 isolate->heap()->RecordWrite( \ 897 isolate->heap()->RecordWrite( \
795 current_object_address, \ 898 current_object_address, \
796 static_cast<int>(current_address - current_object_address)); \ 899 static_cast<int>(current_address - current_object_address)); \
797 } \ 900 } \
798 if (!current_was_incremented) { \ 901 if (!current_was_incremented) { \
799 current++; \ 902 current++; \
800 } \ 903 } \
801 break; \ 904 break; \
802 } \ 905 } \
803 906
907 // This generates a case and a body for each space. The large object spaces are
908 // very rare in snapshots so they are grouped in one body.
909 #define ONE_PER_SPACE(where, how, within) \
910 CASE_STATEMENT(where, how, within, NEW_SPACE) \
911 CASE_BODY(where, how, within, NEW_SPACE, kUnknownOffsetFromStart) \
912 CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
913 CASE_BODY(where, how, within, OLD_DATA_SPACE, kUnknownOffsetFromStart) \
914 CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
915 CASE_BODY(where, how, within, OLD_POINTER_SPACE, kUnknownOffsetFromStart) \
916 CASE_STATEMENT(where, how, within, CODE_SPACE) \
917 CASE_BODY(where, how, within, CODE_SPACE, kUnknownOffsetFromStart) \
918 CASE_STATEMENT(where, how, within, CELL_SPACE) \
919 CASE_BODY(where, how, within, CELL_SPACE, kUnknownOffsetFromStart) \
920 CASE_STATEMENT(where, how, within, MAP_SPACE) \
921 CASE_BODY(where, how, within, MAP_SPACE, kUnknownOffsetFromStart) \
922 CASE_STATEMENT(where, how, within, kLargeData) \
923 CASE_STATEMENT(where, how, within, kLargeCode) \
924 CASE_STATEMENT(where, how, within, kLargeFixedArray) \
925 CASE_BODY(where, how, within, kAnyOldSpace, kUnknownOffsetFromStart)
926
804 // This generates a case and a body for the new space (which has to do extra 927 // This generates a case and a body for the new space (which has to do extra
805 // write barrier handling) and handles the other spaces with 8 fall-through 928 // write barrier handling) and handles the other spaces with 8 fall-through
806 // cases and one body. 929 // cases and one body.
807 #define ALL_SPACES(where, how, within) \ 930 #define ALL_SPACES(where, how, within) \
808 CASE_STATEMENT(where, how, within, NEW_SPACE) \ 931 CASE_STATEMENT(where, how, within, NEW_SPACE) \
809 CASE_BODY(where, how, within, NEW_SPACE) \ 932 CASE_BODY(where, how, within, NEW_SPACE, kUnknownOffsetFromStart) \
810 CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \ 933 CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
811 CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \ 934 CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
812 CASE_STATEMENT(where, how, within, CODE_SPACE) \ 935 CASE_STATEMENT(where, how, within, CODE_SPACE) \
813 CASE_STATEMENT(where, how, within, CELL_SPACE) \ 936 CASE_STATEMENT(where, how, within, CELL_SPACE) \
814 CASE_STATEMENT(where, how, within, MAP_SPACE) \ 937 CASE_STATEMENT(where, how, within, MAP_SPACE) \
815 CASE_BODY(where, how, within, kAnyOldSpace) 938 CASE_STATEMENT(where, how, within, kLargeData) \
939 CASE_STATEMENT(where, how, within, kLargeCode) \
940 CASE_STATEMENT(where, how, within, kLargeFixedArray) \
941 CASE_BODY(where, how, within, kAnyOldSpace, kUnknownOffsetFromStart)
942
943 #define ONE_PER_CODE_SPACE(where, how, within) \
944 CASE_STATEMENT(where, how, within, CODE_SPACE) \
945 CASE_BODY(where, how, within, CODE_SPACE, kUnknownOffsetFromStart) \
946 CASE_STATEMENT(where, how, within, kLargeCode) \
947 CASE_BODY(where, how, within, kLargeCode, kUnknownOffsetFromStart)
816 948
817 #define FOUR_CASES(byte_code) \ 949 #define FOUR_CASES(byte_code) \
818 case byte_code: \ 950 case byte_code: \
819 case byte_code + 1: \ 951 case byte_code + 1: \
820 case byte_code + 2: \ 952 case byte_code + 2: \
821 case byte_code + 3: 953 case byte_code + 3:
822 954
823 #define SIXTEEN_CASES(byte_code) \ 955 #define SIXTEEN_CASES(byte_code) \
824 FOUR_CASES(byte_code) \ 956 FOUR_CASES(byte_code) \
825 FOUR_CASES(byte_code + 4) \ 957 FOUR_CASES(byte_code + 4) \
826 FOUR_CASES(byte_code + 8) \ 958 FOUR_CASES(byte_code + 8) \
827 FOUR_CASES(byte_code + 12) 959 FOUR_CASES(byte_code + 12)
828 960
829 #define COMMON_RAW_LENGTHS(f) \
830 f(1) \
831 f(2) \
832 f(3) \
833 f(4) \
834 f(5) \
835 f(6) \
836 f(7) \
837 f(8) \
838 f(9) \
839 f(10) \
840 f(11) \
841 f(12) \
842 f(13) \
843 f(14) \
844 f(15) \
845 f(16) \
846 f(17) \
847 f(18) \
848 f(19) \
849 f(20) \
850 f(21) \
851 f(22) \
852 f(23) \
853 f(24) \
854 f(25) \
855 f(26) \
856 f(27) \
857 f(28) \
858 f(29) \
859 f(30) \
860 f(31)
861
862 // We generate 15 cases and bodies that process special tags that combine 961 // We generate 15 cases and bodies that process special tags that combine
863 // the raw data tag and the length into one byte. 962 // the raw data tag and the length into one byte.
864 #define RAW_CASE(index) \ 963 #define RAW_CASE(index, size) \
865 case kRawData + index: { \ 964 case kRawData + index: { \
866 byte* raw_data_out = reinterpret_cast<byte*>(current); \ 965 byte* raw_data_out = reinterpret_cast<byte*>(current); \
867 source_->CopyRaw(raw_data_out, index * kPointerSize); \ 966 source_->CopyRaw(raw_data_out, size); \
868 current = \ 967 current = reinterpret_cast<Object**>(raw_data_out + size); \
869 reinterpret_cast<Object**>(raw_data_out + index * kPointerSize); \ 968 break; \
870 break; \
871 } 969 }
872 COMMON_RAW_LENGTHS(RAW_CASE) 970 COMMON_RAW_LENGTHS(RAW_CASE)
873 #undef RAW_CASE 971 #undef RAW_CASE
874 972
875 // Deserialize a chunk of raw data that doesn't have one of the popular 973 // Deserialize a chunk of raw data that doesn't have one of the popular
876 // lengths. 974 // lengths.
877 case kRawData: { 975 case kRawData: {
878 int size = source_->GetInt(); 976 int size = source_->GetInt();
879 byte* raw_data_out = reinterpret_cast<byte*>(current); 977 byte* raw_data_out = reinterpret_cast<byte*>(current);
880 source_->CopyRaw(raw_data_out, size); 978 source_->CopyRaw(raw_data_out, size);
979 current = reinterpret_cast<Object**>(raw_data_out + size);
881 break; 980 break;
882 } 981 }
883 982
884 SIXTEEN_CASES(kRootArrayConstants + kNoSkipDistance) 983 SIXTEEN_CASES(kRootArrayLowConstants)
885 SIXTEEN_CASES(kRootArrayConstants + kNoSkipDistance + 16) { 984 SIXTEEN_CASES(kRootArrayHighConstants) {
886 int root_id = RootArrayConstantFromByteCode(data); 985 int root_id = RootArrayConstantFromByteCode(data);
887 Object* object = isolate->heap()->roots_array_start()[root_id]; 986 Object* object = isolate->heap()->roots_array_start()[root_id];
888 ASSERT(!isolate->heap()->InNewSpace(object)); 987 ASSERT(!isolate->heap()->InNewSpace(object));
889 *current++ = object; 988 *current++ = object;
890 break; 989 break;
891 } 990 }
892 991
893 SIXTEEN_CASES(kRootArrayConstants + kHasSkipDistance)
894 SIXTEEN_CASES(kRootArrayConstants + kHasSkipDistance + 16) {
895 int root_id = RootArrayConstantFromByteCode(data);
896 int skip = source_->GetInt();
897 current = reinterpret_cast<Object**>(
898 reinterpret_cast<intptr_t>(current) + skip);
899 Object* object = isolate->heap()->roots_array_start()[root_id];
900 ASSERT(!isolate->heap()->InNewSpace(object));
901 *current++ = object;
902 break;
903 }
904
905 case kRepeat: { 992 case kRepeat: {
906 int repeats = source_->GetInt(); 993 int repeats = source_->GetInt();
907 Object* object = current[-1]; 994 Object* object = current[-1];
908 ASSERT(!isolate->heap()->InNewSpace(object)); 995 ASSERT(!isolate->heap()->InNewSpace(object));
909 for (int i = 0; i < repeats; i++) current[i] = object; 996 for (int i = 0; i < repeats; i++) current[i] = object;
910 current += repeats; 997 current += repeats;
911 break; 998 break;
912 } 999 }
913 1000
914 STATIC_ASSERT(kRootArrayNumberOfConstantEncodings == 1001 STATIC_ASSERT(kRootArrayNumberOfConstantEncodings ==
915 Heap::kOldSpaceRoots); 1002 Heap::kOldSpaceRoots);
916 STATIC_ASSERT(kMaxRepeats == 13); 1003 STATIC_ASSERT(kMaxRepeats == 12);
917 case kConstantRepeat: 1004 FOUR_CASES(kConstantRepeat)
918 FOUR_CASES(kConstantRepeat + 1) 1005 FOUR_CASES(kConstantRepeat + 4)
919 FOUR_CASES(kConstantRepeat + 5) 1006 FOUR_CASES(kConstantRepeat + 8) {
920 FOUR_CASES(kConstantRepeat + 9) {
921 int repeats = RepeatsForCode(data); 1007 int repeats = RepeatsForCode(data);
922 Object* object = current[-1]; 1008 Object* object = current[-1];
923 ASSERT(!isolate->heap()->InNewSpace(object)); 1009 ASSERT(!isolate->heap()->InNewSpace(object));
924 for (int i = 0; i < repeats; i++) current[i] = object; 1010 for (int i = 0; i < repeats; i++) current[i] = object;
925 current += repeats; 1011 current += repeats;
926 break; 1012 break;
927 } 1013 }
928 1014
929 // Deserialize a new object and write a pointer to it to the current 1015 // Deserialize a new object and write a pointer to it to the current
930 // object. 1016 // object.
931 ALL_SPACES(kNewObject, kPlain, kStartOfObject) 1017 ONE_PER_SPACE(kNewObject, kPlain, kStartOfObject)
932 // Support for direct instruction pointers in functions. It's an inner 1018 // Support for direct instruction pointers in functions. It's an inner
933 // pointer because it points at the entry point, not at the start of the 1019 // pointer because it points at the entry point, not at the start of the
934 // code object. 1020 // code object.
935 CASE_STATEMENT(kNewObject, kPlain, kInnerPointer, CODE_SPACE) 1021 ONE_PER_CODE_SPACE(kNewObject, kPlain, kInnerPointer)
936 CASE_BODY(kNewObject, kPlain, kInnerPointer, CODE_SPACE)
937 // Deserialize a new code object and write a pointer to its first 1022 // Deserialize a new code object and write a pointer to its first
938 // instruction to the current code object. 1023 // instruction to the current code object.
939 ALL_SPACES(kNewObject, kFromCode, kInnerPointer) 1024 ONE_PER_SPACE(kNewObject, kFromCode, kInnerPointer)
940 // Find a recently deserialized object using its offset from the current 1025 // Find a recently deserialized object using its offset from the current
941 // allocation point and write a pointer to it to the current object. 1026 // allocation point and write a pointer to it to the current object.
942 ALL_SPACES(kBackref, kPlain, kStartOfObject) 1027 ALL_SPACES(kBackref, kPlain, kStartOfObject)
943 ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
944 #if V8_TARGET_ARCH_MIPS 1028 #if V8_TARGET_ARCH_MIPS
945 // Deserialize a new object from pointer found in code and write 1029 // Deserialize a new object from pointer found in code and write
946 // a pointer to it to the current object. Required only for MIPS, and 1030 // a pointer to it to the current object. Required only for MIPS, and
947 // omitted on the other architectures because it is fully unrolled and 1031 // omitted on the other architectures because it is fully unrolled and
948 // would cause bloat. 1032 // would cause bloat.
949 ALL_SPACES(kNewObject, kFromCode, kStartOfObject) 1033 ONE_PER_SPACE(kNewObject, kFromCode, kStartOfObject)
950 // Find a recently deserialized code object using its offset from the 1034 // Find a recently deserialized code object using its offset from the
951 // current allocation point and write a pointer to it to the current 1035 // current allocation point and write a pointer to it to the current
952 // object. Required only for MIPS. 1036 // object. Required only for MIPS.
953 ALL_SPACES(kBackref, kFromCode, kStartOfObject) 1037 ALL_SPACES(kBackref, kFromCode, kStartOfObject)
954 ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject) 1038 // Find an already deserialized code object using its offset from
1039 // the start and write a pointer to it to the current object.
1040 // Required only for MIPS.
1041 ALL_SPACES(kFromStart, kFromCode, kStartOfObject)
955 #endif 1042 #endif
956 // Find a recently deserialized code object using its offset from the 1043 // Find a recently deserialized code object using its offset from the
957 // current allocation point and write a pointer to its first instruction 1044 // current allocation point and write a pointer to its first instruction
958 // to the current code object or the instruction pointer in a function 1045 // to the current code object or the instruction pointer in a function
959 // object. 1046 // object.
960 ALL_SPACES(kBackref, kFromCode, kInnerPointer) 1047 ALL_SPACES(kBackref, kFromCode, kInnerPointer)
961 ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
962 ALL_SPACES(kBackref, kPlain, kInnerPointer) 1048 ALL_SPACES(kBackref, kPlain, kInnerPointer)
963 ALL_SPACES(kBackrefWithSkip, kPlain, kInnerPointer) 1049 // Find an already deserialized object using its offset from the start
1050 // and write a pointer to it to the current object.
1051 ALL_SPACES(kFromStart, kPlain, kStartOfObject)
1052 ALL_SPACES(kFromStart, kPlain, kInnerPointer)
1053 // Find an already deserialized code object using its offset from the
1054 // start and write a pointer to its first instruction to the current code
1055 // object.
1056 ALL_SPACES(kFromStart, kFromCode, kInnerPointer)
964 // Find an object in the roots array and write a pointer to it to the 1057 // Find an object in the roots array and write a pointer to it to the
965 // current object. 1058 // current object.
966 CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0) 1059 CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0)
967 CASE_BODY(kRootArray, kPlain, kStartOfObject, 0) 1060 CASE_BODY(kRootArray, kPlain, kStartOfObject, 0, kUnknownOffsetFromStart)
968 // Find an object in the partial snapshots cache and write a pointer to it 1061 // Find an object in the partial snapshots cache and write a pointer to it
969 // to the current object. 1062 // to the current object.
970 CASE_STATEMENT(kPartialSnapshotCache, kPlain, kStartOfObject, 0) 1063 CASE_STATEMENT(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
971 CASE_BODY(kPartialSnapshotCache, 1064 CASE_BODY(kPartialSnapshotCache,
972 kPlain, 1065 kPlain,
973 kStartOfObject, 1066 kStartOfObject,
974 0) 1067 0,
1068 kUnknownOffsetFromStart)
975 // Find an code entry in the partial snapshots cache and 1069 // Find an code entry in the partial snapshots cache and
976 // write a pointer to it to the current object. 1070 // write a pointer to it to the current object.
977 CASE_STATEMENT(kPartialSnapshotCache, kPlain, kInnerPointer, 0) 1071 CASE_STATEMENT(kPartialSnapshotCache, kPlain, kInnerPointer, 0)
978 CASE_BODY(kPartialSnapshotCache, 1072 CASE_BODY(kPartialSnapshotCache,
979 kPlain, 1073 kPlain,
980 kInnerPointer, 1074 kInnerPointer,
981 0) 1075 0,
1076 kUnknownOffsetFromStart)
982 // Find an external reference and write a pointer to it to the current 1077 // Find an external reference and write a pointer to it to the current
983 // object. 1078 // object.
984 CASE_STATEMENT(kExternalReference, kPlain, kStartOfObject, 0) 1079 CASE_STATEMENT(kExternalReference, kPlain, kStartOfObject, 0)
985 CASE_BODY(kExternalReference, 1080 CASE_BODY(kExternalReference,
986 kPlain, 1081 kPlain,
987 kStartOfObject, 1082 kStartOfObject,
988 0) 1083 0,
1084 kUnknownOffsetFromStart)
989 // Find an external reference and write a pointer to it in the current 1085 // Find an external reference and write a pointer to it in the current
990 // code object. 1086 // code object.
991 CASE_STATEMENT(kExternalReference, kFromCode, kStartOfObject, 0) 1087 CASE_STATEMENT(kExternalReference, kFromCode, kStartOfObject, 0)
992 CASE_BODY(kExternalReference, 1088 CASE_BODY(kExternalReference,
993 kFromCode, 1089 kFromCode,
994 kStartOfObject, 1090 kStartOfObject,
995 0) 1091 0,
1092 kUnknownOffsetFromStart)
996 1093
997 #undef CASE_STATEMENT 1094 #undef CASE_STATEMENT
998 #undef CASE_BODY 1095 #undef CASE_BODY
1096 #undef ONE_PER_SPACE
999 #undef ALL_SPACES 1097 #undef ALL_SPACES
1098 #undef ASSIGN_DEST_SPACE
1000 1099
1001 case kSkip: { 1100 case kNewPage: {
1002 int size = source_->GetInt(); 1101 int space = source_->Get();
1003 current = reinterpret_cast<Object**>( 1102 pages_[space].Add(last_object_address_);
1004 reinterpret_cast<intptr_t>(current) + size); 1103 if (space == CODE_SPACE) {
1104 CPU::FlushICache(last_object_address_, Page::kPageSize);
1105 }
1005 break; 1106 break;
1006 } 1107 }
1007 1108
1109 case kSkip: {
1110 current++;
1111 break;
1112 }
1113
1008 case kNativesStringResource: { 1114 case kNativesStringResource: {
1009 int index = source_->Get(); 1115 int index = source_->Get();
1010 Vector<const char> source_vector = Natives::GetRawScriptSource(index); 1116 Vector<const char> source_vector = Natives::GetRawScriptSource(index);
1011 NativesExternalStringResource* resource = 1117 NativesExternalStringResource* resource =
1012 new NativesExternalStringResource(isolate->bootstrapper(), 1118 new NativesExternalStringResource(isolate->bootstrapper(),
1013 source_vector.start(), 1119 source_vector.start(),
1014 source_vector.length()); 1120 source_vector.length());
1015 *current++ = reinterpret_cast<Object*>(resource); 1121 *current++ = reinterpret_cast<Object*>(resource);
1016 break; 1122 break;
1017 } 1123 }
1018 1124
1019 case kSynchronize: { 1125 case kSynchronize: {
1020 // If we get here then that indicates that you have a mismatch between 1126 // If we get here then that indicates that you have a mismatch between
1021 // the number of GC roots when serializing and deserializing. 1127 // the number of GC roots when serializing and deserializing.
1022 UNREACHABLE(); 1128 UNREACHABLE();
1023 } 1129 }
1024 1130
1025 default: 1131 default:
1026 UNREACHABLE(); 1132 UNREACHABLE();
1027 } 1133 }
1028 } 1134 }
1029 ASSERT_EQ(limit, current); 1135 ASSERT_EQ(current, limit);
1030 } 1136 }
1031 1137
1032 1138
1033 void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) { 1139 void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) {
1034 ASSERT(integer < 1 << 22); 1140 const int max_shift = ((kPointerSize * kBitsPerByte) / 7) * 7;
1035 integer <<= 2; 1141 for (int shift = max_shift; shift > 0; shift -= 7) {
1036 int bytes = 1; 1142 if (integer >= static_cast<uintptr_t>(1u) << shift) {
1037 if (integer > 0xff) bytes = 2; 1143 Put((static_cast<int>((integer >> shift)) & 0x7f) | 0x80, "IntPart");
1038 if (integer > 0xffff) bytes = 3; 1144 }
1039 integer |= bytes; 1145 }
1040 Put(static_cast<int>(integer & 0xff), "IntPart1"); 1146 PutSection(static_cast<int>(integer & 0x7f), "IntLastPart");
1041 if (bytes > 1) Put(static_cast<int>((integer >> 8) & 0xff), "IntPart2");
1042 if (bytes > 2) Put(static_cast<int>((integer >> 16) & 0xff), "IntPart3");
1043 } 1147 }
1044 1148
1045 1149
1046 Serializer::Serializer(SnapshotByteSink* sink) 1150 Serializer::Serializer(SnapshotByteSink* sink)
1047 : sink_(sink), 1151 : sink_(sink),
1048 current_root_index_(0), 1152 current_root_index_(0),
1049 external_reference_encoder_(new ExternalReferenceEncoder), 1153 external_reference_encoder_(new ExternalReferenceEncoder),
1154 large_object_total_(0),
1050 root_index_wave_front_(0) { 1155 root_index_wave_front_(0) {
1051 isolate_ = Isolate::Current(); 1156 isolate_ = Isolate::Current();
1052 // The serializer is meant to be used only to generate initial heap images 1157 // The serializer is meant to be used only to generate initial heap images
1053 // from a context in which there is only one isolate. 1158 // from a context in which there is only one isolate.
1054 ASSERT(isolate_->IsDefaultIsolate()); 1159 ASSERT(isolate_->IsDefaultIsolate());
1055 for (int i = 0; i <= LAST_SPACE; i++) { 1160 for (int i = 0; i <= LAST_SPACE; i++) {
1056 fullness_[i] = 0; 1161 fullness_[i] = 0;
1057 } 1162 }
1058 } 1163 }
1059 1164
(...skipping 12 matching lines...) Expand all
1072 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles()); 1177 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
1073 // We don't support serializing installed extensions. 1178 // We don't support serializing installed extensions.
1074 CHECK(!isolate->has_installed_extensions()); 1179 CHECK(!isolate->has_installed_extensions());
1075 1180
1076 HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG); 1181 HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG);
1077 } 1182 }
1078 1183
1079 1184
1080 void PartialSerializer::Serialize(Object** object) { 1185 void PartialSerializer::Serialize(Object** object) {
1081 this->VisitPointer(object); 1186 this->VisitPointer(object);
1082 Pad();
1083 } 1187 }
1084 1188
1085 1189
1086 void Serializer::VisitPointers(Object** start, Object** end) { 1190 void Serializer::VisitPointers(Object** start, Object** end) {
1087 Isolate* isolate = Isolate::Current(); 1191 Isolate* isolate = Isolate::Current();
1088 1192
1089 for (Object** current = start; current < end; current++) { 1193 for (Object** current = start; current < end; current++) {
1090 if (start == isolate->heap()->roots_array_start()) { 1194 if (start == isolate->heap()->roots_array_start()) {
1091 root_index_wave_front_ = 1195 root_index_wave_front_ =
1092 Max(root_index_wave_front_, static_cast<intptr_t>(current - start)); 1196 Max(root_index_wave_front_, static_cast<intptr_t>(current - start));
1093 } 1197 }
1094 if (reinterpret_cast<Address>(current) == 1198 if (reinterpret_cast<Address>(current) ==
1095 isolate->heap()->store_buffer()->TopAddress()) { 1199 isolate->heap()->store_buffer()->TopAddress()) {
1096 sink_->Put(kSkip, "Skip"); 1200 sink_->Put(kSkip, "Skip");
1097 sink_->PutInt(kPointerSize, "SkipOneWord");
1098 } else if ((*current)->IsSmi()) { 1201 } else if ((*current)->IsSmi()) {
1099 sink_->Put(kRawData + 1, "Smi"); 1202 sink_->Put(kRawData, "RawData");
1203 sink_->PutInt(kPointerSize, "length");
1100 for (int i = 0; i < kPointerSize; i++) { 1204 for (int i = 0; i < kPointerSize; i++) {
1101 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); 1205 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte");
1102 } 1206 }
1103 } else { 1207 } else {
1104 SerializeObject(*current, kPlain, kStartOfObject, 0); 1208 SerializeObject(*current, kPlain, kStartOfObject);
1105 } 1209 }
1106 } 1210 }
1107 } 1211 }
1108 1212
1109 1213
1110 // This ensures that the partial snapshot cache keeps things alive during GC and 1214 // This ensures that the partial snapshot cache keeps things alive during GC and
1111 // tracks their movement. When it is called during serialization of the startup 1215 // tracks their movement. When it is called during serialization of the startup
1112 // snapshot nothing happens. When the partial (context) snapshot is created, 1216 // snapshot nothing happens. When the partial (context) snapshot is created,
1113 // this array is populated with the pointers that the partial snapshot will 1217 // this array is populated with the pointers that the partial snapshot will
1114 // need. As that happens we emit serialized objects to the startup snapshot 1218 // need. As that happens we emit serialized objects to the startup snapshot
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
1181 1285
1182 1286
1183 // Encode the location of an already deserialized object in order to write its 1287 // Encode the location of an already deserialized object in order to write its
1184 // location into a later object. We can encode the location as an offset from 1288 // location into a later object. We can encode the location as an offset from
1185 // the start of the deserialized objects or as an offset backwards from the 1289 // the start of the deserialized objects or as an offset backwards from the
1186 // current allocation pointer. 1290 // current allocation pointer.
1187 void Serializer::SerializeReferenceToPreviousObject( 1291 void Serializer::SerializeReferenceToPreviousObject(
1188 int space, 1292 int space,
1189 int address, 1293 int address,
1190 HowToCode how_to_code, 1294 HowToCode how_to_code,
1191 WhereToPoint where_to_point, 1295 WhereToPoint where_to_point) {
1192 int skip) {
1193 int offset = CurrentAllocationAddress(space) - address; 1296 int offset = CurrentAllocationAddress(space) - address;
1194 // Shift out the bits that are always 0. 1297 bool from_start = true;
1195 offset >>= kObjectAlignmentBits; 1298 if (SpaceIsPaged(space)) {
1196 if (skip == 0) { 1299 // For paged space it is simple to encode back from current allocation if
1300 // the object is on the same page as the current allocation pointer.
1301 if ((CurrentAllocationAddress(space) >> kPageSizeBits) ==
1302 (address >> kPageSizeBits)) {
1303 from_start = false;
1304 address = offset;
1305 }
1306 } else if (space == NEW_SPACE) {
1307 // For new space it is always simple to encode back from current allocation.
1308 if (offset < address) {
1309 from_start = false;
1310 address = offset;
1311 }
1312 }
1313 // If we are actually dealing with real offsets (and not a numbering of
1314 // all objects) then we should shift out the bits that are always 0.
1315 if (!SpaceIsLarge(space)) address >>= kObjectAlignmentBits;
1316 if (from_start) {
1317 sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer");
1318 sink_->PutInt(address, "address");
1319 } else {
1197 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer"); 1320 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer");
1198 } else { 1321 sink_->PutInt(address, "address");
1199 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space,
1200 "BackRefSerWithSkip");
1201 sink_->PutInt(skip, "BackRefSkipDistance");
1202 } 1322 }
1203 sink_->PutInt(offset, "offset");
1204 } 1323 }
1205 1324
1206 1325
1207 void StartupSerializer::SerializeObject( 1326 void StartupSerializer::SerializeObject(
1208 Object* o, 1327 Object* o,
1209 HowToCode how_to_code, 1328 HowToCode how_to_code,
1210 WhereToPoint where_to_point, 1329 WhereToPoint where_to_point) {
1211 int skip) {
1212 CHECK(o->IsHeapObject()); 1330 CHECK(o->IsHeapObject());
1213 HeapObject* heap_object = HeapObject::cast(o); 1331 HeapObject* heap_object = HeapObject::cast(o);
1214 1332
1215 int root_index; 1333 int root_index;
1216 if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) { 1334 if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1217 PutRoot(root_index, heap_object, how_to_code, where_to_point, skip); 1335 PutRoot(root_index, heap_object, how_to_code, where_to_point);
1218 return; 1336 return;
1219 } 1337 }
1220 1338
1221 if (address_mapper_.IsMapped(heap_object)) { 1339 if (address_mapper_.IsMapped(heap_object)) {
1222 int space = SpaceOfObject(heap_object); 1340 int space = SpaceOfAlreadySerializedObject(heap_object);
1223 int address = address_mapper_.MappedTo(heap_object); 1341 int address = address_mapper_.MappedTo(heap_object);
1224 SerializeReferenceToPreviousObject(space, 1342 SerializeReferenceToPreviousObject(space,
1225 address, 1343 address,
1226 how_to_code, 1344 how_to_code,
1227 where_to_point, 1345 where_to_point);
1228 skip);
1229 } else { 1346 } else {
1230 if (skip != 0) {
1231 sink_->Put(kSkip, "FlushPendingSkip");
1232 sink_->PutInt(skip, "SkipDistance");
1233 }
1234
1235 // Object has not yet been serialized. Serialize it here. 1347 // Object has not yet been serialized. Serialize it here.
1236 ObjectSerializer object_serializer(this, 1348 ObjectSerializer object_serializer(this,
1237 heap_object, 1349 heap_object,
1238 sink_, 1350 sink_,
1239 how_to_code, 1351 how_to_code,
1240 where_to_point); 1352 where_to_point);
1241 object_serializer.Serialize(); 1353 object_serializer.Serialize();
1242 } 1354 }
1243 } 1355 }
1244 1356
1245 1357
1246 void StartupSerializer::SerializeWeakReferences() { 1358 void StartupSerializer::SerializeWeakReferences() {
1247 // This phase comes right after the partial serialization (of the snapshot). 1359 // This phase comes right after the partial serialization (of the snapshot).
1248 // After we have done the partial serialization the partial snapshot cache 1360 // After we have done the partial serialization the partial snapshot cache
1249 // will contain some references needed to decode the partial snapshot. We 1361 // will contain some references needed to decode the partial snapshot. We
1250 // add one entry with 'undefined' which is the sentinel that the deserializer 1362 // add one entry with 'undefined' which is the sentinel that the deserializer
1251 // uses to know it is done deserializing the array. 1363 // uses to know it is done deserializing the array.
1252 Isolate* isolate = Isolate::Current(); 1364 Isolate* isolate = Isolate::Current();
1253 Object* undefined = isolate->heap()->undefined_value(); 1365 Object* undefined = isolate->heap()->undefined_value();
1254 VisitPointer(&undefined); 1366 VisitPointer(&undefined);
1255 HEAP->IterateWeakRoots(this, VISIT_ALL); 1367 HEAP->IterateWeakRoots(this, VISIT_ALL);
1256 Pad();
1257 } 1368 }
1258 1369
1259 1370
1260 void Serializer::PutRoot(int root_index, 1371 void Serializer::PutRoot(int root_index,
1261 HeapObject* object, 1372 HeapObject* object,
1262 SerializerDeserializer::HowToCode how_to_code, 1373 SerializerDeserializer::HowToCode how_to_code,
1263 SerializerDeserializer::WhereToPoint where_to_point, 1374 SerializerDeserializer::WhereToPoint where_to_point) {
1264 int skip) {
1265 if (how_to_code == kPlain && 1375 if (how_to_code == kPlain &&
1266 where_to_point == kStartOfObject && 1376 where_to_point == kStartOfObject &&
1267 root_index < kRootArrayNumberOfConstantEncodings && 1377 root_index < kRootArrayNumberOfConstantEncodings &&
1268 !HEAP->InNewSpace(object)) { 1378 !HEAP->InNewSpace(object)) {
1269 if (skip == 0) { 1379 if (root_index < kRootArrayNumberOfLowConstantEncodings) {
1270 sink_->Put(kRootArrayConstants + kNoSkipDistance + root_index, 1380 sink_->Put(kRootArrayLowConstants + root_index, "RootLoConstant");
1271 "RootConstant");
1272 } else { 1381 } else {
1273 sink_->Put(kRootArrayConstants + kHasSkipDistance + root_index, 1382 sink_->Put(kRootArrayHighConstants + root_index -
1274 "RootConstant"); 1383 kRootArrayNumberOfLowConstantEncodings,
1275 sink_->PutInt(skip, "SkipInPutRoot"); 1384 "RootHiConstant");
1276 } 1385 }
1277 } else { 1386 } else {
1278 if (skip != 0) {
1279 sink_->Put(kSkip, "SkipFromPutRoot");
1280 sink_->PutInt(skip, "SkipFromPutRootDistance");
1281 }
1282 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization"); 1387 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
1283 sink_->PutInt(root_index, "root_index"); 1388 sink_->PutInt(root_index, "root_index");
1284 } 1389 }
1285 } 1390 }
1286 1391
1287 1392
1288 void PartialSerializer::SerializeObject( 1393 void PartialSerializer::SerializeObject(
1289 Object* o, 1394 Object* o,
1290 HowToCode how_to_code, 1395 HowToCode how_to_code,
1291 WhereToPoint where_to_point, 1396 WhereToPoint where_to_point) {
1292 int skip) {
1293 CHECK(o->IsHeapObject()); 1397 CHECK(o->IsHeapObject());
1294 HeapObject* heap_object = HeapObject::cast(o); 1398 HeapObject* heap_object = HeapObject::cast(o);
1295 1399
1296 if (heap_object->IsMap()) { 1400 if (heap_object->IsMap()) {
1297 // The code-caches link to context-specific code objects, which 1401 // The code-caches link to context-specific code objects, which
1298 // the startup and context serializes cannot currently handle. 1402 // the startup and context serializes cannot currently handle.
1299 ASSERT(Map::cast(heap_object)->code_cache() == 1403 ASSERT(Map::cast(heap_object)->code_cache() ==
1300 heap_object->GetHeap()->raw_unchecked_empty_fixed_array()); 1404 heap_object->GetHeap()->raw_unchecked_empty_fixed_array());
1301 } 1405 }
1302 1406
1303 int root_index; 1407 int root_index;
1304 if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) { 1408 if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1305 PutRoot(root_index, heap_object, how_to_code, where_to_point, skip); 1409 PutRoot(root_index, heap_object, how_to_code, where_to_point);
1306 return; 1410 return;
1307 } 1411 }
1308 1412
1309 if (ShouldBeInThePartialSnapshotCache(heap_object)) { 1413 if (ShouldBeInThePartialSnapshotCache(heap_object)) {
1310 if (skip != 0) {
1311 sink_->Put(kSkip, "SkipFromSerializeObject");
1312 sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
1313 }
1314
1315 int cache_index = PartialSnapshotCacheIndex(heap_object); 1414 int cache_index = PartialSnapshotCacheIndex(heap_object);
1316 sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point, 1415 sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point,
1317 "PartialSnapshotCache"); 1416 "PartialSnapshotCache");
1318 sink_->PutInt(cache_index, "partial_snapshot_cache_index"); 1417 sink_->PutInt(cache_index, "partial_snapshot_cache_index");
1319 return; 1418 return;
1320 } 1419 }
1321 1420
1322 // Pointers from the partial snapshot to the objects in the startup snapshot 1421 // Pointers from the partial snapshot to the objects in the startup snapshot
1323 // should go through the root array or through the partial snapshot cache. 1422 // should go through the root array or through the partial snapshot cache.
1324 // If this is not the case you may have to add something to the root array. 1423 // If this is not the case you may have to add something to the root array.
1325 ASSERT(!startup_serializer_->address_mapper()->IsMapped(heap_object)); 1424 ASSERT(!startup_serializer_->address_mapper()->IsMapped(heap_object));
1326 // All the symbols that the partial snapshot needs should be either in the 1425 // All the symbols that the partial snapshot needs should be either in the
1327 // root table or in the partial snapshot cache. 1426 // root table or in the partial snapshot cache.
1328 ASSERT(!heap_object->IsSymbol()); 1427 ASSERT(!heap_object->IsSymbol());
1329 1428
1330 if (address_mapper_.IsMapped(heap_object)) { 1429 if (address_mapper_.IsMapped(heap_object)) {
1331 int space = SpaceOfObject(heap_object); 1430 int space = SpaceOfAlreadySerializedObject(heap_object);
1332 int address = address_mapper_.MappedTo(heap_object); 1431 int address = address_mapper_.MappedTo(heap_object);
1333 SerializeReferenceToPreviousObject(space, 1432 SerializeReferenceToPreviousObject(space,
1334 address, 1433 address,
1335 how_to_code, 1434 how_to_code,
1336 where_to_point, 1435 where_to_point);
1337 skip);
1338 } else { 1436 } else {
1339 if (skip != 0) {
1340 sink_->Put(kSkip, "SkipFromSerializeObject");
1341 sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
1342 }
1343 // Object has not yet been serialized. Serialize it here. 1437 // Object has not yet been serialized. Serialize it here.
1344 ObjectSerializer serializer(this, 1438 ObjectSerializer serializer(this,
1345 heap_object, 1439 heap_object,
1346 sink_, 1440 sink_,
1347 how_to_code, 1441 how_to_code,
1348 where_to_point); 1442 where_to_point);
1349 serializer.Serialize(); 1443 serializer.Serialize();
1350 } 1444 }
1351 } 1445 }
1352 1446
1353 1447
1354 void Serializer::ObjectSerializer::Serialize() { 1448 void Serializer::ObjectSerializer::Serialize() {
1355 int space = Serializer::SpaceOfObject(object_); 1449 int space = Serializer::SpaceOfObject(object_);
1356 int size = object_->Size(); 1450 int size = object_->Size();
1357 1451
1358 sink_->Put(kNewObject + reference_representation_ + space, 1452 sink_->Put(kNewObject + reference_representation_ + space,
1359 "ObjectSerialization"); 1453 "ObjectSerialization");
1360 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words"); 1454 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words");
1361 1455
1362 LOG(i::Isolate::Current(), 1456 LOG(i::Isolate::Current(),
1363 SnapshotPositionEvent(object_->address(), sink_->Position())); 1457 SnapshotPositionEvent(object_->address(), sink_->Position()));
1364 1458
1365 // Mark this object as already serialized. 1459 // Mark this object as already serialized.
1366 int offset = serializer_->Allocate(space, size); 1460 bool start_new_page;
1461 int offset = serializer_->Allocate(space, size, &start_new_page);
1367 serializer_->address_mapper()->AddMapping(object_, offset); 1462 serializer_->address_mapper()->AddMapping(object_, offset);
1463 if (start_new_page) {
1464 sink_->Put(kNewPage, "NewPage");
1465 sink_->PutSection(space, "NewPageSpace");
1466 }
1368 1467
1369 // Serialize the map (first word of the object). 1468 // Serialize the map (first word of the object).
1370 serializer_->SerializeObject(object_->map(), kPlain, kStartOfObject, 0); 1469 serializer_->SerializeObject(object_->map(), kPlain, kStartOfObject);
1371 1470
1372 // Serialize the rest of the object. 1471 // Serialize the rest of the object.
1373 CHECK_EQ(0, bytes_processed_so_far_); 1472 CHECK_EQ(0, bytes_processed_so_far_);
1374 bytes_processed_so_far_ = kPointerSize; 1473 bytes_processed_so_far_ = kPointerSize;
1375 object_->IterateBody(object_->map()->instance_type(), size, this); 1474 object_->IterateBody(object_->map()->instance_type(), size, this);
1376 OutputRawData(object_->address() + size); 1475 OutputRawData(object_->address() + size);
1377 } 1476 }
1378 1477
1379 1478
1380 void Serializer::ObjectSerializer::VisitPointers(Object** start, 1479 void Serializer::ObjectSerializer::VisitPointers(Object** start,
(...skipping 20 matching lines...) Expand all
1401 } 1500 }
1402 current += repeat_count; 1501 current += repeat_count;
1403 bytes_processed_so_far_ += repeat_count * kPointerSize; 1502 bytes_processed_so_far_ += repeat_count * kPointerSize;
1404 if (repeat_count > kMaxRepeats) { 1503 if (repeat_count > kMaxRepeats) {
1405 sink_->Put(kRepeat, "SerializeRepeats"); 1504 sink_->Put(kRepeat, "SerializeRepeats");
1406 sink_->PutInt(repeat_count, "SerializeRepeats"); 1505 sink_->PutInt(repeat_count, "SerializeRepeats");
1407 } else { 1506 } else {
1408 sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats"); 1507 sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats");
1409 } 1508 }
1410 } else { 1509 } else {
1411 serializer_->SerializeObject( 1510 serializer_->SerializeObject(current_contents, kPlain, kStartOfObject);
1412 current_contents, kPlain, kStartOfObject, 0);
1413 bytes_processed_so_far_ += kPointerSize; 1511 bytes_processed_so_far_ += kPointerSize;
1414 current++; 1512 current++;
1415 } 1513 }
1416 } 1514 }
1417 } 1515 }
1418 } 1516 }
1419 1517
1420 1518
1421 void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) { 1519 void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) {
1422 Object** current = rinfo->target_object_address(); 1520 Object** current = rinfo->target_object_address();
1423 1521
1424 int skip = OutputRawData(rinfo->target_address_address(), 1522 OutputRawData(rinfo->target_address_address());
1425 kCanReturnSkipInsteadOfSkipping);
1426 HowToCode representation = rinfo->IsCodedSpecially() ? kFromCode : kPlain; 1523 HowToCode representation = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
1427 serializer_->SerializeObject(*current, representation, kStartOfObject, skip); 1524 serializer_->SerializeObject(*current, representation, kStartOfObject);
1428 bytes_processed_so_far_ += rinfo->target_address_size(); 1525 bytes_processed_so_far_ += rinfo->target_address_size();
1429 } 1526 }
1430 1527
1431 1528
1432 void Serializer::ObjectSerializer::VisitExternalReferences(Address* start, 1529 void Serializer::ObjectSerializer::VisitExternalReferences(Address* start,
1433 Address* end) { 1530 Address* end) {
1434 Address references_start = reinterpret_cast<Address>(start); 1531 Address references_start = reinterpret_cast<Address>(start);
1435 int skip = OutputRawData(references_start, kCanReturnSkipInsteadOfSkipping); 1532 OutputRawData(references_start);
1436 1533
1437 for (Address* current = start; current < end; current++) { 1534 for (Address* current = start; current < end; current++) {
1438 sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef"); 1535 sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef");
1439 sink_->PutInt(skip, "SkipB4ExternalRef");
1440 skip = 0;
1441 int reference_id = serializer_->EncodeExternalReference(*current); 1536 int reference_id = serializer_->EncodeExternalReference(*current);
1442 sink_->PutInt(reference_id, "reference id"); 1537 sink_->PutInt(reference_id, "reference id");
1443 } 1538 }
1444 bytes_processed_so_far_ += static_cast<int>((end - start) * kPointerSize); 1539 bytes_processed_so_far_ += static_cast<int>((end - start) * kPointerSize);
1445 } 1540 }
1446 1541
1447 1542
1448 void Serializer::ObjectSerializer::VisitExternalReference(RelocInfo* rinfo) { 1543 void Serializer::ObjectSerializer::VisitExternalReference(RelocInfo* rinfo) {
1449 Address references_start = rinfo->target_address_address(); 1544 Address references_start = rinfo->target_address_address();
1450 int skip = OutputRawData(references_start, kCanReturnSkipInsteadOfSkipping); 1545 OutputRawData(references_start);
1451 1546
1452 Address* current = rinfo->target_reference_address(); 1547 Address* current = rinfo->target_reference_address();
1453 int representation = rinfo->IsCodedSpecially() ? 1548 int representation = rinfo->IsCodedSpecially() ?
1454 kFromCode + kStartOfObject : kPlain + kStartOfObject; 1549 kFromCode + kStartOfObject : kPlain + kStartOfObject;
1455 sink_->Put(kExternalReference + representation, "ExternalRef"); 1550 sink_->Put(kExternalReference + representation, "ExternalRef");
1456 sink_->PutInt(skip, "SkipB4ExternalRef");
1457 int reference_id = serializer_->EncodeExternalReference(*current); 1551 int reference_id = serializer_->EncodeExternalReference(*current);
1458 sink_->PutInt(reference_id, "reference id"); 1552 sink_->PutInt(reference_id, "reference id");
1459 bytes_processed_so_far_ += rinfo->target_address_size(); 1553 bytes_processed_so_far_ += rinfo->target_address_size();
1460 } 1554 }
1461 1555
1462 1556
1463 void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) { 1557 void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) {
1464 Address target_start = rinfo->target_address_address(); 1558 Address target_start = rinfo->target_address_address();
1465 int skip = OutputRawData(target_start, kCanReturnSkipInsteadOfSkipping); 1559 OutputRawData(target_start);
1466 Address target = rinfo->target_address(); 1560 Address target = rinfo->target_address();
1467 uint32_t encoding = serializer_->EncodeExternalReference(target); 1561 uint32_t encoding = serializer_->EncodeExternalReference(target);
1468 CHECK(target == NULL ? encoding == 0 : encoding != 0); 1562 CHECK(target == NULL ? encoding == 0 : encoding != 0);
1469 int representation; 1563 int representation;
1470 // Can't use a ternary operator because of gcc. 1564 // Can't use a ternary operator because of gcc.
1471 if (rinfo->IsCodedSpecially()) { 1565 if (rinfo->IsCodedSpecially()) {
1472 representation = kStartOfObject + kFromCode; 1566 representation = kStartOfObject + kFromCode;
1473 } else { 1567 } else {
1474 representation = kStartOfObject + kPlain; 1568 representation = kStartOfObject + kPlain;
1475 } 1569 }
1476 sink_->Put(kExternalReference + representation, "ExternalReference"); 1570 sink_->Put(kExternalReference + representation, "ExternalReference");
1477 sink_->PutInt(skip, "SkipB4ExternalRef");
1478 sink_->PutInt(encoding, "reference id"); 1571 sink_->PutInt(encoding, "reference id");
1479 bytes_processed_so_far_ += rinfo->target_address_size(); 1572 bytes_processed_so_far_ += rinfo->target_address_size();
1480 } 1573 }
1481 1574
1482 1575
1483 void Serializer::ObjectSerializer::VisitCodeTarget(RelocInfo* rinfo) { 1576 void Serializer::ObjectSerializer::VisitCodeTarget(RelocInfo* rinfo) {
1484 CHECK(RelocInfo::IsCodeTarget(rinfo->rmode())); 1577 CHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
1485 Address target_start = rinfo->target_address_address(); 1578 Address target_start = rinfo->target_address_address();
1486 int skip = OutputRawData(target_start, kCanReturnSkipInsteadOfSkipping); 1579 OutputRawData(target_start);
1487 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 1580 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1488 serializer_->SerializeObject(target, kFromCode, kInnerPointer, skip); 1581 serializer_->SerializeObject(target, kFromCode, kInnerPointer);
1489 bytes_processed_so_far_ += rinfo->target_address_size(); 1582 bytes_processed_so_far_ += rinfo->target_address_size();
1490 } 1583 }
1491 1584
1492 1585
1493 void Serializer::ObjectSerializer::VisitCodeEntry(Address entry_address) { 1586 void Serializer::ObjectSerializer::VisitCodeEntry(Address entry_address) {
1494 Code* target = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); 1587 Code* target = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1495 int skip = OutputRawData(entry_address, kCanReturnSkipInsteadOfSkipping); 1588 OutputRawData(entry_address);
1496 serializer_->SerializeObject(target, kPlain, kInnerPointer, skip); 1589 serializer_->SerializeObject(target, kPlain, kInnerPointer);
1497 bytes_processed_so_far_ += kPointerSize; 1590 bytes_processed_so_far_ += kPointerSize;
1498 } 1591 }
1499 1592
1500 1593
1501 void Serializer::ObjectSerializer::VisitGlobalPropertyCell(RelocInfo* rinfo) { 1594 void Serializer::ObjectSerializer::VisitGlobalPropertyCell(RelocInfo* rinfo) {
1502 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); 1595 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
1503 JSGlobalPropertyCell* cell = 1596 JSGlobalPropertyCell* cell =
1504 JSGlobalPropertyCell::cast(rinfo->target_cell()); 1597 JSGlobalPropertyCell::cast(rinfo->target_cell());
1505 int skip = OutputRawData(rinfo->pc(), kCanReturnSkipInsteadOfSkipping); 1598 OutputRawData(rinfo->pc());
1506 serializer_->SerializeObject(cell, kPlain, kInnerPointer, skip); 1599 serializer_->SerializeObject(cell, kPlain, kInnerPointer);
1507 } 1600 }
1508 1601
1509 1602
1510 void Serializer::ObjectSerializer::VisitExternalAsciiString( 1603 void Serializer::ObjectSerializer::VisitExternalAsciiString(
1511 v8::String::ExternalAsciiStringResource** resource_pointer) { 1604 v8::String::ExternalAsciiStringResource** resource_pointer) {
1512 Address references_start = reinterpret_cast<Address>(resource_pointer); 1605 Address references_start = reinterpret_cast<Address>(resource_pointer);
1513 OutputRawData(references_start); 1606 OutputRawData(references_start);
1514 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { 1607 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
1515 Object* source = HEAP->natives_source_cache()->get(i); 1608 Object* source = HEAP->natives_source_cache()->get(i);
1516 if (!source->IsUndefined()) { 1609 if (!source->IsUndefined()) {
1517 ExternalAsciiString* string = ExternalAsciiString::cast(source); 1610 ExternalAsciiString* string = ExternalAsciiString::cast(source);
1518 typedef v8::String::ExternalAsciiStringResource Resource; 1611 typedef v8::String::ExternalAsciiStringResource Resource;
1519 const Resource* resource = string->resource(); 1612 const Resource* resource = string->resource();
1520 if (resource == *resource_pointer) { 1613 if (resource == *resource_pointer) {
1521 sink_->Put(kNativesStringResource, "NativesStringResource"); 1614 sink_->Put(kNativesStringResource, "NativesStringResource");
1522 sink_->PutSection(i, "NativesStringResourceEnd"); 1615 sink_->PutSection(i, "NativesStringResourceEnd");
1523 bytes_processed_so_far_ += sizeof(resource); 1616 bytes_processed_so_far_ += sizeof(resource);
1524 return; 1617 return;
1525 } 1618 }
1526 } 1619 }
1527 } 1620 }
1528 // One of the strings in the natives cache should match the resource. We 1621 // One of the strings in the natives cache should match the resource. We
1529 // can't serialize any other kinds of external strings. 1622 // can't serialize any other kinds of external strings.
1530 UNREACHABLE(); 1623 UNREACHABLE();
1531 } 1624 }
1532 1625
1533 1626
1534 int Serializer::ObjectSerializer::OutputRawData( 1627 void Serializer::ObjectSerializer::OutputRawData(Address up_to) {
1535 Address up_to, Serializer::ObjectSerializer::ReturnSkip return_skip) {
1536 Address object_start = object_->address(); 1628 Address object_start = object_->address();
1537 Address base = object_start + bytes_processed_so_far_;
1538 int up_to_offset = static_cast<int>(up_to - object_start); 1629 int up_to_offset = static_cast<int>(up_to - object_start);
1539 int to_skip = up_to_offset - bytes_processed_so_far_; 1630 int skipped = up_to_offset - bytes_processed_so_far_;
1540 int bytes_to_output = to_skip;
1541 bytes_processed_so_far_ += to_skip;
1542 // This assert will fail if the reloc info gives us the target_address_address 1631 // This assert will fail if the reloc info gives us the target_address_address
1543 // locations in a non-ascending order. Luckily that doesn't happen. 1632 // locations in a non-ascending order. Luckily that doesn't happen.
1544 ASSERT(to_skip >= 0); 1633 ASSERT(skipped >= 0);
1545 bool outputting_code = false; 1634 if (skipped != 0) {
1546 if (to_skip != 0 && code_object_ && !code_has_been_output_) { 1635 Address base = object_start + bytes_processed_so_far_;
1547 // Output the code all at once and fix later. 1636 #define RAW_CASE(index, length) \
1548 bytes_to_output = object_->Size() + to_skip - bytes_processed_so_far_; 1637 if (skipped == length) { \
1549 outputting_code = true;
1550 code_has_been_output_ = true;
1551 }
1552 if (bytes_to_output != 0 &&
1553 (!code_object_ || outputting_code)) {
1554 #define RAW_CASE(index) \
1555 if (!outputting_code && bytes_to_output == index * kPointerSize && \
1556 index * kPointerSize == to_skip) { \
1557 sink_->PutSection(kRawData + index, "RawDataFixed"); \ 1638 sink_->PutSection(kRawData + index, "RawDataFixed"); \
1558 to_skip = 0; /* This insn already skips. */ \
1559 } else /* NOLINT */ 1639 } else /* NOLINT */
1560 COMMON_RAW_LENGTHS(RAW_CASE) 1640 COMMON_RAW_LENGTHS(RAW_CASE)
1561 #undef RAW_CASE 1641 #undef RAW_CASE
1562 { /* NOLINT */ 1642 { /* NOLINT */
1563 // We always end up here if we are outputting the code of a code object.
1564 sink_->Put(kRawData, "RawData"); 1643 sink_->Put(kRawData, "RawData");
1565 sink_->PutInt(bytes_to_output, "length"); 1644 sink_->PutInt(skipped, "length");
1566 } 1645 }
1567 for (int i = 0; i < bytes_to_output; i++) { 1646 for (int i = 0; i < skipped; i++) {
1568 unsigned int data = base[i]; 1647 unsigned int data = base[i];
1569 sink_->PutSection(data, "Byte"); 1648 sink_->PutSection(data, "Byte");
1570 } 1649 }
1650 bytes_processed_so_far_ += skipped;
1571 } 1651 }
1572 if (to_skip != 0 && return_skip == kIgnoringReturn) {
1573 sink_->Put(kSkip, "Skip");
1574 sink_->PutInt(to_skip, "SkipDistance");
1575 to_skip = 0;
1576 }
1577 return to_skip;
1578 } 1652 }
1579 1653
1580 1654
1581 int Serializer::SpaceOfObject(HeapObject* object) { 1655 int Serializer::SpaceOfObject(HeapObject* object) {
1582 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { 1656 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
1583 AllocationSpace s = static_cast<AllocationSpace>(i); 1657 AllocationSpace s = static_cast<AllocationSpace>(i);
1584 if (HEAP->InSpace(object, s)) { 1658 if (HEAP->InSpace(object, s)) {
1585 ASSERT(i < kNumberOfSpaces); 1659 if (i == LO_SPACE) {
1660 if (object->IsCode()) {
1661 return kLargeCode;
1662 } else if (object->IsFixedArray()) {
1663 return kLargeFixedArray;
1664 } else {
1665 return kLargeData;
1666 }
1667 }
1586 return i; 1668 return i;
1587 } 1669 }
1588 } 1670 }
1671 UNREACHABLE();
1672 return 0;
1673 }
1674
1675
1676 int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) {
1677 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
1678 AllocationSpace s = static_cast<AllocationSpace>(i);
1679 if (HEAP->InSpace(object, s)) {
1680 return i;
1681 }
1682 }
1589 UNREACHABLE(); 1683 UNREACHABLE();
1590 return 0; 1684 return 0;
1591 } 1685 }
1592 1686
1593 1687
1594 int Serializer::Allocate(int space, int size) { 1688 int Serializer::Allocate(int space, int size, bool* new_page) {
1595 CHECK(space >= 0 && space < kNumberOfSpaces); 1689 CHECK(space >= 0 && space < kNumberOfSpaces);
1690 if (SpaceIsLarge(space)) {
1691 // In large object space we merely number the objects instead of trying to
1692 // determine some sort of address.
1693 *new_page = true;
1694 large_object_total_ += size;
1695 return fullness_[LO_SPACE]++;
1696 }
1697 *new_page = false;
1698 if (fullness_[space] == 0) {
1699 *new_page = true;
1700 }
1701 if (SpaceIsPaged(space)) {
1702 // Paged spaces are a little special. We encode their addresses as if the
1703 // pages were all contiguous and each page were filled up in the range
1704 // 0 - Page::kObjectAreaSize. In practice the pages may not be contiguous
1705 // and allocation does not start at offset 0 in the page, but this scheme
1706 // means the deserializer can get the page number quickly by shifting the
1707 // serialized address.
1708 CHECK(IsPowerOf2(Page::kPageSize));
1709 int used_in_this_page = (fullness_[space] & (Page::kPageSize - 1));
1710 CHECK(size <= SpaceAreaSize(space));
1711 if (used_in_this_page + size > SpaceAreaSize(space)) {
1712 *new_page = true;
1713 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize);
1714 }
1715 }
1596 int allocation_address = fullness_[space]; 1716 int allocation_address = fullness_[space];
1597 fullness_[space] = allocation_address + size; 1717 fullness_[space] = allocation_address + size;
1598 return allocation_address; 1718 return allocation_address;
1599 } 1719 }
1600 1720
1601 1721
1602 int Serializer::SpaceAreaSize(int space) { 1722 int Serializer::SpaceAreaSize(int space) {
1603 if (space == CODE_SPACE) { 1723 if (space == CODE_SPACE) {
1604 return isolate_->memory_allocator()->CodePageAreaSize(); 1724 return isolate_->memory_allocator()->CodePageAreaSize();
1605 } else { 1725 } else {
1606 return Page::kPageSize - Page::kObjectStartOffset; 1726 return Page::kPageSize - Page::kObjectStartOffset;
1607 } 1727 }
1608 } 1728 }
1609 1729
1610 1730
1611 void Serializer::Pad() {
1612 // The non-branching GetInt will read up to 3 bytes too far, so we need
1613 // to pad the snapshot to make sure we don't read over the end.
1614 for (unsigned i = 0; i < sizeof(int32_t) - 1; i++) {
1615 sink_->Put(kNop, "Padding");
1616 }
1617 }
1618
1619
1620 bool SnapshotByteSource::AtEOF() {
1621 if (0u + length_ - position_ > 2 * sizeof(uint32_t)) return false;
1622 for (int x = position_; x < length_; x++) {
1623 if (data_[x] != SerializerDeserializer::nop()) return false;
1624 }
1625 return true;
1626 }
1627
1628 } } // namespace v8::internal 1731 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/serialize.h ('k') | src/snapshot.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698