OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
311 "StoreBuffer::StoreBufferOverflow"); | 311 "StoreBuffer::StoreBufferOverflow"); |
312 Add(ExternalReference:: | 312 Add(ExternalReference:: |
313 incremental_evacuation_record_write_function(isolate).address(), | 313 incremental_evacuation_record_write_function(isolate).address(), |
314 RUNTIME_ENTRY, | 314 RUNTIME_ENTRY, |
315 7, | 315 7, |
316 "IncrementalMarking::RecordWrite"); | 316 "IncrementalMarking::RecordWrite"); |
317 | 317 |
318 | 318 |
319 | 319 |
320 // Miscellaneous | 320 // Miscellaneous |
321 Add(ExternalReference::roots_address(isolate).address(), | 321 Add(ExternalReference::roots_array_start(isolate).address(), |
322 UNCLASSIFIED, | 322 UNCLASSIFIED, |
323 3, | 323 3, |
324 "Heap::roots_address()"); | 324 "Heap::roots_array_start()"); |
325 Add(ExternalReference::address_of_stack_limit(isolate).address(), | 325 Add(ExternalReference::address_of_stack_limit(isolate).address(), |
326 UNCLASSIFIED, | 326 UNCLASSIFIED, |
327 4, | 327 4, |
328 "StackGuard::address_of_jslimit()"); | 328 "StackGuard::address_of_jslimit()"); |
329 Add(ExternalReference::address_of_real_stack_limit(isolate).address(), | 329 Add(ExternalReference::address_of_real_stack_limit(isolate).address(), |
330 UNCLASSIFIED, | 330 UNCLASSIFIED, |
331 5, | 331 5, |
332 "StackGuard::address_of_real_jslimit()"); | 332 "StackGuard::address_of_real_jslimit()"); |
333 #ifndef V8_INTERPRETED_REGEXP | 333 #ifndef V8_INTERPRETED_REGEXP |
334 Add(ExternalReference::address_of_regexp_stack_limit(isolate).address(), | 334 Add(ExternalReference::address_of_regexp_stack_limit(isolate).address(), |
(...skipping 442 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
777 emit_write_barrier = (space_number == NEW_SPACE && \ | 777 emit_write_barrier = (space_number == NEW_SPACE && \ |
778 source_space != NEW_SPACE && \ | 778 source_space != NEW_SPACE && \ |
779 source_space != CELL_SPACE); \ | 779 source_space != CELL_SPACE); \ |
780 } else { \ | 780 } else { \ |
781 Object* new_object = NULL; /* May not be a real Object pointer. */ \ | 781 Object* new_object = NULL; /* May not be a real Object pointer. */ \ |
782 if (where == kNewObject) { \ | 782 if (where == kNewObject) { \ |
783 ASSIGN_DEST_SPACE(space_number) \ | 783 ASSIGN_DEST_SPACE(space_number) \ |
784 ReadObject(space_number, dest_space, &new_object); \ | 784 ReadObject(space_number, dest_space, &new_object); \ |
785 } else if (where == kRootArray) { \ | 785 } else if (where == kRootArray) { \ |
786 int root_id = source_->GetInt(); \ | 786 int root_id = source_->GetInt(); \ |
787 new_object = isolate->heap()->roots_address()[root_id]; \ | 787 new_object = isolate->heap()->roots_array_start()[root_id]; \ |
788 } else if (where == kPartialSnapshotCache) { \ | 788 } else if (where == kPartialSnapshotCache) { \ |
789 int cache_index = source_->GetInt(); \ | 789 int cache_index = source_->GetInt(); \ |
790 new_object = isolate->serialize_partial_snapshot_cache() \ | 790 new_object = isolate->serialize_partial_snapshot_cache() \ |
791 [cache_index]; \ | 791 [cache_index]; \ |
792 } else if (where == kExternalReference) { \ | 792 } else if (where == kExternalReference) { \ |
793 int reference_id = source_->GetInt(); \ | 793 int reference_id = source_->GetInt(); \ |
794 Address address = external_reference_decoder_-> \ | 794 Address address = external_reference_decoder_-> \ |
795 Decode(reference_id); \ | 795 Decode(reference_id); \ |
796 new_object = reinterpret_cast<Object*>(address); \ | 796 new_object = reinterpret_cast<Object*>(address); \ |
797 } else if (where == kBackref) { \ | 797 } else if (where == kBackref) { \ |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
876 CASE_STATEMENT(where, how, within, kLargeCode) \ | 876 CASE_STATEMENT(where, how, within, kLargeCode) \ |
877 CASE_STATEMENT(where, how, within, kLargeFixedArray) \ | 877 CASE_STATEMENT(where, how, within, kLargeFixedArray) \ |
878 CASE_BODY(where, how, within, kAnyOldSpace, kUnknownOffsetFromStart) | 878 CASE_BODY(where, how, within, kAnyOldSpace, kUnknownOffsetFromStart) |
879 | 879 |
880 #define ONE_PER_CODE_SPACE(where, how, within) \ | 880 #define ONE_PER_CODE_SPACE(where, how, within) \ |
881 CASE_STATEMENT(where, how, within, CODE_SPACE) \ | 881 CASE_STATEMENT(where, how, within, CODE_SPACE) \ |
882 CASE_BODY(where, how, within, CODE_SPACE, kUnknownOffsetFromStart) \ | 882 CASE_BODY(where, how, within, CODE_SPACE, kUnknownOffsetFromStart) \ |
883 CASE_STATEMENT(where, how, within, kLargeCode) \ | 883 CASE_STATEMENT(where, how, within, kLargeCode) \ |
884 CASE_BODY(where, how, within, kLargeCode, kUnknownOffsetFromStart) | 884 CASE_BODY(where, how, within, kLargeCode, kUnknownOffsetFromStart) |
885 | 885 |
886 #define EMIT_COMMON_REFERENCE_PATTERNS(pseudo_space_number, \ | 886 #define FOUR_CASES(byte_code) \ |
887 space_number, \ | 887 case byte_code: \ |
888 offset_from_start) \ | 888 case byte_code + 1: \ |
889 CASE_STATEMENT(kFromStart, kPlain, kStartOfObject, pseudo_space_number) \ | 889 case byte_code + 2: \ |
890 CASE_BODY(kFromStart, kPlain, kStartOfObject, space_number, offset_from_start) | 890 case byte_code + 3: |
| 891 |
| 892 #define SIXTEEN_CASES(byte_code) \ |
| 893 FOUR_CASES(byte_code) \ |
| 894 FOUR_CASES(byte_code + 4) \ |
| 895 FOUR_CASES(byte_code + 8) \ |
| 896 FOUR_CASES(byte_code + 12) |
891 | 897 |
892 // We generate 15 cases and bodies that process special tags that combine | 898 // We generate 15 cases and bodies that process special tags that combine |
893 // the raw data tag and the length into one byte. | 899 // the raw data tag and the length into one byte. |
894 #define RAW_CASE(index, size) \ | 900 #define RAW_CASE(index, size) \ |
895 case kRawData + index: { \ | 901 case kRawData + index: { \ |
896 byte* raw_data_out = reinterpret_cast<byte*>(current); \ | 902 byte* raw_data_out = reinterpret_cast<byte*>(current); \ |
897 source_->CopyRaw(raw_data_out, size); \ | 903 source_->CopyRaw(raw_data_out, size); \ |
898 current = reinterpret_cast<Object**>(raw_data_out + size); \ | 904 current = reinterpret_cast<Object**>(raw_data_out + size); \ |
899 break; \ | 905 break; \ |
900 } | 906 } |
901 COMMON_RAW_LENGTHS(RAW_CASE) | 907 COMMON_RAW_LENGTHS(RAW_CASE) |
902 #undef RAW_CASE | 908 #undef RAW_CASE |
903 | 909 |
904 // Deserialize a chunk of raw data that doesn't have one of the popular | 910 // Deserialize a chunk of raw data that doesn't have one of the popular |
905 // lengths. | 911 // lengths. |
906 case kRawData: { | 912 case kRawData: { |
907 int size = source_->GetInt(); | 913 int size = source_->GetInt(); |
908 byte* raw_data_out = reinterpret_cast<byte*>(current); | 914 byte* raw_data_out = reinterpret_cast<byte*>(current); |
909 source_->CopyRaw(raw_data_out, size); | 915 source_->CopyRaw(raw_data_out, size); |
910 current = reinterpret_cast<Object**>(raw_data_out + size); | 916 current = reinterpret_cast<Object**>(raw_data_out + size); |
911 break; | 917 break; |
912 } | 918 } |
913 | 919 |
| 920 SIXTEEN_CASES(kRootArrayLowConstants) |
| 921 SIXTEEN_CASES(kRootArrayHighConstants) { |
| 922 int root_id = RootArrayConstantFromByteCode(data); |
| 923 *current++ = isolate->heap()->roots_array_start()[root_id]; |
| 924 break; |
| 925 } |
| 926 |
| 927 case kRepeat: { |
| 928 int repeats = source_->GetInt(); |
| 929 Object* object = current[-1]; |
| 930 for (int i = 0; i < repeats; i++) current[i] = object; |
| 931 current += repeats; |
| 932 break; |
| 933 } |
| 934 |
| 935 STATIC_ASSERT(kMaxRepeats == 12); |
| 936 FOUR_CASES(kConstantRepeat) |
| 937 FOUR_CASES(kConstantRepeat + 4) |
| 938 FOUR_CASES(kConstantRepeat + 8) { |
| 939 int repeats = RepeatsForCode(data); |
| 940 Object* object = current[-1]; |
| 941 for (int i = 0; i < repeats; i++) current[i] = object; |
| 942 current += repeats; |
| 943 break; |
| 944 } |
| 945 |
914 // Deserialize a new object and write a pointer to it to the current | 946 // Deserialize a new object and write a pointer to it to the current |
915 // object. | 947 // object. |
916 ONE_PER_SPACE(kNewObject, kPlain, kStartOfObject) | 948 ONE_PER_SPACE(kNewObject, kPlain, kStartOfObject) |
917 // Support for direct instruction pointers in functions | 949 // Support for direct instruction pointers in functions |
918 ONE_PER_CODE_SPACE(kNewObject, kPlain, kFirstInstruction) | 950 ONE_PER_CODE_SPACE(kNewObject, kPlain, kFirstInstruction) |
919 // Deserialize a new code object and write a pointer to its first | 951 // Deserialize a new code object and write a pointer to its first |
920 // instruction to the current code object. | 952 // instruction to the current code object. |
921 ONE_PER_SPACE(kNewObject, kFromCode, kFirstInstruction) | 953 ONE_PER_SPACE(kNewObject, kFromCode, kFirstInstruction) |
922 // Find a recently deserialized object using its offset from the current | 954 // Find a recently deserialized object using its offset from the current |
923 // allocation point and write a pointer to it to the current object. | 955 // allocation point and write a pointer to it to the current object. |
924 ALL_SPACES(kBackref, kPlain, kStartOfObject) | 956 ALL_SPACES(kBackref, kPlain, kStartOfObject) |
925 // Find a recently deserialized code object using its offset from the | 957 // Find a recently deserialized code object using its offset from the |
926 // current allocation point and write a pointer to its first instruction | 958 // current allocation point and write a pointer to its first instruction |
927 // to the current code object or the instruction pointer in a function | 959 // to the current code object or the instruction pointer in a function |
928 // object. | 960 // object. |
929 ALL_SPACES(kBackref, kFromCode, kFirstInstruction) | 961 ALL_SPACES(kBackref, kFromCode, kFirstInstruction) |
930 ALL_SPACES(kBackref, kPlain, kFirstInstruction) | 962 ALL_SPACES(kBackref, kPlain, kFirstInstruction) |
931 // Find an already deserialized object using its offset from the start | 963 // Find an already deserialized object using its offset from the start |
932 // and write a pointer to it to the current object. | 964 // and write a pointer to it to the current object. |
933 ALL_SPACES(kFromStart, kPlain, kStartOfObject) | 965 ALL_SPACES(kFromStart, kPlain, kStartOfObject) |
934 ALL_SPACES(kFromStart, kPlain, kFirstInstruction) | 966 ALL_SPACES(kFromStart, kPlain, kFirstInstruction) |
935 // Find an already deserialized code object using its offset from the | 967 // Find an already deserialized code object using its offset from the |
936 // start and write a pointer to its first instruction to the current code | 968 // start and write a pointer to its first instruction to the current code |
937 // object. | 969 // object. |
938 ALL_SPACES(kFromStart, kFromCode, kFirstInstruction) | 970 ALL_SPACES(kFromStart, kFromCode, kFirstInstruction) |
939 // Find an already deserialized object at one of the predetermined popular | |
940 // offsets from the start and write a pointer to it in the current object. | |
941 COMMON_REFERENCE_PATTERNS(EMIT_COMMON_REFERENCE_PATTERNS) | |
942 // Find an object in the roots array and write a pointer to it to the | 971 // Find an object in the roots array and write a pointer to it to the |
943 // current object. | 972 // current object. |
944 CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0) | 973 CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0) |
945 CASE_BODY(kRootArray, kPlain, kStartOfObject, 0, kUnknownOffsetFromStart) | 974 CASE_BODY(kRootArray, kPlain, kStartOfObject, 0, kUnknownOffsetFromStart) |
946 // Find an object in the partial snapshots cache and write a pointer to it | 975 // Find an object in the partial snapshots cache and write a pointer to it |
947 // to the current object. | 976 // to the current object. |
948 CASE_STATEMENT(kPartialSnapshotCache, kPlain, kStartOfObject, 0) | 977 CASE_STATEMENT(kPartialSnapshotCache, kPlain, kStartOfObject, 0) |
949 CASE_BODY(kPartialSnapshotCache, | 978 CASE_BODY(kPartialSnapshotCache, |
950 kPlain, | 979 kPlain, |
951 kStartOfObject, | 980 kStartOfObject, |
(...skipping 21 matching lines...) Expand all Loading... |
973 CASE_BODY(kExternalReference, | 1002 CASE_BODY(kExternalReference, |
974 kFromCode, | 1003 kFromCode, |
975 kStartOfObject, | 1004 kStartOfObject, |
976 0, | 1005 0, |
977 kUnknownOffsetFromStart) | 1006 kUnknownOffsetFromStart) |
978 | 1007 |
979 #undef CASE_STATEMENT | 1008 #undef CASE_STATEMENT |
980 #undef CASE_BODY | 1009 #undef CASE_BODY |
981 #undef ONE_PER_SPACE | 1010 #undef ONE_PER_SPACE |
982 #undef ALL_SPACES | 1011 #undef ALL_SPACES |
983 #undef EMIT_COMMON_REFERENCE_PATTERNS | |
984 #undef ASSIGN_DEST_SPACE | 1012 #undef ASSIGN_DEST_SPACE |
985 | 1013 |
986 case kNewPage: { | 1014 case kNewPage: { |
987 int space = source_->Get(); | 1015 int space = source_->Get(); |
988 pages_[space].Add(last_object_address_); | 1016 pages_[space].Add(last_object_address_); |
989 if (space == CODE_SPACE) { | 1017 if (space == CODE_SPACE) { |
990 CPU::FlushICache(last_object_address_, Page::kPageSize); | 1018 CPU::FlushICache(last_object_address_, Page::kPageSize); |
991 } | 1019 } |
992 break; | 1020 break; |
993 } | 1021 } |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1060 sink_->PutSection(character, "TagCharacter"); | 1088 sink_->PutSection(character, "TagCharacter"); |
1061 } while (character != 0); | 1089 } while (character != 0); |
1062 } | 1090 } |
1063 | 1091 |
1064 #endif | 1092 #endif |
1065 | 1093 |
1066 Serializer::Serializer(SnapshotByteSink* sink) | 1094 Serializer::Serializer(SnapshotByteSink* sink) |
1067 : sink_(sink), | 1095 : sink_(sink), |
1068 current_root_index_(0), | 1096 current_root_index_(0), |
1069 external_reference_encoder_(new ExternalReferenceEncoder), | 1097 external_reference_encoder_(new ExternalReferenceEncoder), |
1070 large_object_total_(0) { | 1098 large_object_total_(0), |
| 1099 root_index_wave_front_(0) { |
1071 // The serializer is meant to be used only to generate initial heap images | 1100 // The serializer is meant to be used only to generate initial heap images |
1072 // from a context in which there is only one isolate. | 1101 // from a context in which there is only one isolate. |
1073 ASSERT(Isolate::Current()->IsDefaultIsolate()); | 1102 ASSERT(Isolate::Current()->IsDefaultIsolate()); |
1074 for (int i = 0; i <= LAST_SPACE; i++) { | 1103 for (int i = 0; i <= LAST_SPACE; i++) { |
1075 fullness_[i] = 0; | 1104 fullness_[i] = 0; |
1076 } | 1105 } |
1077 } | 1106 } |
1078 | 1107 |
1079 | 1108 |
1080 Serializer::~Serializer() { | 1109 Serializer::~Serializer() { |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1117 } | 1146 } |
1118 isolate->set_serialize_partial_snapshot_cache_length( | 1147 isolate->set_serialize_partial_snapshot_cache_length( |
1119 Isolate::kPartialSnapshotCacheCapacity); | 1148 Isolate::kPartialSnapshotCacheCapacity); |
1120 } | 1149 } |
1121 | 1150 |
1122 | 1151 |
1123 void Serializer::VisitPointers(Object** start, Object** end) { | 1152 void Serializer::VisitPointers(Object** start, Object** end) { |
1124 Isolate* isolate = Isolate::Current(); | 1153 Isolate* isolate = Isolate::Current(); |
1125 | 1154 |
1126 for (Object** current = start; current < end; current++) { | 1155 for (Object** current = start; current < end; current++) { |
| 1156 if (start == isolate->heap()->roots_array_start()) { |
| 1157 root_index_wave_front_ = Max(root_index_wave_front_, current - start); |
| 1158 } |
1127 if (reinterpret_cast<Address>(current) == | 1159 if (reinterpret_cast<Address>(current) == |
1128 isolate->heap()->store_buffer()->TopAddress()) { | 1160 isolate->heap()->store_buffer()->TopAddress()) { |
1129 sink_->Put(kSkip, "Skip"); | 1161 sink_->Put(kSkip, "Skip"); |
1130 } else if ((*current)->IsSmi()) { | 1162 } else if ((*current)->IsSmi()) { |
1131 sink_->Put(kRawData, "RawData"); | 1163 sink_->Put(kRawData, "RawData"); |
1132 sink_->PutInt(kPointerSize, "length"); | 1164 sink_->PutInt(kPointerSize, "length"); |
1133 for (int i = 0; i < kPointerSize; i++) { | 1165 for (int i = 0; i < kPointerSize; i++) { |
1134 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); | 1166 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); |
1135 } | 1167 } |
1136 } else { | 1168 } else { |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1184 startup_serializer_->VisitPointer( | 1216 startup_serializer_->VisitPointer( |
1185 &isolate->serialize_partial_snapshot_cache()[length]); | 1217 &isolate->serialize_partial_snapshot_cache()[length]); |
1186 // We don't recurse from the startup snapshot generator into the partial | 1218 // We don't recurse from the startup snapshot generator into the partial |
1187 // snapshot generator. | 1219 // snapshot generator. |
1188 ASSERT(length == isolate->serialize_partial_snapshot_cache_length()); | 1220 ASSERT(length == isolate->serialize_partial_snapshot_cache_length()); |
1189 isolate->set_serialize_partial_snapshot_cache_length(length + 1); | 1221 isolate->set_serialize_partial_snapshot_cache_length(length + 1); |
1190 return length; | 1222 return length; |
1191 } | 1223 } |
1192 | 1224 |
1193 | 1225 |
1194 int PartialSerializer::RootIndex(HeapObject* heap_object) { | 1226 int Serializer::RootIndex(HeapObject* heap_object) { |
1195 for (int i = 0; i < Heap::kRootListLength; i++) { | 1227 Heap* heap = HEAP; |
1196 Object* root = HEAP->roots_address()[i]; | 1228 if (heap->InNewSpace(heap_object)) return kInvalidRootIndex; |
1197 if (root == heap_object) return i; | 1229 for (int i = 0; i < root_index_wave_front_; i++) { |
| 1230 Object* root = heap->roots_array_start()[i]; |
| 1231 if (!root->IsSmi() && root == heap_object) return i; |
1198 } | 1232 } |
1199 return kInvalidRootIndex; | 1233 return kInvalidRootIndex; |
1200 } | 1234 } |
1201 | 1235 |
1202 | 1236 |
1203 // Encode the location of an already deserialized object in order to write its | 1237 // Encode the location of an already deserialized object in order to write its |
1204 // location into a later object. We can encode the location as an offset from | 1238 // location into a later object. We can encode the location as an offset from |
1205 // the start of the deserialized objects or as an offset backwards from the | 1239 // the start of the deserialized objects or as an offset backwards from the |
1206 // current allocation pointer. | 1240 // current allocation pointer. |
1207 void Serializer::SerializeReferenceToPreviousObject( | 1241 void Serializer::SerializeReferenceToPreviousObject( |
(...skipping 15 matching lines...) Expand all Loading... |
1223 // For new space it is always simple to encode back from current allocation. | 1257 // For new space it is always simple to encode back from current allocation. |
1224 if (offset < address) { | 1258 if (offset < address) { |
1225 from_start = false; | 1259 from_start = false; |
1226 address = offset; | 1260 address = offset; |
1227 } | 1261 } |
1228 } | 1262 } |
1229 // If we are actually dealing with real offsets (and not a numbering of | 1263 // If we are actually dealing with real offsets (and not a numbering of |
1230 // all objects) then we should shift out the bits that are always 0. | 1264 // all objects) then we should shift out the bits that are always 0. |
1231 if (!SpaceIsLarge(space)) address >>= kObjectAlignmentBits; | 1265 if (!SpaceIsLarge(space)) address >>= kObjectAlignmentBits; |
1232 if (from_start) { | 1266 if (from_start) { |
1233 #define COMMON_REFS_CASE(pseudo_space, actual_space, offset) \ | 1267 sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer"); |
1234 if (space == actual_space && address == offset && \ | 1268 sink_->PutInt(address, "address"); |
1235 how_to_code == kPlain && where_to_point == kStartOfObject) { \ | |
1236 sink_->Put(kFromStart + how_to_code + where_to_point + \ | |
1237 pseudo_space, "RefSer"); \ | |
1238 } else /* NOLINT */ | |
1239 COMMON_REFERENCE_PATTERNS(COMMON_REFS_CASE) | |
1240 #undef COMMON_REFS_CASE | |
1241 { /* NOLINT */ | |
1242 sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer"); | |
1243 sink_->PutInt(address, "address"); | |
1244 } | |
1245 } else { | 1269 } else { |
1246 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer"); | 1270 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer"); |
1247 sink_->PutInt(address, "address"); | 1271 sink_->PutInt(address, "address"); |
1248 } | 1272 } |
1249 } | 1273 } |
1250 | 1274 |
1251 | 1275 |
1252 void StartupSerializer::SerializeObject( | 1276 void StartupSerializer::SerializeObject( |
1253 Object* o, | 1277 Object* o, |
1254 HowToCode how_to_code, | 1278 HowToCode how_to_code, |
1255 WhereToPoint where_to_point) { | 1279 WhereToPoint where_to_point) { |
1256 CHECK(o->IsHeapObject()); | 1280 CHECK(o->IsHeapObject()); |
1257 HeapObject* heap_object = HeapObject::cast(o); | 1281 HeapObject* heap_object = HeapObject::cast(o); |
1258 | 1282 |
| 1283 int root_index; |
| 1284 if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) { |
| 1285 PutRoot(root_index, heap_object, how_to_code, where_to_point); |
| 1286 return; |
| 1287 } |
| 1288 |
1259 if (address_mapper_.IsMapped(heap_object)) { | 1289 if (address_mapper_.IsMapped(heap_object)) { |
1260 int space = SpaceOfAlreadySerializedObject(heap_object); | 1290 int space = SpaceOfAlreadySerializedObject(heap_object); |
1261 int address = address_mapper_.MappedTo(heap_object); | 1291 int address = address_mapper_.MappedTo(heap_object); |
1262 SerializeReferenceToPreviousObject(space, | 1292 SerializeReferenceToPreviousObject(space, |
1263 address, | 1293 address, |
1264 how_to_code, | 1294 how_to_code, |
1265 where_to_point); | 1295 where_to_point); |
1266 } else { | 1296 } else { |
1267 // Object has not yet been serialized. Serialize it here. | 1297 // Object has not yet been serialized. Serialize it here. |
1268 ObjectSerializer object_serializer(this, | 1298 ObjectSerializer object_serializer(this, |
(...skipping 10 matching lines...) Expand all Loading... |
1279 for (int i = Isolate::Current()->serialize_partial_snapshot_cache_length(); | 1309 for (int i = Isolate::Current()->serialize_partial_snapshot_cache_length(); |
1280 i < Isolate::kPartialSnapshotCacheCapacity; | 1310 i < Isolate::kPartialSnapshotCacheCapacity; |
1281 i++) { | 1311 i++) { |
1282 sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization"); | 1312 sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization"); |
1283 sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index"); | 1313 sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index"); |
1284 } | 1314 } |
1285 HEAP->IterateWeakRoots(this, VISIT_ALL); | 1315 HEAP->IterateWeakRoots(this, VISIT_ALL); |
1286 } | 1316 } |
1287 | 1317 |
1288 | 1318 |
| 1319 void Serializer::PutRoot(int root_index, |
| 1320 HeapObject* object, |
| 1321 SerializerDeserializer::HowToCode how_to_code, |
| 1322 SerializerDeserializer::WhereToPoint where_to_point) { |
| 1323 if (how_to_code == kPlain && |
| 1324 where_to_point == kStartOfObject && |
| 1325 root_index < kRootArrayNumberOfConstantEncodings && |
| 1326 !HEAP->InNewSpace(object)) { |
| 1327 if (root_index < kRootArrayNumberOfLowConstantEncodings) { |
| 1328 sink_->Put(kRootArrayLowConstants + root_index, "RootLoConstant"); |
| 1329 } else { |
| 1330 sink_->Put(kRootArrayHighConstants + root_index - |
| 1331 kRootArrayNumberOfLowConstantEncodings, |
| 1332 "RootHiConstant"); |
| 1333 } |
| 1334 } else { |
| 1335 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization"); |
| 1336 sink_->PutInt(root_index, "root_index"); |
| 1337 } |
| 1338 } |
| 1339 |
| 1340 |
1289 void PartialSerializer::SerializeObject( | 1341 void PartialSerializer::SerializeObject( |
1290 Object* o, | 1342 Object* o, |
1291 HowToCode how_to_code, | 1343 HowToCode how_to_code, |
1292 WhereToPoint where_to_point) { | 1344 WhereToPoint where_to_point) { |
1293 CHECK(o->IsHeapObject()); | 1345 CHECK(o->IsHeapObject()); |
1294 HeapObject* heap_object = HeapObject::cast(o); | 1346 HeapObject* heap_object = HeapObject::cast(o); |
1295 | 1347 |
1296 int root_index; | 1348 int root_index; |
1297 if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) { | 1349 if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) { |
1298 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization"); | 1350 PutRoot(root_index, heap_object, how_to_code, where_to_point); |
1299 sink_->PutInt(root_index, "root_index"); | |
1300 return; | 1351 return; |
1301 } | 1352 } |
1302 | 1353 |
1303 if (ShouldBeInThePartialSnapshotCache(heap_object)) { | 1354 if (ShouldBeInThePartialSnapshotCache(heap_object)) { |
1304 int cache_index = PartialSnapshotCacheIndex(heap_object); | 1355 int cache_index = PartialSnapshotCacheIndex(heap_object); |
1305 sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point, | 1356 sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point, |
1306 "PartialSnapshotCache"); | 1357 "PartialSnapshotCache"); |
1307 sink_->PutInt(cache_index, "partial_snapshot_cache_index"); | 1358 sink_->PutInt(cache_index, "partial_snapshot_cache_index"); |
1308 return; | 1359 return; |
1309 } | 1360 } |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1367 | 1418 |
1368 | 1419 |
1369 void Serializer::ObjectSerializer::VisitPointers(Object** start, | 1420 void Serializer::ObjectSerializer::VisitPointers(Object** start, |
1370 Object** end) { | 1421 Object** end) { |
1371 Object** current = start; | 1422 Object** current = start; |
1372 while (current < end) { | 1423 while (current < end) { |
1373 while (current < end && (*current)->IsSmi()) current++; | 1424 while (current < end && (*current)->IsSmi()) current++; |
1374 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); | 1425 if (current < end) OutputRawData(reinterpret_cast<Address>(current)); |
1375 | 1426 |
1376 while (current < end && !(*current)->IsSmi()) { | 1427 while (current < end && !(*current)->IsSmi()) { |
1377 serializer_->SerializeObject(*current, kPlain, kStartOfObject); | 1428 if (current != start && |
1378 bytes_processed_so_far_ += kPointerSize; | 1429 current[0] == current[-1] && |
1379 current++; | 1430 !HEAP->InNewSpace(*current)) { |
| 1431 int repeat_count = 1; |
| 1432 while (current < end - 1 && current[repeat_count] == current[0]) { |
| 1433 repeat_count++; |
| 1434 } |
| 1435 current += repeat_count; |
| 1436 bytes_processed_so_far_ += repeat_count * kPointerSize; |
| 1437 if (repeat_count > kMaxRepeats) { |
| 1438 sink_->Put(kRepeat, "SerializeRepeats"); |
| 1439 sink_->PutInt(repeat_count, "SerializeRepeats"); |
| 1440 } else { |
| 1441 sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats"); |
| 1442 } |
| 1443 } else { |
| 1444 serializer_->SerializeObject(*current, kPlain, kStartOfObject); |
| 1445 bytes_processed_so_far_ += kPointerSize; |
| 1446 current++; |
| 1447 } |
1380 } | 1448 } |
1381 } | 1449 } |
1382 } | 1450 } |
1383 | 1451 |
1384 | 1452 |
1385 void Serializer::ObjectSerializer::VisitExternalReferences(Address* start, | 1453 void Serializer::ObjectSerializer::VisitExternalReferences(Address* start, |
1386 Address* end) { | 1454 Address* end) { |
1387 Address references_start = reinterpret_cast<Address>(start); | 1455 Address references_start = reinterpret_cast<Address>(start); |
1388 OutputRawData(references_start); | 1456 OutputRawData(references_start); |
1389 | 1457 |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1553 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); | 1621 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); |
1554 } | 1622 } |
1555 } | 1623 } |
1556 int allocation_address = fullness_[space]; | 1624 int allocation_address = fullness_[space]; |
1557 fullness_[space] = allocation_address + size; | 1625 fullness_[space] = allocation_address + size; |
1558 return allocation_address; | 1626 return allocation_address; |
1559 } | 1627 } |
1560 | 1628 |
1561 | 1629 |
1562 } } // namespace v8::internal | 1630 } } // namespace v8::internal |
OLD | NEW |