| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 540 } else if (space_index == kLargeFixedArray) { | 540 } else if (space_index == kLargeFixedArray) { |
| 541 new_allocation = lo_space->AllocateRawFixedArray(size); | 541 new_allocation = lo_space->AllocateRawFixedArray(size); |
| 542 } else { | 542 } else { |
| 543 ASSERT_EQ(kLargeCode, space_index); | 543 ASSERT_EQ(kLargeCode, space_index); |
| 544 new_allocation = lo_space->AllocateRawCode(size); | 544 new_allocation = lo_space->AllocateRawCode(size); |
| 545 } | 545 } |
| 546 ASSERT(!new_allocation->IsFailure()); | 546 ASSERT(!new_allocation->IsFailure()); |
| 547 HeapObject* new_object = HeapObject::cast(new_allocation); | 547 HeapObject* new_object = HeapObject::cast(new_allocation); |
| 548 // Record all large objects in the same space. | 548 // Record all large objects in the same space. |
| 549 address = new_object->address(); | 549 address = new_object->address(); |
| 550 high_water_[LO_SPACE] = address + size; | 550 pages_[LO_SPACE].Add(address); |
| 551 } | 551 } |
| 552 last_object_address_ = address; | 552 last_object_address_ = address; |
| 553 return address; | 553 return address; |
| 554 } | 554 } |
| 555 | 555 |
| 556 | 556 |
| 557 // This returns the address of an object that has been described in the | 557 // This returns the address of an object that has been described in the |
| 558 // snapshot as being offset bytes back in a particular space. | 558 // snapshot as being offset bytes back in a particular space. |
| 559 HeapObject* Deserializer::GetAddressFromEnd(int space) { | 559 HeapObject* Deserializer::GetAddressFromEnd(int space) { |
| 560 int offset = source_->GetInt(); | 560 int offset = source_->GetInt(); |
| (...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 893 character = *tag++; | 893 character = *tag++; |
| 894 sink_->PutSection(character, "TagCharacter"); | 894 sink_->PutSection(character, "TagCharacter"); |
| 895 } while (character != 0); | 895 } while (character != 0); |
| 896 } | 896 } |
| 897 | 897 |
| 898 #endif | 898 #endif |
| 899 | 899 |
| 900 Serializer::Serializer(SnapshotByteSink* sink) | 900 Serializer::Serializer(SnapshotByteSink* sink) |
| 901 : sink_(sink), | 901 : sink_(sink), |
| 902 current_root_index_(0), | 902 current_root_index_(0), |
| 903 external_reference_encoder_(NULL), | 903 external_reference_encoder_(new ExternalReferenceEncoder), |
| 904 large_object_total_(0) { | 904 large_object_total_(0) { |
| 905 for (int i = 0; i <= LAST_SPACE; i++) { | 905 for (int i = 0; i <= LAST_SPACE; i++) { |
| 906 fullness_[i] = 0; | 906 fullness_[i] = 0; |
| 907 } | 907 } |
| 908 } | 908 } |
| 909 | 909 |
| 910 | 910 |
| 911 Serializer::~Serializer() { |
| 912 delete external_reference_encoder_; |
| 913 } |
| 914 |
| 915 |
| 911 void StartupSerializer::SerializeStrongReferences() { | 916 void StartupSerializer::SerializeStrongReferences() { |
| 912 // No active threads. | 917 // No active threads. |
| 913 CHECK_EQ(NULL, ThreadState::FirstInUse()); | 918 CHECK_EQ(NULL, ThreadState::FirstInUse()); |
| 914 // No active or weak handles. | 919 // No active or weak handles. |
| 915 CHECK(HandleScopeImplementer::instance()->blocks()->is_empty()); | 920 CHECK(HandleScopeImplementer::instance()->blocks()->is_empty()); |
| 916 CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles()); | 921 CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles()); |
| 917 CHECK_EQ(NULL, external_reference_encoder_); | |
| 918 // We don't support serializing installed extensions. | 922 // We don't support serializing installed extensions. |
| 919 for (RegisteredExtension* ext = RegisteredExtension::first_extension(); | 923 for (RegisteredExtension* ext = RegisteredExtension::first_extension(); |
| 920 ext != NULL; | 924 ext != NULL; |
| 921 ext = ext->next()) { | 925 ext = ext->next()) { |
| 922 CHECK_NE(v8::INSTALLED, ext->state()); | 926 CHECK_NE(v8::INSTALLED, ext->state()); |
| 923 } | 927 } |
| 924 external_reference_encoder_ = new ExternalReferenceEncoder(); | |
| 925 Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG); | 928 Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG); |
| 926 delete external_reference_encoder_; | |
| 927 external_reference_encoder_ = NULL; | |
| 928 } | 929 } |
| 929 | 930 |
| 930 | 931 |
| 931 void PartialSerializer::Serialize(Object** object) { | 932 void PartialSerializer::Serialize(Object** object) { |
| 932 external_reference_encoder_ = new ExternalReferenceEncoder(); | |
| 933 this->VisitPointer(object); | 933 this->VisitPointer(object); |
| 934 | 934 |
| 935 // After we have done the partial serialization the partial snapshot cache | 935 // After we have done the partial serialization the partial snapshot cache |
| 936 // will contain some references needed to decode the partial snapshot. We | 936 // will contain some references needed to decode the partial snapshot. We |
| 937 // fill it up with undefineds so it has a predictable length so the | 937 // fill it up with undefineds so it has a predictable length so the |
| 938 // deserialization code doesn't need to know the length. | 938 // deserialization code doesn't need to know the length. |
| 939 for (int index = partial_snapshot_cache_length_; | 939 for (int index = partial_snapshot_cache_length_; |
| 940 index < kPartialSnapshotCacheCapacity; | 940 index < kPartialSnapshotCacheCapacity; |
| 941 index++) { | 941 index++) { |
| 942 partial_snapshot_cache_[index] = Heap::undefined_value(); | 942 partial_snapshot_cache_[index] = Heap::undefined_value(); |
| 943 startup_serializer_->VisitPointer(&partial_snapshot_cache_[index]); | 943 startup_serializer_->VisitPointer(&partial_snapshot_cache_[index]); |
| 944 } | 944 } |
| 945 partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity; | 945 partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity; |
| 946 | |
| 947 delete external_reference_encoder_; | |
| 948 external_reference_encoder_ = NULL; | |
| 949 } | 946 } |
| 950 | 947 |
| 951 | 948 |
| 952 void Serializer::VisitPointers(Object** start, Object** end) { | 949 void Serializer::VisitPointers(Object** start, Object** end) { |
| 953 for (Object** current = start; current < end; current++) { | 950 for (Object** current = start; current < end; current++) { |
| 954 if ((*current)->IsSmi()) { | 951 if ((*current)->IsSmi()) { |
| 955 sink_->Put(RAW_DATA_SERIALIZATION, "RawData"); | 952 sink_->Put(RAW_DATA_SERIALIZATION, "RawData"); |
| 956 sink_->PutInt(kPointerSize, "length"); | 953 sink_->PutInt(kPointerSize, "length"); |
| 957 for (int i = 0; i < kPointerSize; i++) { | 954 for (int i = 0; i < kPointerSize; i++) { |
| 958 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); | 955 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte"); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 990 void SerializerDeserializer::SetSnapshotCacheSize(int size) { | 987 void SerializerDeserializer::SetSnapshotCacheSize(int size) { |
| 991 partial_snapshot_cache_length_ = size; | 988 partial_snapshot_cache_length_ = size; |
| 992 } | 989 } |
| 993 | 990 |
| 994 | 991 |
| 995 int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) { | 992 int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) { |
| 996 for (int i = 0; i < partial_snapshot_cache_length_; i++) { | 993 for (int i = 0; i < partial_snapshot_cache_length_; i++) { |
| 997 Object* entry = partial_snapshot_cache_[i]; | 994 Object* entry = partial_snapshot_cache_[i]; |
| 998 if (entry == heap_object) return i; | 995 if (entry == heap_object) return i; |
| 999 } | 996 } |
| 997 |
| 1000 // We didn't find the object in the cache. So we add it to the cache and | 998 // We didn't find the object in the cache. So we add it to the cache and |
| 1001 // then visit the pointer so that it becomes part of the startup snapshot | 999 // then visit the pointer so that it becomes part of the startup snapshot |
| 1002 // and we can refer to it from the partial snapshot. | 1000 // and we can refer to it from the partial snapshot. |
| 1003 int length = partial_snapshot_cache_length_; | 1001 int length = partial_snapshot_cache_length_; |
| 1004 CHECK(length < kPartialSnapshotCacheCapacity); | 1002 CHECK(length < kPartialSnapshotCacheCapacity); |
| 1005 partial_snapshot_cache_[length] = heap_object; | 1003 partial_snapshot_cache_[length] = heap_object; |
| 1006 startup_serializer_->VisitPointer(&partial_snapshot_cache_[length]); | 1004 startup_serializer_->VisitPointer(&partial_snapshot_cache_[length]); |
| 1007 // We don't recurse from the startup snapshot generator into the partial | 1005 // We don't recurse from the startup snapshot generator into the partial |
| 1008 // snapshot generator. | 1006 // snapshot generator. |
| 1009 ASSERT(length == partial_snapshot_cache_length_); | 1007 ASSERT(length == partial_snapshot_cache_length_); |
| (...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1360 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); | 1358 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); |
| 1361 } | 1359 } |
| 1362 } | 1360 } |
| 1363 int allocation_address = fullness_[space]; | 1361 int allocation_address = fullness_[space]; |
| 1364 fullness_[space] = allocation_address + size; | 1362 fullness_[space] = allocation_address + size; |
| 1365 return allocation_address; | 1363 return allocation_address; |
| 1366 } | 1364 } |
| 1367 | 1365 |
| 1368 | 1366 |
| 1369 } } // namespace v8::internal | 1367 } } // namespace v8::internal |
| OLD | NEW |