| Index: src/serialize.cc
|
| ===================================================================
|
| --- src/serialize.cc (revision 7267)
|
| +++ src/serialize.cc (working copy)
|
| @@ -38,7 +38,6 @@
|
| #include "serialize.h"
|
| #include "stub-cache.h"
|
| #include "v8threads.h"
|
| -#include "top.h"
|
| #include "bootstrapper.h"
|
|
|
| namespace v8 {
|
| @@ -68,9 +67,14 @@
|
| // hashmaps in ExternalReferenceEncoder and ExternalReferenceDecoder.
|
| class ExternalReferenceTable {
|
| public:
|
| - static ExternalReferenceTable* instance() {
|
| - if (!instance_) instance_ = new ExternalReferenceTable();
|
| - return instance_;
|
| + static ExternalReferenceTable* instance(Isolate* isolate) {
|
| + ExternalReferenceTable* external_reference_table =
|
| + isolate->external_reference_table();
|
| + if (external_reference_table == NULL) {
|
| + external_reference_table = new ExternalReferenceTable(isolate);
|
| + isolate->set_external_reference_table(external_reference_table);
|
| + }
|
| + return external_reference_table;
|
| }
|
|
|
| int size() const { return refs_.length(); }
|
| @@ -84,9 +88,9 @@
|
| int max_id(int code) { return max_id_[code]; }
|
|
|
| private:
|
| - static ExternalReferenceTable* instance_;
|
| -
|
| - ExternalReferenceTable() : refs_(64) { PopulateTable(); }
|
| + explicit ExternalReferenceTable(Isolate* isolate) : refs_(64) {
|
| + PopulateTable(isolate);
|
| + }
|
| ~ExternalReferenceTable() { }
|
|
|
| struct ExternalReferenceEntry {
|
| @@ -95,7 +99,7 @@
|
| const char* name;
|
| };
|
|
|
| - void PopulateTable();
|
| + void PopulateTable(Isolate* isolate);
|
|
|
| // For a few types of references, we can get their address from their id.
|
| void AddFromId(TypeCode type, uint16_t id, const char* name);
|
| @@ -108,9 +112,6 @@
|
| };
|
|
|
|
|
| -ExternalReferenceTable* ExternalReferenceTable::instance_ = NULL;
|
| -
|
| -
|
| void ExternalReferenceTable::AddFromId(TypeCode type,
|
| uint16_t id,
|
| const char* name) {
|
| @@ -159,7 +160,7 @@
|
| }
|
|
|
|
|
| -void ExternalReferenceTable::PopulateTable() {
|
| +void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
|
| for (int type_code = 0; type_code < kTypeCodeCount; type_code++) {
|
| max_id_[type_code] = 0;
|
| }
|
| @@ -225,19 +226,19 @@
|
|
|
| #ifdef ENABLE_DEBUGGER_SUPPORT
|
| // Debug addresses
|
| - Add(Debug_Address(Debug::k_after_break_target_address).address(),
|
| + Add(Debug_Address(Debug::k_after_break_target_address).address(isolate),
|
| DEBUG_ADDRESS,
|
| Debug::k_after_break_target_address << kDebugIdShift,
|
| "Debug::after_break_target_address()");
|
| - Add(Debug_Address(Debug::k_debug_break_slot_address).address(),
|
| + Add(Debug_Address(Debug::k_debug_break_slot_address).address(isolate),
|
| DEBUG_ADDRESS,
|
| Debug::k_debug_break_slot_address << kDebugIdShift,
|
| "Debug::debug_break_slot_address()");
|
| - Add(Debug_Address(Debug::k_debug_break_return_address).address(),
|
| + Add(Debug_Address(Debug::k_debug_break_return_address).address(isolate),
|
| DEBUG_ADDRESS,
|
| Debug::k_debug_break_return_address << kDebugIdShift,
|
| "Debug::debug_break_return_address()");
|
| - Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(),
|
| + Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate),
|
| DEBUG_ADDRESS,
|
| Debug::k_restarter_frame_function_pointer << kDebugIdShift,
|
| "Debug::restarter_frame_function_pointer_address()");
|
| @@ -245,14 +246,14 @@
|
|
|
| // Stat counters
|
| struct StatsRefTableEntry {
|
| - StatsCounter* counter;
|
| + StatsCounter* (Counters::*counter)();
|
| uint16_t id;
|
| const char* name;
|
| };
|
|
|
| - static const StatsRefTableEntry stats_ref_table[] = {
|
| + const StatsRefTableEntry stats_ref_table[] = {
|
| #define COUNTER_ENTRY(name, caption) \
|
| - { &Counters::name, \
|
| + { &Counters::name, \
|
| Counters::k_##name, \
|
| "Counters::" #name },
|
|
|
| @@ -261,33 +262,28 @@
|
| #undef COUNTER_ENTRY
|
| }; // end of stats_ref_table[].
|
|
|
| + Counters* counters = isolate->counters();
|
| for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) {
|
| - Add(reinterpret_cast<Address>(
|
| - GetInternalPointer(stats_ref_table[i].counter)),
|
| + Add(reinterpret_cast<Address>(GetInternalPointer(
|
| + (counters->*(stats_ref_table[i].counter))())),
|
| STATS_COUNTER,
|
| stats_ref_table[i].id,
|
| stats_ref_table[i].name);
|
| }
|
|
|
| // Top addresses
|
| - const char* top_address_format = "Top::%s";
|
|
|
| const char* AddressNames[] = {
|
| -#define C(name) #name,
|
| - TOP_ADDRESS_LIST(C)
|
| - TOP_ADDRESS_LIST_PROF(C)
|
| +#define C(name) "Isolate::" #name,
|
| + ISOLATE_ADDRESS_LIST(C)
|
| + ISOLATE_ADDRESS_LIST_PROF(C)
|
| NULL
|
| #undef C
|
| };
|
|
|
| - int top_format_length = StrLength(top_address_format) - 2;
|
| - for (uint16_t i = 0; i < Top::k_top_address_count; ++i) {
|
| - const char* address_name = AddressNames[i];
|
| - Vector<char> name =
|
| - Vector<char>::New(top_format_length + StrLength(address_name) + 1);
|
| - const char* chars = name.start();
|
| - OS::SNPrintF(name, top_address_format, address_name);
|
| - Add(Top::get_address_from_id((Top::AddressId)i), TOP_ADDRESS, i, chars);
|
| + for (uint16_t i = 0; i < Isolate::k_isolate_address_count; ++i) {
|
| + Add(isolate->get_address_from_id((Isolate::AddressId)i),
|
| + TOP_ADDRESS, i, AddressNames[i]);
|
| }
|
|
|
| // Accessors
|
| @@ -300,20 +296,22 @@
|
| ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION)
|
| #undef ACCESSOR_DESCRIPTOR_DECLARATION
|
|
|
| + StubCache* stub_cache = isolate->stub_cache();
|
| +
|
| // Stub cache tables
|
| - Add(SCTableReference::keyReference(StubCache::kPrimary).address(),
|
| + Add(stub_cache->key_reference(StubCache::kPrimary).address(),
|
| STUB_CACHE_TABLE,
|
| 1,
|
| "StubCache::primary_->key");
|
| - Add(SCTableReference::valueReference(StubCache::kPrimary).address(),
|
| + Add(stub_cache->value_reference(StubCache::kPrimary).address(),
|
| STUB_CACHE_TABLE,
|
| 2,
|
| "StubCache::primary_->value");
|
| - Add(SCTableReference::keyReference(StubCache::kSecondary).address(),
|
| + Add(stub_cache->key_reference(StubCache::kSecondary).address(),
|
| STUB_CACHE_TABLE,
|
| 3,
|
| "StubCache::secondary_->key");
|
| - Add(SCTableReference::valueReference(StubCache::kSecondary).address(),
|
| + Add(stub_cache->value_reference(StubCache::kSecondary).address(),
|
| STUB_CACHE_TABLE,
|
| 4,
|
| "StubCache::secondary_->value");
|
| @@ -327,12 +325,10 @@
|
| RUNTIME_ENTRY,
|
| 2,
|
| "V8::FillHeapNumberWithRandom");
|
| -
|
| Add(ExternalReference::random_uint32_function().address(),
|
| RUNTIME_ENTRY,
|
| 3,
|
| "V8::Random");
|
| -
|
| Add(ExternalReference::delete_handle_scope_extensions().address(),
|
| RUNTIME_ENTRY,
|
| 4,
|
| @@ -486,33 +482,38 @@
|
| UNCLASSIFIED,
|
| 36,
|
| "LDoubleConstant::one_half");
|
| + Add(ExternalReference::isolate_address().address(),
|
| + UNCLASSIFIED,
|
| + 37,
|
| + "isolate");
|
| Add(ExternalReference::address_of_minus_zero().address(),
|
| UNCLASSIFIED,
|
| - 37,
|
| + 38,
|
| "LDoubleConstant::minus_zero");
|
| Add(ExternalReference::address_of_negative_infinity().address(),
|
| UNCLASSIFIED,
|
| - 38,
|
| + 39,
|
| "LDoubleConstant::negative_infinity");
|
| Add(ExternalReference::power_double_double_function().address(),
|
| UNCLASSIFIED,
|
| - 39,
|
| + 40,
|
| "power_double_double_function");
|
| Add(ExternalReference::power_double_int_function().address(),
|
| UNCLASSIFIED,
|
| - 40,
|
| + 41,
|
| "power_double_int_function");
|
| Add(ExternalReference::arguments_marker_location().address(),
|
| UNCLASSIFIED,
|
| - 41,
|
| + 42,
|
| "Factory::arguments_marker().location()");
|
| }
|
|
|
|
|
| ExternalReferenceEncoder::ExternalReferenceEncoder()
|
| - : encodings_(Match) {
|
| + : encodings_(Match),
|
| + isolate_(Isolate::Current()) {
|
| ExternalReferenceTable* external_references =
|
| - ExternalReferenceTable::instance();
|
| + ExternalReferenceTable::instance(isolate_);
|
| for (int i = 0; i < external_references->size(); ++i) {
|
| Put(external_references->address(i), i);
|
| }
|
| @@ -522,20 +523,22 @@
|
| uint32_t ExternalReferenceEncoder::Encode(Address key) const {
|
| int index = IndexOf(key);
|
| ASSERT(key == NULL || index >= 0);
|
| - return index >=0 ? ExternalReferenceTable::instance()->code(index) : 0;
|
| + return index >=0 ?
|
| + ExternalReferenceTable::instance(isolate_)->code(index) : 0;
|
| }
|
|
|
|
|
| const char* ExternalReferenceEncoder::NameOfAddress(Address key) const {
|
| int index = IndexOf(key);
|
| - return index >=0 ? ExternalReferenceTable::instance()->name(index) : NULL;
|
| + return index >= 0 ?
|
| + ExternalReferenceTable::instance(isolate_)->name(index) : NULL;
|
| }
|
|
|
|
|
| int ExternalReferenceEncoder::IndexOf(Address key) const {
|
| if (key == NULL) return -1;
|
| HashMap::Entry* entry =
|
| - const_cast<HashMap &>(encodings_).Lookup(key, Hash(key), false);
|
| + const_cast<HashMap&>(encodings_).Lookup(key, Hash(key), false);
|
| return entry == NULL
|
| ? -1
|
| : static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
|
| @@ -549,9 +552,10 @@
|
|
|
|
|
| ExternalReferenceDecoder::ExternalReferenceDecoder()
|
| - : encodings_(NewArray<Address*>(kTypeCodeCount)) {
|
| + : encodings_(NewArray<Address*>(kTypeCodeCount)),
|
| + isolate_(Isolate::Current()) {
|
| ExternalReferenceTable* external_references =
|
| - ExternalReferenceTable::instance();
|
| + ExternalReferenceTable::instance(isolate_);
|
| for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
|
| int max = external_references->max_id(type) + 1;
|
| encodings_[type] = NewArray<Address>(max + 1);
|
| @@ -572,10 +576,12 @@
|
|
|
| bool Serializer::serialization_enabled_ = false;
|
| bool Serializer::too_late_to_enable_now_ = false;
|
| -ExternalReferenceDecoder* Deserializer::external_reference_decoder_ = NULL;
|
|
|
|
|
| -Deserializer::Deserializer(SnapshotByteSource* source) : source_(source) {
|
| +Deserializer::Deserializer(SnapshotByteSource* source)
|
| + : isolate_(NULL),
|
| + source_(source),
|
| + external_reference_decoder_(NULL) {
|
| }
|
|
|
|
|
| @@ -654,27 +660,31 @@
|
|
|
|
|
| void Deserializer::Deserialize() {
|
| + isolate_ = Isolate::Current();
|
| // Don't GC while deserializing - just expand the heap.
|
| AlwaysAllocateScope always_allocate;
|
| // Don't use the free lists while deserializing.
|
| LinearAllocationScope allocate_linearly;
|
| // No active threads.
|
| - ASSERT_EQ(NULL, ThreadState::FirstInUse());
|
| + ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse());
|
| // No active handles.
|
| - ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty());
|
| + ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
|
| // Make sure the entire partial snapshot cache is traversed, filling it with
|
| // valid object pointers.
|
| - partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity;
|
| + isolate_->set_serialize_partial_snapshot_cache_length(
|
| + Isolate::kPartialSnapshotCacheCapacity);
|
| ASSERT_EQ(NULL, external_reference_decoder_);
|
| external_reference_decoder_ = new ExternalReferenceDecoder();
|
| - Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG);
|
| - Heap::IterateWeakRoots(this, VISIT_ALL);
|
| + isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
|
| + isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
|
|
|
| - Heap::set_global_contexts_list(Heap::undefined_value());
|
| + isolate_->heap()->set_global_contexts_list(
|
| + isolate_->heap()->undefined_value());
|
| }
|
|
|
|
|
| void Deserializer::DeserializePartial(Object** root) {
|
| + isolate_ = Isolate::Current();
|
| // Don't GC while deserializing - just expand the heap.
|
| AlwaysAllocateScope always_allocate;
|
| // Don't use the free lists while deserializing.
|
| @@ -688,7 +698,7 @@
|
|
|
| Deserializer::~Deserializer() {
|
| ASSERT(source_->AtEOF());
|
| - if (external_reference_decoder_ != NULL) {
|
| + if (external_reference_decoder_) {
|
| delete external_reference_decoder_;
|
| external_reference_decoder_ = NULL;
|
| }
|
| @@ -719,12 +729,12 @@
|
| Object** current = reinterpret_cast<Object**>(address);
|
| Object** limit = current + (size >> kPointerSizeLog2);
|
| if (FLAG_log_snapshot_positions) {
|
| - LOG(SnapshotPositionEvent(address, source_->position()));
|
| + LOG(isolate_, SnapshotPositionEvent(address, source_->position()));
|
| }
|
| ReadChunk(current, limit, space_number, address);
|
| #ifdef DEBUG
|
| - bool is_codespace = (space == Heap::code_space()) ||
|
| - ((space == Heap::lo_space()) && (space_number == kLargeCode));
|
| + bool is_codespace = (space == HEAP->code_space()) ||
|
| + ((space == HEAP->lo_space()) && (space_number == kLargeCode));
|
| ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace);
|
| #endif
|
| }
|
| @@ -736,20 +746,20 @@
|
| #define ASSIGN_DEST_SPACE(space_number) \
|
| Space* dest_space; \
|
| if (space_number == NEW_SPACE) { \
|
| - dest_space = Heap::new_space(); \
|
| + dest_space = isolate->heap()->new_space(); \
|
| } else if (space_number == OLD_POINTER_SPACE) { \
|
| - dest_space = Heap::old_pointer_space(); \
|
| + dest_space = isolate->heap()->old_pointer_space(); \
|
| } else if (space_number == OLD_DATA_SPACE) { \
|
| - dest_space = Heap::old_data_space(); \
|
| + dest_space = isolate->heap()->old_data_space(); \
|
| } else if (space_number == CODE_SPACE) { \
|
| - dest_space = Heap::code_space(); \
|
| + dest_space = isolate->heap()->code_space(); \
|
| } else if (space_number == MAP_SPACE) { \
|
| - dest_space = Heap::map_space(); \
|
| + dest_space = isolate->heap()->map_space(); \
|
| } else if (space_number == CELL_SPACE) { \
|
| - dest_space = Heap::cell_space(); \
|
| + dest_space = isolate->heap()->cell_space(); \
|
| } else { \
|
| ASSERT(space_number >= LO_SPACE); \
|
| - dest_space = Heap::lo_space(); \
|
| + dest_space = isolate->heap()->lo_space(); \
|
| }
|
|
|
|
|
| @@ -760,6 +770,7 @@
|
| Object** limit,
|
| int source_space,
|
| Address address) {
|
| + Isolate* const isolate = isolate_;
|
| while (current < limit) {
|
| int data = source_->Get();
|
| switch (data) {
|
| @@ -788,14 +799,15 @@
|
| ReadObject(space_number, dest_space, &new_object); \
|
| } else if (where == kRootArray) { \
|
| int root_id = source_->GetInt(); \
|
| - new_object = Heap::roots_address()[root_id]; \
|
| + new_object = isolate->heap()->roots_address()[root_id]; \
|
| } else if (where == kPartialSnapshotCache) { \
|
| int cache_index = source_->GetInt(); \
|
| - new_object = partial_snapshot_cache_[cache_index]; \
|
| + new_object = isolate->serialize_partial_snapshot_cache() \
|
| + [cache_index]; \
|
| } else if (where == kExternalReference) { \
|
| int reference_id = source_->GetInt(); \
|
| - Address address = \
|
| - external_reference_decoder_->Decode(reference_id); \
|
| + Address address = external_reference_decoder_-> \
|
| + Decode(reference_id); \
|
| new_object = reinterpret_cast<Object*>(address); \
|
| } else if (where == kBackref) { \
|
| emit_write_barrier = \
|
| @@ -833,7 +845,7 @@
|
| } \
|
| } \
|
| if (emit_write_barrier) { \
|
| - Heap::RecordWrite(address, static_cast<int>( \
|
| + isolate->heap()->RecordWrite(address, static_cast<int>( \
|
| reinterpret_cast<Address>(current) - address)); \
|
| } \
|
| if (!current_was_incremented) { \
|
| @@ -997,7 +1009,8 @@
|
| int index = source_->Get();
|
| Vector<const char> source_vector = Natives::GetScriptSource(index);
|
| NativesExternalStringResource* resource =
|
| - new NativesExternalStringResource(source_vector.start());
|
| + new NativesExternalStringResource(
|
| + isolate->bootstrapper(), source_vector.start());
|
| *current++ = reinterpret_cast<Object*>(resource);
|
| break;
|
| }
|
| @@ -1062,6 +1075,9 @@
|
| current_root_index_(0),
|
| external_reference_encoder_(new ExternalReferenceEncoder),
|
| large_object_total_(0) {
|
| + // The serializer is meant to be used only to generate initial heap images
|
| + // from a context in which there is only one isolate.
|
| + ASSERT(Isolate::Current()->IsDefaultIsolate());
|
| for (int i = 0; i <= LAST_SPACE; i++) {
|
| fullness_[i] = 0;
|
| }
|
| @@ -1074,35 +1090,40 @@
|
|
|
|
|
| void StartupSerializer::SerializeStrongReferences() {
|
| + Isolate* isolate = Isolate::Current();
|
| // No active threads.
|
| - CHECK_EQ(NULL, ThreadState::FirstInUse());
|
| + CHECK_EQ(NULL, Isolate::Current()->thread_manager()->FirstThreadStateInUse());
|
| // No active or weak handles.
|
| - CHECK(HandleScopeImplementer::instance()->blocks()->is_empty());
|
| - CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles());
|
| + CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
|
| + CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
|
| // We don't support serializing installed extensions.
|
| - for (RegisteredExtension* ext = RegisteredExtension::first_extension();
|
| + for (RegisteredExtension* ext = v8::RegisteredExtension::first_extension();
|
| ext != NULL;
|
| ext = ext->next()) {
|
| CHECK_NE(v8::INSTALLED, ext->state());
|
| }
|
| - Heap::IterateStrongRoots(this, VISIT_ONLY_STRONG);
|
| + HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG);
|
| }
|
|
|
|
|
| void PartialSerializer::Serialize(Object** object) {
|
| this->VisitPointer(object);
|
| + Isolate* isolate = Isolate::Current();
|
|
|
| // After we have done the partial serialization the partial snapshot cache
|
| // will contain some references needed to decode the partial snapshot. We
|
| // fill it up with undefineds so it has a predictable length so the
|
| // deserialization code doesn't need to know the length.
|
| - for (int index = partial_snapshot_cache_length_;
|
| - index < kPartialSnapshotCacheCapacity;
|
| + for (int index = isolate->serialize_partial_snapshot_cache_length();
|
| + index < Isolate::kPartialSnapshotCacheCapacity;
|
| index++) {
|
| - partial_snapshot_cache_[index] = Heap::undefined_value();
|
| - startup_serializer_->VisitPointer(&partial_snapshot_cache_[index]);
|
| + isolate->serialize_partial_snapshot_cache()[index] =
|
| + isolate->heap()->undefined_value();
|
| + startup_serializer_->VisitPointer(
|
| + &isolate->serialize_partial_snapshot_cache()[index]);
|
| }
|
| - partial_snapshot_cache_length_ = kPartialSnapshotCacheCapacity;
|
| + isolate->set_serialize_partial_snapshot_cache_length(
|
| + Isolate::kPartialSnapshotCacheCapacity);
|
| }
|
|
|
|
|
| @@ -1121,11 +1142,6 @@
|
| }
|
|
|
|
|
| -Object* SerializerDeserializer::partial_snapshot_cache_[
|
| - kPartialSnapshotCacheCapacity];
|
| -int SerializerDeserializer::partial_snapshot_cache_length_ = 0;
|
| -
|
| -
|
| // This ensures that the partial snapshot cache keeps things alive during GC and
|
| // tracks their movement. When it is called during serialization of the startup
|
| // snapshot the partial snapshot is empty, so nothing happens. When the partial
|
| @@ -1135,9 +1151,11 @@
|
| // deserialization we therefore need to visit the cache array. This fills it up
|
| // with pointers to deserialized objects.
|
| void SerializerDeserializer::Iterate(ObjectVisitor* visitor) {
|
| + Isolate* isolate = Isolate::Current();
|
| visitor->VisitPointers(
|
| - &partial_snapshot_cache_[0],
|
| - &partial_snapshot_cache_[partial_snapshot_cache_length_]);
|
| + isolate->serialize_partial_snapshot_cache(),
|
| + &isolate->serialize_partial_snapshot_cache()[
|
| + isolate->serialize_partial_snapshot_cache_length()]);
|
| }
|
|
|
|
|
| @@ -1145,33 +1163,39 @@
|
| // the root iteration code (above) will iterate over array elements, writing the
|
| // references to deserialized objects in them.
|
| void SerializerDeserializer::SetSnapshotCacheSize(int size) {
|
| - partial_snapshot_cache_length_ = size;
|
| + Isolate::Current()->set_serialize_partial_snapshot_cache_length(size);
|
| }
|
|
|
|
|
| int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
|
| - for (int i = 0; i < partial_snapshot_cache_length_; i++) {
|
| - Object* entry = partial_snapshot_cache_[i];
|
| + Isolate* isolate = Isolate::Current();
|
| +
|
| + for (int i = 0;
|
| + i < isolate->serialize_partial_snapshot_cache_length();
|
| + i++) {
|
| + Object* entry = isolate->serialize_partial_snapshot_cache()[i];
|
| if (entry == heap_object) return i;
|
| }
|
|
|
| // We didn't find the object in the cache. So we add it to the cache and
|
| // then visit the pointer so that it becomes part of the startup snapshot
|
| // and we can refer to it from the partial snapshot.
|
| - int length = partial_snapshot_cache_length_;
|
| - CHECK(length < kPartialSnapshotCacheCapacity);
|
| - partial_snapshot_cache_[length] = heap_object;
|
| - startup_serializer_->VisitPointer(&partial_snapshot_cache_[length]);
|
| + int length = isolate->serialize_partial_snapshot_cache_length();
|
| + CHECK(length < Isolate::kPartialSnapshotCacheCapacity);
|
| + isolate->serialize_partial_snapshot_cache()[length] = heap_object;
|
| + startup_serializer_->VisitPointer(
|
| + &isolate->serialize_partial_snapshot_cache()[length]);
|
| // We don't recurse from the startup snapshot generator into the partial
|
| // snapshot generator.
|
| - ASSERT(length == partial_snapshot_cache_length_);
|
| - return partial_snapshot_cache_length_++;
|
| + ASSERT(length == isolate->serialize_partial_snapshot_cache_length());
|
| + isolate->set_serialize_partial_snapshot_cache_length(length + 1);
|
| + return length;
|
| }
|
|
|
|
|
| int PartialSerializer::RootIndex(HeapObject* heap_object) {
|
| for (int i = 0; i < Heap::kRootListLength; i++) {
|
| - Object* root = Heap::roots_address()[i];
|
| + Object* root = HEAP->roots_address()[i];
|
| if (root == heap_object) return i;
|
| }
|
| return kInvalidRootIndex;
|
| @@ -1254,13 +1278,13 @@
|
|
|
|
|
| void StartupSerializer::SerializeWeakReferences() {
|
| - for (int i = partial_snapshot_cache_length_;
|
| - i < kPartialSnapshotCacheCapacity;
|
| + for (int i = Isolate::Current()->serialize_partial_snapshot_cache_length();
|
| + i < Isolate::kPartialSnapshotCacheCapacity;
|
| i++) {
|
| sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization");
|
| sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index");
|
| }
|
| - Heap::IterateWeakRoots(this, VISIT_ALL);
|
| + HEAP->IterateWeakRoots(this, VISIT_ALL);
|
| }
|
|
|
|
|
| @@ -1321,7 +1345,8 @@
|
| "ObjectSerialization");
|
| sink_->PutInt(size >> kObjectAlignmentBits, "Size in words");
|
|
|
| - LOG(SnapshotPositionEvent(object_->address(), sink_->Position()));
|
| + LOG(i::Isolate::Current(),
|
| + SnapshotPositionEvent(object_->address(), sink_->Position()));
|
|
|
| // Mark this object as already serialized.
|
| bool start_new_page;
|
| @@ -1422,7 +1447,7 @@
|
| Address references_start = reinterpret_cast<Address>(resource_pointer);
|
| OutputRawData(references_start);
|
| for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
|
| - Object* source = Heap::natives_source_cache()->get(i);
|
| + Object* source = HEAP->natives_source_cache()->get(i);
|
| if (!source->IsUndefined()) {
|
| ExternalAsciiString* string = ExternalAsciiString::cast(source);
|
| typedef v8::String::ExternalAsciiStringResource Resource;
|
| @@ -1472,7 +1497,7 @@
|
| int Serializer::SpaceOfObject(HeapObject* object) {
|
| for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
|
| AllocationSpace s = static_cast<AllocationSpace>(i);
|
| - if (Heap::InSpace(object, s)) {
|
| + if (HEAP->InSpace(object, s)) {
|
| if (i == LO_SPACE) {
|
| if (object->IsCode()) {
|
| return kLargeCode;
|
| @@ -1493,7 +1518,7 @@
|
| int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) {
|
| for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
|
| AllocationSpace s = static_cast<AllocationSpace>(i);
|
| - if (Heap::InSpace(object, s)) {
|
| + if (HEAP->InSpace(object, s)) {
|
| return i;
|
| }
|
| }
|
|
|