Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file | |
| 2 // for details. All rights reserved. Use of this source code is governed by a | |
| 3 // BSD-style license that can be found in the LICENSE file. | |
| 4 | |
| 5 #include "vm/clustered_snapshot.h" | |
| 6 | |
| 7 #include "platform/assert.h" | |
| 8 #include "vm/bootstrap.h" | |
| 9 #include "vm/class_finalizer.h" | |
| 10 #include "vm/dart.h" | |
| 11 #include "vm/dart_entry.h" | |
| 12 #include "vm/exceptions.h" | |
| 13 #include "vm/heap.h" | |
| 14 #include "vm/lockers.h" | |
| 15 #include "vm/longjump.h" | |
| 16 #include "vm/native_entry.h" | |
| 17 #include "vm/object.h" | |
| 18 #include "vm/object_store.h" | |
| 19 #include "vm/stub_code.h" | |
| 20 #include "vm/symbols.h" | |
| 21 #include "vm/timeline.h" | |
| 22 #include "vm/version.h" | |
| 23 | |
| 24 namespace dart { | |
| 25 | |
| 26 static RawObject* AllocateUninitialized(PageSpace* old_space, intptr_t size) { | |
| 27 ASSERT(Utils::IsAligned(size, kObjectAlignment)); | |
| 28 uword address = old_space->TryAllocateDataBumpLocked(size, | |
| 29 PageSpace::kForceGrowth); | |
| 30 if (address == 0) { | |
| 31 FATAL("Out of memory"); | |
| 32 } | |
| 33 return reinterpret_cast<RawObject*>(address + kHeapObjectTag); | |
| 34 } | |
| 35 | |
| 36 | |
| 37 void Deserializer::InitializeHeader(RawObject* raw, | |
| 38 intptr_t class_id, | |
| 39 intptr_t size, | |
| 40 bool is_vm_isolate, | |
| 41 bool is_canonical) { | |
| 42 ASSERT(Utils::IsAligned(size, kObjectAlignment)); | |
| 43 uword tags = 0; | |
| 44 tags = RawObject::ClassIdTag::update(class_id, tags); | |
| 45 tags = RawObject::SizeTag::update(size, tags); | |
| 46 tags = RawObject::VMHeapObjectTag::update(is_vm_isolate, tags); | |
| 47 tags = RawObject::CanonicalObjectTag::update(is_canonical, tags); | |
| 48 raw->ptr()->tags_ = tags; | |
| 49 } | |
| 50 | |
| 51 | |
| 52 class ClassSerializationCluster : public SerializationCluster { | |
| 53 public: | |
| 54 ClassSerializationCluster() { } | |
|
siva
2016/06/30 00:06:08
Maybe
ClassSerializationCluster() ::
predefined
rmacnak
2016/06/30 01:39:21
For a full snapshot we actually should expect the
| |
| 55 virtual ~ClassSerializationCluster() { } | |
| 56 | |
| 57 void Trace(Serializer* s, RawObject* object) { | |
| 58 RawClass* cls = Class::RawCast(object); | |
| 59 intptr_t class_id = cls->ptr()->id_; | |
| 60 | |
| 61 if (class_id < kNumPredefinedCids) { | |
| 62 // These classes are allocated by Object::Init or Object::InitOnce, so the | |
| 63 // deserializer must find them in the class table instead of allocating | |
| 64 // them. | |
| 65 predefined_.Add(cls); | |
| 66 } else { | |
| 67 objects_.Add(cls); | |
| 68 } | |
| 69 | |
| 70 RawObject** from = cls->from(); | |
| 71 RawObject** to = cls->to_snapshot(s->kind()); | |
| 72 for (RawObject** p = from; p <= to; p++) { | |
| 73 s->Push(*p); | |
| 74 } | |
| 75 } | |
| 76 | |
| 77 void WriteAlloc(Serializer* s) { | |
| 78 s->WriteCid(kClassCid); | |
| 79 intptr_t count = predefined_.length(); | |
| 80 s->Write<intptr_t>(count); | |
| 81 for (intptr_t i = 0; i < count; i++) { | |
| 82 RawClass* cls = predefined_[i]; | |
| 83 intptr_t class_id = cls->ptr()->id_; | |
| 84 s->Write<intptr_t>(class_id); | |
| 85 s->AssignRef(cls); | |
| 86 } | |
| 87 count = objects_.length(); | |
| 88 s->Write<intptr_t>(count); | |
| 89 for (intptr_t i = 0; i < count; i++) { | |
| 90 RawClass* cls = objects_[i]; | |
| 91 s->AssignRef(cls); | |
| 92 } | |
| 93 } | |
| 94 | |
| 95 void WriteFill(Serializer* s) { | |
| 96 #define WRITE_CLASS() \ | |
| 97 RawObject** from = cls->from(); \ | |
| 98 RawObject** to = cls->to_snapshot(kind); \ | |
| 99 for (RawObject** p = from; p <= to; p++) { \ | |
| 100 s->WriteRef(*p); \ | |
| 101 } \ | |
| 102 intptr_t class_id = cls->ptr()->id_; \ | |
| 103 s->WriteCid(class_id); \ | |
| 104 s->Write<int32_t>(cls->ptr()->instance_size_in_words_); \ | |
| 105 s->Write<int32_t>(cls->ptr()->next_field_offset_in_words_); \ | |
| 106 s->Write<int32_t>(cls->ptr()->type_arguments_field_offset_in_words_); \ | |
| 107 s->Write<uint16_t>(cls->ptr()->num_type_arguments_); \ | |
| 108 s->Write<uint16_t>(cls->ptr()->num_own_type_arguments_); \ | |
| 109 s->Write<uint16_t>(cls->ptr()->num_native_fields_); \ | |
| 110 s->WriteTokenPosition(cls->ptr()->token_pos_); \ | |
| 111 s->Write<uint16_t>(cls->ptr()->state_bits_); \ | |
|
siva
2016/06/30 00:06:08
Why not make this a static helper function and cal
rmacnak
2016/06/30 01:39:21
Done.
| |
| 112 | |
| 113 Snapshot::Kind kind = s->kind(); | |
| 114 intptr_t count = predefined_.length(); | |
| 115 for (intptr_t i = 0; i < count; i++) { | |
| 116 RawClass* cls = predefined_[i]; | |
| 117 WRITE_CLASS() | |
| 118 } | |
| 119 count = objects_.length(); | |
| 120 for (intptr_t i = 0; i < count; i++) { | |
| 121 RawClass* cls = objects_[i]; | |
| 122 WRITE_CLASS() | |
| 123 } | |
| 124 #undef WRITE_CLASS | |
| 125 } | |
| 126 | |
| 127 private: | |
| 128 GrowableArray<RawClass*> predefined_; | |
| 129 GrowableArray<RawClass*> objects_; | |
| 130 }; | |
| 131 | |
| 132 | |
| 133 class ClassDeserializationCluster : public DeserializationCluster { | |
| 134 public: | |
| 135 ClassDeserializationCluster() { } | |
| 136 virtual ~ClassDeserializationCluster() { } | |
| 137 | |
| 138 void ReadAlloc(Deserializer* d) { | |
| 139 predefined_start_index_ = d->next_index(); | |
| 140 PageSpace* old_space = d->heap()->old_space(); | |
| 141 intptr_t count = d->Read<intptr_t>(); | |
| 142 ClassTable* table = d->isolate()->class_table(); | |
| 143 for (intptr_t i = 0; i < count; i++) { | |
| 144 intptr_t class_id = d->Read<intptr_t>(); | |
| 145 ASSERT(table->HasValidClassAt(class_id)); | |
| 146 RawClass* cls = table->At(class_id); | |
| 147 ASSERT(cls != NULL); | |
| 148 d->AssignRef(cls); | |
| 149 } | |
| 150 predefined_stop_index_ = d->next_index(); | |
| 151 | |
| 152 start_index_ = d->next_index(); | |
| 153 count = d->Read<intptr_t>(); | |
| 154 for (intptr_t i = 0; i < count; i++) { | |
| 155 d->AssignRef(AllocateUninitialized(old_space, | |
| 156 Class::InstanceSize())); | |
| 157 } | |
| 158 stop_index_ = d->next_index(); | |
| 159 } | |
| 160 | |
| 161 void ReadFill(Deserializer* d) { | |
| 162 Snapshot::Kind kind = d->kind(); | |
| 163 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 164 ClassTable* table = d->isolate()->class_table(); | |
| 165 | |
| 166 for (intptr_t id = predefined_start_index_; | |
| 167 id < predefined_stop_index_; | |
| 168 id++) { | |
| 169 RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id)); | |
| 170 RawObject** from = cls->from(); | |
| 171 RawObject** to_snapshot = cls->to_snapshot(kind); | |
| 172 RawObject** to = cls->to(); | |
| 173 for (RawObject** p = from; p <= to_snapshot; p++) { | |
| 174 *p = d->ReadRef(); | |
| 175 } | |
| 176 for (RawObject** p = to_snapshot + 1; p <= to; p++) { | |
| 177 *p = Object::null(); | |
| 178 } | |
|
siva
2016/06/30 00:06:07
Predefined classes are allocated in Object::Init u
rmacnak
2016/06/30 01:39:21
True, removed.
| |
| 179 | |
| 180 intptr_t class_id = d->ReadCid(); | |
| 181 cls->ptr()->id_ = class_id; | |
| 182 cls->ptr()->instance_size_in_words_ = d->Read<int32_t>(); | |
| 183 cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>(); | |
| 184 cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>(); | |
| 185 cls->ptr()->num_type_arguments_ = d->Read<uint16_t>(); | |
| 186 cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>(); | |
| 187 cls->ptr()->num_native_fields_ = d->Read<uint16_t>(); | |
| 188 cls->ptr()->token_pos_ = d->ReadTokenPosition(); | |
| 189 cls->ptr()->state_bits_ = d->Read<uint16_t>(); | |
|
siva
2016/06/30 00:06:08
This piece of code is repeated why not make a help
| |
| 190 } | |
| 191 | |
| 192 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 193 RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id)); | |
| 194 Deserializer::InitializeHeader(cls, kClassCid, Class::InstanceSize(), | |
| 195 is_vm_object); | |
| 196 RawObject** from = cls->from(); | |
| 197 RawObject** to_snapshot = cls->to_snapshot(kind); | |
| 198 RawObject** to = cls->to(); | |
| 199 for (RawObject** p = from; p <= to_snapshot; p++) { | |
| 200 *p = d->ReadRef(); | |
| 201 } | |
| 202 for (RawObject** p = to_snapshot + 1; p <= to; p++) { | |
| 203 *p = Object::null(); | |
| 204 } | |
| 205 | |
| 206 intptr_t class_id = d->ReadCid(); | |
| 207 | |
| 208 ASSERT(class_id >= kNumPredefinedCids); | |
| 209 Instance fake; | |
| 210 cls->ptr()->handle_vtable_ = fake.vtable(); | |
| 211 | |
| 212 cls->ptr()->id_ = class_id; | |
| 213 cls->ptr()->instance_size_in_words_ = d->Read<int32_t>(); | |
| 214 cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>(); | |
| 215 cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>(); | |
| 216 cls->ptr()->num_type_arguments_ = d->Read<uint16_t>(); | |
| 217 cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>(); | |
| 218 cls->ptr()->num_native_fields_ = d->Read<uint16_t>(); | |
| 219 cls->ptr()->token_pos_ = d->ReadTokenPosition(); | |
| 220 cls->ptr()->state_bits_ = d->Read<uint16_t>(); | |
| 221 | |
| 222 table->AllocateIndex(class_id); | |
| 223 table->SetAt(class_id, cls); | |
| 224 } | |
| 225 } | |
| 226 | |
| 227 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { | |
| 228 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), | |
| 229 Timeline::GetIsolateStream(), "PostLoadClass")); | |
| 230 | |
| 231 Class& cls = Class::Handle(zone); | |
| 232 for (intptr_t i = predefined_start_index_; | |
| 233 i < predefined_stop_index_; | |
| 234 i++) { | |
| 235 cls ^= refs.At(i); | |
| 236 cls.RehashConstants(zone); | |
| 237 } | |
| 238 for (intptr_t i = start_index_; i < stop_index_; i++) { | |
| 239 cls ^= refs.At(i); | |
| 240 cls.RehashConstants(zone); | |
| 241 } | |
| 242 } | |
| 243 | |
| 244 private: | |
| 245 intptr_t predefined_start_index_; | |
| 246 intptr_t predefined_stop_index_; | |
| 247 }; | |
| 248 | |
| 249 class UnresolvedClassSerializationCluster : public SerializationCluster { | |
| 250 public: | |
| 251 UnresolvedClassSerializationCluster() { } | |
| 252 virtual ~UnresolvedClassSerializationCluster() { } | |
| 253 | |
| 254 void Trace(Serializer* s, RawObject* object) { | |
| 255 RawUnresolvedClass* cls = UnresolvedClass::RawCast(object); | |
| 256 objects_.Add(cls); | |
| 257 | |
| 258 RawObject** from = cls->from(); | |
| 259 RawObject** to = cls->to(); | |
| 260 for (RawObject** p = from; p <= to; p++) { | |
| 261 s->Push(*p); | |
| 262 } | |
| 263 } | |
| 264 | |
| 265 void WriteAlloc(Serializer* s) { | |
| 266 s->WriteCid(kUnresolvedClassCid); | |
| 267 intptr_t count = objects_.length(); | |
| 268 s->Write<intptr_t>(count); | |
| 269 for (intptr_t i = 0; i < count; i++) { | |
| 270 RawUnresolvedClass* cls = objects_[i]; | |
| 271 s->AssignRef(cls); | |
| 272 } | |
| 273 } | |
| 274 | |
| 275 void WriteFill(Serializer* s) { | |
| 276 intptr_t count = objects_.length(); | |
| 277 s->Write<intptr_t>(count); | |
| 278 for (intptr_t i = 0; i < count; i++) { | |
| 279 RawUnresolvedClass* cls = objects_[i]; | |
| 280 RawObject** from = cls->from(); | |
| 281 RawObject** to = cls->to(); | |
| 282 for (RawObject** p = from; p <= to; p++) { | |
| 283 s->WriteRef(*p); | |
| 284 } | |
| 285 s->WriteTokenPosition(cls->ptr()->token_pos_); | |
| 286 } | |
| 287 } | |
| 288 | |
| 289 private: | |
| 290 GrowableArray<RawUnresolvedClass*> objects_; | |
| 291 }; | |
| 292 | |
| 293 class UnresolvedClassDeserializationCluster : public DeserializationCluster { | |
| 294 public: | |
| 295 UnresolvedClassDeserializationCluster() { } | |
| 296 virtual ~UnresolvedClassDeserializationCluster() { } | |
| 297 | |
| 298 void ReadAlloc(Deserializer* d) { | |
| 299 start_index_ = d->next_index(); | |
| 300 PageSpace* old_space = d->heap()->old_space(); | |
| 301 intptr_t count = d->Read<intptr_t>(); | |
| 302 for (intptr_t i = 0; i < count; i++) { | |
| 303 d->AssignRef(AllocateUninitialized(old_space, | |
| 304 UnresolvedClass::InstanceSize())); | |
| 305 } | |
| 306 stop_index_ = d->next_index(); | |
| 307 } | |
| 308 | |
| 309 void ReadFill(Deserializer* d) { | |
| 310 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 311 | |
| 312 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 313 RawUnresolvedClass* cls = | |
| 314 reinterpret_cast<RawUnresolvedClass*>(d->Ref(id)); | |
| 315 Deserializer::InitializeHeader(cls, kUnresolvedClassCid, | |
| 316 UnresolvedClass::InstanceSize(), | |
| 317 is_vm_object); | |
| 318 RawObject** from = cls->from(); | |
| 319 RawObject** to = cls->to(); | |
| 320 for (RawObject** p = from; p <= to; p++) { | |
| 321 *p = d->ReadRef(); | |
| 322 } | |
| 323 cls->ptr()->token_pos_ = d->ReadTokenPosition(); | |
| 324 } | |
| 325 } | |
| 326 }; | |
| 327 | |
| 328 class TypeArgumentsSerializationCluster : public SerializationCluster { | |
| 329 public: | |
| 330 TypeArgumentsSerializationCluster() { } | |
| 331 virtual ~TypeArgumentsSerializationCluster() { } | |
| 332 | |
| 333 void Trace(Serializer* s, RawObject* object) { | |
| 334 RawTypeArguments* type_args = TypeArguments::RawCast(object); | |
| 335 objects_.Add(type_args); | |
| 336 | |
| 337 s->Push(type_args->ptr()->instantiations_); | |
| 338 intptr_t length = Smi::Value(type_args->ptr()->length_); | |
| 339 for (intptr_t i = 0; i < length; i++) { | |
| 340 s->Push(type_args->ptr()->types()[i]); | |
| 341 } | |
| 342 } | |
| 343 | |
| 344 void WriteAlloc(Serializer* s) { | |
| 345 s->WriteCid(kTypeArgumentsCid); | |
| 346 intptr_t count = objects_.length(); | |
| 347 s->Write<intptr_t>(count); | |
| 348 for (intptr_t i = 0; i < count; i++) { | |
| 349 RawTypeArguments* type_args = objects_[i]; | |
| 350 intptr_t length = Smi::Value(type_args->ptr()->length_); | |
| 351 s->Write<intptr_t>(length); | |
| 352 s->AssignRef(type_args); | |
| 353 } | |
| 354 } | |
| 355 | |
| 356 void WriteFill(Serializer* s) { | |
| 357 intptr_t count = objects_.length(); | |
| 358 for (intptr_t i = 0; i < count; i++) { | |
| 359 RawTypeArguments* type_args = objects_[i]; | |
| 360 intptr_t length = Smi::Value(type_args->ptr()->length_); | |
| 361 s->Write<intptr_t>(length); | |
| 362 s->Write<bool>(type_args->IsCanonical()); | |
| 363 intptr_t hash = Smi::Value(type_args->ptr()->hash_); | |
| 364 s->Write<int32_t>(hash); | |
| 365 s->WriteRef(type_args->ptr()->instantiations_); | |
| 366 for (intptr_t j = 0; j < length; j++) { | |
| 367 s->WriteRef(type_args->ptr()->types()[j]); | |
| 368 } | |
| 369 } | |
| 370 } | |
| 371 | |
| 372 private: | |
| 373 GrowableArray<RawTypeArguments*> objects_; | |
| 374 }; | |
| 375 | |
| 376 | |
| 377 class TypeArgumentsDeserializationCluster : public DeserializationCluster { | |
| 378 public: | |
| 379 TypeArgumentsDeserializationCluster() { } | |
| 380 virtual ~TypeArgumentsDeserializationCluster() { } | |
| 381 | |
| 382 void ReadAlloc(Deserializer* d) { | |
| 383 start_index_ = d->next_index(); | |
| 384 PageSpace* old_space = d->heap()->old_space(); | |
| 385 intptr_t count = d->Read<intptr_t>(); | |
| 386 for (intptr_t i = 0; i < count; i++) { | |
| 387 intptr_t length = d->Read<intptr_t>(); | |
| 388 d->AssignRef(AllocateUninitialized(old_space, | |
| 389 TypeArguments::InstanceSize(length))); | |
| 390 } | |
| 391 stop_index_ = d->next_index(); | |
| 392 } | |
| 393 | |
| 394 void ReadFill(Deserializer* d) { | |
| 395 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 396 | |
| 397 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 398 RawTypeArguments* type_args = | |
| 399 reinterpret_cast<RawTypeArguments*>(d->Ref(id)); | |
| 400 intptr_t length = d->Read<intptr_t>(); | |
| 401 bool is_canonical = d->Read<bool>(); | |
| 402 Deserializer::InitializeHeader(type_args, kTypeArgumentsCid, | |
| 403 TypeArguments::InstanceSize(length), | |
| 404 is_vm_object, is_canonical); | |
| 405 type_args->ptr()->length_ = Smi::New(length); | |
| 406 type_args->ptr()->hash_ = Smi::New(d->Read<int32_t>()); | |
| 407 type_args->ptr()->instantiations_ = | |
| 408 reinterpret_cast<RawArray*>(d->ReadRef()); | |
| 409 for (intptr_t j = 0; j < length; j++) { | |
| 410 type_args->ptr()->types()[j] = | |
| 411 reinterpret_cast<RawAbstractType*>(d->ReadRef()); | |
| 412 } | |
| 413 } | |
| 414 } | |
| 415 }; | |
| 416 | |
| 417 | |
| 418 class PatchClassSerializationCluster : public SerializationCluster { | |
| 419 public: | |
| 420 PatchClassSerializationCluster() { } | |
| 421 virtual ~PatchClassSerializationCluster() { } | |
| 422 | |
| 423 void Trace(Serializer* s, RawObject* object) { | |
| 424 RawPatchClass* cls = PatchClass::RawCast(object); | |
| 425 objects_.Add(cls); | |
| 426 | |
| 427 RawObject** from = cls->from(); | |
| 428 RawObject** to = cls->to(); | |
| 429 for (RawObject** p = from; p <= to; p++) { | |
| 430 s->Push(*p); | |
| 431 } | |
| 432 } | |
| 433 | |
| 434 void WriteAlloc(Serializer* s) { | |
| 435 s->WriteCid(kPatchClassCid); | |
| 436 intptr_t count = objects_.length(); | |
| 437 s->Write<intptr_t>(count); | |
| 438 for (intptr_t i = 0; i < count; i++) { | |
| 439 RawPatchClass* cls = objects_[i]; | |
| 440 s->AssignRef(cls); | |
| 441 } | |
| 442 } | |
|
siva
2016/06/30 00:06:08
The WriteAlloc function seems similar in each of t
| |
| 443 | |
| 444 void WriteFill(Serializer* s) { | |
| 445 intptr_t count = objects_.length(); | |
| 446 for (intptr_t i = 0; i < count; i++) { | |
| 447 RawPatchClass* cls = objects_[i]; | |
| 448 RawObject** from = cls->from(); | |
| 449 RawObject** to = cls->to(); | |
| 450 for (RawObject** p = from; p <= to; p++) { | |
| 451 s->WriteRef(*p); | |
| 452 } | |
| 453 } | |
| 454 } | |
| 455 | |
| 456 private: | |
| 457 GrowableArray<RawPatchClass*> objects_; | |
| 458 }; | |
| 459 | |
| 460 class PatchClassDeserializationCluster : public DeserializationCluster { | |
| 461 public: | |
| 462 PatchClassDeserializationCluster() { } | |
| 463 virtual ~PatchClassDeserializationCluster() { } | |
| 464 | |
| 465 void ReadAlloc(Deserializer* d) { | |
| 466 start_index_ = d->next_index(); | |
| 467 PageSpace* old_space = d->heap()->old_space(); | |
| 468 intptr_t count = d->Read<intptr_t>(); | |
| 469 for (intptr_t i = 0; i < count; i++) { | |
| 470 d->AssignRef(AllocateUninitialized(old_space, | |
| 471 PatchClass::InstanceSize())); | |
| 472 } | |
| 473 stop_index_ = d->next_index(); | |
| 474 } | |
|
siva
2016/06/30 00:06:07
The ReadAlloc function seems similar in each of th
| |
| 475 | |
| 476 void ReadFill(Deserializer* d) { | |
| 477 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 478 | |
| 479 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 480 RawPatchClass* cls = reinterpret_cast<RawPatchClass*>(d->Ref(id)); | |
| 481 Deserializer::InitializeHeader(cls, kPatchClassCid, | |
| 482 PatchClass::InstanceSize(), is_vm_object); | |
| 483 RawObject** from = cls->from(); | |
| 484 RawObject** to = cls->to(); | |
| 485 for (RawObject** p = from; p <= to; p++) { | |
| 486 *p = d->ReadRef(); | |
| 487 } | |
| 488 } | |
| 489 } | |
| 490 }; | |
| 491 | |
| 492 class FunctionSerializationCluster : public SerializationCluster { | |
| 493 public: | |
| 494 FunctionSerializationCluster() { } | |
| 495 virtual ~FunctionSerializationCluster() { } | |
| 496 | |
| 497 void Trace(Serializer* s, RawObject* object) { | |
| 498 RawFunction* func = Function::RawCast(object); | |
| 499 objects_.Add(func); | |
| 500 | |
| 501 RawObject** from = func->from(); | |
| 502 RawObject** to = func->to_snapshot(); | |
| 503 for (RawObject** p = from; p <= to; p++) { | |
| 504 s->Push(*p); | |
| 505 } | |
| 506 if (s->kind() == Snapshot::kAppNoJIT) { | |
| 507 s->Push(func->ptr()->code_); | |
| 508 } else if (s->kind() == Snapshot::kAppWithJIT) { | |
| 509 s->Push(func->ptr()->unoptimized_code_); | |
| 510 s->Push(func->ptr()->ic_data_array_); | |
| 511 } | |
| 512 } | |
| 513 | |
| 514 void WriteAlloc(Serializer* s) { | |
| 515 s->WriteCid(kFunctionCid); | |
| 516 intptr_t count = objects_.length(); | |
| 517 s->Write<intptr_t>(count); | |
| 518 for (intptr_t i = 0; i < count; i++) { | |
| 519 RawFunction* func = objects_[i]; | |
| 520 s->AssignRef(func); | |
| 521 } | |
| 522 } | |
| 523 | |
| 524 void WriteFill(Serializer* s) { | |
| 525 Snapshot::Kind kind = s->kind(); | |
| 526 intptr_t count = objects_.length(); | |
| 527 for (intptr_t i = 0; i < count; i++) { | |
| 528 RawFunction* func = objects_[i]; | |
| 529 RawObject** from = func->from(); | |
| 530 RawObject** to = func->to_snapshot(); | |
| 531 for (RawObject** p = from; p <= to; p++) { | |
| 532 s->WriteRef(*p); | |
| 533 } | |
| 534 if (kind == Snapshot::kAppNoJIT) { | |
| 535 s->WriteRef(func->ptr()->code_); | |
| 536 } else if (s->kind() == Snapshot::kAppWithJIT) { | |
| 537 s->WriteRef(func->ptr()->unoptimized_code_); | |
| 538 s->WriteRef(func->ptr()->ic_data_array_); | |
| 539 } | |
| 540 | |
| 541 s->WriteTokenPosition(func->ptr()->token_pos_); | |
| 542 s->WriteTokenPosition(func->ptr()->end_token_pos_); | |
| 543 s->Write<int16_t>(func->ptr()->num_fixed_parameters_); | |
| 544 s->Write<int16_t>(func->ptr()->num_optional_parameters_); | |
| 545 s->Write<uint32_t>(func->ptr()->kind_tag_); | |
| 546 if (kind == Snapshot::kAppNoJIT) { | |
| 547 // Omit fields used to support de/reoptimization. | |
| 548 } else { | |
| 549 bool is_optimized = Code::IsOptimized(func->ptr()->code_); | |
| 550 if (is_optimized) { | |
| 551 s->Write<int32_t>(FLAG_optimization_counter_threshold); | |
| 552 } else { | |
| 553 s->Write<int32_t>(0); | |
| 554 } | |
| 555 s->Write<int8_t>(func->ptr()->deoptimization_counter_); | |
| 556 s->Write<uint16_t>(func->ptr()->optimized_instruction_count_); | |
| 557 s->Write<uint16_t>(func->ptr()->optimized_call_site_count_); | |
| 558 } | |
| 559 } | |
| 560 } | |
| 561 | |
| 562 private: | |
| 563 GrowableArray<RawFunction*> objects_; | |
| 564 }; | |
| 565 | |
| 566 class FunctionDeserializationCluster : public DeserializationCluster { | |
| 567 public: | |
| 568 FunctionDeserializationCluster() { } | |
| 569 virtual ~FunctionDeserializationCluster() { } | |
| 570 | |
| 571 void ReadAlloc(Deserializer* d) { | |
| 572 start_index_ = d->next_index(); | |
| 573 PageSpace* old_space = d->heap()->old_space(); | |
| 574 intptr_t count = d->Read<intptr_t>(); | |
| 575 for (intptr_t i = 0; i < count; i++) { | |
| 576 d->AssignRef(AllocateUninitialized(old_space, | |
| 577 Function::InstanceSize())); | |
| 578 } | |
| 579 stop_index_ = d->next_index(); | |
| 580 } | |
| 581 | |
| 582 void ReadFill(Deserializer* d) { | |
| 583 Snapshot::Kind kind = d->kind(); | |
| 584 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 585 | |
| 586 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 587 RawFunction* func = reinterpret_cast<RawFunction*>(d->Ref(id)); | |
| 588 Deserializer::InitializeHeader(func, kFunctionCid, | |
| 589 Function::InstanceSize(), is_vm_object); | |
| 590 RawObject** from = func->from(); | |
| 591 RawObject** to_snapshot = func->to_snapshot(); | |
| 592 RawObject** to = func->to(); | |
| 593 for (RawObject** p = from; p <= to_snapshot; p++) { | |
| 594 *p = d->ReadRef(); | |
| 595 } | |
| 596 for (RawObject** p = to_snapshot + 1; p <= to; p++) { | |
| 597 *p = Object::null(); | |
| 598 } | |
| 599 if (kind == Snapshot::kAppNoJIT) { | |
| 600 func->ptr()->code_ = reinterpret_cast<RawCode*>(d->ReadRef()); | |
| 601 } else if (kind == Snapshot::kAppWithJIT) { | |
| 602 func->ptr()->unoptimized_code_ = | |
| 603 reinterpret_cast<RawCode*>(d->ReadRef()); | |
| 604 func->ptr()->ic_data_array_ = reinterpret_cast<RawArray*>(d->ReadRef()); | |
| 605 } | |
| 606 | |
| 607 #if defined(DEBUG) | |
| 608 func->ptr()->entry_point_ = 0; | |
| 609 #endif | |
| 610 | |
| 611 func->ptr()->token_pos_ = d->ReadTokenPosition(); | |
| 612 func->ptr()->end_token_pos_ = d->ReadTokenPosition(); | |
| 613 func->ptr()->num_fixed_parameters_ = d->Read<int16_t>(); | |
| 614 func->ptr()->num_optional_parameters_ = d->Read<int16_t>(); | |
| 615 func->ptr()->kind_tag_ = d->Read<uint32_t>(); | |
| 616 if (kind == Snapshot::kAppNoJIT) { | |
| 617 // Omit fields used to support de/reoptimization. | |
| 618 } else { | |
| 619 func->ptr()->usage_counter_ = d->Read<int32_t>(); | |
| 620 func->ptr()->deoptimization_counter_ = d->Read<int8_t>(); | |
| 621 func->ptr()->optimized_instruction_count_ = d->Read<uint16_t>(); | |
| 622 func->ptr()->optimized_call_site_count_ = d->Read<uint16_t>(); | |
| 623 } | |
| 624 } | |
| 625 } | |
| 626 | |
| 627 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { | |
| 628 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), | |
| 629 Timeline::GetIsolateStream(), "PostLoadFunction")); | |
| 630 | |
| 631 if (kind == Snapshot::kAppNoJIT) { | |
| 632 Function& func = Function::Handle(zone); | |
| 633 for (intptr_t i = start_index_; i < stop_index_; i++) { | |
| 634 func ^= refs.At(i); | |
| 635 ASSERT(func.raw()->ptr()->code_->IsCode()); | |
| 636 uword entry_point = func.raw()->ptr()->code_->ptr()->entry_point_; | |
| 637 ASSERT(entry_point != 0); | |
| 638 func.raw()->ptr()->entry_point_ = entry_point; | |
| 639 } | |
| 640 } else if (kind == Snapshot::kAppWithJIT) { | |
| 641 Function& func = Function::Handle(zone); | |
| 642 Code& code = Code::Handle(zone); | |
| 643 for (intptr_t i = start_index_; i < stop_index_; i++) { | |
| 644 func ^= refs.At(i); | |
| 645 code ^= func.unoptimized_code(); | |
| 646 if (!code.IsNull()) { | |
| 647 func.SetInstructions(code); | |
| 648 func.set_was_compiled(true); | |
| 649 } else { | |
| 650 func.ClearCode(); | |
| 651 func.set_was_compiled(false); | |
| 652 } | |
| 653 } | |
| 654 } else { | |
| 655 Function& func = Function::Handle(zone); | |
| 656 for (intptr_t i = start_index_; i < stop_index_; i++) { | |
| 657 func ^= refs.At(i); | |
| 658 func.ClearICDataArray(); | |
| 659 func.ClearCode(); | |
| 660 func.set_was_compiled(false); | |
| 661 } | |
| 662 } | |
| 663 } | |
| 664 }; | |
| 665 | |
| 666 class ClosureDataSerializationCluster : public SerializationCluster { | |
| 667 public: | |
| 668 ClosureDataSerializationCluster() { } | |
| 669 virtual ~ClosureDataSerializationCluster() { } | |
| 670 | |
| 671 void Trace(Serializer* s, RawObject* object) { | |
| 672 RawClosureData* data = ClosureData::RawCast(object); | |
| 673 objects_.Add(data); | |
| 674 | |
| 675 RawObject** from = data->from(); | |
| 676 RawObject** to = data->to(); | |
| 677 for (RawObject** p = from; p <= to; p++) { | |
| 678 s->Push(*p); | |
| 679 } | |
| 680 } | |
| 681 | |
| 682 void WriteAlloc(Serializer* s) { | |
| 683 s->WriteCid(kClosureDataCid); | |
| 684 intptr_t count = objects_.length(); | |
| 685 s->Write<intptr_t>(count); | |
| 686 for (intptr_t i = 0; i < count; i++) { | |
| 687 RawClosureData* data = objects_[i]; | |
| 688 s->AssignRef(data); | |
| 689 } | |
| 690 } | |
| 691 | |
| 692 void WriteFill(Serializer* s) { | |
| 693 intptr_t count = objects_.length(); | |
| 694 for (intptr_t i = 0; i < count; i++) { | |
| 695 RawClosureData* data = objects_[i]; | |
| 696 RawObject** from = data->from(); | |
| 697 RawObject** to = data->to(); | |
| 698 for (RawObject** p = from; p <= to; p++) { | |
| 699 s->WriteRef(*p); | |
| 700 } | |
| 701 } | |
| 702 } | |
| 703 | |
| 704 private: | |
| 705 GrowableArray<RawClosureData*> objects_; | |
| 706 }; | |
| 707 | |
| 708 class ClosureDataDeserializationCluster : public DeserializationCluster { | |
| 709 public: | |
| 710 ClosureDataDeserializationCluster() { } | |
| 711 virtual ~ClosureDataDeserializationCluster() { } | |
| 712 | |
| 713 void ReadAlloc(Deserializer* d) { | |
| 714 start_index_ = d->next_index(); | |
| 715 PageSpace* old_space = d->heap()->old_space(); | |
| 716 intptr_t count = d->Read<intptr_t>(); | |
| 717 for (intptr_t i = 0; i < count; i++) { | |
| 718 d->AssignRef(AllocateUninitialized(old_space, | |
| 719 ClosureData::InstanceSize())); | |
| 720 } | |
| 721 stop_index_ = d->next_index(); | |
| 722 } | |
| 723 | |
| 724 void ReadFill(Deserializer* d) { | |
| 725 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 726 | |
| 727 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 728 RawClosureData* data = reinterpret_cast<RawClosureData*>(d->Ref(id)); | |
| 729 Deserializer::InitializeHeader(data, kClosureDataCid, | |
| 730 ClosureData::InstanceSize(), is_vm_object); | |
| 731 RawObject** from = data->from(); | |
| 732 RawObject** to = data->to(); | |
| 733 for (RawObject** p = from; p <= to; p++) { | |
| 734 *p = d->ReadRef(); | |
| 735 } | |
| 736 } | |
| 737 } | |
| 738 }; | |
| 739 | |
| 740 class RedirectionDataSerializationCluster : public SerializationCluster { | |
| 741 public: | |
| 742 RedirectionDataSerializationCluster() { } | |
| 743 virtual ~RedirectionDataSerializationCluster() { } | |
| 744 | |
| 745 void Trace(Serializer* s, RawObject* object) { | |
| 746 RawRedirectionData* data = RedirectionData::RawCast(object); | |
| 747 objects_.Add(data); | |
| 748 | |
| 749 RawObject** from = data->from(); | |
| 750 RawObject** to = data->to(); | |
| 751 for (RawObject** p = from; p <= to; p++) { | |
| 752 s->Push(*p); | |
| 753 } | |
| 754 } | |
| 755 | |
| 756 void WriteAlloc(Serializer* s) { | |
| 757 s->WriteCid(kRedirectionDataCid); | |
| 758 intptr_t count = objects_.length(); | |
| 759 s->Write<intptr_t>(count); | |
| 760 for (intptr_t i = 0; i < count; i++) { | |
| 761 RawRedirectionData* data = objects_[i]; | |
| 762 s->AssignRef(data); | |
| 763 } | |
| 764 } | |
| 765 | |
| 766 void WriteFill(Serializer* s) { | |
| 767 intptr_t count = objects_.length(); | |
| 768 for (intptr_t i = 0; i < count; i++) { | |
| 769 RawRedirectionData* data = objects_[i]; | |
| 770 RawObject** from = data->from(); | |
| 771 RawObject** to = data->to(); | |
| 772 for (RawObject** p = from; p <= to; p++) { | |
| 773 s->WriteRef(*p); | |
| 774 } | |
| 775 } | |
| 776 } | |
| 777 | |
| 778 private: | |
| 779 GrowableArray<RawRedirectionData*> objects_; | |
| 780 }; | |
| 781 | |
| 782 class RedirectionDataDeserializationCluster : public DeserializationCluster { | |
| 783 public: | |
| 784 RedirectionDataDeserializationCluster() { } | |
| 785 virtual ~RedirectionDataDeserializationCluster() { } | |
| 786 | |
| 787 void ReadAlloc(Deserializer* d) { | |
| 788 start_index_ = d->next_index(); | |
| 789 PageSpace* old_space = d->heap()->old_space(); | |
| 790 intptr_t count = d->Read<intptr_t>(); | |
| 791 for (intptr_t i = 0; i < count; i++) { | |
| 792 d->AssignRef(AllocateUninitialized(old_space, | |
| 793 RedirectionData::InstanceSize())); | |
| 794 } | |
| 795 stop_index_ = d->next_index(); | |
| 796 } | |
| 797 | |
| 798 void ReadFill(Deserializer* d) { | |
| 799 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 800 | |
| 801 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 802 RawRedirectionData* data = | |
| 803 reinterpret_cast<RawRedirectionData*>(d->Ref(id)); | |
| 804 Deserializer::InitializeHeader(data, kRedirectionDataCid, | |
| 805 RedirectionData::InstanceSize(), | |
| 806 is_vm_object); | |
| 807 RawObject** from = data->from(); | |
| 808 RawObject** to = data->to(); | |
| 809 for (RawObject** p = from; p <= to; p++) { | |
| 810 *p = d->ReadRef(); | |
| 811 } | |
| 812 } | |
| 813 } | |
| 814 }; | |
| 815 | |
| 816 class FieldSerializationCluster : public SerializationCluster { | |
| 817 public: | |
| 818 FieldSerializationCluster() { } | |
| 819 virtual ~FieldSerializationCluster() { } | |
| 820 | |
| 821 void Trace(Serializer* s, RawObject* object) { | |
| 822 RawField* field = Field::RawCast(object); | |
| 823 objects_.Add(field); | |
| 824 | |
| 825 Snapshot::Kind kind = s->kind(); | |
| 826 | |
| 827 s->Push(field->ptr()->name_); | |
| 828 s->Push(field->ptr()->owner_); | |
| 829 s->Push(field->ptr()->type_); | |
| 830 // Write out the initial static value or field offset. | |
| 831 if (Field::StaticBit::decode(field->ptr()->kind_bits_)) { | |
| 832 if (kind == Snapshot::kAppNoJIT) { | |
| 833 // For precompiled static fields, the value was already reset and | |
| 834 // initializer_ now contains a Function. | |
| 835 s->Push(field->ptr()->value_.static_value_); | |
| 836 } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) { | |
| 837 // Do not reset const fields. | |
| 838 s->Push(field->ptr()->value_.static_value_); | |
| 839 } else { | |
| 840 // Otherwise, for static fields we write out the initial static value. | |
| 841 s->Push(field->ptr()->initializer_.saved_value_); | |
| 842 } | |
| 843 } else { | |
| 844 s->Push(field->ptr()->value_.offset_); | |
| 845 } | |
| 846 // Write out the initializer function or saved initial value. | |
| 847 if (kind == Snapshot::kAppNoJIT) { | |
| 848 s->Push(field->ptr()->initializer_.precompiled_); | |
| 849 } else { | |
| 850 s->Push(field->ptr()->initializer_.saved_value_); | |
| 851 } | |
| 852 if (kind != Snapshot::kAppNoJIT) { | |
| 853 // Write out the guarded list length. | |
| 854 s->Push(field->ptr()->guarded_list_length_); | |
| 855 } | |
| 856 } | |
| 857 | |
| 858 void WriteAlloc(Serializer* s) { | |
| 859 s->WriteCid(kFieldCid); | |
| 860 intptr_t count = objects_.length(); | |
| 861 s->Write<intptr_t>(count); | |
| 862 for (intptr_t i = 0; i < count; i++) { | |
| 863 RawField* field = objects_[i]; | |
| 864 s->AssignRef(field); | |
| 865 } | |
| 866 } | |
| 867 | |
| 868 void WriteFill(Serializer* s) { | |
| 869 Snapshot::Kind kind = s->kind(); | |
| 870 intptr_t count = objects_.length(); | |
| 871 for (intptr_t i = 0; i < count; i++) { | |
| 872 RawField* field = objects_[i]; | |
| 873 | |
| 874 s->WriteRef(field->ptr()->name_); | |
| 875 s->WriteRef(field->ptr()->owner_); | |
| 876 s->WriteRef(field->ptr()->type_); | |
| 877 // Write out the initial static value or field offset. | |
| 878 if (Field::StaticBit::decode(field->ptr()->kind_bits_)) { | |
| 879 if (kind == Snapshot::kAppNoJIT) { | |
| 880 // For precompiled static fields, the value was already reset and | |
| 881 // initializer_ now contains a Function. | |
| 882 s->WriteRef(field->ptr()->value_.static_value_); | |
| 883 } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) { | |
| 884 // Do not reset const fields. | |
| 885 s->WriteRef(field->ptr()->value_.static_value_); | |
| 886 } else { | |
| 887 // Otherwise, for static fields we write out the initial static value. | |
| 888 s->WriteRef(field->ptr()->initializer_.saved_value_); | |
| 889 } | |
| 890 } else { | |
| 891 s->WriteRef(field->ptr()->value_.offset_); | |
| 892 } | |
| 893 // Write out the initializer function or saved initial value. | |
| 894 if (kind == Snapshot::kAppNoJIT) { | |
| 895 s->WriteRef(field->ptr()->initializer_.precompiled_); | |
| 896 } else { | |
| 897 s->WriteRef(field->ptr()->initializer_.saved_value_); | |
| 898 } | |
| 899 if (kind != Snapshot::kAppNoJIT) { | |
| 900 // Write out the guarded list length. | |
| 901 s->WriteRef(field->ptr()->guarded_list_length_); | |
| 902 } | |
| 903 | |
| 904 if (kind != Snapshot::kAppNoJIT) { | |
| 905 s->WriteTokenPosition(field->ptr()->token_pos_); | |
| 906 s->WriteCid(field->ptr()->guarded_cid_); | |
| 907 s->WriteCid(field->ptr()->is_nullable_); | |
| 908 } | |
| 909 s->Write<uint8_t>(field->ptr()->kind_bits_); | |
| 910 } | |
| 911 } | |
| 912 | |
| 913 private: | |
| 914 GrowableArray<RawField*> objects_; | |
| 915 }; | |
|
siva
2016/06/30 00:06:08
two blank lines between all these classes in this
rmacnak
2016/06/30 01:39:21
Done.
| |
| 916 | |
| 917 class FieldDeserializationCluster : public DeserializationCluster { | |
| 918 public: | |
| 919 FieldDeserializationCluster() { } | |
| 920 virtual ~FieldDeserializationCluster() { } | |
| 921 | |
| 922 void ReadAlloc(Deserializer* d) { | |
| 923 start_index_ = d->next_index(); | |
| 924 PageSpace* old_space = d->heap()->old_space(); | |
| 925 intptr_t count = d->Read<intptr_t>(); | |
| 926 for (intptr_t i = 0; i < count; i++) { | |
| 927 d->AssignRef(AllocateUninitialized(old_space, Field::InstanceSize())); | |
| 928 } | |
| 929 stop_index_ = d->next_index(); | |
| 930 } | |
| 931 | |
| 932 void ReadFill(Deserializer* d) { | |
| 933 Snapshot::Kind kind = d->kind(); | |
| 934 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 935 | |
| 936 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 937 RawField* field = reinterpret_cast<RawField*>(d->Ref(id)); | |
| 938 Deserializer::InitializeHeader(field, kFieldCid, | |
| 939 Field::InstanceSize(), is_vm_object); | |
| 940 RawObject** from = field->from(); | |
| 941 RawObject** to_snapshot = field->to_snapshot(kind); | |
| 942 RawObject** to = field->to(); | |
| 943 for (RawObject** p = from; p <= to_snapshot; p++) { | |
| 944 *p = d->ReadRef(); | |
| 945 } | |
| 946 for (RawObject** p = to_snapshot + 1; p <= to; p++) { | |
| 947 *p = Object::null(); | |
| 948 } | |
| 949 | |
| 950 if (kind != Snapshot::kAppNoJIT) { | |
| 951 field->ptr()->token_pos_ = d->ReadTokenPosition(); | |
| 952 field->ptr()->guarded_cid_ = d->ReadCid(); | |
| 953 field->ptr()->is_nullable_ = d->ReadCid(); | |
| 954 } | |
| 955 field->ptr()->kind_bits_ = d->Read<uint8_t>(); | |
| 956 } | |
| 957 } | |
| 958 | |
| 959 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { | |
| 960 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), | |
| 961 Timeline::GetIsolateStream(), "PostLoadField")); | |
| 962 | |
| 963 Field& field = Field::Handle(zone); | |
| 964 if (!FLAG_use_field_guards) { | |
| 965 for (intptr_t i = start_index_; i < stop_index_; i++) { | |
| 966 field ^= refs.At(i); | |
| 967 field.set_guarded_cid(kDynamicCid); | |
| 968 field.set_is_nullable(true); | |
| 969 field.set_guarded_list_length(Field::kNoFixedLength); | |
| 970 field.set_guarded_list_length_in_object_offset( | |
| 971 Field::kUnknownLengthOffset); | |
| 972 } | |
| 973 } else { | |
| 974 for (intptr_t i = start_index_; i < stop_index_; i++) { | |
| 975 field ^= refs.At(i); | |
| 976 field.InitializeGuardedListLengthInObjectOffset(); | |
| 977 } | |
| 978 } | |
| 979 } | |
| 980 }; | |
| 981 | |
| 982 class LiteralTokenSerializationCluster : public SerializationCluster { | |
| 983 public: | |
| 984 LiteralTokenSerializationCluster() { } | |
| 985 virtual ~LiteralTokenSerializationCluster() { } | |
| 986 | |
| 987 void Trace(Serializer* s, RawObject* object) { | |
| 988 RawLiteralToken* token = LiteralToken::RawCast(object); | |
| 989 objects_.Add(token); | |
| 990 | |
| 991 RawObject** from = token->from(); | |
| 992 RawObject** to = token->to(); | |
| 993 for (RawObject** p = from; p <= to; p++) { | |
| 994 s->Push(*p); | |
| 995 } | |
| 996 } | |
| 997 | |
| 998 void WriteAlloc(Serializer* s) { | |
| 999 s->WriteCid(kLiteralTokenCid); | |
| 1000 intptr_t count = objects_.length(); | |
| 1001 s->Write<intptr_t>(count); | |
| 1002 for (intptr_t i = 0; i < count; i++) { | |
| 1003 RawLiteralToken* token = objects_[i]; | |
| 1004 s->AssignRef(token); | |
| 1005 } | |
| 1006 } | |
| 1007 | |
| 1008 void WriteFill(Serializer* s) { | |
| 1009 intptr_t count = objects_.length(); | |
| 1010 for (intptr_t i = 0; i < count; i++) { | |
| 1011 RawLiteralToken* token = objects_[i]; | |
| 1012 RawObject** from = token->from(); | |
| 1013 RawObject** to = token->to(); | |
| 1014 for (RawObject** p = from; p <= to; p++) { | |
| 1015 s->WriteRef(*p); | |
| 1016 } | |
| 1017 s->Write<int32_t>(token->ptr()->kind_); | |
| 1018 } | |
| 1019 } | |
| 1020 | |
| 1021 private: | |
| 1022 GrowableArray<RawLiteralToken*> objects_; | |
| 1023 }; | |
| 1024 | |
| 1025 class LiteralTokenDeserializationCluster : public DeserializationCluster { | |
| 1026 public: | |
| 1027 LiteralTokenDeserializationCluster() { } | |
| 1028 virtual ~LiteralTokenDeserializationCluster() { } | |
| 1029 | |
| 1030 void ReadAlloc(Deserializer* d) { | |
| 1031 start_index_ = d->next_index(); | |
| 1032 PageSpace* old_space = d->heap()->old_space(); | |
| 1033 intptr_t count = d->Read<intptr_t>(); | |
| 1034 for (intptr_t i = 0; i < count; i++) { | |
| 1035 d->AssignRef(AllocateUninitialized(old_space, | |
| 1036 LiteralToken::InstanceSize())); | |
| 1037 } | |
| 1038 stop_index_ = d->next_index(); | |
| 1039 } | |
| 1040 | |
| 1041 void ReadFill(Deserializer* d) { | |
| 1042 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 1043 | |
| 1044 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 1045 RawLiteralToken* token = reinterpret_cast<RawLiteralToken*>(d->Ref(id)); | |
| 1046 Deserializer::InitializeHeader(token, kLiteralTokenCid, | |
| 1047 LiteralToken::InstanceSize(), | |
| 1048 is_vm_object); | |
| 1049 RawObject** from = token->from(); | |
| 1050 RawObject** to = token->to(); | |
| 1051 for (RawObject** p = from; p <= to; p++) { | |
| 1052 *p = d->ReadRef(); | |
| 1053 } | |
| 1054 token->ptr()->kind_ = static_cast<Token::Kind>(d->Read<int32_t>()); | |
| 1055 } | |
| 1056 } | |
| 1057 }; | |
| 1058 | |
| 1059 class TokenStreamSerializationCluster : public SerializationCluster { | |
| 1060 public: | |
| 1061 TokenStreamSerializationCluster() { } | |
| 1062 virtual ~TokenStreamSerializationCluster() { } | |
| 1063 | |
| 1064 void Trace(Serializer* s, RawObject* object) { | |
| 1065 RawTokenStream* stream = TokenStream::RawCast(object); | |
| 1066 objects_.Add(stream); | |
| 1067 | |
| 1068 RawObject** from = stream->from(); | |
| 1069 RawObject** to = stream->to(); | |
| 1070 for (RawObject** p = from; p <= to; p++) { | |
| 1071 s->Push(*p); | |
| 1072 } | |
| 1073 } | |
| 1074 | |
| 1075 void WriteAlloc(Serializer* s) { | |
| 1076 s->WriteCid(kTokenStreamCid); | |
| 1077 intptr_t count = objects_.length(); | |
| 1078 s->Write<intptr_t>(count); | |
| 1079 for (intptr_t i = 0; i < count; i++) { | |
| 1080 RawTokenStream* stream = objects_[i]; | |
| 1081 s->AssignRef(stream); | |
| 1082 } | |
| 1083 } | |
| 1084 | |
| 1085 void WriteFill(Serializer* s) { | |
| 1086 intptr_t count = objects_.length(); | |
| 1087 for (intptr_t i = 0; i < count; i++) { | |
| 1088 RawTokenStream* stream = objects_[i]; | |
| 1089 RawObject** from = stream->from(); | |
| 1090 RawObject** to = stream->to(); | |
| 1091 for (RawObject** p = from; p <= to; p++) { | |
| 1092 s->WriteRef(*p); | |
| 1093 } | |
| 1094 } | |
| 1095 } | |
| 1096 | |
| 1097 private: | |
| 1098 GrowableArray<RawTokenStream*> objects_; | |
| 1099 }; | |
| 1100 | |
| 1101 class TokenStreamDeserializationCluster : public DeserializationCluster { | |
| 1102 public: | |
| 1103 TokenStreamDeserializationCluster() { } | |
| 1104 virtual ~TokenStreamDeserializationCluster() { } | |
| 1105 | |
| 1106 void ReadAlloc(Deserializer* d) { | |
| 1107 start_index_ = d->next_index(); | |
| 1108 PageSpace* old_space = d->heap()->old_space(); | |
| 1109 intptr_t count = d->Read<intptr_t>(); | |
| 1110 for (intptr_t i = 0; i < count; i++) { | |
| 1111 d->AssignRef(AllocateUninitialized(old_space, | |
| 1112 TokenStream::InstanceSize())); | |
| 1113 } | |
| 1114 stop_index_ = d->next_index(); | |
| 1115 } | |
| 1116 | |
| 1117 void ReadFill(Deserializer* d) { | |
| 1118 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 1119 | |
| 1120 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 1121 RawTokenStream* stream = reinterpret_cast<RawTokenStream*>(d->Ref(id)); | |
| 1122 Deserializer::InitializeHeader(stream, kTokenStreamCid, | |
| 1123 TokenStream::InstanceSize(), is_vm_object); | |
| 1124 RawObject** from = stream->from(); | |
| 1125 RawObject** to = stream->to(); | |
| 1126 for (RawObject** p = from; p <= to; p++) { | |
| 1127 *p = d->ReadRef(); | |
| 1128 } | |
| 1129 } | |
| 1130 } | |
| 1131 }; | |
| 1132 | |
| 1133 class ScriptSerializationCluster : public SerializationCluster { | |
| 1134 public: | |
| 1135 ScriptSerializationCluster() { } | |
| 1136 virtual ~ScriptSerializationCluster() { } | |
| 1137 | |
| 1138 void Trace(Serializer* s, RawObject* object) { | |
| 1139 RawScript* script = Script::RawCast(object); | |
| 1140 objects_.Add(script); | |
| 1141 | |
| 1142 RawObject** from = script->from(); | |
| 1143 RawObject** to = script->to_snapshot(s->kind()); | |
| 1144 for (RawObject** p = from; p <= to; p++) { | |
| 1145 s->Push(*p); | |
| 1146 } | |
| 1147 } | |
| 1148 | |
| 1149 void WriteAlloc(Serializer* s) { | |
| 1150 s->WriteCid(kScriptCid); | |
| 1151 intptr_t count = objects_.length(); | |
| 1152 s->Write<intptr_t>(count); | |
| 1153 for (intptr_t i = 0; i < count; i++) { | |
| 1154 RawScript* script = objects_[i]; | |
| 1155 s->AssignRef(script); | |
| 1156 } | |
| 1157 } | |
| 1158 | |
| 1159 void WriteFill(Serializer* s) { | |
| 1160 Snapshot::Kind kind = s->kind(); | |
| 1161 intptr_t count = objects_.length(); | |
| 1162 for (intptr_t i = 0; i < count; i++) { | |
| 1163 RawScript* script = objects_[i]; | |
| 1164 RawObject** from = script->from(); | |
| 1165 RawObject** to = script->to_snapshot(kind); | |
| 1166 for (RawObject** p = from; p <= to; p++) { | |
| 1167 s->WriteRef(*p); | |
| 1168 } | |
| 1169 | |
| 1170 s->Write<int32_t>(script->ptr()->line_offset_); | |
| 1171 s->Write<int32_t>(script->ptr()->col_offset_); | |
| 1172 s->Write<int8_t>(script->ptr()->kind_); | |
| 1173 } | |
| 1174 } | |
| 1175 | |
| 1176 private: | |
| 1177 GrowableArray<RawScript*> objects_; | |
| 1178 }; | |
| 1179 | |
| 1180 | |
| 1181 class ScriptDeserializationCluster : public DeserializationCluster { | |
| 1182 public: | |
| 1183 ScriptDeserializationCluster() { } | |
| 1184 virtual ~ScriptDeserializationCluster() { } | |
| 1185 | |
| 1186 void ReadAlloc(Deserializer* d) { | |
| 1187 start_index_ = d->next_index(); | |
| 1188 PageSpace* old_space = d->heap()->old_space(); | |
| 1189 intptr_t count = d->Read<intptr_t>(); | |
| 1190 for (intptr_t i = 0; i < count; i++) { | |
| 1191 d->AssignRef(AllocateUninitialized(old_space, Script::InstanceSize())); | |
| 1192 } | |
| 1193 stop_index_ = d->next_index(); | |
| 1194 } | |
| 1195 | |
| 1196 void ReadFill(Deserializer* d) { | |
| 1197 Snapshot::Kind kind = d->kind(); | |
| 1198 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 1199 | |
| 1200 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 1201 RawScript* script = reinterpret_cast<RawScript*>(d->Ref(id)); | |
| 1202 Deserializer::InitializeHeader(script, kScriptCid, | |
| 1203 Script::InstanceSize(), is_vm_object); | |
| 1204 RawObject** from = script->from(); | |
| 1205 RawObject** to_snapshot = script->to_snapshot(kind); | |
| 1206 RawObject** to = script->to(); | |
| 1207 for (RawObject** p = from; p <= to_snapshot; p++) { | |
| 1208 *p = d->ReadRef(); | |
| 1209 } | |
| 1210 for (RawObject** p = to_snapshot + 1; p <= to; p++) { | |
| 1211 *p = Object::null(); | |
| 1212 } | |
| 1213 | |
| 1214 script->ptr()->line_offset_ = d->Read<int32_t>(); | |
| 1215 script->ptr()->col_offset_ = d->Read<int32_t>(); | |
| 1216 script->ptr()->kind_ = d->Read<int8_t>(); | |
| 1217 script->ptr()->load_timestamp_ = 0; | |
| 1218 } | |
| 1219 } | |
| 1220 }; | |
| 1221 | |
| 1222 | |
| 1223 class LibrarySerializationCluster : public SerializationCluster { | |
| 1224 public: | |
| 1225 LibrarySerializationCluster() { } | |
| 1226 virtual ~LibrarySerializationCluster() { } | |
| 1227 | |
| 1228 void Trace(Serializer* s, RawObject* object) { | |
| 1229 RawLibrary* lib = Library::RawCast(object); | |
| 1230 objects_.Add(lib); | |
| 1231 | |
| 1232 RawObject** from = lib->from(); | |
| 1233 RawObject** to = lib->to_snapshot(); | |
| 1234 for (RawObject** p = from; p <= to; p++) { | |
| 1235 s->Push(*p); | |
| 1236 } | |
| 1237 } | |
| 1238 | |
| 1239 void WriteAlloc(Serializer* s) { | |
| 1240 s->WriteCid(kLibraryCid); | |
| 1241 intptr_t count = objects_.length(); | |
| 1242 s->Write<intptr_t>(count); | |
| 1243 for (intptr_t i = 0; i < count; i++) { | |
| 1244 RawLibrary* lib = objects_[i]; | |
| 1245 s->AssignRef(lib); | |
| 1246 } | |
| 1247 } | |
| 1248 | |
| 1249 void WriteFill(Serializer* s) { | |
| 1250 intptr_t count = objects_.length(); | |
| 1251 for (intptr_t i = 0; i < count; i++) { | |
| 1252 RawLibrary* lib = objects_[i]; | |
| 1253 RawObject** from = lib->from(); | |
| 1254 RawObject** to = lib->to_snapshot(); | |
| 1255 for (RawObject** p = from; p <= to; p++) { | |
| 1256 s->WriteRef(*p); | |
| 1257 } | |
| 1258 | |
| 1259 s->Write<int32_t>(lib->ptr()->index_); | |
| 1260 s->Write<uint16_t>(lib->ptr()->num_imports_); | |
| 1261 s->Write<int8_t>(lib->ptr()->load_state_); | |
| 1262 s->Write<bool>(lib->ptr()->corelib_imported_); | |
| 1263 s->Write<bool>(lib->ptr()->is_dart_scheme_); | |
| 1264 s->Write<bool>(lib->ptr()->debuggable_); | |
| 1265 } | |
| 1266 } | |
| 1267 | |
| 1268 private: | |
| 1269 GrowableArray<RawLibrary*> objects_; | |
| 1270 }; | |
| 1271 | |
| 1272 class LibraryDeserializationCluster : public DeserializationCluster { | |
| 1273 public: | |
| 1274 LibraryDeserializationCluster() { } | |
| 1275 virtual ~LibraryDeserializationCluster() { } | |
| 1276 | |
| 1277 void ReadAlloc(Deserializer* d) { | |
| 1278 start_index_ = d->next_index(); | |
| 1279 PageSpace* old_space = d->heap()->old_space(); | |
| 1280 intptr_t count = d->Read<intptr_t>(); | |
| 1281 for (intptr_t i = 0; i < count; i++) { | |
| 1282 d->AssignRef(AllocateUninitialized(old_space, Library::InstanceSize())); | |
| 1283 } | |
| 1284 stop_index_ = d->next_index(); | |
| 1285 } | |
| 1286 | |
| 1287 void ReadFill(Deserializer* d) { | |
| 1288 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 1289 | |
| 1290 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 1291 RawLibrary* lib = reinterpret_cast<RawLibrary*>(d->Ref(id)); | |
| 1292 Deserializer::InitializeHeader(lib, kLibraryCid, | |
| 1293 Library::InstanceSize(), is_vm_object); | |
| 1294 RawObject** from = lib->from(); | |
| 1295 RawObject** to_snapshot = lib->to_snapshot(); | |
| 1296 RawObject** to = lib->to(); | |
| 1297 for (RawObject** p = from; p <= to_snapshot; p++) { | |
| 1298 *p = d->ReadRef(); | |
| 1299 } | |
| 1300 for (RawObject** p = to_snapshot + 1; p <= to; p++) { | |
| 1301 *p = Object::null(); | |
| 1302 } | |
| 1303 | |
| 1304 lib->ptr()->native_entry_resolver_ = NULL; | |
| 1305 lib->ptr()->native_entry_symbol_resolver_ = NULL; | |
| 1306 lib->ptr()->index_ = d->Read<int32_t>(); | |
| 1307 lib->ptr()->num_imports_ = d->Read<uint16_t>(); | |
| 1308 lib->ptr()->load_state_ = d->Read<int8_t>(); | |
| 1309 lib->ptr()->corelib_imported_ = d->Read<bool>(); | |
| 1310 lib->ptr()->is_dart_scheme_ = d->Read<bool>(); | |
| 1311 lib->ptr()->debuggable_ = d->Read<bool>(); | |
| 1312 lib->ptr()->is_in_fullsnapshot_ = true; | |
| 1313 } | |
| 1314 } | |
| 1315 | |
| 1316 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { | |
| 1317 // TODO(rmacnak): This is surprisingly slow, roughly 20% of deserialization | |
| 1318 // time for the JIT. Maybe make the lookups happy with a null? | |
| 1319 | |
| 1320 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), | |
| 1321 Timeline::GetIsolateStream(), "PostLoadLibrary")); | |
| 1322 | |
| 1323 Library& lib = Library::Handle(zone); | |
| 1324 for (intptr_t i = start_index_; i < stop_index_; i++) { | |
| 1325 lib ^= refs.At(i); | |
| 1326 const intptr_t kInitialNameCacheSize = 64; | |
| 1327 lib.InitResolvedNamesCache(kInitialNameCacheSize); | |
| 1328 } | |
| 1329 } | |
| 1330 }; | |
| 1331 | |
| 1332 class NamespaceSerializationCluster : public SerializationCluster { | |
| 1333 public: | |
| 1334 NamespaceSerializationCluster() { } | |
| 1335 virtual ~NamespaceSerializationCluster() { } | |
| 1336 | |
| 1337 void Trace(Serializer* s, RawObject* object) { | |
| 1338 RawNamespace* ns = Namespace::RawCast(object); | |
| 1339 objects_.Add(ns); | |
| 1340 | |
| 1341 RawObject** from = ns->from(); | |
| 1342 RawObject** to = ns->to(); | |
| 1343 for (RawObject** p = from; p <= to; p++) { | |
| 1344 s->Push(*p); | |
| 1345 } | |
| 1346 } | |
| 1347 | |
| 1348 void WriteAlloc(Serializer* s) { | |
| 1349 s->WriteCid(kNamespaceCid); | |
| 1350 intptr_t count = objects_.length(); | |
| 1351 s->Write<intptr_t>(count); | |
| 1352 for (intptr_t i = 0; i < count; i++) { | |
| 1353 RawNamespace* ns = objects_[i]; | |
| 1354 s->AssignRef(ns); | |
| 1355 } | |
| 1356 } | |
| 1357 | |
| 1358 void WriteFill(Serializer* s) { | |
| 1359 intptr_t count = objects_.length(); | |
| 1360 for (intptr_t i = 0; i < count; i++) { | |
| 1361 RawNamespace* ns = objects_[i]; | |
| 1362 RawObject** from = ns->from(); | |
| 1363 RawObject** to = ns->to(); | |
| 1364 for (RawObject** p = from; p <= to; p++) { | |
| 1365 s->WriteRef(*p); | |
| 1366 } | |
| 1367 } | |
| 1368 } | |
| 1369 | |
| 1370 private: | |
| 1371 GrowableArray<RawNamespace*> objects_; | |
| 1372 }; | |
| 1373 | |
| 1374 class NamespaceDeserializationCluster : public DeserializationCluster { | |
| 1375 public: | |
| 1376 NamespaceDeserializationCluster() { } | |
| 1377 virtual ~NamespaceDeserializationCluster() { } | |
| 1378 | |
| 1379 void ReadAlloc(Deserializer* d) { | |
| 1380 start_index_ = d->next_index(); | |
| 1381 PageSpace* old_space = d->heap()->old_space(); | |
| 1382 intptr_t count = d->Read<intptr_t>(); | |
| 1383 for (intptr_t i = 0; i < count; i++) { | |
| 1384 d->AssignRef(AllocateUninitialized(old_space, Namespace::InstanceSize())); | |
| 1385 } | |
| 1386 stop_index_ = d->next_index(); | |
| 1387 } | |
| 1388 | |
| 1389 void ReadFill(Deserializer* d) { | |
| 1390 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 1391 | |
| 1392 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 1393 RawNamespace* ns = reinterpret_cast<RawNamespace*>(d->Ref(id)); | |
| 1394 Deserializer::InitializeHeader(ns, kNamespaceCid, | |
| 1395 Namespace::InstanceSize(), is_vm_object); | |
| 1396 RawObject** from = ns->from(); | |
| 1397 RawObject** to = ns->to(); | |
| 1398 for (RawObject** p = from; p <= to; p++) { | |
| 1399 *p = d->ReadRef(); | |
| 1400 } | |
| 1401 } | |
| 1402 } | |
| 1403 }; | |
| 1404 | |
| 1405 class CodeSerializationCluster : public SerializationCluster { | |
| 1406 public: | |
| 1407 CodeSerializationCluster() { } | |
| 1408 virtual ~CodeSerializationCluster() { } | |
| 1409 | |
| 1410 void Trace(Serializer* s, RawObject* object) { | |
| 1411 RawCode* code = Code::RawCast(object); | |
| 1412 objects_.Add(code); | |
| 1413 | |
| 1414 s->Push(code->ptr()->object_pool_); | |
| 1415 s->Push(code->ptr()->owner_); | |
| 1416 s->Push(code->ptr()->exception_handlers_); | |
| 1417 s->Push(code->ptr()->pc_descriptors_); | |
| 1418 s->Push(code->ptr()->stackmaps_); | |
| 1419 } | |
| 1420 | |
| 1421 void WriteAlloc(Serializer* s) { | |
| 1422 s->WriteCid(kCodeCid); | |
| 1423 intptr_t count = objects_.length(); | |
| 1424 s->Write<intptr_t>(count); | |
| 1425 for (intptr_t i = 0; i < count; i++) { | |
| 1426 RawCode* code = objects_[i]; | |
| 1427 s->AssignRef(code); | |
| 1428 } | |
| 1429 } | |
| 1430 | |
| 1431 void WriteFill(Serializer* s) { | |
| 1432 Snapshot::Kind kind = s->kind(); | |
| 1433 intptr_t count = objects_.length(); | |
| 1434 for (intptr_t i = 0; i < count; i++) { | |
| 1435 RawCode* code = objects_[i]; | |
| 1436 | |
| 1437 intptr_t pointer_offsets_length = | |
| 1438 Code::PtrOffBits::decode(code->ptr()->state_bits_); | |
| 1439 if (pointer_offsets_length != 0) { | |
| 1440 FATAL("Cannot serialize code with embedded pointers"); | |
| 1441 } | |
| 1442 if (kind == Snapshot::kAppNoJIT) { | |
| 1443 // No disabled code in precompilation. | |
| 1444 ASSERT(code->ptr()->instructions_ == code->ptr()->active_instructions_); | |
| 1445 } else { | |
| 1446 ASSERT(kind == Snapshot::kAppWithJIT); | |
| 1447 // We never include optimized code in JIT precompilation. Deoptimization | |
| 1448 // requires code patching and we cannot patch code that is shared | |
| 1449 // between isolates and should not mutate memory allocated by the | |
| 1450 // embedder. | |
| 1451 bool is_optimized = Code::PtrOffBits::decode(code->ptr()->state_bits_); | |
| 1452 if (is_optimized) { | |
| 1453 FATAL("Cannot include optimized code in a JIT snapshot"); | |
| 1454 } | |
| 1455 } | |
| 1456 | |
| 1457 RawInstructions* instr = code->ptr()->instructions_; | |
| 1458 int32_t text_offset = s->GetTextOffset(instr, code); | |
| 1459 s->Write<int32_t>(text_offset); | |
| 1460 | |
| 1461 s->WriteRef(code->ptr()->object_pool_); | |
| 1462 s->WriteRef(code->ptr()->owner_); | |
| 1463 s->WriteRef(code->ptr()->exception_handlers_); | |
| 1464 s->WriteRef(code->ptr()->pc_descriptors_); | |
| 1465 s->WriteRef(code->ptr()->stackmaps_); | |
| 1466 | |
| 1467 s->Write<int32_t>(code->ptr()->state_bits_); | |
| 1468 } | |
| 1469 } | |
| 1470 | |
| 1471 private: | |
| 1472 GrowableArray<RawCode*> objects_; | |
| 1473 }; | |
| 1474 | |
| 1475 class CodeDeserializationCluster : public DeserializationCluster { | |
| 1476 public: | |
| 1477 CodeDeserializationCluster() { } | |
| 1478 virtual ~CodeDeserializationCluster() { } | |
| 1479 | |
| 1480 void ReadAlloc(Deserializer* d) { | |
| 1481 start_index_ = d->next_index(); | |
| 1482 PageSpace* old_space = d->heap()->old_space(); | |
| 1483 intptr_t count = d->Read<intptr_t>(); | |
| 1484 for (intptr_t i = 0; i < count; i++) { | |
| 1485 d->AssignRef(AllocateUninitialized(old_space, Code::InstanceSize(0))); | |
| 1486 } | |
| 1487 stop_index_ = d->next_index(); | |
| 1488 } | |
| 1489 | |
| 1490 void ReadFill(Deserializer* d) { | |
| 1491 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 1492 | |
| 1493 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 1494 RawCode* code = reinterpret_cast<RawCode*>(d->Ref(id)); | |
| 1495 Deserializer::InitializeHeader(code, kCodeCid, | |
| 1496 Code::InstanceSize(0), is_vm_object); | |
| 1497 | |
| 1498 int32_t text_offset = d->Read<int32_t>(); | |
| 1499 RawInstructions* instr = reinterpret_cast<RawInstructions*>( | |
| 1500 d->GetInstructionsAt(text_offset) + kHeapObjectTag); | |
| 1501 uword entry_point = Instructions::EntryPoint(instr); | |
| 1502 | |
| 1503 code->ptr()->entry_point_ = entry_point; | |
| 1504 code->ptr()->active_instructions_ = instr; | |
| 1505 code->ptr()->instructions_ = instr; | |
| 1506 code->ptr()->object_pool_ = | |
| 1507 reinterpret_cast<RawObjectPool*>(d->ReadRef()); | |
| 1508 code->ptr()->owner_ = d->ReadRef(); | |
| 1509 code->ptr()->exception_handlers_ = | |
| 1510 reinterpret_cast<RawExceptionHandlers*>(d->ReadRef()); | |
| 1511 code->ptr()->pc_descriptors_ = | |
| 1512 reinterpret_cast<RawPcDescriptors*>(d->ReadRef()); | |
| 1513 code->ptr()->stackmaps_ = | |
| 1514 reinterpret_cast<RawArray*>(d->ReadRef()); | |
| 1515 | |
| 1516 code->ptr()->deopt_info_array_ = Array::null(); | |
| 1517 code->ptr()->static_calls_target_table_ = Array::null(); | |
| 1518 code->ptr()->var_descriptors_ = LocalVarDescriptors::null(); | |
| 1519 code->ptr()->inlined_metadata_ = Array::null(); | |
| 1520 code->ptr()->code_source_map_ = CodeSourceMap::null(); | |
| 1521 code->ptr()->comments_ = Array::null(); | |
| 1522 code->ptr()->return_address_metadata_ = Object::null(); | |
| 1523 | |
| 1524 code->ptr()->compile_timestamp_ = 0; | |
| 1525 code->ptr()->state_bits_ = d->Read<int32_t>(); | |
| 1526 code->ptr()->lazy_deopt_pc_offset_ = -1; | |
| 1527 } | |
| 1528 } | |
| 1529 }; | |
| 1530 | |
| 1531 class ObjectPoolSerializationCluster : public SerializationCluster { | |
| 1532 public: | |
| 1533 ObjectPoolSerializationCluster() { } | |
| 1534 virtual ~ObjectPoolSerializationCluster() { } | |
| 1535 | |
| 1536 void Trace(Serializer* s, RawObject* object) { | |
| 1537 RawObjectPool* pool = ObjectPool::RawCast(object); | |
| 1538 objects_.Add(pool); | |
| 1539 | |
| 1540 intptr_t length = pool->ptr()->length_; | |
| 1541 RawTypedData* info_array = pool->ptr()->info_array_; | |
| 1542 | |
| 1543 for (intptr_t i = 0; i < length; i++) { | |
| 1544 ObjectPool::EntryType entry_type = | |
| 1545 static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[i]); | |
| 1546 if (entry_type == ObjectPool::kTaggedObject) { | |
| 1547 s->Push(pool->ptr()->data()[i].raw_obj_); | |
| 1548 } | |
| 1549 } | |
| 1550 | |
| 1551 // We are going to allocate the object pool and its info array together, | |
| 1552 // so steal a slot in the refs array to hold it between alloc and fill. | |
| 1553 // s->NoteUntracedRef(); | |
|
siva
2016/06/30 00:06:08
Code commented out, can it be deleted?
rmacnak
2016/06/30 01:39:21
-> TODO
| |
| 1554 } | |
| 1555 | |
| 1556 void WriteAlloc(Serializer* s) { | |
| 1557 s->WriteCid(kObjectPoolCid); | |
| 1558 intptr_t count = objects_.length(); | |
| 1559 s->Write<intptr_t>(count); | |
| 1560 for (intptr_t i = 0; i < count; i++) { | |
| 1561 RawObjectPool* pool = objects_[i]; | |
| 1562 /// RawTypedData* info_array = pool->ptr()->info_array_; | |
| 1563 intptr_t length = pool->ptr()->length_; | |
| 1564 s->Write<intptr_t>(length); | |
| 1565 /// s->AssignRefNotTraced(info_array); | |
| 1566 s->AssignRef(pool); | |
| 1567 } | |
| 1568 } | |
| 1569 | |
| 1570 void WriteFill(Serializer* s) { | |
| 1571 intptr_t count = objects_.length(); | |
| 1572 for (intptr_t i = 0; i < count; i++) { | |
| 1573 RawObjectPool* pool = objects_[i]; | |
| 1574 RawTypedData* info_array = pool->ptr()->info_array_; | |
| 1575 intptr_t length = pool->ptr()->length_; | |
| 1576 s->Write<intptr_t>(length); | |
| 1577 for (intptr_t j = 0; j < length; j++) { | |
| 1578 ObjectPool::EntryType entry_type = | |
| 1579 static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[j]); | |
| 1580 s->Write<int8_t>(entry_type); | |
| 1581 RawObjectPool::Entry& entry = pool->ptr()->data()[j]; | |
| 1582 switch (entry_type) { | |
| 1583 case ObjectPool::kTaggedObject: { | |
| 1584 #if !defined(TARGET_ARCH_DBC) | |
| 1585 if (entry.raw_obj_ == | |
| 1586 StubCode::CallNativeCFunction_entry()->code()) { | |
| 1587 // Natives can run while precompiling, becoming linked and | |
| 1588 // switching their stub. Reset to the initial stub used for | |
| 1589 // lazy-linking. | |
| 1590 s->WriteRef(StubCode::CallBootstrapCFunction_entry()->code()); | |
| 1591 break; | |
| 1592 } | |
| 1593 #endif | |
| 1594 s->WriteRef(entry.raw_obj_); | |
| 1595 break; | |
| 1596 } | |
| 1597 case ObjectPool::kImmediate: { | |
| 1598 s->Write<intptr_t>(entry.raw_value_); | |
| 1599 break; | |
| 1600 } | |
| 1601 case ObjectPool::kNativeEntry: { | |
| 1602 // Write nothing. Will initialize with the lazy link entry. | |
| 1603 #if defined(TARGET_ARCH_DBC) | |
| 1604 UNREACHABLE(); // DBC does not support lazy native call linking. | |
| 1605 #endif | |
| 1606 break; | |
| 1607 } | |
| 1608 default: | |
| 1609 UNREACHABLE(); | |
| 1610 } | |
| 1611 } | |
| 1612 } | |
| 1613 } | |
| 1614 | |
| 1615 private: | |
| 1616 GrowableArray<RawObjectPool*> objects_; | |
| 1617 }; | |
| 1618 | |
| 1619 | |
| 1620 class ObjectPoolDeserializationCluster : public DeserializationCluster { | |
| 1621 public: | |
| 1622 ObjectPoolDeserializationCluster() { } | |
| 1623 virtual ~ObjectPoolDeserializationCluster() { } | |
| 1624 | |
| 1625 void ReadAlloc(Deserializer* d) { | |
| 1626 start_index_ = d->next_index(); | |
| 1627 PageSpace* old_space = d->heap()->old_space(); | |
| 1628 intptr_t count = d->Read<intptr_t>(); | |
| 1629 for (intptr_t i = 0; i < count; i++) { | |
| 1630 intptr_t length = d->Read<intptr_t>(); | |
| 1631 /// d->AssignRef(AllocateUninitialized(old_space, | |
| 1632 /// TypedData::InstanceSize(length))); | |
| 1633 d->AssignRef(AllocateUninitialized(old_space, | |
| 1634 ObjectPool::InstanceSize(length))); | |
| 1635 } | |
| 1636 stop_index_ = d->next_index(); | |
| 1637 } | |
| 1638 | |
| 1639 void ReadFill(Deserializer* d) { | |
| 1640 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 1641 PageSpace* old_space = d->heap()->old_space(); | |
| 1642 for (intptr_t id = start_index_; id < stop_index_; id += 1) { | |
| 1643 intptr_t length = d->Read<intptr_t>(); | |
| 1644 /// RawTypedData* info_array = | |
| 1645 /// reinterpret_cast<RawTypedData*>(d->Ref(id)); | |
| 1646 RawTypedData* info_array = reinterpret_cast<RawTypedData*>( | |
| 1647 AllocateUninitialized(old_space, TypedData::InstanceSize(length))); | |
| 1648 Deserializer::InitializeHeader(info_array, kTypedDataUint8ArrayCid, | |
| 1649 TypedData::InstanceSize(length), | |
| 1650 is_vm_object); | |
| 1651 info_array->ptr()->length_ = Smi::New(length); | |
| 1652 RawObjectPool* pool = reinterpret_cast<RawObjectPool*>(d->Ref(id + 0)); | |
| 1653 Deserializer::InitializeHeader(pool, kObjectPoolCid, | |
| 1654 ObjectPool::InstanceSize(length), | |
| 1655 is_vm_object); | |
| 1656 pool->ptr()->length_ = length; | |
| 1657 pool->ptr()->info_array_ = info_array; | |
| 1658 for (intptr_t j = 0; j < length; j++) { | |
| 1659 ObjectPool::EntryType entry_type = | |
| 1660 static_cast<ObjectPool::EntryType>(d->Read<int8_t>()); | |
| 1661 info_array->ptr()->data()[j] = entry_type; | |
| 1662 RawObjectPool::Entry& entry = pool->ptr()->data()[j]; | |
| 1663 switch (entry_type) { | |
| 1664 case ObjectPool::kTaggedObject: | |
| 1665 entry.raw_obj_ = d->ReadRef(); | |
| 1666 break; | |
| 1667 case ObjectPool::kImmediate: | |
| 1668 entry.raw_value_ = d->Read<intptr_t>(); | |
| 1669 break; | |
| 1670 case ObjectPool::kNativeEntry: { | |
| 1671 #if !defined(TARGET_ARCH_DBC) | |
| 1672 // Read nothing. Initialize with the lazy link entry. | |
| 1673 uword new_entry = NativeEntry::LinkNativeCallEntry(); | |
| 1674 entry.raw_value_ = static_cast<intptr_t>(new_entry); | |
| 1675 #else | |
| 1676 UNREACHABLE(); // DBC does not support lazy native call linking. | |
| 1677 #endif | |
| 1678 break; | |
| 1679 } | |
| 1680 default: | |
| 1681 UNREACHABLE(); | |
| 1682 } | |
| 1683 } | |
| 1684 } | |
| 1685 } | |
| 1686 }; | |
| 1687 | |
| 1688 | |
| 1689 // PcDescriptor, Stackmap, OneByteString, TwoByteString | |
| 1690 class RODataSerializationCluster : public SerializationCluster { | |
| 1691 public: | |
| 1692 explicit RODataSerializationCluster(intptr_t cid) : cid_(cid) { } | |
| 1693 virtual ~RODataSerializationCluster() { } | |
| 1694 | |
| 1695 void Trace(Serializer* s, RawObject* object) { | |
| 1696 objects_.Add(object); | |
| 1697 | |
| 1698 // A string's hash must already be computed when we write it because it | |
| 1699 // will be loaded into read-only memory. | |
| 1700 if (cid_ == kOneByteStringCid) { | |
| 1701 RawOneByteString* str = static_cast<RawOneByteString*>(object); | |
| 1702 if (str->ptr()->hash_ == Smi::New(0)) { | |
| 1703 intptr_t hash = String::Hash(str->ptr()->data(), | |
| 1704 Smi::Value(str->ptr()->length_)); | |
| 1705 str->ptr()->hash_ = Smi::New(hash); | |
| 1706 } | |
| 1707 ASSERT(str->ptr()->hash_ != Smi::New(0)); | |
| 1708 } else if (cid_ == kTwoByteStringCid) { | |
| 1709 RawTwoByteString* str = static_cast<RawTwoByteString*>(object); | |
| 1710 if (str->ptr()->hash_ == Smi::New(0)) { | |
| 1711 intptr_t hash = String::Hash(str->ptr()->data(), | |
| 1712 Smi::Value(str->ptr()->length_) * 2); | |
| 1713 str->ptr()->hash_ = Smi::New(hash); | |
| 1714 } | |
| 1715 ASSERT(str->ptr()->hash_ != Smi::New(0)); | |
| 1716 } | |
| 1717 } | |
| 1718 | |
| 1719 void WriteAlloc(Serializer* s) { | |
| 1720 s->WriteCid(cid_); | |
| 1721 intptr_t count = objects_.length(); | |
| 1722 s->Write<intptr_t>(count); | |
| 1723 for (intptr_t i = 0; i < count; i++) { | |
| 1724 RawObject* object = objects_[i]; | |
| 1725 int32_t rodata_offset = s->GetRODataOffset(object); | |
| 1726 s->Write<int32_t>(rodata_offset); | |
| 1727 s->AssignRef(object); | |
| 1728 } | |
| 1729 } | |
| 1730 | |
| 1731 void WriteFill(Serializer* s) { | |
| 1732 // No-op. | |
| 1733 } | |
| 1734 | |
| 1735 private: | |
| 1736 const intptr_t cid_; | |
| 1737 GrowableArray<RawObject*> objects_; | |
| 1738 }; | |
| 1739 | |
| 1740 | |
| 1741 class RODataDeserializationCluster : public DeserializationCluster { | |
| 1742 public: | |
| 1743 RODataDeserializationCluster() { } | |
| 1744 virtual ~RODataDeserializationCluster() { } | |
| 1745 | |
| 1746 void ReadAlloc(Deserializer* d) { | |
| 1747 intptr_t count = d->Read<intptr_t>(); | |
| 1748 for (intptr_t i = 0; i < count; i++) { | |
| 1749 int32_t rodata_offset = d->Read<int32_t>(); | |
| 1750 d->AssignRef(d->GetObjectAt(rodata_offset)); | |
| 1751 } | |
| 1752 } | |
| 1753 | |
| 1754 void ReadFill(Deserializer* d) { | |
| 1755 // No-op. | |
| 1756 } | |
| 1757 }; | |
| 1758 | |
| 1759 | |
| 1760 class LocalVarDescriptorsSerializationCluster : public SerializationCluster { | |
| 1761 public: | |
| 1762 LocalVarDescriptorsSerializationCluster() { } | |
| 1763 virtual ~LocalVarDescriptorsSerializationCluster() { } | |
| 1764 | |
| 1765 void Trace(Serializer* s, RawObject* object) { UNIMPLEMENTED(); } | |
| 1766 void WriteAlloc(Serializer* s) {} | |
| 1767 void WriteFill(Serializer* s) {} | |
| 1768 | |
| 1769 private: | |
| 1770 GrowableArray<RawClass*> objects_; | |
| 1771 }; | |
| 1772 | |
| 1773 | |
| 1774 class ExceptionHandlersSerializationCluster : public SerializationCluster { | |
| 1775 public: | |
| 1776 ExceptionHandlersSerializationCluster() { } | |
| 1777 virtual ~ExceptionHandlersSerializationCluster() { } | |
| 1778 | |
| 1779 void Trace(Serializer* s, RawObject* object) { | |
| 1780 RawExceptionHandlers* handlers = ExceptionHandlers::RawCast(object); | |
| 1781 objects_.Add(handlers); | |
| 1782 | |
| 1783 s->Push(handlers->ptr()->handled_types_data_); | |
| 1784 } | |
| 1785 | |
| 1786 void WriteAlloc(Serializer* s) { | |
| 1787 s->WriteCid(kExceptionHandlersCid); | |
| 1788 intptr_t count = objects_.length(); | |
| 1789 s->Write<intptr_t>(count); | |
| 1790 for (intptr_t i = 0; i < count; i++) { | |
| 1791 RawExceptionHandlers* handlers = objects_[i]; | |
| 1792 intptr_t length = handlers->ptr()->num_entries_; | |
| 1793 s->Write<intptr_t>(length); | |
| 1794 s->AssignRef(handlers); | |
| 1795 } | |
| 1796 } | |
| 1797 | |
| 1798 void WriteFill(Serializer* s) { | |
| 1799 intptr_t count = objects_.length(); | |
| 1800 for (intptr_t i = 0; i < count; i++) { | |
| 1801 RawExceptionHandlers* handlers = objects_[i]; | |
| 1802 intptr_t length = handlers->ptr()->num_entries_; | |
| 1803 s->Write<intptr_t>(length); | |
| 1804 s->WriteRef(handlers->ptr()->handled_types_data_); | |
| 1805 | |
| 1806 uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data()); | |
| 1807 intptr_t length_in_bytes = | |
| 1808 length * sizeof(RawExceptionHandlers::HandlerInfo); | |
| 1809 s->WriteBytes(data, length_in_bytes); | |
| 1810 } | |
| 1811 } | |
| 1812 | |
| 1813 private: | |
| 1814 GrowableArray<RawExceptionHandlers*> objects_; | |
| 1815 }; | |
| 1816 | |
| 1817 class ExceptionHandlersDeserializationCluster : public DeserializationCluster { | |
| 1818 public: | |
| 1819 ExceptionHandlersDeserializationCluster() { } | |
| 1820 virtual ~ExceptionHandlersDeserializationCluster() { } | |
| 1821 | |
| 1822 void ReadAlloc(Deserializer* d) { | |
| 1823 start_index_ = d->next_index(); | |
| 1824 PageSpace* old_space = d->heap()->old_space(); | |
| 1825 intptr_t count = d->Read<intptr_t>(); | |
| 1826 for (intptr_t i = 0; i < count; i++) { | |
| 1827 intptr_t length = d->Read<intptr_t>(); | |
| 1828 d->AssignRef(AllocateUninitialized(old_space, | |
| 1829 ExceptionHandlers::InstanceSize(length))); | |
| 1830 } | |
| 1831 stop_index_ = d->next_index(); | |
| 1832 } | |
| 1833 | |
| 1834 void ReadFill(Deserializer* d) { | |
| 1835 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 1836 | |
| 1837 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 1838 RawExceptionHandlers* handlers = | |
| 1839 reinterpret_cast<RawExceptionHandlers*>(d->Ref(id)); | |
| 1840 intptr_t length = d->Read<intptr_t>(); | |
| 1841 Deserializer::InitializeHeader(handlers, kExceptionHandlersCid, | |
| 1842 ExceptionHandlers::InstanceSize(length), | |
| 1843 is_vm_object); | |
| 1844 handlers->ptr()->num_entries_ = length; | |
| 1845 handlers->ptr()->handled_types_data_ = | |
| 1846 reinterpret_cast<RawArray*>(d->ReadRef()); | |
| 1847 | |
| 1848 uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data()); | |
| 1849 intptr_t length_in_bytes = | |
| 1850 length * sizeof(RawExceptionHandlers::HandlerInfo); | |
| 1851 d->ReadBytes(data, length_in_bytes); | |
| 1852 } | |
| 1853 } | |
| 1854 }; | |
| 1855 | |
| 1856 class ContextSerializationCluster : public SerializationCluster { | |
| 1857 public: | |
| 1858 ContextSerializationCluster() { } | |
| 1859 virtual ~ContextSerializationCluster() { } | |
| 1860 | |
| 1861 void Trace(Serializer* s, RawObject* object) { | |
| 1862 RawContext* context = Context::RawCast(object); | |
| 1863 objects_.Add(context); | |
| 1864 | |
| 1865 s->Push(context->ptr()->parent_); | |
| 1866 intptr_t length = context->ptr()->num_variables_; | |
| 1867 for (intptr_t i = 0; i < length; i++) { | |
| 1868 s->Push(context->ptr()->data()[i]); | |
| 1869 } | |
| 1870 } | |
| 1871 | |
| 1872 void WriteAlloc(Serializer* s) { | |
| 1873 s->WriteCid(kContextCid); | |
| 1874 intptr_t count = objects_.length(); | |
| 1875 s->Write<intptr_t>(count); | |
| 1876 for (intptr_t i = 0; i < count; i++) { | |
| 1877 RawContext* context = objects_[i]; | |
| 1878 intptr_t length = context->ptr()->num_variables_; | |
| 1879 s->Write<intptr_t>(length); | |
| 1880 s->AssignRef(context); | |
| 1881 } | |
| 1882 } | |
| 1883 | |
| 1884 void WriteFill(Serializer* s) { | |
| 1885 intptr_t count = objects_.length(); | |
| 1886 for (intptr_t i = 0; i < count; i++) { | |
| 1887 RawContext* context = objects_[i]; | |
| 1888 intptr_t length = context->ptr()->num_variables_; | |
| 1889 s->Write<intptr_t>(length); | |
| 1890 s->WriteRef(context->ptr()->parent_); | |
| 1891 for (intptr_t j = 0; j < length; j++) { | |
| 1892 s->WriteRef(context->ptr()->data()[j]); | |
| 1893 } | |
| 1894 } | |
| 1895 } | |
| 1896 | |
| 1897 private: | |
| 1898 GrowableArray<RawContext*> objects_; | |
| 1899 }; | |
| 1900 | |
| 1901 class ContextDeserializationCluster : public DeserializationCluster { | |
| 1902 public: | |
| 1903 ContextDeserializationCluster() { } | |
| 1904 virtual ~ContextDeserializationCluster() { } | |
| 1905 | |
| 1906 void ReadAlloc(Deserializer* d) { | |
| 1907 start_index_ = d->next_index(); | |
| 1908 PageSpace* old_space = d->heap()->old_space(); | |
| 1909 intptr_t count = d->Read<intptr_t>(); | |
| 1910 for (intptr_t i = 0; i < count; i++) { | |
| 1911 intptr_t length = d->Read<intptr_t>(); | |
| 1912 d->AssignRef(AllocateUninitialized(old_space, | |
| 1913 Context::InstanceSize(length))); | |
| 1914 } | |
| 1915 stop_index_ = d->next_index(); | |
| 1916 } | |
| 1917 | |
| 1918 void ReadFill(Deserializer* d) { | |
| 1919 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 1920 | |
| 1921 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 1922 RawContext* context = reinterpret_cast<RawContext*>(d->Ref(id)); | |
| 1923 intptr_t length = d->Read<intptr_t>(); | |
| 1924 Deserializer::InitializeHeader(context, kContextCid, | |
| 1925 Context::InstanceSize(length), | |
| 1926 is_vm_object); | |
| 1927 context->ptr()->num_variables_ = length; | |
| 1928 context->ptr()->parent_ = reinterpret_cast<RawContext*>(d->ReadRef()); | |
| 1929 for (intptr_t j = 0; j < length; j++) { | |
| 1930 context->ptr()->data()[j] = d->ReadRef(); | |
| 1931 } | |
| 1932 } | |
| 1933 } | |
| 1934 }; | |
| 1935 | |
| 1936 class ContextScopeSerializationCluster : public SerializationCluster { | |
| 1937 public: | |
| 1938 ContextScopeSerializationCluster() { } | |
| 1939 virtual ~ContextScopeSerializationCluster() { } | |
| 1940 | |
| 1941 void Trace(Serializer* s, RawObject* object) { | |
| 1942 RawContextScope* scope = ContextScope::RawCast(object); | |
| 1943 objects_.Add(scope); | |
| 1944 | |
| 1945 intptr_t length = scope->ptr()->num_variables_; | |
| 1946 RawObject** from = scope->from(); | |
| 1947 RawObject** to = scope->to(length); | |
| 1948 for (RawObject** p = from; p <= to; p++) { | |
| 1949 s->Push(*p); | |
| 1950 } | |
| 1951 } | |
| 1952 | |
| 1953 void WriteAlloc(Serializer* s) { | |
| 1954 s->WriteCid(kContextScopeCid); | |
| 1955 intptr_t count = objects_.length(); | |
| 1956 s->Write<intptr_t>(count); | |
| 1957 for (intptr_t i = 0; i < count; i++) { | |
| 1958 RawContextScope* scope = objects_[i]; | |
| 1959 intptr_t length = scope->ptr()->num_variables_; | |
| 1960 s->Write<intptr_t>(length); | |
| 1961 s->AssignRef(scope); | |
| 1962 } | |
| 1963 } | |
| 1964 | |
| 1965 void WriteFill(Serializer* s) { | |
| 1966 intptr_t count = objects_.length(); | |
| 1967 for (intptr_t i = 0; i < count; i++) { | |
| 1968 RawContextScope* scope = objects_[i]; | |
| 1969 intptr_t length = scope->ptr()->num_variables_; | |
| 1970 s->Write<intptr_t>(length); | |
| 1971 s->Write<bool>(scope->ptr()->is_implicit_); | |
| 1972 RawObject** from = scope->from(); | |
| 1973 RawObject** to = scope->to(length); | |
| 1974 for (RawObject** p = from; p <= to; p++) { | |
| 1975 s->WriteRef(*p); | |
| 1976 } | |
| 1977 } | |
| 1978 } | |
| 1979 | |
| 1980 private: | |
| 1981 GrowableArray<RawContextScope*> objects_; | |
| 1982 }; | |
| 1983 | |
| 1984 class ContextScopeDeserializationCluster : public DeserializationCluster { | |
| 1985 public: | |
| 1986 ContextScopeDeserializationCluster() { } | |
| 1987 virtual ~ContextScopeDeserializationCluster() { } | |
| 1988 | |
| 1989 void ReadAlloc(Deserializer* d) { | |
| 1990 start_index_ = d->next_index(); | |
| 1991 PageSpace* old_space = d->heap()->old_space(); | |
| 1992 intptr_t count = d->Read<intptr_t>(); | |
| 1993 for (intptr_t i = 0; i < count; i++) { | |
| 1994 intptr_t length = d->Read<intptr_t>(); | |
| 1995 d->AssignRef(AllocateUninitialized(old_space, | |
| 1996 ContextScope::InstanceSize(length))); | |
| 1997 } | |
| 1998 stop_index_ = d->next_index(); | |
| 1999 } | |
| 2000 | |
| 2001 void ReadFill(Deserializer* d) { | |
| 2002 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2003 | |
| 2004 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2005 RawContextScope* scope = reinterpret_cast<RawContextScope*>(d->Ref(id)); | |
| 2006 intptr_t length = d->Read<intptr_t>(); | |
| 2007 Deserializer::InitializeHeader(scope, kContextScopeCid, | |
| 2008 ContextScope::InstanceSize(length), | |
| 2009 is_vm_object); | |
| 2010 scope->ptr()->num_variables_ = length; | |
| 2011 scope->ptr()->is_implicit_ = d->Read<bool>(); | |
| 2012 RawObject** from = scope->from(); | |
| 2013 RawObject** to = scope->to(length); | |
| 2014 for (RawObject** p = from; p <= to; p++) { | |
| 2015 *p = d->ReadRef(); | |
| 2016 } | |
| 2017 } | |
| 2018 } | |
| 2019 }; | |
| 2020 | |
| 2021 | |
| 2022 class ICDataSerializationCluster : public SerializationCluster { | |
| 2023 public: | |
| 2024 ICDataSerializationCluster() { } | |
| 2025 virtual ~ICDataSerializationCluster() { } | |
| 2026 | |
| 2027 void Trace(Serializer* s, RawObject* object) { | |
| 2028 RawICData* ic = ICData::RawCast(object); | |
| 2029 objects_.Add(ic); | |
| 2030 | |
| 2031 RawObject** from = ic->from(); | |
| 2032 RawObject** to = ic->to_snapshot(s->kind()); | |
| 2033 for (RawObject** p = from; p <= to; p++) { | |
| 2034 s->Push(*p); | |
| 2035 } | |
| 2036 } | |
| 2037 | |
| 2038 void WriteAlloc(Serializer* s) { | |
| 2039 s->WriteCid(kICDataCid); | |
| 2040 intptr_t count = objects_.length(); | |
| 2041 s->Write<intptr_t>(count); | |
| 2042 for (intptr_t i = 0; i < count; i++) { | |
| 2043 RawICData* ic = objects_[i]; | |
| 2044 s->AssignRef(ic); | |
| 2045 } | |
| 2046 } | |
| 2047 | |
| 2048 void WriteFill(Serializer* s) { | |
| 2049 Snapshot::Kind kind = s->kind(); | |
| 2050 intptr_t count = objects_.length(); | |
| 2051 for (intptr_t i = 0; i < count; i++) { | |
| 2052 RawICData* ic = objects_[i]; | |
| 2053 RawObject** from = ic->from(); | |
| 2054 RawObject** to = ic->to_snapshot(kind); | |
| 2055 for (RawObject** p = from; p <= to; p++) { | |
| 2056 s->WriteRef(*p); | |
| 2057 } | |
| 2058 s->Write<int32_t>(ic->ptr()->deopt_id_); | |
| 2059 s->Write<uint32_t>(ic->ptr()->state_bits_); | |
| 2060 #if defined(TAG_IC_DATA) | |
| 2061 s->Write<intptr_t>(ic->ptr()->tag_); | |
| 2062 #endif | |
| 2063 } | |
| 2064 } | |
| 2065 | |
| 2066 private: | |
| 2067 GrowableArray<RawICData*> objects_; | |
| 2068 }; | |
| 2069 | |
| 2070 class ICDataDeserializationCluster : public DeserializationCluster { | |
| 2071 public: | |
| 2072 ICDataDeserializationCluster() { } | |
| 2073 virtual ~ICDataDeserializationCluster() { } | |
| 2074 | |
| 2075 void ReadAlloc(Deserializer* d) { | |
| 2076 start_index_ = d->next_index(); | |
| 2077 PageSpace* old_space = d->heap()->old_space(); | |
| 2078 intptr_t count = d->Read<intptr_t>(); | |
| 2079 for (intptr_t i = 0; i < count; i++) { | |
| 2080 d->AssignRef(AllocateUninitialized(old_space, ICData::InstanceSize())); | |
| 2081 } | |
| 2082 stop_index_ = d->next_index(); | |
| 2083 } | |
| 2084 | |
| 2085 void ReadFill(Deserializer* d) { | |
| 2086 Snapshot::Kind kind = d->kind(); | |
| 2087 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2088 | |
| 2089 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2090 RawICData* ic = reinterpret_cast<RawICData*>(d->Ref(id)); | |
| 2091 Deserializer::InitializeHeader(ic, kICDataCid, | |
| 2092 ICData::InstanceSize(), is_vm_object); | |
| 2093 RawObject** from = ic->from(); | |
| 2094 RawObject** to_snapshot = ic->to_snapshot(kind); | |
| 2095 RawObject** to = ic->to(); | |
| 2096 for (RawObject** p = from; p <= to_snapshot; p++) { | |
| 2097 *p = d->ReadRef(); | |
| 2098 } | |
| 2099 for (RawObject** p = to_snapshot + 1; p <= to; p++) { | |
| 2100 *p = Object::null(); | |
| 2101 } | |
| 2102 ic->ptr()->deopt_id_ = d->Read<int32_t>(); | |
| 2103 ic->ptr()->state_bits_ = d->Read<int32_t>(); | |
| 2104 #if defined(TAG_IC_DATA) | |
| 2105 ic->ptr()->tag_ = d->Read<intptr_t>(); | |
| 2106 #endif | |
| 2107 } | |
| 2108 } | |
| 2109 | |
| 2110 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { | |
| 2111 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), | |
| 2112 Timeline::GetIsolateStream(), "PostLoadICData")); | |
| 2113 | |
| 2114 if (kind == Snapshot::kAppNoJIT) { | |
| 2115 ICData& ic = ICData::Handle(zone); | |
| 2116 Object& funcOrCode = Object::Handle(zone); | |
| 2117 Code& code = Code::Handle(zone); | |
| 2118 Smi& entry_point = Smi::Handle(zone); | |
| 2119 for (intptr_t i = start_index_; i < stop_index_; i++) { | |
| 2120 ic ^= refs.At(i); | |
| 2121 for (intptr_t j = 0; j < ic.NumberOfChecks(); j++) { | |
| 2122 funcOrCode = ic.GetTargetOrCodeAt(j); | |
| 2123 if (funcOrCode.IsCode()) { | |
| 2124 code ^= funcOrCode.raw(); | |
| 2125 entry_point = Smi::FromAlignedAddress(code.EntryPoint()); | |
| 2126 ic.SetEntryPointAt(j, entry_point); | |
| 2127 } | |
| 2128 } | |
| 2129 } | |
| 2130 } | |
| 2131 } | |
| 2132 }; | |
| 2133 | |
| 2134 class MegamorphicCacheSerializationCluster : public SerializationCluster { | |
| 2135 public: | |
| 2136 MegamorphicCacheSerializationCluster() { } | |
| 2137 virtual ~MegamorphicCacheSerializationCluster() { } | |
| 2138 | |
| 2139 void Trace(Serializer* s, RawObject* object) { | |
| 2140 RawMegamorphicCache* cache = MegamorphicCache::RawCast(object); | |
| 2141 objects_.Add(cache); | |
| 2142 | |
| 2143 RawObject** from = cache->from(); | |
| 2144 RawObject** to = cache->to(); | |
| 2145 for (RawObject** p = from; p <= to; p++) { | |
| 2146 s->Push(*p); | |
| 2147 } | |
| 2148 } | |
| 2149 | |
| 2150 void WriteAlloc(Serializer* s) { | |
| 2151 s->WriteCid(kMegamorphicCacheCid); | |
| 2152 intptr_t count = objects_.length(); | |
| 2153 s->Write<intptr_t>(count); | |
| 2154 for (intptr_t i = 0; i < count; i++) { | |
| 2155 RawMegamorphicCache* cache = objects_[i]; | |
| 2156 s->AssignRef(cache); | |
| 2157 } | |
| 2158 } | |
| 2159 | |
| 2160 void WriteFill(Serializer* s) { | |
| 2161 intptr_t count = objects_.length(); | |
| 2162 for (intptr_t i = 0; i < count; i++) { | |
| 2163 RawMegamorphicCache* cache = objects_[i]; | |
| 2164 RawObject** from = cache->from(); | |
| 2165 RawObject** to = cache->to(); | |
| 2166 for (RawObject** p = from; p <= to; p++) { | |
| 2167 s->WriteRef(*p); | |
| 2168 } | |
| 2169 s->Write<int32_t>(cache->ptr()->filled_entry_count_); | |
| 2170 } | |
| 2171 } | |
| 2172 | |
| 2173 private: | |
| 2174 GrowableArray<RawMegamorphicCache*> objects_; | |
| 2175 }; | |
| 2176 | |
| 2177 | |
| 2178 class MegamorphicCacheDeserializationCluster : public DeserializationCluster { | |
| 2179 public: | |
| 2180 MegamorphicCacheDeserializationCluster() { } | |
| 2181 virtual ~MegamorphicCacheDeserializationCluster() { } | |
| 2182 | |
| 2183 void ReadAlloc(Deserializer* d) { | |
| 2184 start_index_ = d->next_index(); | |
| 2185 PageSpace* old_space = d->heap()->old_space(); | |
| 2186 intptr_t count = d->Read<intptr_t>(); | |
| 2187 for (intptr_t i = 0; i < count; i++) { | |
| 2188 d->AssignRef(AllocateUninitialized(old_space, | |
| 2189 MegamorphicCache::InstanceSize())); | |
| 2190 } | |
| 2191 stop_index_ = d->next_index(); | |
| 2192 } | |
| 2193 | |
| 2194 void ReadFill(Deserializer* d) { | |
| 2195 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2196 | |
| 2197 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2198 RawMegamorphicCache* cache = | |
| 2199 reinterpret_cast<RawMegamorphicCache*>(d->Ref(id)); | |
| 2200 Deserializer::InitializeHeader(cache, kMegamorphicCacheCid, | |
| 2201 MegamorphicCache::InstanceSize(), | |
| 2202 is_vm_object); | |
| 2203 RawObject** from = cache->from(); | |
| 2204 RawObject** to = cache->to(); | |
| 2205 for (RawObject** p = from; p <= to; p++) { | |
| 2206 *p = d->ReadRef(); | |
| 2207 } | |
| 2208 cache->ptr()->filled_entry_count_ = d->Read<int32_t>(); | |
| 2209 } | |
| 2210 } | |
| 2211 }; | |
| 2212 | |
| 2213 | |
| 2214 class SubtypeTestCacheSerializationCluster : public SerializationCluster { | |
| 2215 public: | |
| 2216 SubtypeTestCacheSerializationCluster() { } | |
| 2217 virtual ~SubtypeTestCacheSerializationCluster() { } | |
| 2218 | |
| 2219 void Trace(Serializer* s, RawObject* object) { | |
| 2220 RawSubtypeTestCache* cache = SubtypeTestCache::RawCast(object); | |
| 2221 objects_.Add(cache); | |
| 2222 s->Push(cache->ptr()->cache_); | |
| 2223 } | |
| 2224 | |
| 2225 void WriteAlloc(Serializer* s) { | |
| 2226 s->WriteCid(kSubtypeTestCacheCid); | |
| 2227 intptr_t count = objects_.length(); | |
| 2228 s->Write<intptr_t>(count); | |
| 2229 for (intptr_t i = 0; i < count; i++) { | |
| 2230 RawSubtypeTestCache* cache = objects_[i]; | |
| 2231 s->AssignRef(cache); | |
| 2232 } | |
| 2233 } | |
| 2234 | |
| 2235 void WriteFill(Serializer* s) { | |
| 2236 intptr_t count = objects_.length(); | |
| 2237 for (intptr_t i = 0; i < count; i++) { | |
| 2238 RawSubtypeTestCache* cache = objects_[i]; | |
| 2239 s->WriteRef(cache->ptr()->cache_); | |
| 2240 } | |
| 2241 } | |
| 2242 | |
| 2243 private: | |
| 2244 GrowableArray<RawSubtypeTestCache*> objects_; | |
| 2245 }; | |
| 2246 | |
| 2247 class SubtypeTestCacheDeserializationCluster : public DeserializationCluster { | |
| 2248 public: | |
| 2249 SubtypeTestCacheDeserializationCluster() { } | |
| 2250 virtual ~SubtypeTestCacheDeserializationCluster() { } | |
| 2251 | |
| 2252 void ReadAlloc(Deserializer* d) { | |
| 2253 start_index_ = d->next_index(); | |
| 2254 PageSpace* old_space = d->heap()->old_space(); | |
| 2255 intptr_t count = d->Read<intptr_t>(); | |
| 2256 for (intptr_t i = 0; i < count; i++) { | |
| 2257 d->AssignRef(AllocateUninitialized(old_space, | |
| 2258 SubtypeTestCache::InstanceSize())); | |
| 2259 } | |
| 2260 stop_index_ = d->next_index(); | |
| 2261 } | |
| 2262 | |
| 2263 void ReadFill(Deserializer* d) { | |
| 2264 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2265 | |
| 2266 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2267 RawSubtypeTestCache* cache = | |
| 2268 reinterpret_cast<RawSubtypeTestCache*>(d->Ref(id)); | |
| 2269 Deserializer::InitializeHeader(cache, kSubtypeTestCacheCid, | |
| 2270 SubtypeTestCache::InstanceSize(), | |
| 2271 is_vm_object); | |
| 2272 cache->ptr()->cache_ = reinterpret_cast<RawArray*>(d->ReadRef()); | |
| 2273 } | |
| 2274 } | |
| 2275 }; | |
| 2276 | |
| 2277 class LanguageErrorSerializationCluster : public SerializationCluster { | |
| 2278 public: | |
| 2279 LanguageErrorSerializationCluster() { } | |
| 2280 virtual ~LanguageErrorSerializationCluster() { } | |
| 2281 | |
| 2282 void Trace(Serializer* s, RawObject* object) { | |
| 2283 RawLanguageError* error = LanguageError::RawCast(object); | |
| 2284 objects_.Add(error); | |
| 2285 | |
| 2286 RawObject** from = error->from(); | |
| 2287 RawObject** to = error->to(); | |
| 2288 for (RawObject** p = from; p <= to; p++) { | |
| 2289 s->Push(*p); | |
| 2290 } | |
| 2291 } | |
| 2292 | |
| 2293 void WriteAlloc(Serializer* s) { | |
| 2294 s->WriteCid(kLanguageErrorCid); | |
| 2295 intptr_t count = objects_.length(); | |
| 2296 s->Write<intptr_t>(count); | |
| 2297 for (intptr_t i = 0; i < count; i++) { | |
| 2298 RawLanguageError* error = objects_[i]; | |
| 2299 s->AssignRef(error); | |
| 2300 } | |
| 2301 } | |
| 2302 | |
| 2303 void WriteFill(Serializer* s) { | |
| 2304 intptr_t count = objects_.length(); | |
| 2305 for (intptr_t i = 0; i < count; i++) { | |
| 2306 RawLanguageError* error = objects_[i]; | |
| 2307 RawObject** from = error->from(); | |
| 2308 RawObject** to = error->to(); | |
| 2309 for (RawObject** p = from; p <= to; p++) { | |
| 2310 s->WriteRef(*p); | |
| 2311 } | |
| 2312 s->WriteTokenPosition(error->ptr()->token_pos_); | |
| 2313 s->Write<bool>(error->ptr()->report_after_token_); | |
| 2314 s->Write<int8_t>(error->ptr()->kind_); | |
| 2315 } | |
| 2316 } | |
| 2317 | |
| 2318 private: | |
| 2319 GrowableArray<RawLanguageError*> objects_; | |
| 2320 }; | |
| 2321 | |
| 2322 class LanguageErrorDeserializationCluster : public DeserializationCluster { | |
| 2323 public: | |
| 2324 LanguageErrorDeserializationCluster() { } | |
| 2325 virtual ~LanguageErrorDeserializationCluster() { } | |
| 2326 | |
| 2327 void ReadAlloc(Deserializer* d) { | |
| 2328 start_index_ = d->next_index(); | |
| 2329 PageSpace* old_space = d->heap()->old_space(); | |
| 2330 intptr_t count = d->Read<intptr_t>(); | |
| 2331 for (intptr_t i = 0; i < count; i++) { | |
| 2332 d->AssignRef(AllocateUninitialized(old_space, | |
| 2333 LanguageError::InstanceSize())); | |
| 2334 } | |
| 2335 stop_index_ = d->next_index(); | |
| 2336 } | |
| 2337 | |
| 2338 void ReadFill(Deserializer* d) { | |
| 2339 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2340 | |
| 2341 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2342 RawLanguageError* error = reinterpret_cast<RawLanguageError*>(d->Ref(id)); | |
| 2343 Deserializer::InitializeHeader(error, kLanguageErrorCid, | |
| 2344 LanguageError::InstanceSize(), | |
| 2345 is_vm_object); | |
| 2346 RawObject** from = error->from(); | |
| 2347 RawObject** to = error->to(); | |
| 2348 for (RawObject** p = from; p <= to; p++) { | |
| 2349 *p = d->ReadRef(); | |
| 2350 } | |
| 2351 error->ptr()->token_pos_ = d->ReadTokenPosition(); | |
| 2352 error->ptr()->report_after_token_ = d->Read<bool>(); | |
| 2353 error->ptr()->kind_ = d->Read<int8_t>(); | |
| 2354 } | |
| 2355 } | |
| 2356 }; | |
| 2357 | |
| 2358 class UnhandledExceptionSerializationCluster : public SerializationCluster { | |
| 2359 public: | |
| 2360 UnhandledExceptionSerializationCluster() { } | |
| 2361 virtual ~UnhandledExceptionSerializationCluster() { } | |
| 2362 | |
| 2363 void Trace(Serializer* s, RawObject* object) { | |
| 2364 RawUnhandledException* exception = UnhandledException::RawCast(object); | |
| 2365 objects_.Add(exception); | |
| 2366 | |
| 2367 RawObject** from = exception->from(); | |
| 2368 RawObject** to = exception->to(); | |
| 2369 for (RawObject** p = from; p <= to; p++) { | |
| 2370 s->Push(*p); | |
| 2371 } | |
| 2372 } | |
| 2373 | |
| 2374 void WriteAlloc(Serializer* s) { | |
| 2375 s->WriteCid(kUnhandledExceptionCid); | |
| 2376 intptr_t count = objects_.length(); | |
| 2377 s->Write<intptr_t>(count); | |
| 2378 for (intptr_t i = 0; i < count; i++) { | |
| 2379 RawUnhandledException* exception = objects_[i]; | |
| 2380 s->AssignRef(exception); | |
| 2381 } | |
| 2382 } | |
| 2383 | |
| 2384 void WriteFill(Serializer* s) { | |
| 2385 intptr_t count = objects_.length(); | |
| 2386 for (intptr_t i = 0; i < count; i++) { | |
| 2387 RawUnhandledException* exception = objects_[i]; | |
| 2388 RawObject** from = exception->from(); | |
| 2389 RawObject** to = exception->to(); | |
| 2390 for (RawObject** p = from; p <= to; p++) { | |
| 2391 s->WriteRef(*p); | |
| 2392 } | |
| 2393 } | |
| 2394 } | |
| 2395 | |
| 2396 private: | |
| 2397 GrowableArray<RawUnhandledException*> objects_; | |
| 2398 }; | |
| 2399 | |
| 2400 class UnhandledExceptionDeserializationCluster : public DeserializationCluster { | |
| 2401 public: | |
| 2402 UnhandledExceptionDeserializationCluster() { } | |
| 2403 virtual ~UnhandledExceptionDeserializationCluster() { } | |
| 2404 | |
| 2405 void ReadAlloc(Deserializer* d) { | |
| 2406 start_index_ = d->next_index(); | |
| 2407 PageSpace* old_space = d->heap()->old_space(); | |
| 2408 intptr_t count = d->Read<intptr_t>(); | |
| 2409 for (intptr_t i = 0; i < count; i++) { | |
| 2410 d->AssignRef(AllocateUninitialized(old_space, | |
| 2411 UnhandledException::InstanceSize())); | |
| 2412 } | |
| 2413 stop_index_ = d->next_index(); | |
| 2414 } | |
| 2415 | |
| 2416 void ReadFill(Deserializer* d) { | |
| 2417 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2418 | |
| 2419 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2420 RawUnhandledException* exception = | |
| 2421 reinterpret_cast<RawUnhandledException*>(d->Ref(id)); | |
| 2422 Deserializer::InitializeHeader(exception, kUnhandledExceptionCid, | |
| 2423 UnhandledException::InstanceSize(), | |
| 2424 is_vm_object); | |
| 2425 RawObject** from = exception->from(); | |
| 2426 RawObject** to = exception->to(); | |
| 2427 for (RawObject** p = from; p <= to; p++) { | |
| 2428 *p = d->ReadRef(); | |
| 2429 } | |
| 2430 } | |
| 2431 } | |
| 2432 }; | |
| 2433 | |
| 2434 class InstanceSerializationCluster : public SerializationCluster { | |
| 2435 public: | |
| 2436 explicit InstanceSerializationCluster(intptr_t cid) : cid_(cid) { | |
| 2437 RawClass* cls = Isolate::Current()->class_table()->At(cid); | |
| 2438 next_field_offset_ = | |
| 2439 cls->ptr()->next_field_offset_in_words_ << kWordSizeLog2; | |
| 2440 instance_size_in_words_ = cls->ptr()->instance_size_in_words_; | |
| 2441 ASSERT(next_field_offset_ > 0); | |
| 2442 ASSERT(instance_size_in_words_ > 0); | |
| 2443 } | |
| 2444 virtual ~InstanceSerializationCluster() { } | |
| 2445 | |
| 2446 void Trace(Serializer* s, RawObject* object) { | |
| 2447 RawInstance* instance = Instance::RawCast(object); | |
| 2448 objects_.Add(instance); | |
| 2449 | |
| 2450 intptr_t offset = Instance::NextFieldOffset(); | |
| 2451 while (offset < next_field_offset_) { | |
| 2452 RawObject* raw_obj = *reinterpret_cast<RawObject**>( | |
| 2453 reinterpret_cast<uword>(instance->ptr()) + offset); | |
| 2454 s->Push(raw_obj); | |
| 2455 offset += kWordSize; | |
| 2456 } | |
| 2457 } | |
| 2458 | |
| 2459 void WriteAlloc(Serializer* s) { | |
| 2460 s->Write<intptr_t>(cid_); | |
| 2461 intptr_t count = objects_.length(); | |
| 2462 s->Write<intptr_t>(count); | |
| 2463 | |
| 2464 s->Write<intptr_t>(next_field_offset_); | |
| 2465 s->Write<intptr_t>(instance_size_in_words_); | |
| 2466 | |
| 2467 for (intptr_t i = 0; i < count; i++) { | |
| 2468 RawInstance* instance = objects_[i]; | |
| 2469 s->AssignRef(instance); | |
| 2470 } | |
| 2471 } | |
| 2472 | |
| 2473 void WriteFill(Serializer* s) { | |
| 2474 intptr_t count = objects_.length(); | |
| 2475 for (intptr_t i = 0; i < count; i++) { | |
| 2476 RawInstance* instance = objects_[i]; | |
| 2477 s->Write<bool>(instance->IsCanonical()); | |
| 2478 intptr_t offset = Instance::NextFieldOffset(); | |
| 2479 while (offset < next_field_offset_) { | |
| 2480 RawObject* raw_obj = *reinterpret_cast<RawObject**>( | |
| 2481 reinterpret_cast<uword>(instance->ptr()) + offset); | |
| 2482 s->WriteRef(raw_obj); | |
| 2483 offset += kWordSize; | |
| 2484 } | |
| 2485 } | |
| 2486 } | |
| 2487 | |
| 2488 private: | |
| 2489 const intptr_t cid_; | |
| 2490 intptr_t next_field_offset_; | |
| 2491 intptr_t instance_size_in_words_; | |
| 2492 GrowableArray<RawInstance*> objects_; | |
| 2493 }; | |
| 2494 | |
| 2495 class InstanceDeserializationCluster : public DeserializationCluster { | |
| 2496 public: | |
| 2497 explicit InstanceDeserializationCluster(intptr_t cid) : cid_(cid) { } | |
| 2498 virtual ~InstanceDeserializationCluster() { } | |
| 2499 | |
| 2500 void ReadAlloc(Deserializer* d) { | |
| 2501 start_index_ = d->next_index(); | |
| 2502 PageSpace* old_space = d->heap()->old_space(); | |
| 2503 intptr_t count = d->Read<intptr_t>(); | |
| 2504 next_field_offset_ = d->Read<intptr_t>(); | |
| 2505 instance_size_in_words_ = d->Read<intptr_t>(); | |
| 2506 intptr_t instance_size = | |
| 2507 Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize); | |
| 2508 for (intptr_t i = 0; i < count; i++) { | |
| 2509 d->AssignRef(AllocateUninitialized(old_space, instance_size)); | |
| 2510 } | |
| 2511 stop_index_ = d->next_index(); | |
| 2512 } | |
| 2513 | |
| 2514 void ReadFill(Deserializer* d) { | |
| 2515 intptr_t instance_size = | |
| 2516 Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize); | |
| 2517 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2518 | |
| 2519 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2520 RawInstance* instance = reinterpret_cast<RawInstance*>(d->Ref(id)); | |
| 2521 bool is_canonical = d->Read<bool>(); | |
| 2522 Deserializer::InitializeHeader(instance, cid_, | |
| 2523 instance_size, | |
| 2524 is_vm_object, is_canonical); | |
| 2525 intptr_t offset = Instance::NextFieldOffset(); | |
| 2526 while (offset < next_field_offset_) { | |
| 2527 RawObject** p = reinterpret_cast<RawObject**>( | |
| 2528 reinterpret_cast<uword>(instance->ptr()) + offset); | |
| 2529 *p = d->ReadRef(); | |
| 2530 offset += kWordSize; | |
| 2531 } | |
| 2532 if (offset < instance_size) { | |
| 2533 RawObject** p = reinterpret_cast<RawObject**>( | |
| 2534 reinterpret_cast<uword>(instance->ptr()) + offset); | |
| 2535 *p = Object::null(); | |
| 2536 offset += kWordSize; | |
| 2537 } | |
| 2538 ASSERT(offset == instance_size); | |
| 2539 } | |
| 2540 } | |
| 2541 | |
| 2542 private: | |
| 2543 const intptr_t cid_; | |
| 2544 intptr_t next_field_offset_; | |
| 2545 intptr_t instance_size_in_words_; | |
| 2546 }; | |
| 2547 | |
| 2548 class LibraryPrefixSerializationCluster : public SerializationCluster { | |
| 2549 public: | |
| 2550 LibraryPrefixSerializationCluster() { } | |
| 2551 virtual ~LibraryPrefixSerializationCluster() { } | |
| 2552 | |
| 2553 void Trace(Serializer* s, RawObject* object) { | |
| 2554 RawLibraryPrefix* prefix = LibraryPrefix::RawCast(object); | |
| 2555 objects_.Add(prefix); | |
| 2556 | |
| 2557 RawObject** from = prefix->from(); | |
| 2558 RawObject** to = prefix->to(); | |
| 2559 for (RawObject** p = from; p <= to; p++) { | |
| 2560 s->Push(*p); | |
| 2561 } | |
| 2562 } | |
| 2563 | |
| 2564 void WriteAlloc(Serializer* s) { | |
| 2565 s->WriteCid(kLibraryPrefixCid); | |
| 2566 intptr_t count = objects_.length(); | |
| 2567 s->Write<intptr_t>(count); | |
| 2568 for (intptr_t i = 0; i < count; i++) { | |
| 2569 RawLibraryPrefix* prefix = objects_[i]; | |
| 2570 s->AssignRef(prefix); | |
| 2571 } | |
| 2572 } | |
| 2573 | |
| 2574 void WriteFill(Serializer* s) { | |
| 2575 intptr_t count = objects_.length(); | |
| 2576 for (intptr_t i = 0; i < count; i++) { | |
| 2577 RawLibraryPrefix* prefix = objects_[i]; | |
| 2578 RawObject** from = prefix->from(); | |
| 2579 RawObject** to = prefix->to(); | |
| 2580 for (RawObject** p = from; p <= to; p++) { | |
| 2581 s->WriteRef(*p); | |
| 2582 } | |
| 2583 s->Write<uint16_t>(prefix->ptr()->num_imports_); | |
| 2584 s->Write<bool>(prefix->ptr()->is_deferred_load_); | |
| 2585 s->Write<bool>(prefix->ptr()->is_loaded_); | |
| 2586 } | |
| 2587 } | |
| 2588 | |
| 2589 private: | |
| 2590 GrowableArray<RawLibraryPrefix*> objects_; | |
| 2591 }; | |
| 2592 | |
| 2593 class LibraryPrefixDeserializationCluster : public DeserializationCluster { | |
| 2594 public: | |
| 2595 LibraryPrefixDeserializationCluster() { } | |
| 2596 virtual ~LibraryPrefixDeserializationCluster() { } | |
| 2597 | |
| 2598 void ReadAlloc(Deserializer* d) { | |
| 2599 start_index_ = d->next_index(); | |
| 2600 PageSpace* old_space = d->heap()->old_space(); | |
| 2601 intptr_t count = d->Read<intptr_t>(); | |
| 2602 for (intptr_t i = 0; i < count; i++) { | |
| 2603 d->AssignRef(AllocateUninitialized(old_space, | |
| 2604 LibraryPrefix::InstanceSize())); | |
| 2605 } | |
| 2606 stop_index_ = d->next_index(); | |
| 2607 } | |
| 2608 | |
| 2609 void ReadFill(Deserializer* d) { | |
| 2610 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2611 | |
| 2612 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2613 RawLibraryPrefix* prefix = | |
| 2614 reinterpret_cast<RawLibraryPrefix*>(d->Ref(id)); | |
| 2615 Deserializer::InitializeHeader(prefix, kLibraryPrefixCid, | |
| 2616 LibraryPrefix::InstanceSize(), | |
| 2617 is_vm_object); | |
| 2618 RawObject** from = prefix->from(); | |
| 2619 RawObject** to = prefix->to(); | |
| 2620 for (RawObject** p = from; p <= to; p++) { | |
| 2621 *p = d->ReadRef(); | |
| 2622 } | |
| 2623 prefix->ptr()->num_imports_ = d->Read<uint16_t>(); | |
| 2624 prefix->ptr()->is_deferred_load_ = d->Read<bool>(); | |
| 2625 prefix->ptr()->is_loaded_ = d->Read<bool>(); | |
| 2626 } | |
| 2627 } | |
| 2628 }; | |
| 2629 | |
| 2630 class TypeSerializationCluster : public SerializationCluster { | |
| 2631 public: | |
| 2632 TypeSerializationCluster() { } | |
| 2633 virtual ~TypeSerializationCluster() { } | |
| 2634 | |
| 2635 void Trace(Serializer* s, RawObject* object) { | |
| 2636 RawType* type = Type::RawCast(object); | |
| 2637 if (type->IsCanonical()) { | |
| 2638 canonical_objects_.Add(type); | |
| 2639 } else { | |
| 2640 objects_.Add(type); | |
| 2641 } | |
| 2642 | |
| 2643 RawObject** from = type->from(); | |
| 2644 RawObject** to = type->to(); | |
| 2645 for (RawObject** p = from; p <= to; p++) { | |
| 2646 s->Push(*p); | |
| 2647 } | |
| 2648 | |
| 2649 RawSmi* raw_type_class_id = Smi::RawCast(type->ptr()->type_class_id_); | |
| 2650 RawClass* type_class = | |
| 2651 s->isolate()->class_table()->At(Smi::Value(raw_type_class_id)); | |
| 2652 s->Push(type_class); | |
| 2653 } | |
| 2654 | |
| 2655 void WriteAlloc(Serializer* s) { | |
| 2656 s->WriteCid(kTypeCid); | |
| 2657 intptr_t count = canonical_objects_.length(); | |
| 2658 s->Write<intptr_t>(count); | |
| 2659 for (intptr_t i = 0; i < count; i++) { | |
| 2660 RawType* type = canonical_objects_[i]; | |
| 2661 s->AssignRef(type); | |
| 2662 } | |
| 2663 count = objects_.length(); | |
| 2664 s->Write<intptr_t>(count); | |
| 2665 for (intptr_t i = 0; i < count; i++) { | |
| 2666 RawType* type = objects_[i]; | |
| 2667 s->AssignRef(type); | |
| 2668 } | |
| 2669 } | |
| 2670 | |
| 2671 void WriteFill(Serializer* s) { | |
| 2672 intptr_t count = canonical_objects_.length(); | |
| 2673 for (intptr_t i = 0; i < count; i++) { | |
| 2674 RawType* type = canonical_objects_[i]; | |
| 2675 RawObject** from = type->from(); | |
| 2676 RawObject** to = type->to(); | |
| 2677 for (RawObject** p = from; p <= to; p++) { | |
| 2678 s->WriteRef(*p); | |
| 2679 } | |
| 2680 s->WriteTokenPosition(type->ptr()->token_pos_); | |
| 2681 s->Write<int8_t>(type->ptr()->type_state_); | |
| 2682 } | |
| 2683 count = objects_.length(); | |
| 2684 for (intptr_t i = 0; i < count; i++) { | |
| 2685 RawType* type = objects_[i]; | |
| 2686 RawObject** from = type->from(); | |
| 2687 RawObject** to = type->to(); | |
| 2688 for (RawObject** p = from; p <= to; p++) { | |
| 2689 s->WriteRef(*p); | |
| 2690 } | |
| 2691 s->WriteTokenPosition(type->ptr()->token_pos_); | |
| 2692 s->Write<int8_t>(type->ptr()->type_state_); | |
| 2693 } | |
| 2694 } | |
| 2695 | |
| 2696 private: | |
| 2697 GrowableArray<RawType*> canonical_objects_; | |
| 2698 GrowableArray<RawType*> objects_; | |
| 2699 }; | |
| 2700 | |
| 2701 class TypeDeserializationCluster : public DeserializationCluster { | |
| 2702 public: | |
| 2703 TypeDeserializationCluster() { } | |
| 2704 virtual ~TypeDeserializationCluster() { } | |
| 2705 | |
| 2706 void ReadAlloc(Deserializer* d) { | |
| 2707 canonical_start_index_ = d->next_index(); | |
| 2708 PageSpace* old_space = d->heap()->old_space(); | |
| 2709 intptr_t count = d->Read<intptr_t>(); | |
| 2710 for (intptr_t i = 0; i < count; i++) { | |
| 2711 d->AssignRef(AllocateUninitialized(old_space, Type::InstanceSize())); | |
| 2712 } | |
| 2713 canonical_stop_index_ = d->next_index(); | |
| 2714 | |
| 2715 start_index_ = d->next_index(); | |
| 2716 count = d->Read<intptr_t>(); | |
| 2717 for (intptr_t i = 0; i < count; i++) { | |
| 2718 d->AssignRef(AllocateUninitialized(old_space, Type::InstanceSize())); | |
| 2719 } | |
| 2720 stop_index_ = d->next_index(); | |
| 2721 } | |
| 2722 | |
| 2723 void ReadFill(Deserializer* d) { | |
| 2724 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2725 | |
| 2726 for (intptr_t id = canonical_start_index_; | |
| 2727 id < canonical_stop_index_; | |
| 2728 id++) { | |
| 2729 RawType* type = reinterpret_cast<RawType*>(d->Ref(id)); | |
| 2730 Deserializer::InitializeHeader(type, kTypeCid, | |
| 2731 Type::InstanceSize(), is_vm_object, true); | |
| 2732 RawObject** from = type->from(); | |
| 2733 RawObject** to = type->to(); | |
| 2734 for (RawObject** p = from; p <= to; p++) { | |
| 2735 *p = d->ReadRef(); | |
| 2736 } | |
| 2737 type->ptr()->token_pos_ = d->ReadTokenPosition(); | |
| 2738 type->ptr()->type_state_ = d->Read<int8_t>(); | |
| 2739 } | |
| 2740 | |
| 2741 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2742 RawType* type = reinterpret_cast<RawType*>(d->Ref(id)); | |
| 2743 Deserializer::InitializeHeader(type, kTypeCid, | |
| 2744 Type::InstanceSize(), is_vm_object); | |
| 2745 RawObject** from = type->from(); | |
| 2746 RawObject** to = type->to(); | |
| 2747 for (RawObject** p = from; p <= to; p++) { | |
| 2748 *p = d->ReadRef(); | |
| 2749 } | |
| 2750 type->ptr()->token_pos_ = d->ReadTokenPosition(); | |
| 2751 type->ptr()->type_state_ = d->Read<int8_t>(); | |
| 2752 } | |
| 2753 } | |
| 2754 | |
| 2755 private: | |
| 2756 intptr_t canonical_start_index_; | |
| 2757 intptr_t canonical_stop_index_; | |
| 2758 }; | |
| 2759 | |
| 2760 class TypeRefSerializationCluster : public SerializationCluster { | |
| 2761 public: | |
| 2762 TypeRefSerializationCluster() { } | |
| 2763 virtual ~TypeRefSerializationCluster() { } | |
| 2764 | |
| 2765 void Trace(Serializer* s, RawObject* object) { | |
| 2766 RawTypeRef* type = TypeRef::RawCast(object); | |
| 2767 objects_.Add(type); | |
| 2768 | |
| 2769 RawObject** from = type->from(); | |
| 2770 RawObject** to = type->to(); | |
| 2771 for (RawObject** p = from; p <= to; p++) { | |
| 2772 s->Push(*p); | |
| 2773 } | |
| 2774 } | |
| 2775 | |
| 2776 void WriteAlloc(Serializer* s) { | |
| 2777 s->WriteCid(kTypeRefCid); | |
| 2778 intptr_t count = objects_.length(); | |
| 2779 s->Write<intptr_t>(count); | |
| 2780 for (intptr_t i = 0; i < count; i++) { | |
| 2781 RawTypeRef* type = objects_[i]; | |
| 2782 s->AssignRef(type); | |
| 2783 } | |
| 2784 } | |
| 2785 | |
| 2786 void WriteFill(Serializer* s) { | |
| 2787 intptr_t count = objects_.length(); | |
| 2788 for (intptr_t i = 0; i < count; i++) { | |
| 2789 RawTypeRef* type = objects_[i]; | |
| 2790 RawObject** from = type->from(); | |
| 2791 RawObject** to = type->to(); | |
| 2792 for (RawObject** p = from; p <= to; p++) { | |
| 2793 s->WriteRef(*p); | |
| 2794 } | |
| 2795 } | |
| 2796 } | |
| 2797 | |
| 2798 private: | |
| 2799 GrowableArray<RawTypeRef*> objects_; | |
| 2800 }; | |
| 2801 | |
| 2802 class TypeRefDeserializationCluster : public DeserializationCluster { | |
| 2803 public: | |
| 2804 TypeRefDeserializationCluster() { } | |
| 2805 virtual ~TypeRefDeserializationCluster() { } | |
| 2806 | |
| 2807 void ReadAlloc(Deserializer* d) { | |
| 2808 start_index_ = d->next_index(); | |
| 2809 PageSpace* old_space = d->heap()->old_space(); | |
| 2810 intptr_t count = d->Read<intptr_t>(); | |
| 2811 for (intptr_t i = 0; i < count; i++) { | |
| 2812 d->AssignRef(AllocateUninitialized(old_space, TypeRef::InstanceSize())); | |
| 2813 } | |
| 2814 stop_index_ = d->next_index(); | |
| 2815 } | |
| 2816 | |
| 2817 void ReadFill(Deserializer* d) { | |
| 2818 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2819 | |
| 2820 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2821 RawTypeRef* type = reinterpret_cast<RawTypeRef*>(d->Ref(id)); | |
| 2822 Deserializer::InitializeHeader(type, kTypeRefCid, | |
| 2823 TypeRef::InstanceSize(), is_vm_object); | |
| 2824 RawObject** from = type->from(); | |
| 2825 RawObject** to = type->to(); | |
| 2826 for (RawObject** p = from; p <= to; p++) { | |
| 2827 *p = d->ReadRef(); | |
| 2828 } | |
| 2829 } | |
| 2830 } | |
| 2831 }; | |
| 2832 | |
| 2833 class TypeParameterSerializationCluster : public SerializationCluster { | |
| 2834 public: | |
| 2835 TypeParameterSerializationCluster() { } | |
| 2836 virtual ~TypeParameterSerializationCluster() { } | |
| 2837 | |
| 2838 void Trace(Serializer* s, RawObject* object) { | |
| 2839 RawTypeParameter* type = TypeParameter::RawCast(object); | |
| 2840 objects_.Add(type); | |
| 2841 ASSERT(!type->IsCanonical()); | |
| 2842 | |
| 2843 RawObject** from = type->from(); | |
| 2844 RawObject** to = type->to(); | |
| 2845 for (RawObject** p = from; p <= to; p++) { | |
| 2846 s->Push(*p); | |
| 2847 } | |
| 2848 } | |
| 2849 | |
| 2850 void WriteAlloc(Serializer* s) { | |
| 2851 s->WriteCid(kTypeParameterCid); | |
| 2852 intptr_t count = objects_.length(); | |
| 2853 s->Write<intptr_t>(count); | |
| 2854 for (intptr_t i = 0; i < count; i++) { | |
| 2855 RawTypeParameter* type = objects_[i]; | |
| 2856 s->AssignRef(type); | |
| 2857 } | |
| 2858 } | |
| 2859 | |
| 2860 void WriteFill(Serializer* s) { | |
| 2861 intptr_t count = objects_.length(); | |
| 2862 for (intptr_t i = 0; i < count; i++) { | |
| 2863 RawTypeParameter* type = objects_[i]; | |
| 2864 RawObject** from = type->from(); | |
| 2865 RawObject** to = type->to(); | |
| 2866 for (RawObject** p = from; p <= to; p++) { | |
| 2867 s->WriteRef(*p); | |
| 2868 } | |
| 2869 s->Write<intptr_t>(type->ptr()->parameterized_class_id_); | |
| 2870 s->WriteTokenPosition(type->ptr()->token_pos_); | |
| 2871 s->Write<int16_t>(type->ptr()->index_); | |
| 2872 s->Write<int8_t>(type->ptr()->type_state_); | |
| 2873 } | |
| 2874 } | |
| 2875 | |
| 2876 private: | |
| 2877 GrowableArray<RawTypeParameter*> objects_; | |
| 2878 }; | |
| 2879 | |
| 2880 | |
| 2881 class TypeParameterDeserializationCluster : public DeserializationCluster { | |
| 2882 public: | |
| 2883 TypeParameterDeserializationCluster() { } | |
| 2884 virtual ~TypeParameterDeserializationCluster() { } | |
| 2885 | |
| 2886 void ReadAlloc(Deserializer* d) { | |
| 2887 start_index_ = d->next_index(); | |
| 2888 PageSpace* old_space = d->heap()->old_space(); | |
| 2889 intptr_t count = d->Read<intptr_t>(); | |
| 2890 for (intptr_t i = 0; i < count; i++) { | |
| 2891 d->AssignRef(AllocateUninitialized(old_space, | |
| 2892 TypeParameter::InstanceSize())); | |
| 2893 } | |
| 2894 stop_index_ = d->next_index(); | |
| 2895 } | |
| 2896 | |
| 2897 void ReadFill(Deserializer* d) { | |
| 2898 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2899 | |
| 2900 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2901 RawTypeParameter* type = reinterpret_cast<RawTypeParameter*>(d->Ref(id)); | |
| 2902 Deserializer::InitializeHeader(type, kTypeParameterCid, | |
| 2903 TypeParameter::InstanceSize(), | |
| 2904 is_vm_object); | |
| 2905 RawObject** from = type->from(); | |
| 2906 RawObject** to = type->to(); | |
| 2907 for (RawObject** p = from; p <= to; p++) { | |
| 2908 *p = d->ReadRef(); | |
| 2909 } | |
| 2910 type->ptr()->parameterized_class_id_ = d->Read<intptr_t>(); | |
| 2911 type->ptr()->token_pos_ = d->ReadTokenPosition(); | |
| 2912 type->ptr()->index_ = d->Read<int16_t>(); | |
| 2913 type->ptr()->type_state_ = d->Read<int8_t>(); | |
| 2914 } | |
| 2915 } | |
| 2916 }; | |
| 2917 | |
| 2918 class BoundedTypeSerializationCluster : public SerializationCluster { | |
| 2919 public: | |
| 2920 BoundedTypeSerializationCluster() { } | |
| 2921 virtual ~BoundedTypeSerializationCluster() { } | |
| 2922 | |
| 2923 void Trace(Serializer* s, RawObject* object) { | |
| 2924 RawBoundedType* type = BoundedType::RawCast(object); | |
| 2925 objects_.Add(type); | |
| 2926 | |
| 2927 RawObject** from = type->from(); | |
| 2928 RawObject** to = type->to(); | |
| 2929 for (RawObject** p = from; p <= to; p++) { | |
| 2930 s->Push(*p); | |
| 2931 } | |
| 2932 } | |
| 2933 | |
| 2934 void WriteAlloc(Serializer* s) { | |
| 2935 s->WriteCid(kBoundedTypeCid); | |
| 2936 intptr_t count = objects_.length(); | |
| 2937 s->Write<intptr_t>(count); | |
| 2938 for (intptr_t i = 0; i < count; i++) { | |
| 2939 RawBoundedType* type = objects_[i]; | |
| 2940 s->AssignRef(type); | |
| 2941 } | |
| 2942 } | |
| 2943 | |
| 2944 void WriteFill(Serializer* s) { | |
| 2945 intptr_t count = objects_.length(); | |
| 2946 for (intptr_t i = 0; i < count; i++) { | |
| 2947 RawBoundedType* type = objects_[i]; | |
| 2948 RawObject** from = type->from(); | |
| 2949 RawObject** to = type->to(); | |
| 2950 for (RawObject** p = from; p <= to; p++) { | |
| 2951 s->WriteRef(*p); | |
| 2952 } | |
| 2953 } | |
| 2954 } | |
| 2955 | |
| 2956 private: | |
| 2957 GrowableArray<RawBoundedType*> objects_; | |
| 2958 }; | |
| 2959 | |
| 2960 class BoundedTypeDeserializationCluster : public DeserializationCluster { | |
| 2961 public: | |
| 2962 BoundedTypeDeserializationCluster() { } | |
| 2963 virtual ~BoundedTypeDeserializationCluster() { } | |
| 2964 | |
| 2965 void ReadAlloc(Deserializer* d) { | |
| 2966 start_index_ = d->next_index(); | |
| 2967 PageSpace* old_space = d->heap()->old_space(); | |
| 2968 intptr_t count = d->Read<intptr_t>(); | |
| 2969 for (intptr_t i = 0; i < count; i++) { | |
| 2970 d->AssignRef(AllocateUninitialized(old_space, | |
| 2971 BoundedType::InstanceSize())); | |
| 2972 } | |
| 2973 stop_index_ = d->next_index(); | |
| 2974 } | |
| 2975 | |
| 2976 void ReadFill(Deserializer* d) { | |
| 2977 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 2978 | |
| 2979 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 2980 RawBoundedType* type = reinterpret_cast<RawBoundedType*>(d->Ref(id)); | |
| 2981 Deserializer::InitializeHeader(type, kBoundedTypeCid, | |
| 2982 BoundedType::InstanceSize(), is_vm_object); | |
| 2983 RawObject** from = type->from(); | |
| 2984 RawObject** to = type->to(); | |
| 2985 for (RawObject** p = from; p <= to; p++) { | |
| 2986 *p = d->ReadRef(); | |
| 2987 } | |
| 2988 } | |
| 2989 } | |
| 2990 }; | |
| 2991 | |
| 2992 class ClosureSerializationCluster : public SerializationCluster { | |
| 2993 public: | |
| 2994 ClosureSerializationCluster() { } | |
| 2995 virtual ~ClosureSerializationCluster() { } | |
| 2996 | |
| 2997 void Trace(Serializer* s, RawObject* object) { | |
| 2998 RawClosure* closure = Closure::RawCast(object); | |
| 2999 objects_.Add(closure); | |
| 3000 | |
| 3001 RawObject** from = closure->from(); | |
| 3002 RawObject** to = closure->to(); | |
| 3003 for (RawObject** p = from; p <= to; p++) { | |
| 3004 s->Push(*p); | |
| 3005 } | |
| 3006 } | |
| 3007 | |
| 3008 void WriteAlloc(Serializer* s) { | |
| 3009 s->WriteCid(kClosureCid); | |
| 3010 intptr_t count = objects_.length(); | |
| 3011 s->Write<intptr_t>(count); | |
| 3012 for (intptr_t i = 0; i < count; i++) { | |
| 3013 RawClosure* closure = objects_[i]; | |
| 3014 s->AssignRef(closure); | |
| 3015 } | |
| 3016 } | |
| 3017 | |
| 3018 void WriteFill(Serializer* s) { | |
| 3019 intptr_t count = objects_.length(); | |
| 3020 for (intptr_t i = 0; i < count; i++) { | |
| 3021 RawClosure* closure = objects_[i]; | |
| 3022 s->Write<bool>(closure->IsCanonical()); | |
| 3023 RawObject** from = closure->from(); | |
| 3024 RawObject** to = closure->to(); | |
| 3025 for (RawObject** p = from; p <= to; p++) { | |
| 3026 s->WriteRef(*p); | |
| 3027 } | |
| 3028 } | |
| 3029 } | |
| 3030 | |
| 3031 private: | |
| 3032 GrowableArray<RawClosure*> objects_; | |
| 3033 }; | |
| 3034 | |
| 3035 | |
| 3036 class ClosureDeserializationCluster : public DeserializationCluster { | |
| 3037 public: | |
| 3038 ClosureDeserializationCluster() { } | |
| 3039 virtual ~ClosureDeserializationCluster() { } | |
| 3040 | |
| 3041 void ReadAlloc(Deserializer* d) { | |
| 3042 start_index_ = d->next_index(); | |
| 3043 PageSpace* old_space = d->heap()->old_space(); | |
| 3044 intptr_t count = d->Read<intptr_t>(); | |
| 3045 for (intptr_t i = 0; i < count; i++) { | |
| 3046 d->AssignRef(AllocateUninitialized(old_space, Closure::InstanceSize())); | |
| 3047 } | |
| 3048 stop_index_ = d->next_index(); | |
| 3049 } | |
| 3050 | |
| 3051 void ReadFill(Deserializer* d) { | |
| 3052 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3053 | |
| 3054 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3055 RawClosure* closure = reinterpret_cast<RawClosure*>(d->Ref(id)); | |
| 3056 bool is_canonical = d->Read<bool>(); | |
| 3057 Deserializer::InitializeHeader(closure, kClosureCid, | |
| 3058 Closure::InstanceSize(), | |
| 3059 is_vm_object, is_canonical); | |
| 3060 RawObject** from = closure->from(); | |
| 3061 RawObject** to = closure->to(); | |
| 3062 for (RawObject** p = from; p <= to; p++) { | |
| 3063 *p = d->ReadRef(); | |
| 3064 } | |
| 3065 } | |
| 3066 } | |
| 3067 }; | |
| 3068 | |
| 3069 | |
| 3070 class MintSerializationCluster : public SerializationCluster { | |
| 3071 public: | |
| 3072 MintSerializationCluster() { } | |
| 3073 virtual ~MintSerializationCluster() { } | |
| 3074 | |
| 3075 void Trace(Serializer* s, RawObject* object) { | |
| 3076 RawMint* mint = Mint::RawCast(object); | |
| 3077 objects_.Add(mint); | |
| 3078 } | |
| 3079 | |
| 3080 void WriteAlloc(Serializer* s) { | |
| 3081 s->WriteCid(kMintCid); | |
| 3082 intptr_t count = objects_.length(); | |
| 3083 s->Write<intptr_t>(count); | |
| 3084 for (intptr_t i = 0; i < count; i++) { | |
| 3085 RawMint* mint = objects_[i]; | |
| 3086 s->AssignRef(mint); | |
| 3087 } | |
| 3088 } | |
| 3089 | |
| 3090 void WriteFill(Serializer* s) { | |
| 3091 intptr_t count = objects_.length(); | |
| 3092 for (intptr_t i = 0; i < count; i++) { | |
| 3093 RawMint* mint = objects_[i]; | |
| 3094 s->Write<bool>(mint->IsCanonical()); | |
| 3095 s->Write<int64_t>(mint->ptr()->value_); | |
| 3096 } | |
| 3097 } | |
| 3098 | |
| 3099 private: | |
| 3100 GrowableArray<RawMint*> objects_; | |
| 3101 }; | |
| 3102 | |
| 3103 class MintDeserializationCluster : public DeserializationCluster { | |
| 3104 public: | |
| 3105 MintDeserializationCluster() { } | |
| 3106 virtual ~MintDeserializationCluster() { } | |
| 3107 | |
| 3108 void ReadAlloc(Deserializer* d) { | |
| 3109 start_index_ = d->next_index(); | |
| 3110 PageSpace* old_space = d->heap()->old_space(); | |
| 3111 intptr_t count = d->Read<intptr_t>(); | |
| 3112 for (intptr_t i = 0; i < count; i++) { | |
| 3113 d->AssignRef(AllocateUninitialized(old_space, Mint::InstanceSize())); | |
| 3114 } | |
| 3115 stop_index_ = d->next_index(); | |
| 3116 } | |
| 3117 | |
| 3118 void ReadFill(Deserializer* d) { | |
| 3119 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3120 | |
| 3121 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3122 RawMint* mint = reinterpret_cast<RawMint*>(d->Ref(id)); | |
| 3123 bool is_canonical = d->Read<bool>(); | |
| 3124 Deserializer::InitializeHeader(mint, kMintCid, | |
| 3125 Mint::InstanceSize(), | |
| 3126 is_vm_object, is_canonical); | |
| 3127 mint->ptr()->value_ = d->Read<int64_t>(); | |
| 3128 } | |
| 3129 } | |
| 3130 }; | |
| 3131 | |
| 3132 class BigintSerializationCluster : public SerializationCluster { | |
| 3133 public: | |
| 3134 BigintSerializationCluster() { } | |
| 3135 virtual ~BigintSerializationCluster() { } | |
| 3136 | |
| 3137 void Trace(Serializer* s, RawObject* object) { | |
| 3138 RawBigint* bigint = Bigint::RawCast(object); | |
| 3139 objects_.Add(bigint); | |
| 3140 | |
| 3141 RawObject** from = bigint->from(); | |
| 3142 RawObject** to = bigint->to(); | |
| 3143 for (RawObject** p = from; p <= to; p++) { | |
| 3144 s->Push(*p); | |
| 3145 } | |
| 3146 } | |
| 3147 | |
| 3148 void WriteAlloc(Serializer* s) { | |
| 3149 s->WriteCid(kBigintCid); | |
| 3150 intptr_t count = objects_.length(); | |
| 3151 s->Write<intptr_t>(count); | |
| 3152 for (intptr_t i = 0; i < count; i++) { | |
| 3153 RawBigint* bigint = objects_[i]; | |
| 3154 s->AssignRef(bigint); | |
| 3155 } | |
| 3156 } | |
| 3157 | |
| 3158 void WriteFill(Serializer* s) { | |
| 3159 intptr_t count = objects_.length(); | |
| 3160 for (intptr_t i = 0; i < count; i++) { | |
| 3161 RawBigint* bigint = objects_[i]; | |
| 3162 s->Write<bool>(bigint->IsCanonical()); | |
| 3163 RawObject** from = bigint->from(); | |
| 3164 RawObject** to = bigint->to(); | |
| 3165 for (RawObject** p = from; p <= to; p++) { | |
| 3166 s->WriteRef(*p); | |
| 3167 } | |
| 3168 } | |
| 3169 } | |
| 3170 | |
| 3171 private: | |
| 3172 GrowableArray<RawBigint*> objects_; | |
| 3173 }; | |
| 3174 | |
| 3175 class BigintDeserializationCluster : public DeserializationCluster { | |
| 3176 public: | |
| 3177 BigintDeserializationCluster() { } | |
| 3178 virtual ~BigintDeserializationCluster() { } | |
| 3179 | |
| 3180 void ReadAlloc(Deserializer* d) { | |
| 3181 start_index_ = d->next_index(); | |
| 3182 PageSpace* old_space = d->heap()->old_space(); | |
| 3183 intptr_t count = d->Read<intptr_t>(); | |
| 3184 for (intptr_t i = 0; i < count; i++) { | |
| 3185 d->AssignRef(AllocateUninitialized(old_space, Bigint::InstanceSize())); | |
| 3186 } | |
| 3187 stop_index_ = d->next_index(); | |
| 3188 } | |
| 3189 | |
| 3190 void ReadFill(Deserializer* d) { | |
| 3191 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3192 | |
| 3193 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3194 RawBigint* bigint = reinterpret_cast<RawBigint*>(d->Ref(id)); | |
| 3195 bool is_canonical = d->Read<bool>(); | |
| 3196 Deserializer::InitializeHeader(bigint, kBigintCid, | |
| 3197 Bigint::InstanceSize(), | |
| 3198 is_vm_object, is_canonical); | |
| 3199 RawObject** from = bigint->from(); | |
| 3200 RawObject** to = bigint->to(); | |
| 3201 for (RawObject** p = from; p <= to; p++) { | |
| 3202 *p = d->ReadRef(); | |
| 3203 } | |
| 3204 } | |
| 3205 } | |
| 3206 }; | |
| 3207 | |
| 3208 class DoubleSerializationCluster : public SerializationCluster { | |
| 3209 public: | |
| 3210 DoubleSerializationCluster() { } | |
| 3211 virtual ~DoubleSerializationCluster() { } | |
| 3212 | |
| 3213 void Trace(Serializer* s, RawObject* object) { | |
| 3214 RawDouble* dbl = Double::RawCast(object); | |
| 3215 objects_.Add(dbl); | |
| 3216 } | |
| 3217 | |
| 3218 void WriteAlloc(Serializer* s) { | |
| 3219 s->WriteCid(kDoubleCid); | |
| 3220 intptr_t count = objects_.length(); | |
| 3221 s->Write<intptr_t>(count); | |
| 3222 for (intptr_t i = 0; i < count; i++) { | |
| 3223 RawDouble* dbl = objects_[i]; | |
| 3224 s->AssignRef(dbl); | |
| 3225 } | |
| 3226 } | |
| 3227 | |
| 3228 void WriteFill(Serializer* s) { | |
| 3229 intptr_t count = objects_.length(); | |
| 3230 for (intptr_t i = 0; i < count; i++) { | |
| 3231 RawDouble* dbl = objects_[i]; | |
| 3232 s->Write<bool>(dbl->IsCanonical()); | |
| 3233 s->Write<double>(dbl->ptr()->value_); | |
| 3234 } | |
| 3235 } | |
| 3236 | |
| 3237 private: | |
| 3238 GrowableArray<RawDouble*> objects_; | |
| 3239 }; | |
| 3240 | |
| 3241 class DoubleDeserializationCluster : public DeserializationCluster { | |
| 3242 public: | |
| 3243 DoubleDeserializationCluster() { } | |
| 3244 virtual ~DoubleDeserializationCluster() { } | |
| 3245 | |
| 3246 void ReadAlloc(Deserializer* d) { | |
| 3247 start_index_ = d->next_index(); | |
| 3248 PageSpace* old_space = d->heap()->old_space(); | |
| 3249 intptr_t count = d->Read<intptr_t>(); | |
| 3250 for (intptr_t i = 0; i < count; i++) { | |
| 3251 d->AssignRef(AllocateUninitialized(old_space, Double::InstanceSize())); | |
| 3252 } | |
| 3253 stop_index_ = d->next_index(); | |
| 3254 } | |
| 3255 | |
| 3256 void ReadFill(Deserializer* d) { | |
| 3257 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3258 | |
| 3259 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3260 RawDouble* dbl = reinterpret_cast<RawDouble*>(d->Ref(id)); | |
| 3261 bool is_canonical = d->Read<bool>(); | |
| 3262 Deserializer::InitializeHeader(dbl, kDoubleCid, | |
| 3263 Double::InstanceSize(), | |
| 3264 is_vm_object, is_canonical); | |
| 3265 dbl->ptr()->value_ = d->Read<double>(); | |
| 3266 } | |
| 3267 } | |
| 3268 }; | |
| 3269 | |
| 3270 | |
| 3271 class GrowableObjectArraySerializationCluster : public SerializationCluster { | |
| 3272 public: | |
| 3273 GrowableObjectArraySerializationCluster() { } | |
| 3274 virtual ~GrowableObjectArraySerializationCluster() { } | |
| 3275 | |
| 3276 void Trace(Serializer* s, RawObject* object) { | |
| 3277 RawGrowableObjectArray* array = GrowableObjectArray::RawCast(object); | |
| 3278 objects_.Add(array); | |
| 3279 | |
| 3280 RawObject** from = array->from(); | |
| 3281 RawObject** to = array->to(); | |
| 3282 for (RawObject** p = from; p <= to; p++) { | |
| 3283 s->Push(*p); | |
| 3284 } | |
| 3285 } | |
| 3286 | |
| 3287 void WriteAlloc(Serializer* s) { | |
| 3288 s->WriteCid(kGrowableObjectArrayCid); | |
| 3289 intptr_t count = objects_.length(); | |
| 3290 s->Write<intptr_t>(count); | |
| 3291 for (intptr_t i = 0; i < count; i++) { | |
| 3292 RawGrowableObjectArray* array = objects_[i]; | |
| 3293 s->AssignRef(array); | |
| 3294 } | |
| 3295 } | |
| 3296 | |
| 3297 void WriteFill(Serializer* s) { | |
| 3298 intptr_t count = objects_.length(); | |
| 3299 for (intptr_t i = 0; i < count; i++) { | |
| 3300 RawGrowableObjectArray* array = objects_[i]; | |
| 3301 s->Write<bool>(array->IsCanonical()); | |
| 3302 RawObject** from = array->from(); | |
| 3303 RawObject** to = array->to(); | |
| 3304 for (RawObject** p = from; p <= to; p++) { | |
| 3305 s->WriteRef(*p); | |
| 3306 } | |
| 3307 } | |
| 3308 } | |
| 3309 | |
| 3310 private: | |
| 3311 GrowableArray<RawGrowableObjectArray*> objects_; | |
| 3312 }; | |
| 3313 | |
| 3314 class GrowableObjectArrayDeserializationCluster | |
| 3315 : public DeserializationCluster { | |
| 3316 public: | |
| 3317 GrowableObjectArrayDeserializationCluster() { } | |
| 3318 virtual ~GrowableObjectArrayDeserializationCluster() { } | |
| 3319 | |
| 3320 void ReadAlloc(Deserializer* d) { | |
| 3321 start_index_ = d->next_index(); | |
| 3322 PageSpace* old_space = d->heap()->old_space(); | |
| 3323 intptr_t count = d->Read<intptr_t>(); | |
| 3324 for (intptr_t i = 0; i < count; i++) { | |
| 3325 d->AssignRef(AllocateUninitialized(old_space, | |
| 3326 GrowableObjectArray::InstanceSize())); | |
| 3327 } | |
| 3328 stop_index_ = d->next_index(); | |
| 3329 } | |
| 3330 | |
| 3331 void ReadFill(Deserializer* d) { | |
| 3332 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3333 | |
| 3334 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3335 RawGrowableObjectArray* list = | |
| 3336 reinterpret_cast<RawGrowableObjectArray*>(d->Ref(id)); | |
| 3337 bool is_canonical = d->Read<bool>(); | |
| 3338 Deserializer::InitializeHeader(list, kGrowableObjectArrayCid, | |
| 3339 GrowableObjectArray::InstanceSize(), | |
| 3340 is_vm_object, is_canonical); | |
| 3341 RawObject** from = list->from(); | |
| 3342 RawObject** to = list->to(); | |
| 3343 for (RawObject** p = from; p <= to; p++) { | |
| 3344 *p = d->ReadRef(); | |
| 3345 } | |
| 3346 } | |
| 3347 } | |
| 3348 }; | |
| 3349 | |
| 3350 class TypedDataSerializationCluster : public SerializationCluster { | |
| 3351 public: | |
| 3352 explicit TypedDataSerializationCluster(intptr_t cid) : cid_(cid) { } | |
| 3353 virtual ~TypedDataSerializationCluster() { } | |
| 3354 | |
| 3355 void Trace(Serializer* s, RawObject* object) { | |
| 3356 RawTypedData* data = TypedData::RawCast(object); | |
| 3357 objects_.Add(data); | |
| 3358 } | |
| 3359 | |
| 3360 void WriteAlloc(Serializer* s) { | |
| 3361 s->Write<intptr_t>(cid_); | |
| 3362 intptr_t count = objects_.length(); | |
| 3363 s->Write<intptr_t>(count); | |
| 3364 for (intptr_t i = 0; i < count; i++) { | |
| 3365 RawTypedData* data = objects_[i]; | |
| 3366 intptr_t length = Smi::Value(data->ptr()->length_); | |
| 3367 s->Write<intptr_t>(length); | |
| 3368 s->AssignRef(data); | |
| 3369 } | |
| 3370 } | |
| 3371 | |
| 3372 void WriteFill(Serializer* s) { | |
| 3373 intptr_t count = objects_.length(); | |
| 3374 intptr_t element_size = TypedData::ElementSizeInBytes(cid_); | |
| 3375 for (intptr_t i = 0; i < count; i++) { | |
| 3376 RawTypedData* data = objects_[i]; | |
| 3377 intptr_t length = Smi::Value(data->ptr()->length_); | |
| 3378 s->Write<intptr_t>(length); | |
| 3379 s->Write<bool>(data->IsCanonical()); | |
| 3380 uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data()); | |
| 3381 s->WriteBytes(cdata, length * element_size); | |
| 3382 } | |
| 3383 } | |
| 3384 | |
| 3385 private: | |
| 3386 const intptr_t cid_; | |
| 3387 GrowableArray<RawTypedData*> objects_; | |
| 3388 }; | |
| 3389 | |
| 3390 | |
| 3391 class TypedDataDeserializationCluster : public DeserializationCluster { | |
| 3392 public: | |
| 3393 explicit TypedDataDeserializationCluster(intptr_t cid) : cid_(cid) { } | |
| 3394 virtual ~TypedDataDeserializationCluster() { } | |
| 3395 | |
| 3396 void ReadAlloc(Deserializer* d) { | |
| 3397 start_index_ = d->next_index(); | |
| 3398 PageSpace* old_space = d->heap()->old_space(); | |
| 3399 intptr_t count = d->Read<intptr_t>(); | |
| 3400 intptr_t element_size = TypedData::ElementSizeInBytes(cid_); | |
| 3401 for (intptr_t i = 0; i < count; i++) { | |
| 3402 intptr_t length = d->Read<intptr_t>(); | |
| 3403 d->AssignRef(AllocateUninitialized(old_space, | |
| 3404 TypedData::InstanceSize(length * element_size))); | |
| 3405 } | |
| 3406 stop_index_ = d->next_index(); | |
| 3407 } | |
| 3408 | |
| 3409 void ReadFill(Deserializer* d) { | |
| 3410 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3411 intptr_t element_size = TypedData::ElementSizeInBytes(cid_); | |
| 3412 | |
| 3413 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3414 RawTypedData* data = reinterpret_cast<RawTypedData*>(d->Ref(id)); | |
| 3415 intptr_t length = d->Read<intptr_t>(); | |
| 3416 bool is_canonical = d->Read<bool>(); | |
| 3417 intptr_t length_in_bytes = length * element_size; | |
| 3418 Deserializer::InitializeHeader(data, cid_, | |
| 3419 TypedData::InstanceSize(length_in_bytes), | |
| 3420 is_vm_object, is_canonical); | |
| 3421 data->ptr()->length_ = Smi::New(length); | |
| 3422 uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data()); | |
| 3423 d->ReadBytes(cdata, length_in_bytes); | |
| 3424 } | |
| 3425 } | |
| 3426 | |
| 3427 private: | |
| 3428 const intptr_t cid_; | |
| 3429 }; | |
| 3430 | |
| 3431 | |
| 3432 class ExternalTypedDataSerializationCluster : public SerializationCluster { | |
| 3433 public: | |
| 3434 explicit ExternalTypedDataSerializationCluster(intptr_t cid) : cid_(cid) { } | |
| 3435 virtual ~ExternalTypedDataSerializationCluster() { } | |
| 3436 | |
| 3437 void Trace(Serializer* s, RawObject* object) { | |
| 3438 RawExternalTypedData* data = ExternalTypedData::RawCast(object); | |
| 3439 objects_.Add(data); | |
| 3440 ASSERT(!data->IsCanonical()); | |
| 3441 } | |
| 3442 | |
| 3443 void WriteAlloc(Serializer* s) { | |
| 3444 s->Write<intptr_t>(cid_); | |
| 3445 intptr_t count = objects_.length(); | |
| 3446 s->Write<intptr_t>(count); | |
| 3447 for (intptr_t i = 0; i < count; i++) { | |
| 3448 RawExternalTypedData* data = objects_[i]; | |
| 3449 s->AssignRef(data); | |
| 3450 } | |
| 3451 } | |
| 3452 | |
| 3453 void WriteFill(Serializer* s) { | |
| 3454 intptr_t count = objects_.length(); | |
| 3455 intptr_t element_size = ExternalTypedData::ElementSizeInBytes(cid_); | |
| 3456 for (intptr_t i = 0; i < count; i++) { | |
| 3457 RawExternalTypedData* data = objects_[i]; | |
| 3458 intptr_t length = Smi::Value(data->ptr()->length_); | |
| 3459 s->Write<intptr_t>(length); | |
| 3460 uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data_); | |
| 3461 s->WriteBytes(cdata, length * element_size); | |
| 3462 } | |
| 3463 } | |
| 3464 | |
| 3465 private: | |
| 3466 const intptr_t cid_; | |
| 3467 GrowableArray<RawExternalTypedData*> objects_; | |
| 3468 }; | |
| 3469 | |
| 3470 | |
| 3471 class ExternalTypedDataDeserializationCluster : public DeserializationCluster { | |
| 3472 public: | |
| 3473 explicit ExternalTypedDataDeserializationCluster(intptr_t cid) : cid_(cid) { } | |
| 3474 virtual ~ExternalTypedDataDeserializationCluster() { } | |
| 3475 | |
| 3476 void ReadAlloc(Deserializer* d) { | |
| 3477 start_index_ = d->next_index(); | |
| 3478 PageSpace* old_space = d->heap()->old_space(); | |
| 3479 intptr_t count = d->Read<intptr_t>(); | |
| 3480 for (intptr_t i = 0; i < count; i++) { | |
| 3481 d->AssignRef(AllocateUninitialized(old_space, | |
| 3482 ExternalTypedData::InstanceSize())); | |
| 3483 } | |
| 3484 stop_index_ = d->next_index(); | |
| 3485 } | |
| 3486 | |
| 3487 void ReadFill(Deserializer* d) { | |
| 3488 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3489 intptr_t element_size = ExternalTypedData::ElementSizeInBytes(cid_); | |
| 3490 | |
| 3491 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3492 RawExternalTypedData* data = | |
| 3493 reinterpret_cast<RawExternalTypedData*>(d->Ref(id)); | |
| 3494 intptr_t length = d->Read<intptr_t>(); | |
| 3495 Deserializer::InitializeHeader(data, cid_, | |
| 3496 ExternalTypedData::InstanceSize(), | |
| 3497 is_vm_object); | |
| 3498 data->ptr()->length_ = Smi::New(length); | |
| 3499 data->ptr()->data_ = const_cast<uint8_t*>(d->CurrentBufferAddress()); | |
| 3500 d->Advance(length * element_size); | |
| 3501 } | |
| 3502 } | |
| 3503 | |
| 3504 private: | |
| 3505 const intptr_t cid_; | |
| 3506 }; | |
| 3507 | |
| 3508 class StacktraceSerializationCluster : public SerializationCluster { | |
| 3509 public: | |
| 3510 StacktraceSerializationCluster() { } | |
| 3511 virtual ~StacktraceSerializationCluster() { } | |
| 3512 | |
| 3513 void Trace(Serializer* s, RawObject* object) { | |
| 3514 RawStacktrace* trace = Stacktrace::RawCast(object); | |
| 3515 objects_.Add(trace); | |
| 3516 | |
| 3517 RawObject** from = trace->from(); | |
| 3518 RawObject** to = trace->to(); | |
| 3519 for (RawObject** p = from; p <= to; p++) { | |
| 3520 s->Push(*p); | |
| 3521 } | |
| 3522 } | |
| 3523 | |
| 3524 void WriteAlloc(Serializer* s) { | |
| 3525 s->WriteCid(kStacktraceCid); | |
| 3526 intptr_t count = objects_.length(); | |
| 3527 s->Write<intptr_t>(count); | |
| 3528 for (intptr_t i = 0; i < count; i++) { | |
| 3529 RawStacktrace* trace = objects_[i]; | |
| 3530 s->AssignRef(trace); | |
| 3531 } | |
| 3532 } | |
| 3533 | |
| 3534 void WriteFill(Serializer* s) { | |
| 3535 intptr_t count = objects_.length(); | |
| 3536 for (intptr_t i = 0; i < count; i++) { | |
| 3537 RawStacktrace* trace = objects_[i]; | |
| 3538 RawObject** from = trace->from(); | |
| 3539 RawObject** to = trace->to(); | |
| 3540 for (RawObject** p = from; p <= to; p++) { | |
| 3541 s->WriteRef(*p); | |
| 3542 } | |
| 3543 } | |
| 3544 } | |
| 3545 | |
| 3546 private: | |
| 3547 GrowableArray<RawStacktrace*> objects_; | |
| 3548 }; | |
| 3549 | |
| 3550 class StacktraceDeserializationCluster : public DeserializationCluster { | |
| 3551 public: | |
| 3552 StacktraceDeserializationCluster() { } | |
| 3553 virtual ~StacktraceDeserializationCluster() { } | |
| 3554 | |
| 3555 void ReadAlloc(Deserializer* d) { | |
| 3556 start_index_ = d->next_index(); | |
| 3557 PageSpace* old_space = d->heap()->old_space(); | |
| 3558 intptr_t count = d->Read<intptr_t>(); | |
| 3559 for (intptr_t i = 0; i < count; i++) { | |
| 3560 d->AssignRef(AllocateUninitialized(old_space, | |
| 3561 Stacktrace::InstanceSize())); | |
| 3562 } | |
| 3563 stop_index_ = d->next_index(); | |
| 3564 } | |
| 3565 | |
| 3566 void ReadFill(Deserializer* d) { | |
| 3567 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3568 | |
| 3569 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3570 RawStacktrace* trace = reinterpret_cast<RawStacktrace*>(d->Ref(id)); | |
| 3571 Deserializer::InitializeHeader(trace, kStacktraceCid, | |
| 3572 Stacktrace::InstanceSize(), is_vm_object); | |
| 3573 RawObject** from = trace->from(); | |
| 3574 RawObject** to = trace->to(); | |
| 3575 for (RawObject** p = from; p <= to; p++) { | |
| 3576 *p = d->ReadRef(); | |
| 3577 } | |
| 3578 } | |
| 3579 } | |
| 3580 }; | |
| 3581 | |
| 3582 class RegExpSerializationCluster : public SerializationCluster { | |
| 3583 public: | |
| 3584 RegExpSerializationCluster() { } | |
| 3585 virtual ~RegExpSerializationCluster() { } | |
| 3586 | |
| 3587 void Trace(Serializer* s, RawObject* object) { | |
| 3588 RawRegExp* regexp = RegExp::RawCast(object); | |
| 3589 objects_.Add(regexp); | |
| 3590 | |
| 3591 RawObject** from = regexp->from(); | |
| 3592 RawObject** to = regexp->to(); | |
| 3593 for (RawObject** p = from; p <= to; p++) { | |
| 3594 s->Push(*p); | |
| 3595 } | |
| 3596 } | |
| 3597 | |
| 3598 void WriteAlloc(Serializer* s) { | |
| 3599 s->WriteCid(kRegExpCid); | |
| 3600 intptr_t count = objects_.length(); | |
| 3601 s->Write<intptr_t>(count); | |
| 3602 for (intptr_t i = 0; i < count; i++) { | |
| 3603 RawRegExp* regexp = objects_[i]; | |
| 3604 s->AssignRef(regexp); | |
| 3605 } | |
| 3606 } | |
| 3607 | |
| 3608 void WriteFill(Serializer* s) { | |
| 3609 intptr_t count = objects_.length(); | |
| 3610 for (intptr_t i = 0; i < count; i++) { | |
| 3611 RawRegExp* regexp = objects_[i]; | |
| 3612 RawObject** from = regexp->from(); | |
| 3613 RawObject** to = regexp->to(); | |
| 3614 for (RawObject** p = from; p <= to; p++) { | |
| 3615 s->WriteRef(*p); | |
| 3616 } | |
| 3617 | |
| 3618 s->Write<intptr_t>(regexp->ptr()->num_registers_); | |
| 3619 s->Write<int8_t>(regexp->ptr()->type_flags_); | |
| 3620 } | |
| 3621 } | |
| 3622 | |
| 3623 private: | |
| 3624 GrowableArray<RawRegExp*> objects_; | |
| 3625 }; | |
| 3626 | |
| 3627 class RegExpDeserializationCluster : public DeserializationCluster { | |
| 3628 public: | |
| 3629 RegExpDeserializationCluster() { } | |
| 3630 virtual ~RegExpDeserializationCluster() { } | |
| 3631 | |
| 3632 void ReadAlloc(Deserializer* d) { | |
| 3633 start_index_ = d->next_index(); | |
| 3634 PageSpace* old_space = d->heap()->old_space(); | |
| 3635 intptr_t count = d->Read<intptr_t>(); | |
| 3636 for (intptr_t i = 0; i < count; i++) { | |
| 3637 d->AssignRef(AllocateUninitialized(old_space, | |
| 3638 RegExp::InstanceSize())); | |
| 3639 } | |
| 3640 stop_index_ = d->next_index(); | |
| 3641 } | |
| 3642 | |
| 3643 void ReadFill(Deserializer* d) { | |
| 3644 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3645 | |
| 3646 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3647 RawRegExp* regexp = reinterpret_cast<RawRegExp*>(d->Ref(id)); | |
| 3648 Deserializer::InitializeHeader(regexp, kRegExpCid, | |
| 3649 RegExp::InstanceSize(), is_vm_object); | |
| 3650 RawObject** from = regexp->from(); | |
| 3651 RawObject** to = regexp->to(); | |
| 3652 for (RawObject** p = from; p <= to; p++) { | |
| 3653 *p = d->ReadRef(); | |
| 3654 } | |
| 3655 | |
| 3656 regexp->ptr()->num_registers_ = d->Read<intptr_t>(); | |
| 3657 regexp->ptr()->type_flags_ = d->Read<int8_t>(); | |
| 3658 } | |
| 3659 } | |
| 3660 }; | |
| 3661 | |
| 3662 class LinkedHashMapSerializationCluster : public SerializationCluster { | |
| 3663 public: | |
| 3664 LinkedHashMapSerializationCluster() { } | |
| 3665 virtual ~LinkedHashMapSerializationCluster() { } | |
| 3666 | |
| 3667 void Trace(Serializer* s, RawObject* object) { | |
| 3668 RawLinkedHashMap* map = LinkedHashMap::RawCast(object); | |
| 3669 objects_.Add(map); | |
| 3670 | |
| 3671 s->Push(map->ptr()->type_arguments_); | |
| 3672 | |
| 3673 intptr_t used_data = Smi::Value(map->ptr()->used_data_); | |
| 3674 RawArray* data_array = map->ptr()->data_; | |
| 3675 RawObject** data_elements = data_array->ptr()->data(); | |
| 3676 for (intptr_t i = 0; i < used_data; i += 2) { | |
| 3677 RawObject* key = data_elements[i]; | |
| 3678 if (key != data_array) { | |
| 3679 RawObject* value = data_elements[i + 1]; | |
| 3680 s->Push(key); | |
| 3681 s->Push(value); | |
| 3682 } | |
| 3683 } | |
| 3684 } | |
| 3685 | |
| 3686 void WriteAlloc(Serializer* s) { | |
| 3687 s->WriteCid(kLinkedHashMapCid); | |
| 3688 intptr_t count = objects_.length(); | |
| 3689 s->Write<intptr_t>(count); | |
| 3690 for (intptr_t i = 0; i < count; i++) { | |
| 3691 RawLinkedHashMap* map = objects_[i]; | |
| 3692 s->AssignRef(map); | |
| 3693 } | |
| 3694 } | |
| 3695 | |
| 3696 void WriteFill(Serializer* s) { | |
| 3697 intptr_t count = objects_.length(); | |
| 3698 for (intptr_t i = 0; i < count; i++) { | |
| 3699 RawLinkedHashMap* map = objects_[i]; | |
| 3700 s->Write<bool>(map->IsCanonical()); | |
| 3701 | |
| 3702 s->WriteRef(map->ptr()->type_arguments_); | |
| 3703 | |
| 3704 const intptr_t used_data = Smi::Value(map->ptr()->used_data_); | |
| 3705 ASSERT((used_data & 1) == 0); // Keys + values, so must be even. | |
| 3706 const intptr_t deleted_keys = Smi::Value(map->ptr()->deleted_keys_); | |
| 3707 | |
| 3708 // Write out the number of (not deleted) key/value pairs that will follow. | |
| 3709 s->Write<intptr_t>((used_data >> 1) - deleted_keys); | |
| 3710 | |
| 3711 RawArray* data_array = map->ptr()->data_; | |
| 3712 RawObject** data_elements = data_array->ptr()->data(); | |
| 3713 for (intptr_t i = 0; i < used_data; i += 2) { | |
| 3714 RawObject* key = data_elements[i]; | |
| 3715 if (key != data_array) { | |
| 3716 RawObject* value = data_elements[i + 1]; | |
| 3717 s->WriteRef(key); | |
| 3718 s->WriteRef(value); | |
| 3719 } | |
| 3720 } | |
| 3721 } | |
| 3722 } | |
| 3723 | |
| 3724 private: | |
| 3725 GrowableArray<RawLinkedHashMap*> objects_; | |
| 3726 }; | |
| 3727 | |
| 3728 class LinkedHashMapDeserializationCluster : public DeserializationCluster { | |
| 3729 public: | |
| 3730 LinkedHashMapDeserializationCluster() { } | |
| 3731 virtual ~LinkedHashMapDeserializationCluster() { } | |
| 3732 | |
| 3733 void ReadAlloc(Deserializer* d) { | |
| 3734 start_index_ = d->next_index(); | |
| 3735 PageSpace* old_space = d->heap()->old_space(); | |
| 3736 intptr_t count = d->Read<intptr_t>(); | |
| 3737 for (intptr_t i = 0; i < count; i++) { | |
| 3738 d->AssignRef(AllocateUninitialized(old_space, | |
| 3739 LinkedHashMap::InstanceSize())); | |
| 3740 } | |
| 3741 stop_index_ = d->next_index(); | |
| 3742 } | |
| 3743 | |
| 3744 void ReadFill(Deserializer* d) { | |
| 3745 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3746 PageSpace* old_space = d->heap()->old_space(); | |
| 3747 | |
| 3748 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3749 RawLinkedHashMap* map = reinterpret_cast<RawLinkedHashMap*>(d->Ref(id)); | |
| 3750 bool is_canonical = d->Read<bool>(); | |
| 3751 Deserializer::InitializeHeader(map, kLinkedHashMapCid, | |
| 3752 LinkedHashMap::InstanceSize(), | |
| 3753 is_vm_object, is_canonical); | |
| 3754 | |
| 3755 map->ptr()->type_arguments_ = | |
| 3756 reinterpret_cast<RawTypeArguments*>(d->ReadRef()); | |
| 3757 | |
| 3758 // TODO(rmacnak): Reserve ref ids and co-allocate in ReadAlloc. | |
| 3759 intptr_t pairs = d->Read<intptr_t>(); | |
| 3760 intptr_t used_data = pairs << 1; | |
| 3761 intptr_t data_size = Utils::Maximum( | |
| 3762 Utils::RoundUpToPowerOfTwo(used_data), | |
| 3763 static_cast<uintptr_t>(LinkedHashMap::kInitialIndexSize)); | |
| 3764 | |
| 3765 RawArray* data = reinterpret_cast<RawArray*>( | |
| 3766 AllocateUninitialized(old_space, Array::InstanceSize(data_size))); | |
| 3767 data->ptr()->type_arguments_ = TypeArguments::null(); | |
| 3768 data->ptr()->length_ = Smi::New(data_size); | |
| 3769 intptr_t i; | |
| 3770 for (i = 0; i < used_data; i++) { | |
| 3771 data->ptr()->data()[i] = d->ReadRef(); | |
| 3772 } | |
| 3773 for (; i < data_size; i++) { | |
| 3774 data->ptr()->data()[i] = Object::null(); | |
| 3775 } | |
| 3776 | |
| 3777 map->ptr()->index_ = TypedData::null(); | |
| 3778 map->ptr()->hash_mask_ = Smi::New(0); | |
| 3779 map->ptr()->data_ = data; | |
| 3780 map->ptr()->used_data_ = Smi::New(used_data); | |
| 3781 map->ptr()->deleted_keys_ = Smi::New(0); | |
| 3782 } | |
| 3783 } | |
| 3784 }; | |
| 3785 | |
| 3786 class ArraySerializationCluster : public SerializationCluster { | |
| 3787 public: | |
| 3788 explicit ArraySerializationCluster(intptr_t cid) : cid_(cid) { } | |
| 3789 virtual ~ArraySerializationCluster() { } | |
| 3790 | |
| 3791 void Trace(Serializer* s, RawObject* object) { | |
| 3792 RawArray* array = Array::RawCast(object); | |
| 3793 objects_.Add(array); | |
| 3794 | |
| 3795 s->Push(array->ptr()->type_arguments_); | |
| 3796 intptr_t length = Smi::Value(array->ptr()->length_); | |
| 3797 for (intptr_t i = 0; i < length; i++) { | |
| 3798 s->Push(array->ptr()->data()[i]); | |
| 3799 } | |
| 3800 } | |
| 3801 | |
| 3802 void WriteAlloc(Serializer* s) { | |
| 3803 s->WriteCid(cid_); | |
| 3804 intptr_t count = objects_.length(); | |
| 3805 s->Write<intptr_t>(count); | |
| 3806 for (intptr_t i = 0; i < count; i++) { | |
| 3807 RawArray* array = objects_[i]; | |
| 3808 intptr_t length = Smi::Value(array->ptr()->length_); | |
| 3809 s->Write<intptr_t>(length); | |
| 3810 s->AssignRef(array); | |
| 3811 } | |
| 3812 } | |
| 3813 | |
| 3814 void WriteFill(Serializer* s) { | |
| 3815 intptr_t count = objects_.length(); | |
| 3816 for (intptr_t i = 0; i < count; i++) { | |
| 3817 RawArray* array = objects_[i]; | |
| 3818 intptr_t length = Smi::Value(array->ptr()->length_); | |
| 3819 s->Write<intptr_t>(length); | |
| 3820 s->Write<bool>(array->IsCanonical()); | |
| 3821 s->WriteRef(array->ptr()->type_arguments_); | |
| 3822 for (intptr_t j = 0; j < length; j++) { | |
| 3823 s->WriteRef(array->ptr()->data()[j]); | |
| 3824 } | |
| 3825 } | |
| 3826 } | |
| 3827 | |
| 3828 private: | |
| 3829 intptr_t cid_; | |
| 3830 GrowableArray<RawArray*> objects_; | |
| 3831 }; | |
| 3832 | |
| 3833 class ArrayDeserializationCluster : public DeserializationCluster { | |
| 3834 public: | |
| 3835 explicit ArrayDeserializationCluster(intptr_t cid) : cid_(cid) { } | |
| 3836 virtual ~ArrayDeserializationCluster() { } | |
| 3837 | |
| 3838 void ReadAlloc(Deserializer* d) { | |
| 3839 start_index_ = d->next_index(); | |
| 3840 PageSpace* old_space = d->heap()->old_space(); | |
| 3841 intptr_t count = d->Read<intptr_t>(); | |
| 3842 for (intptr_t i = 0; i < count; i++) { | |
| 3843 intptr_t length = d->Read<intptr_t>(); | |
| 3844 d->AssignRef(AllocateUninitialized(old_space, | |
| 3845 Array::InstanceSize(length))); | |
| 3846 } | |
| 3847 stop_index_ = d->next_index(); | |
| 3848 } | |
| 3849 | |
| 3850 void ReadFill(Deserializer* d) { | |
| 3851 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3852 | |
| 3853 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3854 RawArray* array = reinterpret_cast<RawArray*>(d->Ref(id)); | |
| 3855 intptr_t length = d->Read<intptr_t>(); | |
| 3856 bool is_canonical = d->Read<bool>(); | |
| 3857 Deserializer::InitializeHeader(array, cid_, | |
| 3858 Array::InstanceSize(length), | |
| 3859 is_vm_object, is_canonical); | |
| 3860 array->ptr()->type_arguments_ = | |
| 3861 reinterpret_cast<RawTypeArguments*>(d->ReadRef()); | |
| 3862 array->ptr()->length_ = Smi::New(length); | |
| 3863 for (intptr_t j = 0; j < length; j++) { | |
| 3864 array->ptr()->data()[j] = d->ReadRef(); | |
| 3865 } | |
| 3866 } | |
| 3867 } | |
| 3868 | |
| 3869 private: | |
| 3870 const intptr_t cid_; | |
| 3871 }; | |
| 3872 | |
| 3873 class OneByteStringSerializationCluster : public SerializationCluster { | |
| 3874 public: | |
| 3875 OneByteStringSerializationCluster() { } | |
| 3876 virtual ~OneByteStringSerializationCluster() { } | |
| 3877 | |
| 3878 void Trace(Serializer* s, RawObject* object) { | |
| 3879 RawOneByteString* str = reinterpret_cast<RawOneByteString*>(object); | |
| 3880 objects_.Add(str); | |
| 3881 } | |
| 3882 | |
| 3883 void WriteAlloc(Serializer* s) { | |
| 3884 s->WriteCid(kOneByteStringCid); | |
| 3885 intptr_t count = objects_.length(); | |
| 3886 s->Write<intptr_t>(count); | |
| 3887 for (intptr_t i = 0; i < count; i++) { | |
| 3888 RawOneByteString* str = objects_[i]; | |
| 3889 intptr_t length = Smi::Value(str->ptr()->length_); | |
| 3890 s->Write<intptr_t>(length); | |
| 3891 s->AssignRef(str); | |
| 3892 } | |
| 3893 } | |
| 3894 | |
| 3895 void WriteFill(Serializer* s) { | |
| 3896 intptr_t count = objects_.length(); | |
| 3897 for (intptr_t i = 0; i < count; i++) { | |
| 3898 RawOneByteString* str = objects_[i]; | |
| 3899 intptr_t length = Smi::Value(str->ptr()->length_); | |
| 3900 s->Write<intptr_t>(length); | |
| 3901 s->Write<bool>(str->IsCanonical()); | |
| 3902 intptr_t hash = Smi::Value(str->ptr()->hash_); | |
| 3903 s->Write<int32_t>(hash); | |
| 3904 s->WriteBytes(str->ptr()->data(), length); | |
| 3905 } | |
| 3906 } | |
| 3907 | |
| 3908 private: | |
| 3909 GrowableArray<RawOneByteString*> objects_; | |
| 3910 }; | |
| 3911 | |
| 3912 class OneByteStringDeserializationCluster : public DeserializationCluster { | |
| 3913 public: | |
| 3914 OneByteStringDeserializationCluster() { } | |
| 3915 virtual ~OneByteStringDeserializationCluster() { } | |
| 3916 | |
| 3917 void ReadAlloc(Deserializer* d) { | |
| 3918 start_index_ = d->next_index(); | |
| 3919 PageSpace* old_space = d->heap()->old_space(); | |
| 3920 intptr_t count = d->Read<intptr_t>(); | |
| 3921 for (intptr_t i = 0; i < count; i++) { | |
| 3922 intptr_t length = d->Read<intptr_t>(); | |
| 3923 d->AssignRef(AllocateUninitialized(old_space, | |
| 3924 OneByteString::InstanceSize(length))); | |
| 3925 } | |
| 3926 stop_index_ = d->next_index(); | |
| 3927 } | |
| 3928 | |
| 3929 void ReadFill(Deserializer* d) { | |
| 3930 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 3931 | |
| 3932 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 3933 RawOneByteString* str = reinterpret_cast<RawOneByteString*>(d->Ref(id)); | |
| 3934 intptr_t length = d->Read<intptr_t>(); | |
| 3935 bool is_canonical = d->Read<bool>(); | |
| 3936 Deserializer::InitializeHeader(str, kOneByteStringCid, | |
| 3937 OneByteString::InstanceSize(length), | |
| 3938 is_vm_object, is_canonical); | |
| 3939 str->ptr()->length_ = Smi::New(length); | |
| 3940 str->ptr()->hash_ = Smi::New(d->Read<intptr_t>()); | |
| 3941 for (intptr_t j = 0; j < length; j++) { | |
| 3942 str->ptr()->data()[j] = d->Read<uint8_t>(); | |
| 3943 } | |
| 3944 } | |
| 3945 } | |
| 3946 }; | |
| 3947 | |
| 3948 class TwoByteStringSerializationCluster : public SerializationCluster { | |
| 3949 public: | |
| 3950 TwoByteStringSerializationCluster() { } | |
| 3951 virtual ~TwoByteStringSerializationCluster() { } | |
| 3952 | |
| 3953 void Trace(Serializer* s, RawObject* object) { | |
| 3954 RawTwoByteString* str = reinterpret_cast<RawTwoByteString*>(object); | |
| 3955 objects_.Add(str); | |
| 3956 } | |
| 3957 | |
| 3958 void WriteAlloc(Serializer* s) { | |
| 3959 s->WriteCid(kTwoByteStringCid); | |
| 3960 intptr_t count = objects_.length(); | |
| 3961 s->Write<intptr_t>(count); | |
| 3962 for (intptr_t i = 0; i < count; i++) { | |
| 3963 RawTwoByteString* str = objects_[i]; | |
| 3964 intptr_t length = Smi::Value(str->ptr()->length_); | |
| 3965 s->Write<intptr_t>(length); | |
| 3966 s->AssignRef(str); | |
| 3967 } | |
| 3968 } | |
| 3969 | |
| 3970 void WriteFill(Serializer* s) { | |
| 3971 intptr_t count = objects_.length(); | |
| 3972 for (intptr_t i = 0; i < count; i++) { | |
| 3973 RawTwoByteString* str = objects_[i]; | |
| 3974 intptr_t length = Smi::Value(str->ptr()->length_); | |
| 3975 s->Write<intptr_t>(length); | |
| 3976 s->Write<bool>(str->IsCanonical()); | |
| 3977 intptr_t hash = Smi::Value(str->ptr()->hash_); | |
| 3978 s->Write<int32_t>(hash); | |
| 3979 s->WriteBytes(reinterpret_cast<uint8_t*>(str->ptr()->data()), length * 2); | |
| 3980 } | |
| 3981 } | |
| 3982 | |
| 3983 private: | |
| 3984 GrowableArray<RawTwoByteString*> objects_; | |
| 3985 }; | |
| 3986 | |
| 3987 class TwoByteStringDeserializationCluster : public DeserializationCluster { | |
| 3988 public: | |
| 3989 TwoByteStringDeserializationCluster() { } | |
| 3990 virtual ~TwoByteStringDeserializationCluster() { } | |
| 3991 | |
| 3992 void ReadAlloc(Deserializer* d) { | |
| 3993 start_index_ = d->next_index(); | |
| 3994 PageSpace* old_space = d->heap()->old_space(); | |
| 3995 intptr_t count = d->Read<intptr_t>(); | |
| 3996 for (intptr_t i = 0; i < count; i++) { | |
| 3997 intptr_t length = d->Read<intptr_t>(); | |
| 3998 d->AssignRef(AllocateUninitialized(old_space, | |
| 3999 TwoByteString::InstanceSize(length))); | |
| 4000 } | |
| 4001 stop_index_ = d->next_index(); | |
| 4002 } | |
| 4003 | |
| 4004 void ReadFill(Deserializer* d) { | |
| 4005 bool is_vm_object = d->isolate() == Dart::vm_isolate(); | |
| 4006 | |
| 4007 for (intptr_t id = start_index_; id < stop_index_; id++) { | |
| 4008 RawTwoByteString* str = | |
| 4009 reinterpret_cast<RawTwoByteString*>(d->Ref(id)); | |
| 4010 intptr_t length = d->Read<intptr_t>(); | |
| 4011 bool is_canonical = d->Read<bool>(); | |
| 4012 Deserializer::InitializeHeader(str, kTwoByteStringCid, | |
| 4013 TwoByteString::InstanceSize(length), | |
| 4014 is_vm_object, is_canonical); | |
| 4015 str->ptr()->length_ = Smi::New(length); | |
| 4016 str->ptr()->hash_ = Smi::New(d->Read<int32_t>()); | |
| 4017 uint8_t* cdata = reinterpret_cast<uint8_t*>(str->ptr()->data()); | |
| 4018 d->ReadBytes(cdata, length * 2); | |
| 4019 } | |
| 4020 } | |
| 4021 }; | |
| 4022 | |
| 4023 | |
| 4024 Serializer::Serializer(Thread* thread, | |
| 4025 Snapshot::Kind kind, | |
| 4026 uint8_t** buffer, | |
| 4027 ReAlloc alloc, | |
| 4028 intptr_t initial_size, | |
| 4029 InstructionsWriter* instructions_writer) | |
| 4030 : StackResource(thread), | |
| 4031 heap_(thread->isolate()->heap()), | |
| 4032 zone_(thread->zone()), | |
| 4033 kind_(kind), | |
| 4034 stream_(buffer, alloc, initial_size), | |
| 4035 instructions_writer_(instructions_writer), | |
| 4036 clusters_by_cid_(NULL), | |
| 4037 stack_(), | |
| 4038 num_cids_(0), | |
| 4039 num_base_objects_(0), | |
| 4040 num_written_objects_(0), | |
| 4041 next_ref_index_(1) { | |
| 4042 num_cids_ = thread->isolate()->class_table()->NumCids(); | |
| 4043 clusters_by_cid_ = new SerializationCluster*[num_cids_]; | |
| 4044 for (intptr_t i = 0; i < num_cids_; i++) { | |
| 4045 clusters_by_cid_[i] = NULL; | |
|
siva
2016/06/30 00:06:07
why not just call NewClusterForClass(i) here inste
rmacnak
2016/06/30 01:39:21
We only create a cluster if there are instances of
| |
| 4046 } | |
| 4047 } | |
| 4048 | |
| 4049 | |
| 4050 Serializer::~Serializer() { | |
| 4051 delete[] clusters_by_cid_; | |
| 4052 } | |
| 4053 | |
| 4054 | |
| 4055 SerializationCluster* Serializer::NewClusterForClass(intptr_t cid) { | |
| 4056 Zone* Z = zone_; | |
| 4057 if ((cid > kNumPredefinedCids) || | |
| 4058 (cid == kInstanceCid) || | |
| 4059 RawObject::IsTypedDataViewClassId(cid)) { | |
| 4060 Push(isolate()->class_table()->At(cid)); | |
| 4061 return new (Z) InstanceSerializationCluster(cid); | |
| 4062 } | |
| 4063 if (RawObject::IsExternalTypedDataClassId(cid)) { | |
| 4064 return new (Z) ExternalTypedDataSerializationCluster(cid); | |
| 4065 } | |
| 4066 if (RawObject::IsTypedDataClassId(cid)) { | |
| 4067 return new (Z) TypedDataSerializationCluster(cid); | |
| 4068 } | |
| 4069 | |
| 4070 switch (cid) { | |
| 4071 case kClassCid: return new (Z) ClassSerializationCluster(); | |
| 4072 case kUnresolvedClassCid: | |
| 4073 return new (Z) UnresolvedClassSerializationCluster(); | |
| 4074 case kTypeArgumentsCid: return new (Z) TypeArgumentsSerializationCluster(); | |
| 4075 case kPatchClassCid: return new (Z) PatchClassSerializationCluster(); | |
| 4076 case kFunctionCid: return new (Z) FunctionSerializationCluster(); | |
| 4077 case kClosureDataCid: return new (Z) ClosureDataSerializationCluster(); | |
| 4078 case kRedirectionDataCid: | |
| 4079 return new (Z) RedirectionDataSerializationCluster(); | |
| 4080 case kFieldCid: return new (Z) FieldSerializationCluster(); | |
| 4081 case kLiteralTokenCid: return new (Z) LiteralTokenSerializationCluster(); | |
| 4082 case kTokenStreamCid: return new (Z) TokenStreamSerializationCluster(); | |
| 4083 case kScriptCid: return new (Z) ScriptSerializationCluster(); | |
| 4084 case kLibraryCid: return new (Z) LibrarySerializationCluster(); | |
| 4085 case kNamespaceCid: return new (Z) NamespaceSerializationCluster(); | |
| 4086 case kCodeCid: return new (Z) CodeSerializationCluster(); | |
| 4087 case kObjectPoolCid: return new (Z) ObjectPoolSerializationCluster(); | |
| 4088 case kPcDescriptorsCid: | |
| 4089 return new (Z) RODataSerializationCluster(kPcDescriptorsCid); | |
| 4090 case kStackmapCid: | |
| 4091 return new (Z) RODataSerializationCluster(kStackmapCid); | |
| 4092 case kExceptionHandlersCid: | |
| 4093 return new (Z) ExceptionHandlersSerializationCluster(); | |
| 4094 case kContextCid: return new (Z) ContextSerializationCluster(); | |
| 4095 case kContextScopeCid: return new (Z) ContextScopeSerializationCluster(); | |
| 4096 case kICDataCid: return new (Z) ICDataSerializationCluster(); | |
| 4097 case kMegamorphicCacheCid: | |
| 4098 return new (Z) MegamorphicCacheSerializationCluster(); | |
| 4099 case kSubtypeTestCacheCid: | |
| 4100 return new (Z) SubtypeTestCacheSerializationCluster(); | |
| 4101 case kLanguageErrorCid: | |
| 4102 return new (Z) LanguageErrorSerializationCluster(); | |
| 4103 case kUnhandledExceptionCid: | |
| 4104 return new (Z) UnhandledExceptionSerializationCluster(); | |
| 4105 case kLibraryPrefixCid: return new (Z) LibraryPrefixSerializationCluster(); | |
| 4106 case kTypeCid: return new (Z) TypeSerializationCluster(); | |
| 4107 case kTypeRefCid: return new (Z) TypeRefSerializationCluster(); | |
| 4108 case kTypeParameterCid: return new (Z) TypeParameterSerializationCluster(); | |
| 4109 case kBoundedTypeCid: return new (Z) BoundedTypeSerializationCluster(); | |
| 4110 case kClosureCid: return new (Z) ClosureSerializationCluster(); | |
| 4111 case kMintCid: return new (Z) MintSerializationCluster(); | |
| 4112 case kBigintCid: return new (Z) BigintSerializationCluster(); | |
| 4113 case kDoubleCid: return new (Z) DoubleSerializationCluster(); | |
| 4114 case kGrowableObjectArrayCid: | |
| 4115 return new (Z) GrowableObjectArraySerializationCluster(); | |
| 4116 case kStacktraceCid: return new (Z) StacktraceSerializationCluster(); | |
| 4117 case kRegExpCid: return new (Z) RegExpSerializationCluster(); | |
| 4118 case kLinkedHashMapCid: return new (Z) LinkedHashMapSerializationCluster(); | |
| 4119 case kArrayCid: | |
| 4120 return new (Z) ArraySerializationCluster(kArrayCid); | |
| 4121 case kImmutableArrayCid: | |
| 4122 return new (Z) ArraySerializationCluster(kImmutableArrayCid); | |
| 4123 case kOneByteStringCid: { | |
| 4124 if (Snapshot::IncludesCode(kind_)) { | |
| 4125 return new (Z) RODataSerializationCluster(kOneByteStringCid); | |
| 4126 } else { | |
| 4127 return new (Z) OneByteStringSerializationCluster(); | |
| 4128 } | |
| 4129 } | |
| 4130 case kTwoByteStringCid: { | |
| 4131 if (Snapshot::IncludesCode(kind_)) { | |
| 4132 return new (Z) RODataSerializationCluster(kTwoByteStringCid); | |
| 4133 } else { | |
| 4134 return new (Z) TwoByteStringSerializationCluster(); | |
| 4135 } | |
| 4136 } | |
| 4137 default: break; | |
| 4138 } | |
| 4139 | |
| 4140 FATAL1("No cluster defined for cid %" Pd, cid); | |
| 4141 return NULL; | |
| 4142 } | |
| 4143 | |
| 4144 | |
| 4145 void Serializer::Trace(RawObject* object) { | |
| 4146 intptr_t cid; | |
| 4147 if (!object->IsHeapObject()) { | |
| 4148 cid = kSmiCid; | |
| 4149 } else { | |
| 4150 cid = object->GetClassId(); | |
| 4151 } | |
| 4152 | |
| 4153 SerializationCluster* cluster = clusters_by_cid_[cid]; | |
| 4154 if (cluster == NULL) { | |
| 4155 cluster = NewClusterForClass(cid); | |
| 4156 clusters_by_cid_[cid] = cluster; | |
| 4157 } | |
| 4158 ASSERT(cluster != NULL); | |
| 4159 cluster->Trace(this, object); | |
| 4160 } | |
| 4161 | |
| 4162 | |
| 4163 void Serializer::WriteVersionAndFeatures() { | |
| 4164 const char* expected_version = Version::SnapshotString(); | |
| 4165 ASSERT(expected_version != NULL); | |
| 4166 const intptr_t version_len = strlen(expected_version); | |
| 4167 WriteBytes(reinterpret_cast<const uint8_t*>(expected_version), version_len); | |
| 4168 | |
| 4169 const char* expected_features = Dart::FeaturesString(kind_); | |
| 4170 ASSERT(expected_features != NULL); | |
| 4171 const intptr_t features_len = strlen(expected_features); | |
| 4172 WriteBytes(reinterpret_cast<const uint8_t*>(expected_features), | |
| 4173 features_len + 1); | |
| 4174 free(const_cast<char*>(expected_features)); | |
| 4175 } | |
| 4176 | |
| 4177 | |
| 4178 #if defined(DEBUG) | |
| 4179 static const intptr_t kSectionMarker = 0xABAB; | |
| 4180 #endif | |
| 4181 | |
| 4182 void Serializer::Serialize() { | |
| 4183 while (stack_.length() > 0) { | |
| 4184 Trace(stack_.RemoveLast()); | |
| 4185 } | |
| 4186 | |
| 4187 intptr_t num_clusters = 0; | |
| 4188 for (intptr_t cid = 1; cid < num_cids_; cid++) { | |
| 4189 SerializationCluster* cluster = clusters_by_cid_[cid]; | |
| 4190 if (cluster != NULL) { | |
| 4191 num_clusters++; | |
| 4192 } | |
| 4193 } | |
| 4194 | |
| 4195 intptr_t num_objects = num_base_objects_ + num_written_objects_; | |
| 4196 | |
| 4197 Write<int32_t>(num_objects); | |
| 4198 Write<int32_t>(num_clusters); | |
| 4199 | |
| 4200 for (intptr_t cid = 1; cid < num_cids_; cid++) { | |
| 4201 SerializationCluster* cluster = clusters_by_cid_[cid]; | |
| 4202 if (cluster != NULL) { | |
| 4203 cluster->WriteAlloc(this); | |
| 4204 #if defined(DEBUG) | |
| 4205 Write<intptr_t>(next_ref_index_); | |
| 4206 #endif | |
| 4207 } | |
| 4208 } | |
| 4209 | |
| 4210 // We should have assigned a ref to every object we pushed. | |
| 4211 ASSERT((next_ref_index_ - 1) == num_objects); | |
| 4212 | |
| 4213 for (intptr_t cid = 1; cid < num_cids_; cid++) { | |
| 4214 SerializationCluster* cluster = clusters_by_cid_[cid]; | |
| 4215 if (cluster != NULL) { | |
| 4216 cluster->WriteFill(this); | |
| 4217 #if defined(DEBUG) | |
| 4218 Write<intptr_t>(kSectionMarker); | |
| 4219 #endif | |
| 4220 } | |
| 4221 } | |
| 4222 } | |
| 4223 | |
| 4224 | |
| 4225 void Serializer::AddVMIsolateBaseObjects() { | |
| 4226 // These objects are always allocated by Object::InitOnce, so they are not | |
| 4227 // written into the snapshot. | |
| 4228 | |
| 4229 AddBaseObject(Object::null()); | |
| 4230 AddBaseObject(Object::sentinel().raw()); | |
| 4231 AddBaseObject(Object::transition_sentinel().raw()); | |
| 4232 AddBaseObject(Object::empty_array().raw()); | |
| 4233 AddBaseObject(Object::zero_array().raw()); | |
| 4234 AddBaseObject(Object::dynamic_type().raw()); | |
| 4235 AddBaseObject(Object::void_type().raw()); | |
| 4236 AddBaseObject(Bool::True().raw()); | |
| 4237 AddBaseObject(Bool::False().raw()); | |
| 4238 AddBaseObject(Object::extractor_parameter_types().raw()); | |
| 4239 AddBaseObject(Object::extractor_parameter_names().raw()); | |
| 4240 AddBaseObject(Object::empty_context_scope().raw()); | |
| 4241 AddBaseObject(Object::empty_descriptors().raw()); | |
| 4242 AddBaseObject(Object::empty_var_descriptors().raw()); | |
| 4243 AddBaseObject(Object::empty_exception_handlers().raw()); | |
| 4244 | |
| 4245 for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) { | |
| 4246 AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]); | |
| 4247 } | |
| 4248 for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) { | |
| 4249 AddBaseObject(ICData::cached_icdata_arrays_[i]); | |
| 4250 } | |
| 4251 | |
| 4252 ClassTable* table = isolate()->class_table(); | |
| 4253 for (intptr_t cid = kClassCid; cid <= kUnwindErrorCid; cid++) { | |
| 4254 // Error has no class object. | |
| 4255 if (cid != kErrorCid) { | |
| 4256 ASSERT(table->HasValidClassAt(cid)); | |
| 4257 AddBaseObject(table->At(cid)); | |
| 4258 } | |
| 4259 } | |
| 4260 AddBaseObject(table->At(kDynamicCid)); | |
| 4261 AddBaseObject(table->At(kVoidCid)); | |
| 4262 } | |
| 4263 | |
| 4264 | |
| 4265 intptr_t Serializer::WriteVMSnapshot(const Array& symbols, | |
| 4266 const Array& scripts) { | |
| 4267 NoSafepointScope no_safepoint; | |
| 4268 | |
| 4269 AddVMIsolateBaseObjects(); | |
| 4270 | |
| 4271 // Push roots. | |
| 4272 Push(symbols.raw()); | |
| 4273 Push(scripts.raw()); | |
| 4274 if (Snapshot::IncludesCode(kind_)) { | |
| 4275 StubCode::Push(this); | |
| 4276 } | |
| 4277 | |
| 4278 Serialize(); | |
| 4279 | |
| 4280 // Write roots. | |
| 4281 WriteRef(symbols.raw()); | |
| 4282 WriteRef(scripts.raw()); | |
| 4283 if (Snapshot::IncludesCode(kind_)) { | |
| 4284 StubCode::WriteRef(this); | |
| 4285 } | |
| 4286 | |
| 4287 #if defined(DEBUG) | |
| 4288 Write<intptr_t>(kSectionMarker); | |
| 4289 #endif | |
| 4290 | |
| 4291 // Note we are not clearing the object id table. The full ref table | |
| 4292 // of the vm isolate snapshot serves as the base objects for the | |
| 4293 // regular isolate snapshot. | |
| 4294 | |
| 4295 // Return the number of objects, -1 accounts for unused ref 0. | |
| 4296 return next_ref_index_ - 1; | |
| 4297 } | |
| 4298 | |
| 4299 | |
| 4300 void Serializer::WriteFullSnapshot(intptr_t num_base_objects, | |
| 4301 ObjectStore* object_store) { | |
| 4302 NoSafepointScope no_safepoint; | |
| 4303 | |
| 4304 if (num_base_objects == 0) { | |
| 4305 // Units tests not writing a new vm isolate. | |
| 4306 const Array& base_objects = Object::vm_isolate_snapshot_object_table(); | |
| 4307 for (intptr_t i = 1; i < base_objects.Length(); i++) { | |
| 4308 AddBaseObject(base_objects.At(i)); | |
| 4309 } | |
| 4310 } else { | |
| 4311 // Base objects carried over from WriteVMIsolateSnapshot. | |
| 4312 num_base_objects_ += num_base_objects; | |
| 4313 next_ref_index_ += num_base_objects; | |
| 4314 } | |
| 4315 | |
| 4316 // Push roots. | |
| 4317 RawObject** from = object_store->from(); | |
| 4318 RawObject** to = object_store->to_snapshot(kind_); | |
| 4319 for (RawObject** p = from; p <= to; p++) { | |
| 4320 Push(*p); | |
| 4321 } | |
| 4322 | |
| 4323 Serialize(); | |
| 4324 | |
| 4325 // Write roots. | |
| 4326 for (RawObject** p = from; p <= to; p++) { | |
| 4327 WriteRef(*p); | |
| 4328 } | |
| 4329 | |
| 4330 #if defined(DEBUG) | |
| 4331 Write<intptr_t>(kSectionMarker); | |
| 4332 #endif | |
| 4333 | |
| 4334 heap_->ResetObjectIdTable(); | |
| 4335 } | |
| 4336 | |
| 4337 | |
| 4338 Deserializer::Deserializer(Thread* thread, | |
| 4339 Snapshot::Kind kind, | |
| 4340 const uint8_t* buffer, | |
| 4341 intptr_t size, | |
| 4342 const uint8_t* instructions_buffer, | |
| 4343 const uint8_t* data_buffer) | |
| 4344 : StackResource(thread), | |
| 4345 heap_(thread->isolate()->heap()), | |
| 4346 zone_(thread->zone()), | |
| 4347 kind_(kind), | |
| 4348 stream_(buffer, size), | |
| 4349 instructions_reader_(NULL), | |
| 4350 refs_(NULL), | |
| 4351 next_ref_index_(1), | |
| 4352 clusters_(NULL) { | |
| 4353 if (Snapshot::IncludesCode(kind)) { | |
| 4354 ASSERT(instructions_buffer != NULL); | |
| 4355 } | |
| 4356 if (instructions_buffer != NULL) { | |
| 4357 instructions_reader_ = | |
| 4358 new (zone_) InstructionsReader(instructions_buffer, data_buffer); | |
| 4359 } | |
| 4360 } | |
| 4361 | |
| 4362 | |
| 4363 Deserializer::~Deserializer() { | |
| 4364 delete[] clusters_; | |
| 4365 } | |
| 4366 | |
| 4367 | |
| 4368 DeserializationCluster* Deserializer::ReadCluster() { | |
| 4369 intptr_t cid = ReadCid(); | |
| 4370 | |
| 4371 Zone* Z = zone_; | |
| 4372 if ((cid > kNumPredefinedCids) || | |
| 4373 (cid == kInstanceCid) || | |
| 4374 RawObject::IsTypedDataViewClassId(cid)) { | |
| 4375 return new (Z) InstanceDeserializationCluster(cid); | |
| 4376 } | |
| 4377 if (RawObject::IsExternalTypedDataClassId(cid)) { | |
| 4378 return new (Z) ExternalTypedDataDeserializationCluster(cid); | |
| 4379 } | |
| 4380 if (RawObject::IsTypedDataClassId(cid)) { | |
| 4381 return new (Z) TypedDataDeserializationCluster(cid); | |
| 4382 } | |
| 4383 | |
| 4384 switch (cid) { | |
| 4385 case kClassCid: return new (Z) ClassDeserializationCluster(); | |
| 4386 case kUnresolvedClassCid: | |
| 4387 return new (Z) UnresolvedClassDeserializationCluster(); | |
| 4388 case kTypeArgumentsCid: | |
| 4389 return new (Z) TypeArgumentsDeserializationCluster(); | |
| 4390 case kPatchClassCid: return new (Z) PatchClassDeserializationCluster(); | |
| 4391 case kFunctionCid: return new (Z) FunctionDeserializationCluster(); | |
| 4392 case kClosureDataCid: return new (Z) ClosureDataDeserializationCluster(); | |
| 4393 case kRedirectionDataCid: | |
| 4394 return new (Z) RedirectionDataDeserializationCluster(); | |
| 4395 case kFieldCid: return new (Z) FieldDeserializationCluster(); | |
| 4396 case kLiteralTokenCid: return new (Z) LiteralTokenDeserializationCluster(); | |
| 4397 case kTokenStreamCid: return new (Z) TokenStreamDeserializationCluster(); | |
| 4398 case kScriptCid: return new (Z) ScriptDeserializationCluster(); | |
| 4399 case kLibraryCid: return new (Z) LibraryDeserializationCluster(); | |
| 4400 case kNamespaceCid: return new (Z) NamespaceDeserializationCluster(); | |
| 4401 case kCodeCid: return new (Z) CodeDeserializationCluster(); | |
| 4402 case kObjectPoolCid: return new (Z) ObjectPoolDeserializationCluster(); | |
| 4403 case kPcDescriptorsCid: | |
| 4404 case kStackmapCid: | |
| 4405 return new (Z) RODataDeserializationCluster(); | |
| 4406 case kExceptionHandlersCid: | |
| 4407 return new (Z) ExceptionHandlersDeserializationCluster(); | |
| 4408 case kContextCid: return new (Z) ContextDeserializationCluster(); | |
| 4409 case kContextScopeCid: return new (Z) ContextScopeDeserializationCluster(); | |
| 4410 case kICDataCid: return new (Z) ICDataDeserializationCluster(); | |
| 4411 case kMegamorphicCacheCid: | |
| 4412 return new (Z) MegamorphicCacheDeserializationCluster(); | |
| 4413 case kSubtypeTestCacheCid: | |
| 4414 return new (Z) SubtypeTestCacheDeserializationCluster(); | |
| 4415 case kLanguageErrorCid: | |
| 4416 return new (Z) LanguageErrorDeserializationCluster(); | |
| 4417 case kUnhandledExceptionCid: | |
| 4418 return new (Z) UnhandledExceptionDeserializationCluster(); | |
| 4419 case kLibraryPrefixCid: | |
| 4420 return new (Z) LibraryPrefixDeserializationCluster(); | |
| 4421 case kTypeCid: return new (Z) TypeDeserializationCluster(); | |
| 4422 case kTypeRefCid: return new (Z) TypeRefDeserializationCluster(); | |
| 4423 case kTypeParameterCid: | |
| 4424 return new (Z) TypeParameterDeserializationCluster(); | |
| 4425 case kBoundedTypeCid: return new (Z) BoundedTypeDeserializationCluster(); | |
| 4426 case kClosureCid: return new (Z) ClosureDeserializationCluster(); | |
| 4427 case kMintCid: return new (Z) MintDeserializationCluster(); | |
| 4428 case kBigintCid: return new (Z) BigintDeserializationCluster(); | |
| 4429 case kDoubleCid: return new (Z) DoubleDeserializationCluster(); | |
| 4430 case kGrowableObjectArrayCid: | |
| 4431 return new (Z) GrowableObjectArrayDeserializationCluster(); | |
| 4432 case kStacktraceCid: return new (Z) StacktraceDeserializationCluster(); | |
| 4433 case kRegExpCid: return new (Z) RegExpDeserializationCluster(); | |
| 4434 case kLinkedHashMapCid: | |
| 4435 return new (Z) LinkedHashMapDeserializationCluster(); | |
| 4436 case kArrayCid: | |
| 4437 return new (Z) ArrayDeserializationCluster(kArrayCid); | |
| 4438 case kImmutableArrayCid: | |
| 4439 return new (Z) ArrayDeserializationCluster(kImmutableArrayCid); | |
| 4440 case kOneByteStringCid: { | |
| 4441 if (Snapshot::IncludesCode(kind_)) { | |
| 4442 return new (Z) RODataDeserializationCluster(); | |
| 4443 } else { | |
| 4444 return new (Z) OneByteStringDeserializationCluster(); | |
| 4445 } | |
| 4446 } | |
| 4447 case kTwoByteStringCid: { | |
| 4448 if (Snapshot::IncludesCode(kind_)) { | |
| 4449 return new (Z) RODataDeserializationCluster(); | |
| 4450 } else { | |
| 4451 return new (Z) TwoByteStringDeserializationCluster(); | |
| 4452 } | |
| 4453 } | |
| 4454 default: break; | |
| 4455 } | |
| 4456 FATAL1("No cluster defined for cid %" Pd, cid); | |
| 4457 return NULL; | |
| 4458 } | |
| 4459 | |
| 4460 | |
| 4461 RawApiError* Deserializer::VerifyVersionAndFeatures() { | |
| 4462 // If the version string doesn't match, return an error. | |
| 4463 // Note: New things are allocated only if we're going to return an error. | |
| 4464 | |
| 4465 const char* expected_version = Version::SnapshotString(); | |
| 4466 ASSERT(expected_version != NULL); | |
| 4467 const intptr_t version_len = strlen(expected_version); | |
| 4468 if (PendingBytes() < version_len) { | |
| 4469 const intptr_t kMessageBufferSize = 128; | |
| 4470 char message_buffer[kMessageBufferSize]; | |
| 4471 OS::SNPrint(message_buffer, | |
| 4472 kMessageBufferSize, | |
| 4473 "No full snapshot version found, expected '%s'", | |
| 4474 expected_version); | |
| 4475 // This can also fail while bringing up the VM isolate, so make sure to | |
| 4476 // allocate the error message in old space. | |
| 4477 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); | |
| 4478 return ApiError::New(msg, Heap::kOld); | |
| 4479 } | |
| 4480 | |
| 4481 const char* version = reinterpret_cast<const char*>(CurrentBufferAddress()); | |
| 4482 ASSERT(version != NULL); | |
| 4483 if (strncmp(version, expected_version, version_len)) { | |
| 4484 const intptr_t kMessageBufferSize = 256; | |
| 4485 char message_buffer[kMessageBufferSize]; | |
| 4486 char* actual_version = OS::StrNDup(version, version_len); | |
| 4487 OS::SNPrint(message_buffer, | |
| 4488 kMessageBufferSize, | |
| 4489 "Wrong %s snapshot version, expected '%s' found '%s'", | |
| 4490 (Snapshot::IsFull(kind_)) ? "full" : "script", | |
| 4491 expected_version, | |
| 4492 actual_version); | |
| 4493 free(actual_version); | |
| 4494 // This can also fail while bringing up the VM isolate, so make sure to | |
| 4495 // allocate the error message in old space. | |
| 4496 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); | |
| 4497 return ApiError::New(msg, Heap::kOld); | |
| 4498 } | |
| 4499 Advance(version_len); | |
| 4500 | |
| 4501 const char* expected_features = Dart::FeaturesString(kind_); | |
| 4502 ASSERT(expected_features != NULL); | |
| 4503 const intptr_t expected_len = strlen(expected_features); | |
| 4504 | |
| 4505 const char* features = reinterpret_cast<const char*>(CurrentBufferAddress()); | |
| 4506 ASSERT(features != NULL); | |
| 4507 intptr_t buffer_len = OS::StrNLen(features, PendingBytes()); | |
| 4508 if ((buffer_len != expected_len) || | |
| 4509 strncmp(features, expected_features, expected_len)) { | |
| 4510 const intptr_t kMessageBufferSize = 256; | |
| 4511 char message_buffer[kMessageBufferSize]; | |
| 4512 char* actual_features = OS::StrNDup(features, buffer_len < 128 ? buffer_len | |
| 4513 : 128); | |
| 4514 OS::SNPrint(message_buffer, | |
| 4515 kMessageBufferSize, | |
| 4516 "Wrong features in snapshot, expected '%s' found '%s'", | |
| 4517 expected_features, | |
| 4518 actual_features); | |
| 4519 free(const_cast<char*>(expected_features)); | |
| 4520 free(actual_features); | |
| 4521 // This can also fail while bringing up the VM isolate, so make sure to | |
| 4522 // allocate the error message in old space. | |
| 4523 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); | |
| 4524 return ApiError::New(msg, Heap::kOld); | |
| 4525 } | |
| 4526 free(const_cast<char*>(expected_features)); | |
| 4527 Advance(expected_len + 1); | |
| 4528 return ApiError::null(); | |
| 4529 } | |
| 4530 | |
| 4531 | |
| 4532 void Deserializer::Prepare() { | |
| 4533 num_objects_ = Read<int32_t>(); | |
| 4534 num_clusters_ = Read<int32_t>(); | |
| 4535 | |
| 4536 clusters_ = new DeserializationCluster*[num_clusters_]; | |
| 4537 refs_ = Array::New(num_objects_ + 1, Heap::kOld); | |
| 4538 } | |
| 4539 | |
| 4540 | |
| 4541 void Deserializer::Deserialize() { | |
| 4542 // TODO(rmacnak): Verify num of base objects. | |
| 4543 | |
| 4544 { | |
| 4545 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), | |
| 4546 Timeline::GetIsolateStream(), "ReadAlloc")); | |
| 4547 for (intptr_t i = 0; i < num_clusters_; i++) { | |
| 4548 clusters_[i] = ReadCluster(); | |
| 4549 clusters_[i]->ReadAlloc(this); | |
| 4550 #if defined(DEBUG) | |
| 4551 intptr_t serializers_next_ref_index_ = Read<intptr_t>(); | |
| 4552 ASSERT(serializers_next_ref_index_ == next_ref_index_); | |
| 4553 #endif | |
| 4554 } | |
| 4555 } | |
| 4556 | |
| 4557 // We should have completely filled the ref array. | |
| 4558 ASSERT((next_ref_index_ - 1) == num_objects_); | |
| 4559 | |
| 4560 { | |
| 4561 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), | |
| 4562 Timeline::GetIsolateStream(), "ReadFill")); | |
| 4563 for (intptr_t i = 0; i < num_clusters_; i++) { | |
| 4564 clusters_[i]->ReadFill(this); | |
| 4565 #if defined(DEBUG) | |
| 4566 intptr_t section_marker = Read<intptr_t>(); | |
| 4567 ASSERT(section_marker == kSectionMarker); | |
| 4568 #endif | |
| 4569 } | |
| 4570 } | |
| 4571 } | |
| 4572 | |
| 4573 class HeapLocker : public StackResource { | |
| 4574 public: | |
| 4575 HeapLocker(Thread* thread, PageSpace* page_space) | |
| 4576 : StackResource(thread), page_space_(page_space) { | |
| 4577 page_space_->AcquireDataLock(); | |
| 4578 } | |
| 4579 ~HeapLocker() { | |
| 4580 page_space_->ReleaseDataLock(); | |
| 4581 } | |
| 4582 | |
| 4583 private: | |
| 4584 PageSpace* page_space_; | |
| 4585 }; | |
| 4586 | |
| 4587 | |
| 4588 void Deserializer::AddVMIsolateBaseObjects() { | |
| 4589 // These objects are always allocated by Object::InitOnce, so they are not | |
| 4590 // written into the snapshot. | |
| 4591 | |
| 4592 AddBaseObject(Object::null()); | |
| 4593 AddBaseObject(Object::sentinel().raw()); | |
| 4594 AddBaseObject(Object::transition_sentinel().raw()); | |
| 4595 AddBaseObject(Object::empty_array().raw()); | |
| 4596 AddBaseObject(Object::zero_array().raw()); | |
| 4597 AddBaseObject(Object::dynamic_type().raw()); | |
| 4598 AddBaseObject(Object::void_type().raw()); | |
| 4599 AddBaseObject(Bool::True().raw()); | |
| 4600 AddBaseObject(Bool::False().raw()); | |
| 4601 AddBaseObject(Object::extractor_parameter_types().raw()); | |
| 4602 AddBaseObject(Object::extractor_parameter_names().raw()); | |
| 4603 AddBaseObject(Object::empty_context_scope().raw()); | |
| 4604 AddBaseObject(Object::empty_descriptors().raw()); | |
| 4605 AddBaseObject(Object::empty_var_descriptors().raw()); | |
| 4606 AddBaseObject(Object::empty_exception_handlers().raw()); | |
| 4607 | |
| 4608 for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) { | |
| 4609 AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]); | |
| 4610 } | |
| 4611 for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) { | |
| 4612 AddBaseObject(ICData::cached_icdata_arrays_[i]); | |
| 4613 } | |
| 4614 | |
| 4615 ClassTable* table = isolate()->class_table(); | |
| 4616 for (intptr_t cid = kClassCid; cid <= kUnwindErrorCid; cid++) { | |
| 4617 // Error has no class object. | |
| 4618 if (cid != kErrorCid) { | |
| 4619 ASSERT(table->HasValidClassAt(cid)); | |
| 4620 AddBaseObject(table->At(cid)); | |
| 4621 } | |
| 4622 } | |
| 4623 AddBaseObject(table->At(kDynamicCid)); | |
| 4624 AddBaseObject(table->At(kVoidCid)); | |
| 4625 } | |
| 4626 | |
| 4627 | |
| 4628 void Deserializer::ReadVMSnapshot() { | |
| 4629 Array& symbol_table = Array::Handle(zone_); | |
| 4630 Array& refs = Array::Handle(zone_); | |
| 4631 Prepare(); | |
| 4632 | |
| 4633 { | |
| 4634 NoSafepointScope no_safepoint; | |
| 4635 HeapLocker hl(thread(), heap_->old_space()); | |
|
siva
2016/06/30 00:06:07
Why do you need to lock the heap pages, no other t
rmacnak
2016/06/30 01:39:21
Comes from the old full snapshot reading code. Rem
| |
| 4636 | |
| 4637 AddVMIsolateBaseObjects(); | |
| 4638 | |
| 4639 Deserialize(); | |
| 4640 | |
| 4641 // Read roots. | |
| 4642 symbol_table ^= ReadRef(); | |
| 4643 isolate()->object_store()->set_symbol_table(symbol_table); | |
| 4644 ReadRef(); // Script list. | |
| 4645 if (Snapshot::IncludesCode(kind_)) { | |
| 4646 StubCode::ReadRef(this); | |
| 4647 } | |
| 4648 | |
| 4649 #if defined(DEBUG) | |
| 4650 intptr_t section_marker = Read<intptr_t>(); | |
| 4651 ASSERT(section_marker == kSectionMarker); | |
| 4652 #endif | |
| 4653 | |
| 4654 refs = refs_; | |
| 4655 refs_ = NULL; | |
| 4656 } | |
| 4657 | |
| 4658 Symbols::InitOnceFromSnapshot(isolate()); | |
| 4659 | |
| 4660 Object::set_vm_isolate_snapshot_object_table(refs); | |
| 4661 | |
| 4662 #if defined(DEBUG) | |
| 4663 isolate()->ValidateClassTable(); | |
| 4664 #endif | |
| 4665 } | |
| 4666 | |
| 4667 void Deserializer::ReadFullSnapshot(ObjectStore* object_store) { | |
| 4668 Array& refs = Array::Handle(); | |
| 4669 Prepare(); | |
| 4670 | |
| 4671 { | |
| 4672 NoSafepointScope no_safepoint; | |
| 4673 HeapLocker hl(thread(), heap_->old_space()); | |
| 4674 | |
| 4675 // N.B.: Skipping index 0 because ref 0 is illegal. | |
| 4676 const Array& base_objects = Object::vm_isolate_snapshot_object_table(); | |
| 4677 for (intptr_t i = 1; i < base_objects.Length(); i++) { | |
| 4678 AddBaseObject(base_objects.At(i)); | |
| 4679 } | |
| 4680 | |
| 4681 Deserialize(); | |
| 4682 | |
| 4683 // Read roots. | |
| 4684 RawObject** from = object_store->from(); | |
| 4685 RawObject** to = object_store->to_snapshot(kind_); | |
| 4686 for (RawObject** p = from; p <= to; p++) { | |
| 4687 *p = ReadRef(); | |
| 4688 } | |
| 4689 | |
| 4690 #if defined(DEBUG) | |
| 4691 intptr_t section_marker = Read<intptr_t>(); | |
| 4692 ASSERT(section_marker == kSectionMarker); | |
| 4693 #endif | |
| 4694 | |
| 4695 refs = refs_; | |
| 4696 refs_ = NULL; | |
| 4697 } | |
| 4698 | |
| 4699 #if defined(DEBUG) | |
| 4700 Isolate* isolate = thread()->isolate(); | |
| 4701 isolate->ValidateClassTable(); | |
| 4702 isolate->heap()->Verify(); | |
| 4703 #endif | |
| 4704 | |
| 4705 { | |
| 4706 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), | |
| 4707 Timeline::GetIsolateStream(), "PostLoad")); | |
| 4708 for (intptr_t i = 0; i < num_clusters_; i++) { | |
| 4709 clusters_[i]->PostLoad(refs, kind_, zone_); | |
| 4710 } | |
| 4711 } | |
| 4712 | |
| 4713 // Setup native resolver for bootstrap impl. | |
| 4714 Bootstrap::SetupNativeResolver(); | |
| 4715 } | |
| 4716 | |
| 4717 | |
| 4718 // An object visitor which will iterate over all the script objects in the heap | |
| 4719 // and either count them or collect them into an array. This is used during | |
| 4720 // full snapshot generation of the VM isolate to write out all script | |
| 4721 // objects and their accompanying token streams. | |
| 4722 class ScriptVisitor : public ObjectVisitor { | |
| 4723 public: | |
| 4724 explicit ScriptVisitor(Thread* thread) : | |
| 4725 objHandle_(Object::Handle(thread->zone())), | |
| 4726 count_(0), | |
| 4727 scripts_(NULL) {} | |
| 4728 | |
| 4729 ScriptVisitor(Thread* thread, const Array* scripts) : | |
| 4730 objHandle_(Object::Handle(thread->zone())), | |
| 4731 count_(0), | |
| 4732 scripts_(scripts) {} | |
| 4733 | |
| 4734 void VisitObject(RawObject* obj) { | |
| 4735 if (obj->IsScript()) { | |
| 4736 if (scripts_ != NULL) { | |
| 4737 objHandle_ = obj; | |
| 4738 scripts_->SetAt(count_, objHandle_); | |
| 4739 } | |
| 4740 count_ += 1; | |
| 4741 } | |
| 4742 } | |
| 4743 | |
| 4744 intptr_t count() const { return count_; } | |
| 4745 | |
| 4746 private: | |
| 4747 Object& objHandle_; | |
| 4748 intptr_t count_; | |
| 4749 const Array* scripts_; | |
| 4750 }; | |
| 4751 | |
| 4752 | |
| 4753 | |
| 4754 FullSnapshotWriter::FullSnapshotWriter(Snapshot::Kind kind, | |
| 4755 uint8_t** vm_isolate_snapshot_buffer, | |
| 4756 uint8_t** isolate_snapshot_buffer, | |
| 4757 ReAlloc alloc, | |
| 4758 InstructionsWriter* instructions_writer) | |
| 4759 : thread_(Thread::Current()), | |
| 4760 kind_(kind), | |
| 4761 vm_isolate_snapshot_buffer_(vm_isolate_snapshot_buffer), | |
| 4762 isolate_snapshot_buffer_(isolate_snapshot_buffer), | |
| 4763 alloc_(alloc), | |
| 4764 vm_isolate_snapshot_size_(0), | |
| 4765 isolate_snapshot_size_(0), | |
| 4766 instructions_writer_(instructions_writer), | |
| 4767 scripts_(Array::Handle(zone())), | |
| 4768 saved_symbol_table_(Array::Handle(zone())), | |
| 4769 new_vm_symbol_table_(Array::Handle(zone())) { | |
| 4770 ASSERT(isolate_snapshot_buffer_ != NULL); | |
| 4771 ASSERT(alloc_ != NULL); | |
| 4772 ASSERT(isolate() != NULL); | |
| 4773 ASSERT(ClassFinalizer::AllClassesFinalized()); | |
| 4774 ASSERT(isolate() != NULL); | |
| 4775 ASSERT(heap() != NULL); | |
| 4776 ObjectStore* object_store = isolate()->object_store(); | |
| 4777 ASSERT(object_store != NULL); | |
| 4778 | |
| 4779 #if defined(DEBUG) | |
| 4780 // Ensure the class table is valid. | |
| 4781 isolate()->ValidateClassTable(); | |
| 4782 #endif | |
| 4783 // Can't have any mutation happening while we're serializing. | |
| 4784 ASSERT(isolate()->background_compiler() == NULL); | |
| 4785 | |
| 4786 if (vm_isolate_snapshot_buffer != NULL) { | |
| 4787 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), | |
| 4788 Timeline::GetIsolateStream(), "PrepareNewVMIsolate")); | |
| 4789 | |
| 4790 // Collect all the script objects and their accompanying token stream | |
| 4791 // objects into an array so that we can write it out as part of the VM | |
| 4792 // isolate snapshot. We first count the number of script objects, allocate | |
| 4793 // an array and then fill it up with the script objects. | |
| 4794 ScriptVisitor scripts_counter(thread()); | |
| 4795 heap()->IterateOldObjects(&scripts_counter); | |
| 4796 Dart::vm_isolate()->heap()->IterateOldObjects(&scripts_counter); | |
| 4797 intptr_t count = scripts_counter.count(); | |
| 4798 scripts_ = Array::New(count, Heap::kOld); | |
| 4799 ScriptVisitor script_visitor(thread(), &scripts_); | |
| 4800 heap()->IterateOldObjects(&script_visitor); | |
| 4801 Dart::vm_isolate()->heap()->IterateOldObjects(&script_visitor); | |
| 4802 ASSERT(script_visitor.count() == count); | |
| 4803 | |
| 4804 // Tuck away the current symbol table. | |
| 4805 saved_symbol_table_ = object_store->symbol_table(); | |
| 4806 | |
| 4807 // Create a unified symbol table that will be written as the vm isolate's | |
| 4808 // symbol table. | |
| 4809 new_vm_symbol_table_ = Symbols::UnifiedSymbolTable(); | |
| 4810 | |
| 4811 // Create an empty symbol table that will be written as the isolate's symbol | |
| 4812 // table. | |
| 4813 Symbols::SetupSymbolTable(isolate()); | |
| 4814 } else { | |
| 4815 // Reuse the current vm isolate. | |
|
siva
2016/06/30 00:06:08
Why have this empty block here?
rmacnak
2016/06/30 01:39:21
Scoping the comment.
| |
| 4816 } | |
| 4817 } | |
| 4818 | |
| 4819 FullSnapshotWriter::~FullSnapshotWriter() { | |
| 4820 // We may run Dart code afterwards, restore the symbol table if needed. | |
| 4821 if (!saved_symbol_table_.IsNull()) { | |
| 4822 isolate()->object_store()->set_symbol_table(saved_symbol_table_); | |
| 4823 saved_symbol_table_ = Array::null(); | |
| 4824 } | |
| 4825 new_vm_symbol_table_ = Array::null(); | |
| 4826 scripts_ = Array::null(); | |
| 4827 } | |
| 4828 | |
| 4829 | |
| 4830 intptr_t FullSnapshotWriter::WriteVmIsolateSnapshot() { | |
| 4831 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), | |
| 4832 Timeline::GetIsolateStream(), "WriteVmIsolateSnapshot")); | |
| 4833 | |
| 4834 ASSERT(vm_isolate_snapshot_buffer_ != NULL); | |
| 4835 Serializer serializer(thread(), | |
| 4836 kind_, | |
| 4837 vm_isolate_snapshot_buffer_, | |
| 4838 alloc_, | |
| 4839 kInitialSize, | |
| 4840 instructions_writer_); | |
| 4841 | |
| 4842 serializer.ReserveHeader(); | |
| 4843 serializer.WriteVersionAndFeatures(); | |
| 4844 /* | |
| 4845 * Now Write out the following | |
| 4846 * - the symbol table | |
| 4847 * - all the scripts and token streams for these scripts | |
| 4848 * - the stub code (precompiled snapshots only) | |
| 4849 **/ | |
| 4850 intptr_t num_objects = serializer.WriteVMSnapshot(new_vm_symbol_table_, | |
| 4851 scripts_); | |
| 4852 serializer.FillHeader(serializer.kind()); | |
| 4853 | |
| 4854 vm_isolate_snapshot_size_ = serializer.bytes_written(); | |
| 4855 return num_objects; | |
| 4856 } | |
| 4857 | |
| 4858 | |
| 4859 void FullSnapshotWriter::WriteIsolateFullSnapshot( | |
| 4860 intptr_t num_base_objects) { | |
| 4861 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), | |
| 4862 Timeline::GetIsolateStream(), "WriteIsolateFullSnapshot")); | |
| 4863 | |
| 4864 Serializer serializer(thread(), | |
| 4865 kind_, | |
| 4866 isolate_snapshot_buffer_, | |
| 4867 alloc_, | |
| 4868 kInitialSize, | |
| 4869 instructions_writer_); | |
| 4870 ObjectStore* object_store = isolate()->object_store(); | |
| 4871 ASSERT(object_store != NULL); | |
| 4872 | |
| 4873 serializer.ReserveHeader(); | |
| 4874 serializer.WriteVersionAndFeatures(); | |
| 4875 serializer.WriteFullSnapshot(num_base_objects, object_store); | |
| 4876 serializer.FillHeader(serializer.kind()); | |
| 4877 | |
| 4878 isolate_snapshot_size_ = serializer.bytes_written(); | |
| 4879 } | |
| 4880 | |
| 4881 | |
| 4882 void FullSnapshotWriter::WriteFullSnapshot() { | |
| 4883 intptr_t num_base_objects; | |
| 4884 if (vm_isolate_snapshot_buffer() != NULL) { | |
| 4885 num_base_objects = WriteVmIsolateSnapshot(); | |
| 4886 ASSERT(num_base_objects != 0); | |
| 4887 } else { | |
| 4888 num_base_objects = 0; | |
| 4889 } | |
| 4890 | |
| 4891 WriteIsolateFullSnapshot(num_base_objects); | |
| 4892 | |
| 4893 if (Snapshot::IncludesCode(kind_)) { | |
| 4894 instructions_writer_->Write(); | |
| 4895 | |
| 4896 OS::Print("VMIsolate(CodeSize): %" Pd "\n", VmIsolateSnapshotSize()); | |
| 4897 OS::Print("Isolate(CodeSize): %" Pd "\n", IsolateSnapshotSize()); | |
| 4898 OS::Print("Instructions(CodeSize): %" Pd "\n", | |
| 4899 instructions_writer_->binary_size()); | |
| 4900 intptr_t total = VmIsolateSnapshotSize() + | |
| 4901 IsolateSnapshotSize() + | |
| 4902 instructions_writer_->binary_size(); | |
| 4903 OS::Print("Total(CodeSize): %" Pd "\n", total); | |
| 4904 } | |
| 4905 } | |
| 4906 | |
| 4907 | |
| 4908 RawApiError* IsolateSnapshotReader::ReadFullSnapshot() { | |
| 4909 Deserializer deserializer(thread_, | |
| 4910 kind_, | |
| 4911 buffer_, | |
| 4912 size_, | |
| 4913 instructions_buffer_, | |
| 4914 data_buffer_); | |
| 4915 | |
| 4916 RawApiError* error = deserializer.VerifyVersionAndFeatures(); | |
| 4917 if (error != ApiError::null()) { | |
| 4918 return error; | |
| 4919 } | |
| 4920 | |
| 4921 deserializer.ReadFullSnapshot(thread_->isolate()->object_store()); | |
| 4922 | |
| 4923 return ApiError::null(); | |
| 4924 } | |
| 4925 | |
| 4926 | |
| 4927 RawApiError* VmIsolateSnapshotReader::ReadVmIsolateSnapshot() { | |
| 4928 Deserializer deserializer(thread_, | |
| 4929 kind_, | |
| 4930 buffer_, | |
| 4931 size_, | |
| 4932 instructions_buffer_, | |
| 4933 data_buffer_); | |
| 4934 | |
| 4935 RawApiError* error = deserializer.VerifyVersionAndFeatures(); | |
| 4936 if (error != ApiError::null()) { | |
| 4937 return error; | |
| 4938 } | |
| 4939 | |
| 4940 deserializer.ReadVMSnapshot(); | |
| 4941 | |
| 4942 Dart::set_instructions_snapshot_buffer(instructions_buffer_); | |
| 4943 Dart::set_data_snapshot_buffer(data_buffer_); | |
| 4944 | |
| 4945 return ApiError::null(); | |
| 4946 } | |
| 4947 | |
| 4948 } // namespace dart | |
| OLD | NEW |