OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. |
| 4 |
| 5 #include "vm/clustered_snapshot.h" |
| 6 |
| 7 #include "platform/assert.h" |
| 8 #include "vm/bootstrap.h" |
| 9 #include "vm/class_finalizer.h" |
| 10 #include "vm/dart.h" |
| 11 #include "vm/dart_entry.h" |
| 12 #include "vm/exceptions.h" |
| 13 #include "vm/heap.h" |
| 14 #include "vm/lockers.h" |
| 15 #include "vm/longjump.h" |
| 16 #include "vm/native_entry.h" |
| 17 #include "vm/object.h" |
| 18 #include "vm/object_store.h" |
| 19 #include "vm/stub_code.h" |
| 20 #include "vm/symbols.h" |
| 21 #include "vm/timeline.h" |
| 22 #include "vm/version.h" |
| 23 |
| 24 namespace dart { |
| 25 |
| 26 static RawObject* AllocateUninitialized(PageSpace* old_space, intptr_t size) { |
| 27 ASSERT(Utils::IsAligned(size, kObjectAlignment)); |
| 28 uword address = old_space->TryAllocateDataBumpLocked(size, |
| 29 PageSpace::kForceGrowth); |
| 30 if (address == 0) { |
| 31 FATAL("Out of memory"); |
| 32 } |
| 33 return reinterpret_cast<RawObject*>(address + kHeapObjectTag); |
| 34 } |
| 35 |
| 36 |
| 37 void Deserializer::InitializeHeader(RawObject* raw, |
| 38 intptr_t class_id, |
| 39 intptr_t size, |
| 40 bool is_vm_isolate, |
| 41 bool is_canonical) { |
| 42 ASSERT(Utils::IsAligned(size, kObjectAlignment)); |
| 43 uword tags = 0; |
| 44 tags = RawObject::ClassIdTag::update(class_id, tags); |
| 45 tags = RawObject::SizeTag::update(size, tags); |
| 46 tags = RawObject::VMHeapObjectTag::update(is_vm_isolate, tags); |
| 47 tags = RawObject::CanonicalObjectTag::update(is_canonical, tags); |
| 48 raw->ptr()->tags_ = tags; |
| 49 } |
| 50 |
| 51 |
| 52 class ClassSerializationCluster : public SerializationCluster { |
| 53 public: |
| 54 ClassSerializationCluster() { } |
| 55 virtual ~ClassSerializationCluster() { } |
| 56 |
| 57 void Trace(Serializer* s, RawObject* object) { |
| 58 RawClass* cls = Class::RawCast(object); |
| 59 intptr_t class_id = cls->ptr()->id_; |
| 60 |
| 61 if (class_id < kNumPredefinedCids) { |
| 62 // These classes are allocated by Object::Init or Object::InitOnce, so the |
| 63 // deserializer must find them in the class table instead of allocating |
| 64 // them. |
| 65 predefined_.Add(cls); |
| 66 } else { |
| 67 objects_.Add(cls); |
| 68 } |
| 69 |
| 70 RawObject** from = cls->from(); |
| 71 RawObject** to = cls->to_snapshot(s->kind()); |
| 72 for (RawObject** p = from; p <= to; p++) { |
| 73 s->Push(*p); |
| 74 } |
| 75 } |
| 76 |
| 77 void WriteAlloc(Serializer* s) { |
| 78 s->WriteCid(kClassCid); |
| 79 intptr_t count = predefined_.length(); |
| 80 s->Write<intptr_t>(count); |
| 81 for (intptr_t i = 0; i < count; i++) { |
| 82 RawClass* cls = predefined_[i]; |
| 83 intptr_t class_id = cls->ptr()->id_; |
| 84 s->Write<intptr_t>(class_id); |
| 85 s->AssignRef(cls); |
| 86 } |
| 87 count = objects_.length(); |
| 88 s->Write<intptr_t>(count); |
| 89 for (intptr_t i = 0; i < count; i++) { |
| 90 RawClass* cls = objects_[i]; |
| 91 s->AssignRef(cls); |
| 92 } |
| 93 } |
| 94 |
| 95 void WriteFill(Serializer* s) { |
| 96 #define WRITE_CLASS() \ |
| 97 RawObject** from = cls->from(); \ |
| 98 RawObject** to = cls->to_snapshot(kind); \ |
| 99 for (RawObject** p = from; p <= to; p++) { \ |
| 100 s->WriteRef(*p); \ |
| 101 } \ |
| 102 intptr_t class_id = cls->ptr()->id_; \ |
| 103 s->WriteCid(class_id); \ |
| 104 s->Write<int32_t>(cls->ptr()->instance_size_in_words_); \ |
| 105 s->Write<int32_t>(cls->ptr()->next_field_offset_in_words_); \ |
| 106 s->Write<int32_t>(cls->ptr()->type_arguments_field_offset_in_words_); \ |
| 107 s->Write<uint16_t>(cls->ptr()->num_type_arguments_); \ |
| 108 s->Write<uint16_t>(cls->ptr()->num_own_type_arguments_); \ |
| 109 s->Write<uint16_t>(cls->ptr()->num_native_fields_); \ |
| 110 s->WriteTokenPosition(cls->ptr()->token_pos_); \ |
| 111 s->Write<uint16_t>(cls->ptr()->state_bits_); \ |
| 112 |
| 113 Snapshot::Kind kind = s->kind(); |
| 114 intptr_t count = predefined_.length(); |
| 115 for (intptr_t i = 0; i < count; i++) { |
| 116 RawClass* cls = predefined_[i]; |
| 117 WRITE_CLASS() |
| 118 } |
| 119 count = objects_.length(); |
| 120 for (intptr_t i = 0; i < count; i++) { |
| 121 RawClass* cls = objects_[i]; |
| 122 WRITE_CLASS() |
| 123 } |
| 124 #undef WRITE_CLASS |
| 125 } |
| 126 |
| 127 private: |
| 128 GrowableArray<RawClass*> predefined_; |
| 129 GrowableArray<RawClass*> objects_; |
| 130 }; |
| 131 |
| 132 |
| 133 class ClassDeserializationCluster : public DeserializationCluster { |
| 134 public: |
| 135 ClassDeserializationCluster() { } |
| 136 virtual ~ClassDeserializationCluster() { } |
| 137 |
| 138 void ReadAlloc(Deserializer* d) { |
| 139 predefined_start_index_ = d->next_index(); |
| 140 PageSpace* old_space = d->heap()->old_space(); |
| 141 intptr_t count = d->Read<intptr_t>(); |
| 142 ClassTable* table = d->isolate()->class_table(); |
| 143 for (intptr_t i = 0; i < count; i++) { |
| 144 intptr_t class_id = d->Read<intptr_t>(); |
| 145 ASSERT(table->HasValidClassAt(class_id)); |
| 146 RawClass* cls = table->At(class_id); |
| 147 ASSERT(cls != NULL); |
| 148 d->AssignRef(cls); |
| 149 } |
| 150 predefined_stop_index_ = d->next_index(); |
| 151 |
| 152 start_index_ = d->next_index(); |
| 153 count = d->Read<intptr_t>(); |
| 154 for (intptr_t i = 0; i < count; i++) { |
| 155 d->AssignRef(AllocateUninitialized(old_space, |
| 156 Class::InstanceSize())); |
| 157 } |
| 158 stop_index_ = d->next_index(); |
| 159 } |
| 160 |
| 161 void ReadFill(Deserializer* d) { |
| 162 Snapshot::Kind kind = d->kind(); |
| 163 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 164 ClassTable* table = d->isolate()->class_table(); |
| 165 |
| 166 for (intptr_t id = predefined_start_index_; |
| 167 id < predefined_stop_index_; |
| 168 id++) { |
| 169 RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id)); |
| 170 RawObject** from = cls->from(); |
| 171 RawObject** to_snapshot = cls->to_snapshot(kind); |
| 172 RawObject** to = cls->to(); |
| 173 for (RawObject** p = from; p <= to_snapshot; p++) { |
| 174 *p = d->ReadRef(); |
| 175 } |
| 176 for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| 177 *p = Object::null(); |
| 178 } |
| 179 |
| 180 intptr_t class_id = d->ReadCid(); |
| 181 cls->ptr()->id_ = class_id; |
| 182 cls->ptr()->instance_size_in_words_ = d->Read<int32_t>(); |
| 183 cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>(); |
| 184 cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>(); |
| 185 cls->ptr()->num_type_arguments_ = d->Read<uint16_t>(); |
| 186 cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>(); |
| 187 cls->ptr()->num_native_fields_ = d->Read<uint16_t>(); |
| 188 cls->ptr()->token_pos_ = d->ReadTokenPosition(); |
| 189 cls->ptr()->state_bits_ = d->Read<uint16_t>(); |
| 190 } |
| 191 |
| 192 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 193 RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id)); |
| 194 Deserializer::InitializeHeader(cls, kClassCid, Class::InstanceSize(), |
| 195 is_vm_object); |
| 196 RawObject** from = cls->from(); |
| 197 RawObject** to_snapshot = cls->to_snapshot(kind); |
| 198 RawObject** to = cls->to(); |
| 199 for (RawObject** p = from; p <= to_snapshot; p++) { |
| 200 *p = d->ReadRef(); |
| 201 } |
| 202 for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| 203 *p = Object::null(); |
| 204 } |
| 205 |
| 206 intptr_t class_id = d->ReadCid(); |
| 207 |
| 208 ASSERT(class_id >= kNumPredefinedCids); |
| 209 Instance fake; |
| 210 cls->ptr()->handle_vtable_ = fake.vtable(); |
| 211 |
| 212 cls->ptr()->id_ = class_id; |
| 213 cls->ptr()->instance_size_in_words_ = d->Read<int32_t>(); |
| 214 cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>(); |
| 215 cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>(); |
| 216 cls->ptr()->num_type_arguments_ = d->Read<uint16_t>(); |
| 217 cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>(); |
| 218 cls->ptr()->num_native_fields_ = d->Read<uint16_t>(); |
| 219 cls->ptr()->token_pos_ = d->ReadTokenPosition(); |
| 220 cls->ptr()->state_bits_ = d->Read<uint16_t>(); |
| 221 table->RegisterAt(class_id, cls); |
| 222 } |
| 223 } |
| 224 |
| 225 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { |
| 226 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), |
| 227 Timeline::GetIsolateStream(), "PostLoadClass")); |
| 228 |
| 229 Class& cls = Class::Handle(zone); |
| 230 for (intptr_t i = predefined_start_index_; |
| 231 i < predefined_stop_index_; |
| 232 i++) { |
| 233 cls ^= refs.At(i); |
| 234 cls.RehashConstants(zone); |
| 235 } |
| 236 for (intptr_t i = start_index_; i < stop_index_; i++) { |
| 237 cls ^= refs.At(i); |
| 238 cls.RehashConstants(zone); |
| 239 } |
| 240 } |
| 241 |
| 242 private: |
| 243 intptr_t predefined_start_index_; |
| 244 intptr_t predefined_stop_index_; |
| 245 }; |
| 246 |
| 247 class UnresolvedClassSerializationCluster : public SerializationCluster { |
| 248 public: |
| 249 UnresolvedClassSerializationCluster() { } |
| 250 virtual ~UnresolvedClassSerializationCluster() { } |
| 251 |
| 252 void Trace(Serializer* s, RawObject* object) { |
| 253 RawUnresolvedClass* cls = UnresolvedClass::RawCast(object); |
| 254 objects_.Add(cls); |
| 255 |
| 256 RawObject** from = cls->from(); |
| 257 RawObject** to = cls->to(); |
| 258 for (RawObject** p = from; p <= to; p++) { |
| 259 s->Push(*p); |
| 260 } |
| 261 } |
| 262 |
| 263 void WriteAlloc(Serializer* s) { |
| 264 s->WriteCid(kUnresolvedClassCid); |
| 265 intptr_t count = objects_.length(); |
| 266 s->Write<intptr_t>(count); |
| 267 for (intptr_t i = 0; i < count; i++) { |
| 268 RawUnresolvedClass* cls = objects_[i]; |
| 269 s->AssignRef(cls); |
| 270 } |
| 271 } |
| 272 |
| 273 void WriteFill(Serializer* s) { |
| 274 intptr_t count = objects_.length(); |
| 275 s->Write<intptr_t>(count); |
| 276 for (intptr_t i = 0; i < count; i++) { |
| 277 RawUnresolvedClass* cls = objects_[i]; |
| 278 RawObject** from = cls->from(); |
| 279 RawObject** to = cls->to(); |
| 280 for (RawObject** p = from; p <= to; p++) { |
| 281 s->WriteRef(*p); |
| 282 } |
| 283 s->WriteTokenPosition(cls->ptr()->token_pos_); |
| 284 } |
| 285 } |
| 286 |
| 287 private: |
| 288 GrowableArray<RawUnresolvedClass*> objects_; |
| 289 }; |
| 290 |
| 291 class UnresolvedClassDeserializationCluster : public DeserializationCluster { |
| 292 public: |
| 293 UnresolvedClassDeserializationCluster() { } |
| 294 virtual ~UnresolvedClassDeserializationCluster() { } |
| 295 |
| 296 void ReadAlloc(Deserializer* d) { |
| 297 start_index_ = d->next_index(); |
| 298 PageSpace* old_space = d->heap()->old_space(); |
| 299 intptr_t count = d->Read<intptr_t>(); |
| 300 for (intptr_t i = 0; i < count; i++) { |
| 301 d->AssignRef(AllocateUninitialized(old_space, |
| 302 UnresolvedClass::InstanceSize())); |
| 303 } |
| 304 stop_index_ = d->next_index(); |
| 305 } |
| 306 |
| 307 void ReadFill(Deserializer* d) { |
| 308 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 309 |
| 310 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 311 RawUnresolvedClass* cls = |
| 312 reinterpret_cast<RawUnresolvedClass*>(d->Ref(id)); |
| 313 Deserializer::InitializeHeader(cls, kUnresolvedClassCid, |
| 314 UnresolvedClass::InstanceSize(), |
| 315 is_vm_object); |
| 316 RawObject** from = cls->from(); |
| 317 RawObject** to = cls->to(); |
| 318 for (RawObject** p = from; p <= to; p++) { |
| 319 *p = d->ReadRef(); |
| 320 } |
| 321 cls->ptr()->token_pos_ = d->ReadTokenPosition(); |
| 322 } |
| 323 } |
| 324 }; |
| 325 |
| 326 class TypeArgumentsSerializationCluster : public SerializationCluster { |
| 327 public: |
| 328 TypeArgumentsSerializationCluster() { } |
| 329 virtual ~TypeArgumentsSerializationCluster() { } |
| 330 |
| 331 void Trace(Serializer* s, RawObject* object) { |
| 332 RawTypeArguments* type_args = TypeArguments::RawCast(object); |
| 333 objects_.Add(type_args); |
| 334 |
| 335 s->Push(type_args->ptr()->instantiations_); |
| 336 intptr_t length = Smi::Value(type_args->ptr()->length_); |
| 337 for (intptr_t i = 0; i < length; i++) { |
| 338 s->Push(type_args->ptr()->types()[i]); |
| 339 } |
| 340 } |
| 341 |
| 342 void WriteAlloc(Serializer* s) { |
| 343 s->WriteCid(kTypeArgumentsCid); |
| 344 intptr_t count = objects_.length(); |
| 345 s->Write<intptr_t>(count); |
| 346 for (intptr_t i = 0; i < count; i++) { |
| 347 RawTypeArguments* type_args = objects_[i]; |
| 348 intptr_t length = Smi::Value(type_args->ptr()->length_); |
| 349 s->Write<intptr_t>(length); |
| 350 s->AssignRef(type_args); |
| 351 } |
| 352 } |
| 353 |
| 354 void WriteFill(Serializer* s) { |
| 355 intptr_t count = objects_.length(); |
| 356 for (intptr_t i = 0; i < count; i++) { |
| 357 RawTypeArguments* type_args = objects_[i]; |
| 358 intptr_t length = Smi::Value(type_args->ptr()->length_); |
| 359 s->Write<intptr_t>(length); |
| 360 s->Write<bool>(type_args->IsCanonical()); |
| 361 intptr_t hash = Smi::Value(type_args->ptr()->hash_); |
| 362 s->Write<int32_t>(hash); |
| 363 s->WriteRef(type_args->ptr()->instantiations_); |
| 364 for (intptr_t j = 0; j < length; j++) { |
| 365 s->WriteRef(type_args->ptr()->types()[j]); |
| 366 } |
| 367 } |
| 368 } |
| 369 |
| 370 private: |
| 371 GrowableArray<RawTypeArguments*> objects_; |
| 372 }; |
| 373 |
| 374 |
| 375 class TypeArgumentsDeserializationCluster : public DeserializationCluster { |
| 376 public: |
| 377 TypeArgumentsDeserializationCluster() { } |
| 378 virtual ~TypeArgumentsDeserializationCluster() { } |
| 379 |
| 380 void ReadAlloc(Deserializer* d) { |
| 381 start_index_ = d->next_index(); |
| 382 PageSpace* old_space = d->heap()->old_space(); |
| 383 intptr_t count = d->Read<intptr_t>(); |
| 384 for (intptr_t i = 0; i < count; i++) { |
| 385 intptr_t length = d->Read<intptr_t>(); |
| 386 d->AssignRef(AllocateUninitialized(old_space, |
| 387 TypeArguments::InstanceSize(length))); |
| 388 } |
| 389 stop_index_ = d->next_index(); |
| 390 } |
| 391 |
| 392 void ReadFill(Deserializer* d) { |
| 393 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 394 |
| 395 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 396 RawTypeArguments* type_args = |
| 397 reinterpret_cast<RawTypeArguments*>(d->Ref(id)); |
| 398 intptr_t length = d->Read<intptr_t>(); |
| 399 bool is_canonical = d->Read<bool>(); |
| 400 Deserializer::InitializeHeader(type_args, kTypeArgumentsCid, |
| 401 TypeArguments::InstanceSize(length), |
| 402 is_vm_object, is_canonical); |
| 403 type_args->ptr()->length_ = Smi::New(length); |
| 404 type_args->ptr()->hash_ = Smi::New(d->Read<int32_t>()); |
| 405 type_args->ptr()->instantiations_ = |
| 406 reinterpret_cast<RawArray*>(d->ReadRef()); |
| 407 for (intptr_t j = 0; j < length; j++) { |
| 408 type_args->ptr()->types()[j] = |
| 409 reinterpret_cast<RawAbstractType*>(d->ReadRef()); |
| 410 } |
| 411 } |
| 412 } |
| 413 }; |
| 414 |
| 415 |
| 416 class PatchClassSerializationCluster : public SerializationCluster { |
| 417 public: |
| 418 PatchClassSerializationCluster() { } |
| 419 virtual ~PatchClassSerializationCluster() { } |
| 420 |
| 421 void Trace(Serializer* s, RawObject* object) { |
| 422 RawPatchClass* cls = PatchClass::RawCast(object); |
| 423 objects_.Add(cls); |
| 424 |
| 425 RawObject** from = cls->from(); |
| 426 RawObject** to = cls->to(); |
| 427 for (RawObject** p = from; p <= to; p++) { |
| 428 s->Push(*p); |
| 429 } |
| 430 } |
| 431 |
| 432 void WriteAlloc(Serializer* s) { |
| 433 s->WriteCid(kPatchClassCid); |
| 434 intptr_t count = objects_.length(); |
| 435 s->Write<intptr_t>(count); |
| 436 for (intptr_t i = 0; i < count; i++) { |
| 437 RawPatchClass* cls = objects_[i]; |
| 438 s->AssignRef(cls); |
| 439 } |
| 440 } |
| 441 |
| 442 void WriteFill(Serializer* s) { |
| 443 intptr_t count = objects_.length(); |
| 444 for (intptr_t i = 0; i < count; i++) { |
| 445 RawPatchClass* cls = objects_[i]; |
| 446 RawObject** from = cls->from(); |
| 447 RawObject** to = cls->to(); |
| 448 for (RawObject** p = from; p <= to; p++) { |
| 449 s->WriteRef(*p); |
| 450 } |
| 451 } |
| 452 } |
| 453 |
| 454 private: |
| 455 GrowableArray<RawPatchClass*> objects_; |
| 456 }; |
| 457 |
| 458 class PatchClassDeserializationCluster : public DeserializationCluster { |
| 459 public: |
| 460 PatchClassDeserializationCluster() { } |
| 461 virtual ~PatchClassDeserializationCluster() { } |
| 462 |
| 463 void ReadAlloc(Deserializer* d) { |
| 464 start_index_ = d->next_index(); |
| 465 PageSpace* old_space = d->heap()->old_space(); |
| 466 intptr_t count = d->Read<intptr_t>(); |
| 467 for (intptr_t i = 0; i < count; i++) { |
| 468 d->AssignRef(AllocateUninitialized(old_space, |
| 469 PatchClass::InstanceSize())); |
| 470 } |
| 471 stop_index_ = d->next_index(); |
| 472 } |
| 473 |
| 474 void ReadFill(Deserializer* d) { |
| 475 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 476 |
| 477 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 478 RawPatchClass* cls = reinterpret_cast<RawPatchClass*>(d->Ref(id)); |
| 479 Deserializer::InitializeHeader(cls, kPatchClassCid, |
| 480 PatchClass::InstanceSize(), is_vm_object); |
| 481 RawObject** from = cls->from(); |
| 482 RawObject** to = cls->to(); |
| 483 for (RawObject** p = from; p <= to; p++) { |
| 484 *p = d->ReadRef(); |
| 485 } |
| 486 } |
| 487 } |
| 488 }; |
| 489 |
| 490 class FunctionSerializationCluster : public SerializationCluster { |
| 491 public: |
| 492 FunctionSerializationCluster() { } |
| 493 virtual ~FunctionSerializationCluster() { } |
| 494 |
| 495 void Trace(Serializer* s, RawObject* object) { |
| 496 RawFunction* func = Function::RawCast(object); |
| 497 objects_.Add(func); |
| 498 |
| 499 RawObject** from = func->from(); |
| 500 RawObject** to = func->to_snapshot(); |
| 501 for (RawObject** p = from; p <= to; p++) { |
| 502 s->Push(*p); |
| 503 } |
| 504 if (s->kind() == Snapshot::kAppNoJIT) { |
| 505 s->Push(func->ptr()->code_); |
| 506 } else if (s->kind() == Snapshot::kAppWithJIT) { |
| 507 s->Push(func->ptr()->unoptimized_code_); |
| 508 s->Push(func->ptr()->ic_data_array_); |
| 509 } |
| 510 } |
| 511 |
| 512 void WriteAlloc(Serializer* s) { |
| 513 s->WriteCid(kFunctionCid); |
| 514 intptr_t count = objects_.length(); |
| 515 s->Write<intptr_t>(count); |
| 516 for (intptr_t i = 0; i < count; i++) { |
| 517 RawFunction* func = objects_[i]; |
| 518 s->AssignRef(func); |
| 519 } |
| 520 } |
| 521 |
| 522 void WriteFill(Serializer* s) { |
| 523 Snapshot::Kind kind = s->kind(); |
| 524 intptr_t count = objects_.length(); |
| 525 for (intptr_t i = 0; i < count; i++) { |
| 526 RawFunction* func = objects_[i]; |
| 527 RawObject** from = func->from(); |
| 528 RawObject** to = func->to_snapshot(); |
| 529 for (RawObject** p = from; p <= to; p++) { |
| 530 s->WriteRef(*p); |
| 531 } |
| 532 if (kind == Snapshot::kAppNoJIT) { |
| 533 s->WriteRef(func->ptr()->code_); |
| 534 } else if (s->kind() == Snapshot::kAppWithJIT) { |
| 535 s->WriteRef(func->ptr()->unoptimized_code_); |
| 536 s->WriteRef(func->ptr()->ic_data_array_); |
| 537 } |
| 538 |
| 539 s->WriteTokenPosition(func->ptr()->token_pos_); |
| 540 s->WriteTokenPosition(func->ptr()->end_token_pos_); |
| 541 s->Write<int16_t>(func->ptr()->num_fixed_parameters_); |
| 542 s->Write<int16_t>(func->ptr()->num_optional_parameters_); |
| 543 s->Write<uint32_t>(func->ptr()->kind_tag_); |
| 544 if (kind == Snapshot::kAppNoJIT) { |
| 545 // Omit fields used to support de/reoptimization. |
| 546 } else { |
| 547 bool is_optimized = Code::IsOptimized(func->ptr()->code_); |
| 548 if (is_optimized) { |
| 549 s->Write<int32_t>(FLAG_optimization_counter_threshold); |
| 550 } else { |
| 551 s->Write<int32_t>(0); |
| 552 } |
| 553 s->Write<int8_t>(func->ptr()->deoptimization_counter_); |
| 554 s->Write<uint16_t>(func->ptr()->optimized_instruction_count_); |
| 555 s->Write<uint16_t>(func->ptr()->optimized_call_site_count_); |
| 556 } |
| 557 } |
| 558 } |
| 559 |
| 560 private: |
| 561 GrowableArray<RawFunction*> objects_; |
| 562 }; |
| 563 |
| 564 class FunctionDeserializationCluster : public DeserializationCluster { |
| 565 public: |
| 566 FunctionDeserializationCluster() { } |
| 567 virtual ~FunctionDeserializationCluster() { } |
| 568 |
| 569 void ReadAlloc(Deserializer* d) { |
| 570 start_index_ = d->next_index(); |
| 571 PageSpace* old_space = d->heap()->old_space(); |
| 572 intptr_t count = d->Read<intptr_t>(); |
| 573 for (intptr_t i = 0; i < count; i++) { |
| 574 d->AssignRef(AllocateUninitialized(old_space, |
| 575 Function::InstanceSize())); |
| 576 } |
| 577 stop_index_ = d->next_index(); |
| 578 } |
| 579 |
| 580 void ReadFill(Deserializer* d) { |
| 581 Snapshot::Kind kind = d->kind(); |
| 582 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 583 |
| 584 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 585 RawFunction* func = reinterpret_cast<RawFunction*>(d->Ref(id)); |
| 586 Deserializer::InitializeHeader(func, kFunctionCid, |
| 587 Function::InstanceSize(), is_vm_object); |
| 588 RawObject** from = func->from(); |
| 589 RawObject** to_snapshot = func->to_snapshot(); |
| 590 RawObject** to = func->to(); |
| 591 for (RawObject** p = from; p <= to_snapshot; p++) { |
| 592 *p = d->ReadRef(); |
| 593 } |
| 594 for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| 595 *p = Object::null(); |
| 596 } |
| 597 if (kind == Snapshot::kAppNoJIT) { |
| 598 func->ptr()->code_ = reinterpret_cast<RawCode*>(d->ReadRef()); |
| 599 } else if (kind == Snapshot::kAppWithJIT) { |
| 600 func->ptr()->unoptimized_code_ = |
| 601 reinterpret_cast<RawCode*>(d->ReadRef()); |
| 602 func->ptr()->ic_data_array_ = reinterpret_cast<RawArray*>(d->ReadRef()); |
| 603 } |
| 604 |
| 605 #if defined(DEBUG) |
| 606 func->ptr()->entry_point_ = 0; |
| 607 #endif |
| 608 |
| 609 func->ptr()->token_pos_ = d->ReadTokenPosition(); |
| 610 func->ptr()->end_token_pos_ = d->ReadTokenPosition(); |
| 611 func->ptr()->num_fixed_parameters_ = d->Read<int16_t>(); |
| 612 func->ptr()->num_optional_parameters_ = d->Read<int16_t>(); |
| 613 func->ptr()->kind_tag_ = d->Read<uint32_t>(); |
| 614 if (kind == Snapshot::kAppNoJIT) { |
| 615 // Omit fields used to support de/reoptimization. |
| 616 } else { |
| 617 func->ptr()->usage_counter_ = d->Read<int32_t>(); |
| 618 func->ptr()->deoptimization_counter_ = d->Read<int8_t>(); |
| 619 func->ptr()->optimized_instruction_count_ = d->Read<uint16_t>(); |
| 620 func->ptr()->optimized_call_site_count_ = d->Read<uint16_t>(); |
| 621 } |
| 622 } |
| 623 } |
| 624 |
| 625 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { |
| 626 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), |
| 627 Timeline::GetIsolateStream(), "PostLoadFunction")); |
| 628 |
| 629 if (kind == Snapshot::kAppNoJIT) { |
| 630 Function& func = Function::Handle(zone); |
| 631 for (intptr_t i = start_index_; i < stop_index_; i++) { |
| 632 func ^= refs.At(i); |
| 633 ASSERT(func.raw()->ptr()->code_->IsCode()); |
| 634 uword entry_point = func.raw()->ptr()->code_->ptr()->entry_point_; |
| 635 ASSERT(entry_point != 0); |
| 636 func.raw()->ptr()->entry_point_ = entry_point; |
| 637 } |
| 638 } else if (kind == Snapshot::kAppWithJIT) { |
| 639 Function& func = Function::Handle(zone); |
| 640 Code& code = Code::Handle(zone); |
| 641 for (intptr_t i = start_index_; i < stop_index_; i++) { |
| 642 func ^= refs.At(i); |
| 643 code ^= func.unoptimized_code(); |
| 644 if (!code.IsNull()) { |
| 645 func.SetInstructions(code); |
| 646 func.set_was_compiled(true); |
| 647 } else { |
| 648 func.ClearCode(); |
| 649 func.set_was_compiled(false); |
| 650 } |
| 651 } |
| 652 } else { |
| 653 Function& func = Function::Handle(zone); |
| 654 for (intptr_t i = start_index_; i < stop_index_; i++) { |
| 655 func ^= refs.At(i); |
| 656 func.ClearICDataArray(); |
| 657 func.ClearCode(); |
| 658 func.set_was_compiled(false); |
| 659 } |
| 660 } |
| 661 } |
| 662 }; |
| 663 |
| 664 class ClosureDataSerializationCluster : public SerializationCluster { |
| 665 public: |
| 666 ClosureDataSerializationCluster() { } |
| 667 virtual ~ClosureDataSerializationCluster() { } |
| 668 |
| 669 void Trace(Serializer* s, RawObject* object) { |
| 670 RawClosureData* data = ClosureData::RawCast(object); |
| 671 objects_.Add(data); |
| 672 |
| 673 RawObject** from = data->from(); |
| 674 RawObject** to = data->to(); |
| 675 for (RawObject** p = from; p <= to; p++) { |
| 676 s->Push(*p); |
| 677 } |
| 678 } |
| 679 |
| 680 void WriteAlloc(Serializer* s) { |
| 681 s->WriteCid(kClosureDataCid); |
| 682 intptr_t count = objects_.length(); |
| 683 s->Write<intptr_t>(count); |
| 684 for (intptr_t i = 0; i < count; i++) { |
| 685 RawClosureData* data = objects_[i]; |
| 686 s->AssignRef(data); |
| 687 } |
| 688 } |
| 689 |
| 690 void WriteFill(Serializer* s) { |
| 691 intptr_t count = objects_.length(); |
| 692 for (intptr_t i = 0; i < count; i++) { |
| 693 RawClosureData* data = objects_[i]; |
| 694 RawObject** from = data->from(); |
| 695 RawObject** to = data->to(); |
| 696 for (RawObject** p = from; p <= to; p++) { |
| 697 s->WriteRef(*p); |
| 698 } |
| 699 } |
| 700 } |
| 701 |
| 702 private: |
| 703 GrowableArray<RawClosureData*> objects_; |
| 704 }; |
| 705 |
| 706 class ClosureDataDeserializationCluster : public DeserializationCluster { |
| 707 public: |
| 708 ClosureDataDeserializationCluster() { } |
| 709 virtual ~ClosureDataDeserializationCluster() { } |
| 710 |
| 711 void ReadAlloc(Deserializer* d) { |
| 712 start_index_ = d->next_index(); |
| 713 PageSpace* old_space = d->heap()->old_space(); |
| 714 intptr_t count = d->Read<intptr_t>(); |
| 715 for (intptr_t i = 0; i < count; i++) { |
| 716 d->AssignRef(AllocateUninitialized(old_space, |
| 717 ClosureData::InstanceSize())); |
| 718 } |
| 719 stop_index_ = d->next_index(); |
| 720 } |
| 721 |
| 722 void ReadFill(Deserializer* d) { |
| 723 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 724 |
| 725 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 726 RawClosureData* data = reinterpret_cast<RawClosureData*>(d->Ref(id)); |
| 727 Deserializer::InitializeHeader(data, kClosureDataCid, |
| 728 ClosureData::InstanceSize(), is_vm_object); |
| 729 RawObject** from = data->from(); |
| 730 RawObject** to = data->to(); |
| 731 for (RawObject** p = from; p <= to; p++) { |
| 732 *p = d->ReadRef(); |
| 733 } |
| 734 } |
| 735 } |
| 736 }; |
| 737 |
| 738 class RedirectionDataSerializationCluster : public SerializationCluster { |
| 739 public: |
| 740 RedirectionDataSerializationCluster() { } |
| 741 virtual ~RedirectionDataSerializationCluster() { } |
| 742 |
| 743 void Trace(Serializer* s, RawObject* object) { |
| 744 RawRedirectionData* data = RedirectionData::RawCast(object); |
| 745 objects_.Add(data); |
| 746 |
| 747 RawObject** from = data->from(); |
| 748 RawObject** to = data->to(); |
| 749 for (RawObject** p = from; p <= to; p++) { |
| 750 s->Push(*p); |
| 751 } |
| 752 } |
| 753 |
| 754 void WriteAlloc(Serializer* s) { |
| 755 s->WriteCid(kRedirectionDataCid); |
| 756 intptr_t count = objects_.length(); |
| 757 s->Write<intptr_t>(count); |
| 758 for (intptr_t i = 0; i < count; i++) { |
| 759 RawRedirectionData* data = objects_[i]; |
| 760 s->AssignRef(data); |
| 761 } |
| 762 } |
| 763 |
| 764 void WriteFill(Serializer* s) { |
| 765 intptr_t count = objects_.length(); |
| 766 for (intptr_t i = 0; i < count; i++) { |
| 767 RawRedirectionData* data = objects_[i]; |
| 768 RawObject** from = data->from(); |
| 769 RawObject** to = data->to(); |
| 770 for (RawObject** p = from; p <= to; p++) { |
| 771 s->WriteRef(*p); |
| 772 } |
| 773 } |
| 774 } |
| 775 |
| 776 private: |
| 777 GrowableArray<RawRedirectionData*> objects_; |
| 778 }; |
| 779 |
| 780 class RedirectionDataDeserializationCluster : public DeserializationCluster { |
| 781 public: |
| 782 RedirectionDataDeserializationCluster() { } |
| 783 virtual ~RedirectionDataDeserializationCluster() { } |
| 784 |
| 785 void ReadAlloc(Deserializer* d) { |
| 786 start_index_ = d->next_index(); |
| 787 PageSpace* old_space = d->heap()->old_space(); |
| 788 intptr_t count = d->Read<intptr_t>(); |
| 789 for (intptr_t i = 0; i < count; i++) { |
| 790 d->AssignRef(AllocateUninitialized(old_space, |
| 791 RedirectionData::InstanceSize())); |
| 792 } |
| 793 stop_index_ = d->next_index(); |
| 794 } |
| 795 |
| 796 void ReadFill(Deserializer* d) { |
| 797 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 798 |
| 799 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 800 RawRedirectionData* data = |
| 801 reinterpret_cast<RawRedirectionData*>(d->Ref(id)); |
| 802 Deserializer::InitializeHeader(data, kRedirectionDataCid, |
| 803 RedirectionData::InstanceSize(), |
| 804 is_vm_object); |
| 805 RawObject** from = data->from(); |
| 806 RawObject** to = data->to(); |
| 807 for (RawObject** p = from; p <= to; p++) { |
| 808 *p = d->ReadRef(); |
| 809 } |
| 810 } |
| 811 } |
| 812 }; |
| 813 |
| 814 class FieldSerializationCluster : public SerializationCluster { |
| 815 public: |
| 816 FieldSerializationCluster() { } |
| 817 virtual ~FieldSerializationCluster() { } |
| 818 |
| 819 void Trace(Serializer* s, RawObject* object) { |
| 820 RawField* field = Field::RawCast(object); |
| 821 objects_.Add(field); |
| 822 |
| 823 Snapshot::Kind kind = s->kind(); |
| 824 |
| 825 s->Push(field->ptr()->name_); |
| 826 s->Push(field->ptr()->owner_); |
| 827 s->Push(field->ptr()->type_); |
| 828 // Write out the initial static value or field offset. |
| 829 if (Field::StaticBit::decode(field->ptr()->kind_bits_)) { |
| 830 if (kind == Snapshot::kAppNoJIT) { |
| 831 // For precompiled static fields, the value was already reset and |
| 832 // initializer_ now contains a Function. |
| 833 s->Push(field->ptr()->value_.static_value_); |
| 834 } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) { |
| 835 // Do not reset const fields. |
| 836 s->Push(field->ptr()->value_.static_value_); |
| 837 } else { |
| 838 // Otherwise, for static fields we write out the initial static value. |
| 839 s->Push(field->ptr()->initializer_.saved_value_); |
| 840 } |
| 841 } else { |
| 842 s->Push(field->ptr()->value_.offset_); |
| 843 } |
| 844 // Write out the initializer function or saved initial value. |
| 845 if (kind == Snapshot::kAppNoJIT) { |
| 846 s->Push(field->ptr()->initializer_.precompiled_); |
| 847 } else { |
| 848 s->Push(field->ptr()->initializer_.saved_value_); |
| 849 } |
| 850 if (kind != Snapshot::kAppNoJIT) { |
| 851 // Write out the guarded list length. |
| 852 s->Push(field->ptr()->guarded_list_length_); |
| 853 } |
| 854 } |
| 855 |
| 856 void WriteAlloc(Serializer* s) { |
| 857 s->WriteCid(kFieldCid); |
| 858 intptr_t count = objects_.length(); |
| 859 s->Write<intptr_t>(count); |
| 860 for (intptr_t i = 0; i < count; i++) { |
| 861 RawField* field = objects_[i]; |
| 862 s->AssignRef(field); |
| 863 } |
| 864 } |
| 865 |
| 866 void WriteFill(Serializer* s) { |
| 867 Snapshot::Kind kind = s->kind(); |
| 868 intptr_t count = objects_.length(); |
| 869 for (intptr_t i = 0; i < count; i++) { |
| 870 RawField* field = objects_[i]; |
| 871 |
| 872 s->WriteRef(field->ptr()->name_); |
| 873 s->WriteRef(field->ptr()->owner_); |
| 874 s->WriteRef(field->ptr()->type_); |
| 875 // Write out the initial static value or field offset. |
| 876 if (Field::StaticBit::decode(field->ptr()->kind_bits_)) { |
| 877 if (kind == Snapshot::kAppNoJIT) { |
| 878 // For precompiled static fields, the value was already reset and |
| 879 // initializer_ now contains a Function. |
| 880 s->WriteRef(field->ptr()->value_.static_value_); |
| 881 } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) { |
| 882 // Do not reset const fields. |
| 883 s->WriteRef(field->ptr()->value_.static_value_); |
| 884 } else { |
| 885 // Otherwise, for static fields we write out the initial static value. |
| 886 s->WriteRef(field->ptr()->initializer_.saved_value_); |
| 887 } |
| 888 } else { |
| 889 s->WriteRef(field->ptr()->value_.offset_); |
| 890 } |
| 891 // Write out the initializer function or saved initial value. |
| 892 if (kind == Snapshot::kAppNoJIT) { |
| 893 s->WriteRef(field->ptr()->initializer_.precompiled_); |
| 894 } else { |
| 895 s->WriteRef(field->ptr()->initializer_.saved_value_); |
| 896 } |
| 897 if (kind != Snapshot::kAppNoJIT) { |
| 898 // Write out the guarded list length. |
| 899 s->WriteRef(field->ptr()->guarded_list_length_); |
| 900 } |
| 901 |
| 902 if (kind != Snapshot::kAppNoJIT) { |
| 903 s->WriteTokenPosition(field->ptr()->token_pos_); |
| 904 s->WriteCid(field->ptr()->guarded_cid_); |
| 905 s->WriteCid(field->ptr()->is_nullable_); |
| 906 } |
| 907 s->Write<uint8_t>(field->ptr()->kind_bits_); |
| 908 } |
| 909 } |
| 910 |
| 911 private: |
| 912 GrowableArray<RawField*> objects_; |
| 913 }; |
| 914 |
| 915 class FieldDeserializationCluster : public DeserializationCluster { |
| 916 public: |
| 917 FieldDeserializationCluster() { } |
| 918 virtual ~FieldDeserializationCluster() { } |
| 919 |
| 920 void ReadAlloc(Deserializer* d) { |
| 921 start_index_ = d->next_index(); |
| 922 PageSpace* old_space = d->heap()->old_space(); |
| 923 intptr_t count = d->Read<intptr_t>(); |
| 924 for (intptr_t i = 0; i < count; i++) { |
| 925 d->AssignRef(AllocateUninitialized(old_space, Field::InstanceSize())); |
| 926 } |
| 927 stop_index_ = d->next_index(); |
| 928 } |
| 929 |
| 930 void ReadFill(Deserializer* d) { |
| 931 Snapshot::Kind kind = d->kind(); |
| 932 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 933 |
| 934 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 935 RawField* field = reinterpret_cast<RawField*>(d->Ref(id)); |
| 936 Deserializer::InitializeHeader(field, kFieldCid, |
| 937 Field::InstanceSize(), is_vm_object); |
| 938 RawObject** from = field->from(); |
| 939 RawObject** to_snapshot = field->to_snapshot(kind); |
| 940 RawObject** to = field->to(); |
| 941 for (RawObject** p = from; p <= to_snapshot; p++) { |
| 942 *p = d->ReadRef(); |
| 943 } |
| 944 for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| 945 *p = Object::null(); |
| 946 } |
| 947 |
| 948 if (kind != Snapshot::kAppNoJIT) { |
| 949 field->ptr()->token_pos_ = d->ReadTokenPosition(); |
| 950 field->ptr()->guarded_cid_ = d->ReadCid(); |
| 951 field->ptr()->is_nullable_ = d->ReadCid(); |
| 952 } |
| 953 field->ptr()->kind_bits_ = d->Read<uint8_t>(); |
| 954 } |
| 955 } |
| 956 |
| 957 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { |
| 958 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), |
| 959 Timeline::GetIsolateStream(), "PostLoadField")); |
| 960 |
| 961 Field& field = Field::Handle(zone); |
| 962 if (!FLAG_use_field_guards) { |
| 963 for (intptr_t i = start_index_; i < stop_index_; i++) { |
| 964 field ^= refs.At(i); |
| 965 field.set_guarded_cid(kDynamicCid); |
| 966 field.set_is_nullable(true); |
| 967 field.set_guarded_list_length(Field::kNoFixedLength); |
| 968 field.set_guarded_list_length_in_object_offset( |
| 969 Field::kUnknownLengthOffset); |
| 970 } |
| 971 } else { |
| 972 for (intptr_t i = start_index_; i < stop_index_; i++) { |
| 973 field ^= refs.At(i); |
| 974 field.InitializeGuardedListLengthInObjectOffset(); |
| 975 } |
| 976 } |
| 977 } |
| 978 }; |
| 979 |
| 980 class LiteralTokenSerializationCluster : public SerializationCluster { |
| 981 public: |
| 982 LiteralTokenSerializationCluster() { } |
| 983 virtual ~LiteralTokenSerializationCluster() { } |
| 984 |
| 985 void Trace(Serializer* s, RawObject* object) { |
| 986 RawLiteralToken* token = LiteralToken::RawCast(object); |
| 987 objects_.Add(token); |
| 988 |
| 989 RawObject** from = token->from(); |
| 990 RawObject** to = token->to(); |
| 991 for (RawObject** p = from; p <= to; p++) { |
| 992 s->Push(*p); |
| 993 } |
| 994 } |
| 995 |
| 996 void WriteAlloc(Serializer* s) { |
| 997 s->WriteCid(kLiteralTokenCid); |
| 998 intptr_t count = objects_.length(); |
| 999 s->Write<intptr_t>(count); |
| 1000 for (intptr_t i = 0; i < count; i++) { |
| 1001 RawLiteralToken* token = objects_[i]; |
| 1002 s->AssignRef(token); |
| 1003 } |
| 1004 } |
| 1005 |
| 1006 void WriteFill(Serializer* s) { |
| 1007 intptr_t count = objects_.length(); |
| 1008 for (intptr_t i = 0; i < count; i++) { |
| 1009 RawLiteralToken* token = objects_[i]; |
| 1010 RawObject** from = token->from(); |
| 1011 RawObject** to = token->to(); |
| 1012 for (RawObject** p = from; p <= to; p++) { |
| 1013 s->WriteRef(*p); |
| 1014 } |
| 1015 s->Write<int32_t>(token->ptr()->kind_); |
| 1016 } |
| 1017 } |
| 1018 |
| 1019 private: |
| 1020 GrowableArray<RawLiteralToken*> objects_; |
| 1021 }; |
| 1022 |
| 1023 class LiteralTokenDeserializationCluster : public DeserializationCluster { |
| 1024 public: |
| 1025 LiteralTokenDeserializationCluster() { } |
| 1026 virtual ~LiteralTokenDeserializationCluster() { } |
| 1027 |
| 1028 void ReadAlloc(Deserializer* d) { |
| 1029 start_index_ = d->next_index(); |
| 1030 PageSpace* old_space = d->heap()->old_space(); |
| 1031 intptr_t count = d->Read<intptr_t>(); |
| 1032 for (intptr_t i = 0; i < count; i++) { |
| 1033 d->AssignRef(AllocateUninitialized(old_space, |
| 1034 LiteralToken::InstanceSize())); |
| 1035 } |
| 1036 stop_index_ = d->next_index(); |
| 1037 } |
| 1038 |
| 1039 void ReadFill(Deserializer* d) { |
| 1040 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 1041 |
| 1042 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 1043 RawLiteralToken* token = reinterpret_cast<RawLiteralToken*>(d->Ref(id)); |
| 1044 Deserializer::InitializeHeader(token, kLiteralTokenCid, |
| 1045 LiteralToken::InstanceSize(), |
| 1046 is_vm_object); |
| 1047 RawObject** from = token->from(); |
| 1048 RawObject** to = token->to(); |
| 1049 for (RawObject** p = from; p <= to; p++) { |
| 1050 *p = d->ReadRef(); |
| 1051 } |
| 1052 token->ptr()->kind_ = static_cast<Token::Kind>(d->Read<int32_t>()); |
| 1053 } |
| 1054 } |
| 1055 }; |
| 1056 |
| 1057 class TokenStreamSerializationCluster : public SerializationCluster { |
| 1058 public: |
| 1059 TokenStreamSerializationCluster() { } |
| 1060 virtual ~TokenStreamSerializationCluster() { } |
| 1061 |
| 1062 void Trace(Serializer* s, RawObject* object) { |
| 1063 RawTokenStream* stream = TokenStream::RawCast(object); |
| 1064 objects_.Add(stream); |
| 1065 |
| 1066 RawObject** from = stream->from(); |
| 1067 RawObject** to = stream->to(); |
| 1068 for (RawObject** p = from; p <= to; p++) { |
| 1069 s->Push(*p); |
| 1070 } |
| 1071 } |
| 1072 |
| 1073 void WriteAlloc(Serializer* s) { |
| 1074 s->WriteCid(kTokenStreamCid); |
| 1075 intptr_t count = objects_.length(); |
| 1076 s->Write<intptr_t>(count); |
| 1077 for (intptr_t i = 0; i < count; i++) { |
| 1078 RawTokenStream* stream = objects_[i]; |
| 1079 s->AssignRef(stream); |
| 1080 } |
| 1081 } |
| 1082 |
| 1083 void WriteFill(Serializer* s) { |
| 1084 intptr_t count = objects_.length(); |
| 1085 for (intptr_t i = 0; i < count; i++) { |
| 1086 RawTokenStream* stream = objects_[i]; |
| 1087 RawObject** from = stream->from(); |
| 1088 RawObject** to = stream->to(); |
| 1089 for (RawObject** p = from; p <= to; p++) { |
| 1090 s->WriteRef(*p); |
| 1091 } |
| 1092 } |
| 1093 } |
| 1094 |
| 1095 private: |
| 1096 GrowableArray<RawTokenStream*> objects_; |
| 1097 }; |
| 1098 |
| 1099 class TokenStreamDeserializationCluster : public DeserializationCluster { |
| 1100 public: |
| 1101 TokenStreamDeserializationCluster() { } |
| 1102 virtual ~TokenStreamDeserializationCluster() { } |
| 1103 |
| 1104 void ReadAlloc(Deserializer* d) { |
| 1105 start_index_ = d->next_index(); |
| 1106 PageSpace* old_space = d->heap()->old_space(); |
| 1107 intptr_t count = d->Read<intptr_t>(); |
| 1108 for (intptr_t i = 0; i < count; i++) { |
| 1109 d->AssignRef(AllocateUninitialized(old_space, |
| 1110 TokenStream::InstanceSize())); |
| 1111 } |
| 1112 stop_index_ = d->next_index(); |
| 1113 } |
| 1114 |
| 1115 void ReadFill(Deserializer* d) { |
| 1116 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 1117 |
| 1118 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 1119 RawTokenStream* stream = reinterpret_cast<RawTokenStream*>(d->Ref(id)); |
| 1120 Deserializer::InitializeHeader(stream, kTokenStreamCid, |
| 1121 TokenStream::InstanceSize(), is_vm_object); |
| 1122 RawObject** from = stream->from(); |
| 1123 RawObject** to = stream->to(); |
| 1124 for (RawObject** p = from; p <= to; p++) { |
| 1125 *p = d->ReadRef(); |
| 1126 } |
| 1127 } |
| 1128 } |
| 1129 }; |
| 1130 |
| 1131 class ScriptSerializationCluster : public SerializationCluster { |
| 1132 public: |
| 1133 ScriptSerializationCluster() { } |
| 1134 virtual ~ScriptSerializationCluster() { } |
| 1135 |
| 1136 void Trace(Serializer* s, RawObject* object) { |
| 1137 RawScript* script = Script::RawCast(object); |
| 1138 objects_.Add(script); |
| 1139 |
| 1140 RawObject** from = script->from(); |
| 1141 RawObject** to = script->to_snapshot(s->kind()); |
| 1142 for (RawObject** p = from; p <= to; p++) { |
| 1143 s->Push(*p); |
| 1144 } |
| 1145 } |
| 1146 |
| 1147 void WriteAlloc(Serializer* s) { |
| 1148 s->WriteCid(kScriptCid); |
| 1149 intptr_t count = objects_.length(); |
| 1150 s->Write<intptr_t>(count); |
| 1151 for (intptr_t i = 0; i < count; i++) { |
| 1152 RawScript* script = objects_[i]; |
| 1153 s->AssignRef(script); |
| 1154 } |
| 1155 } |
| 1156 |
| 1157 void WriteFill(Serializer* s) { |
| 1158 Snapshot::Kind kind = s->kind(); |
| 1159 intptr_t count = objects_.length(); |
| 1160 for (intptr_t i = 0; i < count; i++) { |
| 1161 RawScript* script = objects_[i]; |
| 1162 RawObject** from = script->from(); |
| 1163 RawObject** to = script->to_snapshot(kind); |
| 1164 for (RawObject** p = from; p <= to; p++) { |
| 1165 s->WriteRef(*p); |
| 1166 } |
| 1167 |
| 1168 s->Write<int32_t>(script->ptr()->line_offset_); |
| 1169 s->Write<int32_t>(script->ptr()->col_offset_); |
| 1170 s->Write<int8_t>(script->ptr()->kind_); |
| 1171 } |
| 1172 } |
| 1173 |
| 1174 private: |
| 1175 GrowableArray<RawScript*> objects_; |
| 1176 }; |
| 1177 |
| 1178 |
| 1179 class ScriptDeserializationCluster : public DeserializationCluster { |
| 1180 public: |
| 1181 ScriptDeserializationCluster() { } |
| 1182 virtual ~ScriptDeserializationCluster() { } |
| 1183 |
| 1184 void ReadAlloc(Deserializer* d) { |
| 1185 start_index_ = d->next_index(); |
| 1186 PageSpace* old_space = d->heap()->old_space(); |
| 1187 intptr_t count = d->Read<intptr_t>(); |
| 1188 for (intptr_t i = 0; i < count; i++) { |
| 1189 d->AssignRef(AllocateUninitialized(old_space, Script::InstanceSize())); |
| 1190 } |
| 1191 stop_index_ = d->next_index(); |
| 1192 } |
| 1193 |
| 1194 void ReadFill(Deserializer* d) { |
| 1195 Snapshot::Kind kind = d->kind(); |
| 1196 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 1197 |
| 1198 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 1199 RawScript* script = reinterpret_cast<RawScript*>(d->Ref(id)); |
| 1200 Deserializer::InitializeHeader(script, kScriptCid, |
| 1201 Script::InstanceSize(), is_vm_object); |
| 1202 RawObject** from = script->from(); |
| 1203 RawObject** to_snapshot = script->to_snapshot(kind); |
| 1204 RawObject** to = script->to(); |
| 1205 for (RawObject** p = from; p <= to_snapshot; p++) { |
| 1206 *p = d->ReadRef(); |
| 1207 } |
| 1208 for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| 1209 *p = Object::null(); |
| 1210 } |
| 1211 |
| 1212 script->ptr()->line_offset_ = d->Read<int32_t>(); |
| 1213 script->ptr()->col_offset_ = d->Read<int32_t>(); |
| 1214 script->ptr()->kind_ = d->Read<int8_t>(); |
| 1215 script->ptr()->load_timestamp_ = 0; |
| 1216 } |
| 1217 } |
| 1218 }; |
| 1219 |
| 1220 |
| 1221 class LibrarySerializationCluster : public SerializationCluster { |
| 1222 public: |
| 1223 LibrarySerializationCluster() { } |
| 1224 virtual ~LibrarySerializationCluster() { } |
| 1225 |
| 1226 void Trace(Serializer* s, RawObject* object) { |
| 1227 RawLibrary* lib = Library::RawCast(object); |
| 1228 objects_.Add(lib); |
| 1229 |
| 1230 RawObject** from = lib->from(); |
| 1231 RawObject** to = lib->to_snapshot(); |
| 1232 for (RawObject** p = from; p <= to; p++) { |
| 1233 s->Push(*p); |
| 1234 } |
| 1235 } |
| 1236 |
| 1237 void WriteAlloc(Serializer* s) { |
| 1238 s->WriteCid(kLibraryCid); |
| 1239 intptr_t count = objects_.length(); |
| 1240 s->Write<intptr_t>(count); |
| 1241 for (intptr_t i = 0; i < count; i++) { |
| 1242 RawLibrary* lib = objects_[i]; |
| 1243 s->AssignRef(lib); |
| 1244 } |
| 1245 } |
| 1246 |
| 1247 void WriteFill(Serializer* s) { |
| 1248 intptr_t count = objects_.length(); |
| 1249 for (intptr_t i = 0; i < count; i++) { |
| 1250 RawLibrary* lib = objects_[i]; |
| 1251 RawObject** from = lib->from(); |
| 1252 RawObject** to = lib->to_snapshot(); |
| 1253 for (RawObject** p = from; p <= to; p++) { |
| 1254 s->WriteRef(*p); |
| 1255 } |
| 1256 |
| 1257 s->Write<int32_t>(lib->ptr()->index_); |
| 1258 s->Write<uint16_t>(lib->ptr()->num_imports_); |
| 1259 s->Write<int8_t>(lib->ptr()->load_state_); |
| 1260 s->Write<bool>(lib->ptr()->corelib_imported_); |
| 1261 s->Write<bool>(lib->ptr()->is_dart_scheme_); |
| 1262 s->Write<bool>(lib->ptr()->debuggable_); |
| 1263 } |
| 1264 } |
| 1265 |
| 1266 private: |
| 1267 GrowableArray<RawLibrary*> objects_; |
| 1268 }; |
| 1269 |
| 1270 class LibraryDeserializationCluster : public DeserializationCluster { |
| 1271 public: |
| 1272 LibraryDeserializationCluster() { } |
| 1273 virtual ~LibraryDeserializationCluster() { } |
| 1274 |
| 1275 void ReadAlloc(Deserializer* d) { |
| 1276 start_index_ = d->next_index(); |
| 1277 PageSpace* old_space = d->heap()->old_space(); |
| 1278 intptr_t count = d->Read<intptr_t>(); |
| 1279 for (intptr_t i = 0; i < count; i++) { |
| 1280 d->AssignRef(AllocateUninitialized(old_space, Library::InstanceSize())); |
| 1281 } |
| 1282 stop_index_ = d->next_index(); |
| 1283 } |
| 1284 |
| 1285 void ReadFill(Deserializer* d) { |
| 1286 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 1287 |
| 1288 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 1289 RawLibrary* lib = reinterpret_cast<RawLibrary*>(d->Ref(id)); |
| 1290 Deserializer::InitializeHeader(lib, kLibraryCid, |
| 1291 Library::InstanceSize(), is_vm_object); |
| 1292 RawObject** from = lib->from(); |
| 1293 RawObject** to_snapshot = lib->to_snapshot(); |
| 1294 RawObject** to = lib->to(); |
| 1295 for (RawObject** p = from; p <= to_snapshot; p++) { |
| 1296 *p = d->ReadRef(); |
| 1297 } |
| 1298 for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| 1299 *p = Object::null(); |
| 1300 } |
| 1301 |
| 1302 lib->ptr()->native_entry_resolver_ = NULL; |
| 1303 lib->ptr()->native_entry_symbol_resolver_ = NULL; |
| 1304 lib->ptr()->index_ = d->Read<int32_t>(); |
| 1305 lib->ptr()->num_imports_ = d->Read<uint16_t>(); |
| 1306 lib->ptr()->load_state_ = d->Read<int8_t>(); |
| 1307 lib->ptr()->corelib_imported_ = d->Read<bool>(); |
| 1308 lib->ptr()->is_dart_scheme_ = d->Read<bool>(); |
| 1309 lib->ptr()->debuggable_ = d->Read<bool>(); |
| 1310 lib->ptr()->is_in_fullsnapshot_ = true; |
| 1311 } |
| 1312 } |
| 1313 |
| 1314 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { |
| 1315 // TODO(rmacnak): This is surprisingly slow, roughly 20% of deserialization |
| 1316 // time for the JIT. Maybe make the lookups happy with a null? |
| 1317 |
| 1318 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), |
| 1319 Timeline::GetIsolateStream(), "PostLoadLibrary")); |
| 1320 |
| 1321 Library& lib = Library::Handle(zone); |
| 1322 for (intptr_t i = start_index_; i < stop_index_; i++) { |
| 1323 lib ^= refs.At(i); |
| 1324 const intptr_t kInitialNameCacheSize = 64; |
| 1325 lib.InitResolvedNamesCache(kInitialNameCacheSize); |
| 1326 } |
| 1327 } |
| 1328 }; |
| 1329 |
| 1330 class NamespaceSerializationCluster : public SerializationCluster { |
| 1331 public: |
| 1332 NamespaceSerializationCluster() { } |
| 1333 virtual ~NamespaceSerializationCluster() { } |
| 1334 |
| 1335 void Trace(Serializer* s, RawObject* object) { |
| 1336 RawNamespace* ns = Namespace::RawCast(object); |
| 1337 objects_.Add(ns); |
| 1338 |
| 1339 RawObject** from = ns->from(); |
| 1340 RawObject** to = ns->to(); |
| 1341 for (RawObject** p = from; p <= to; p++) { |
| 1342 s->Push(*p); |
| 1343 } |
| 1344 } |
| 1345 |
| 1346 void WriteAlloc(Serializer* s) { |
| 1347 s->WriteCid(kNamespaceCid); |
| 1348 intptr_t count = objects_.length(); |
| 1349 s->Write<intptr_t>(count); |
| 1350 for (intptr_t i = 0; i < count; i++) { |
| 1351 RawNamespace* ns = objects_[i]; |
| 1352 s->AssignRef(ns); |
| 1353 } |
| 1354 } |
| 1355 |
| 1356 void WriteFill(Serializer* s) { |
| 1357 intptr_t count = objects_.length(); |
| 1358 for (intptr_t i = 0; i < count; i++) { |
| 1359 RawNamespace* ns = objects_[i]; |
| 1360 RawObject** from = ns->from(); |
| 1361 RawObject** to = ns->to(); |
| 1362 for (RawObject** p = from; p <= to; p++) { |
| 1363 s->WriteRef(*p); |
| 1364 } |
| 1365 } |
| 1366 } |
| 1367 |
| 1368 private: |
| 1369 GrowableArray<RawNamespace*> objects_; |
| 1370 }; |
| 1371 |
| 1372 class NamespaceDeserializationCluster : public DeserializationCluster { |
| 1373 public: |
| 1374 NamespaceDeserializationCluster() { } |
| 1375 virtual ~NamespaceDeserializationCluster() { } |
| 1376 |
| 1377 void ReadAlloc(Deserializer* d) { |
| 1378 start_index_ = d->next_index(); |
| 1379 PageSpace* old_space = d->heap()->old_space(); |
| 1380 intptr_t count = d->Read<intptr_t>(); |
| 1381 for (intptr_t i = 0; i < count; i++) { |
| 1382 d->AssignRef(AllocateUninitialized(old_space, Namespace::InstanceSize())); |
| 1383 } |
| 1384 stop_index_ = d->next_index(); |
| 1385 } |
| 1386 |
| 1387 void ReadFill(Deserializer* d) { |
| 1388 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 1389 |
| 1390 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 1391 RawNamespace* ns = reinterpret_cast<RawNamespace*>(d->Ref(id)); |
| 1392 Deserializer::InitializeHeader(ns, kNamespaceCid, |
| 1393 Namespace::InstanceSize(), is_vm_object); |
| 1394 RawObject** from = ns->from(); |
| 1395 RawObject** to = ns->to(); |
| 1396 for (RawObject** p = from; p <= to; p++) { |
| 1397 *p = d->ReadRef(); |
| 1398 } |
| 1399 } |
| 1400 } |
| 1401 }; |
| 1402 |
| 1403 class CodeSerializationCluster : public SerializationCluster { |
| 1404 public: |
| 1405 CodeSerializationCluster() { } |
| 1406 virtual ~CodeSerializationCluster() { } |
| 1407 |
| 1408 void Trace(Serializer* s, RawObject* object) { |
| 1409 RawCode* code = Code::RawCast(object); |
| 1410 objects_.Add(code); |
| 1411 |
| 1412 s->Push(code->ptr()->object_pool_); |
| 1413 s->Push(code->ptr()->owner_); |
| 1414 s->Push(code->ptr()->exception_handlers_); |
| 1415 s->Push(code->ptr()->pc_descriptors_); |
| 1416 s->Push(code->ptr()->stackmaps_); |
| 1417 } |
| 1418 |
| 1419 void WriteAlloc(Serializer* s) { |
| 1420 s->WriteCid(kCodeCid); |
| 1421 intptr_t count = objects_.length(); |
| 1422 s->Write<intptr_t>(count); |
| 1423 for (intptr_t i = 0; i < count; i++) { |
| 1424 RawCode* code = objects_[i]; |
| 1425 s->AssignRef(code); |
| 1426 } |
| 1427 } |
| 1428 |
| 1429 void WriteFill(Serializer* s) { |
| 1430 Snapshot::Kind kind = s->kind(); |
| 1431 intptr_t count = objects_.length(); |
| 1432 for (intptr_t i = 0; i < count; i++) { |
| 1433 RawCode* code = objects_[i]; |
| 1434 |
| 1435 intptr_t pointer_offsets_length = |
| 1436 Code::PtrOffBits::decode(code->ptr()->state_bits_); |
| 1437 if (pointer_offsets_length != 0) { |
| 1438 FATAL("Cannot serialize code with embedded pointers"); |
| 1439 } |
| 1440 if (kind == Snapshot::kAppNoJIT) { |
| 1441 // No disabled code in precompilation. |
| 1442 ASSERT(code->ptr()->instructions_ == code->ptr()->active_instructions_); |
| 1443 } else { |
| 1444 ASSERT(kind == Snapshot::kAppWithJIT); |
| 1445 // We never include optimized code in JIT precompilation. Deoptimization |
| 1446 // requires code patching and we cannot patch code that is shared |
| 1447 // between isolates and should not mutate memory allocated by the |
| 1448 // embedder. |
| 1449 bool is_optimized = Code::PtrOffBits::decode(code->ptr()->state_bits_); |
| 1450 if (is_optimized) { |
| 1451 FATAL("Cannot include optimized code in a JIT snapshot"); |
| 1452 } |
| 1453 } |
| 1454 |
| 1455 RawInstructions* instr = code->ptr()->instructions_; |
| 1456 int32_t text_offset = s->GetTextOffset(instr, code); |
| 1457 s->Write<int32_t>(text_offset); |
| 1458 |
| 1459 s->WriteRef(code->ptr()->object_pool_); |
| 1460 s->WriteRef(code->ptr()->owner_); |
| 1461 s->WriteRef(code->ptr()->exception_handlers_); |
| 1462 s->WriteRef(code->ptr()->pc_descriptors_); |
| 1463 s->WriteRef(code->ptr()->stackmaps_); |
| 1464 |
| 1465 s->Write<int32_t>(code->ptr()->state_bits_); |
| 1466 } |
| 1467 } |
| 1468 |
| 1469 private: |
| 1470 GrowableArray<RawCode*> objects_; |
| 1471 }; |
| 1472 |
| 1473 class CodeDeserializationCluster : public DeserializationCluster { |
| 1474 public: |
| 1475 CodeDeserializationCluster() { } |
| 1476 virtual ~CodeDeserializationCluster() { } |
| 1477 |
| 1478 void ReadAlloc(Deserializer* d) { |
| 1479 start_index_ = d->next_index(); |
| 1480 PageSpace* old_space = d->heap()->old_space(); |
| 1481 intptr_t count = d->Read<intptr_t>(); |
| 1482 for (intptr_t i = 0; i < count; i++) { |
| 1483 d->AssignRef(AllocateUninitialized(old_space, Code::InstanceSize(0))); |
| 1484 } |
| 1485 stop_index_ = d->next_index(); |
| 1486 } |
| 1487 |
| 1488 void ReadFill(Deserializer* d) { |
| 1489 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 1490 |
| 1491 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 1492 RawCode* code = reinterpret_cast<RawCode*>(d->Ref(id)); |
| 1493 Deserializer::InitializeHeader(code, kCodeCid, |
| 1494 Code::InstanceSize(0), is_vm_object); |
| 1495 |
| 1496 int32_t text_offset = d->Read<int32_t>(); |
| 1497 RawInstructions* instr = reinterpret_cast<RawInstructions*>( |
| 1498 d->GetInstructionsAt(text_offset) + kHeapObjectTag); |
| 1499 uword entry_point = Instructions::EntryPoint(instr); |
| 1500 |
| 1501 code->ptr()->entry_point_ = entry_point; |
| 1502 code->ptr()->active_instructions_ = instr; |
| 1503 code->ptr()->instructions_ = instr; |
| 1504 code->ptr()->object_pool_ = |
| 1505 reinterpret_cast<RawObjectPool*>(d->ReadRef()); |
| 1506 code->ptr()->owner_ = d->ReadRef(); |
| 1507 code->ptr()->exception_handlers_ = |
| 1508 reinterpret_cast<RawExceptionHandlers*>(d->ReadRef()); |
| 1509 code->ptr()->pc_descriptors_ = |
| 1510 reinterpret_cast<RawPcDescriptors*>(d->ReadRef()); |
| 1511 code->ptr()->stackmaps_ = |
| 1512 reinterpret_cast<RawArray*>(d->ReadRef()); |
| 1513 |
| 1514 code->ptr()->deopt_info_array_ = Array::null(); |
| 1515 code->ptr()->static_calls_target_table_ = Array::null(); |
| 1516 code->ptr()->var_descriptors_ = LocalVarDescriptors::null(); |
| 1517 code->ptr()->inlined_metadata_ = Array::null(); |
| 1518 code->ptr()->code_source_map_ = CodeSourceMap::null(); |
| 1519 code->ptr()->comments_ = Array::null(); |
| 1520 code->ptr()->return_address_metadata_ = Object::null(); |
| 1521 |
| 1522 code->ptr()->compile_timestamp_ = 0; |
| 1523 code->ptr()->state_bits_ = d->Read<int32_t>(); |
| 1524 code->ptr()->lazy_deopt_pc_offset_ = -1; |
| 1525 } |
| 1526 } |
| 1527 }; |
| 1528 |
| 1529 class ObjectPoolSerializationCluster : public SerializationCluster { |
| 1530 public: |
| 1531 ObjectPoolSerializationCluster() { } |
| 1532 virtual ~ObjectPoolSerializationCluster() { } |
| 1533 |
| 1534 void Trace(Serializer* s, RawObject* object) { |
| 1535 RawObjectPool* pool = ObjectPool::RawCast(object); |
| 1536 objects_.Add(pool); |
| 1537 |
| 1538 intptr_t length = pool->ptr()->length_; |
| 1539 RawTypedData* info_array = pool->ptr()->info_array_; |
| 1540 |
| 1541 for (intptr_t i = 0; i < length; i++) { |
| 1542 ObjectPool::EntryType entry_type = |
| 1543 static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[i]); |
| 1544 if (entry_type == ObjectPool::kTaggedObject) { |
| 1545 s->Push(pool->ptr()->data()[i].raw_obj_); |
| 1546 } |
| 1547 } |
| 1548 |
| 1549 // We are going to allocate the object pool and its info array together, |
| 1550 // so steal a slot in the refs array to hold it between alloc and fill. |
| 1551 // s->NoteUntracedRef(); |
| 1552 } |
| 1553 |
| 1554 void WriteAlloc(Serializer* s) { |
| 1555 s->WriteCid(kObjectPoolCid); |
| 1556 intptr_t count = objects_.length(); |
| 1557 s->Write<intptr_t>(count); |
| 1558 for (intptr_t i = 0; i < count; i++) { |
| 1559 RawObjectPool* pool = objects_[i]; |
| 1560 /// RawTypedData* info_array = pool->ptr()->info_array_; |
| 1561 intptr_t length = pool->ptr()->length_; |
| 1562 s->Write<intptr_t>(length); |
| 1563 /// s->AssignRefNotTraced(info_array); |
| 1564 s->AssignRef(pool); |
| 1565 } |
| 1566 } |
| 1567 |
| 1568 void WriteFill(Serializer* s) { |
| 1569 intptr_t count = objects_.length(); |
| 1570 for (intptr_t i = 0; i < count; i++) { |
| 1571 RawObjectPool* pool = objects_[i]; |
| 1572 RawTypedData* info_array = pool->ptr()->info_array_; |
| 1573 intptr_t length = pool->ptr()->length_; |
| 1574 s->Write<intptr_t>(length); |
| 1575 for (intptr_t j = 0; j < length; j++) { |
| 1576 ObjectPool::EntryType entry_type = |
| 1577 static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[j]); |
| 1578 s->Write<int8_t>(entry_type); |
| 1579 RawObjectPool::Entry& entry = pool->ptr()->data()[j]; |
| 1580 switch (entry_type) { |
| 1581 case ObjectPool::kTaggedObject: { |
| 1582 #if !defined(TARGET_ARCH_DBC) |
| 1583 if (entry.raw_obj_ == |
| 1584 StubCode::CallNativeCFunction_entry()->code()) { |
| 1585 // Natives can run while precompiling, becoming linked and |
| 1586 // switching their stub. Reset to the initial stub used for |
| 1587 // lazy-linking. |
| 1588 s->WriteRef(StubCode::CallBootstrapCFunction_entry()->code()); |
| 1589 break; |
| 1590 } |
| 1591 #endif |
| 1592 s->WriteRef(entry.raw_obj_); |
| 1593 break; |
| 1594 } |
| 1595 case ObjectPool::kImmediate: { |
| 1596 s->Write<intptr_t>(entry.raw_value_); |
| 1597 break; |
| 1598 } |
| 1599 case ObjectPool::kNativeEntry: { |
| 1600 // Write nothing. Will initialize with the lazy link entry. |
| 1601 #if defined(TARGET_ARCH_DBC) |
| 1602 UNREACHABLE(); // DBC does not support lazy native call linking. |
| 1603 #endif |
| 1604 break; |
| 1605 } |
| 1606 default: |
| 1607 UNREACHABLE(); |
| 1608 } |
| 1609 } |
| 1610 } |
| 1611 } |
| 1612 |
| 1613 private: |
| 1614 GrowableArray<RawObjectPool*> objects_; |
| 1615 }; |
| 1616 |
| 1617 |
| 1618 class ObjectPoolDeserializationCluster : public DeserializationCluster { |
| 1619 public: |
| 1620 ObjectPoolDeserializationCluster() { } |
| 1621 virtual ~ObjectPoolDeserializationCluster() { } |
| 1622 |
| 1623 void ReadAlloc(Deserializer* d) { |
| 1624 start_index_ = d->next_index(); |
| 1625 PageSpace* old_space = d->heap()->old_space(); |
| 1626 intptr_t count = d->Read<intptr_t>(); |
| 1627 for (intptr_t i = 0; i < count; i++) { |
| 1628 intptr_t length = d->Read<intptr_t>(); |
| 1629 /// d->AssignRef(AllocateUninitialized(old_space, |
| 1630 /// TypedData::InstanceSize(length))); |
| 1631 d->AssignRef(AllocateUninitialized(old_space, |
| 1632 ObjectPool::InstanceSize(length))); |
| 1633 } |
| 1634 stop_index_ = d->next_index(); |
| 1635 } |
| 1636 |
| 1637 void ReadFill(Deserializer* d) { |
| 1638 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 1639 PageSpace* old_space = d->heap()->old_space(); |
| 1640 for (intptr_t id = start_index_; id < stop_index_; id += 1) { |
| 1641 intptr_t length = d->Read<intptr_t>(); |
| 1642 /// RawTypedData* info_array = |
| 1643 /// reinterpret_cast<RawTypedData*>(d->Ref(id)); |
| 1644 RawTypedData* info_array = reinterpret_cast<RawTypedData*>( |
| 1645 AllocateUninitialized(old_space, TypedData::InstanceSize(length))); |
| 1646 Deserializer::InitializeHeader(info_array, kTypedDataUint8ArrayCid, |
| 1647 TypedData::InstanceSize(length), |
| 1648 is_vm_object); |
| 1649 info_array->ptr()->length_ = Smi::New(length); |
| 1650 RawObjectPool* pool = reinterpret_cast<RawObjectPool*>(d->Ref(id + 0)); |
| 1651 Deserializer::InitializeHeader(pool, kObjectPoolCid, |
| 1652 ObjectPool::InstanceSize(length), |
| 1653 is_vm_object); |
| 1654 pool->ptr()->length_ = length; |
| 1655 pool->ptr()->info_array_ = info_array; |
| 1656 for (intptr_t j = 0; j < length; j++) { |
| 1657 ObjectPool::EntryType entry_type = |
| 1658 static_cast<ObjectPool::EntryType>(d->Read<int8_t>()); |
| 1659 info_array->ptr()->data()[j] = entry_type; |
| 1660 RawObjectPool::Entry& entry = pool->ptr()->data()[j]; |
| 1661 switch (entry_type) { |
| 1662 case ObjectPool::kTaggedObject: |
| 1663 entry.raw_obj_ = d->ReadRef(); |
| 1664 break; |
| 1665 case ObjectPool::kImmediate: |
| 1666 entry.raw_value_ = d->Read<intptr_t>(); |
| 1667 break; |
| 1668 case ObjectPool::kNativeEntry: { |
| 1669 #if !defined(TARGET_ARCH_DBC) |
| 1670 // Read nothing. Initialize with the lazy link entry. |
| 1671 uword new_entry = NativeEntry::LinkNativeCallEntry(); |
| 1672 entry.raw_value_ = static_cast<intptr_t>(new_entry); |
| 1673 #else |
| 1674 UNREACHABLE(); // DBC does not support lazy native call linking. |
| 1675 #endif |
| 1676 break; |
| 1677 } |
| 1678 default: |
| 1679 UNREACHABLE(); |
| 1680 } |
| 1681 } |
| 1682 } |
| 1683 } |
| 1684 }; |
| 1685 |
| 1686 |
| 1687 // PcDescriptor, Stackmap, OneByteString, TwoByteString |
| 1688 class RODataSerializationCluster : public SerializationCluster { |
| 1689 public: |
| 1690 explicit RODataSerializationCluster(intptr_t cid) : cid_(cid) { } |
| 1691 virtual ~RODataSerializationCluster() { } |
| 1692 |
| 1693 void Trace(Serializer* s, RawObject* object) { |
| 1694 objects_.Add(object); |
| 1695 |
| 1696 // A string's hash must already be computed when we write it because it |
| 1697 // will be loaded into read-only memory. |
| 1698 if (cid_ == kOneByteStringCid) { |
| 1699 RawOneByteString* str = static_cast<RawOneByteString*>(object); |
| 1700 if (str->ptr()->hash_ == Smi::New(0)) { |
| 1701 intptr_t hash = String::Hash(str->ptr()->data(), |
| 1702 Smi::Value(str->ptr()->length_)); |
| 1703 str->ptr()->hash_ = Smi::New(hash); |
| 1704 } |
| 1705 ASSERT(str->ptr()->hash_ != Smi::New(0)); |
| 1706 } else if (cid_ == kTwoByteStringCid) { |
| 1707 RawTwoByteString* str = static_cast<RawTwoByteString*>(object); |
| 1708 if (str->ptr()->hash_ == Smi::New(0)) { |
| 1709 intptr_t hash = String::Hash(str->ptr()->data(), |
| 1710 Smi::Value(str->ptr()->length_) * 2); |
| 1711 str->ptr()->hash_ = Smi::New(hash); |
| 1712 } |
| 1713 ASSERT(str->ptr()->hash_ != Smi::New(0)); |
| 1714 } |
| 1715 } |
| 1716 |
| 1717 void WriteAlloc(Serializer* s) { |
| 1718 s->WriteCid(cid_); |
| 1719 intptr_t count = objects_.length(); |
| 1720 s->Write<intptr_t>(count); |
| 1721 for (intptr_t i = 0; i < count; i++) { |
| 1722 RawObject* object = objects_[i]; |
| 1723 int32_t rodata_offset = s->GetRODataOffset(object); |
| 1724 s->Write<int32_t>(rodata_offset); |
| 1725 s->AssignRef(object); |
| 1726 } |
| 1727 } |
| 1728 |
| 1729 void WriteFill(Serializer* s) { |
| 1730 // No-op. |
| 1731 } |
| 1732 |
| 1733 private: |
| 1734 const intptr_t cid_; |
| 1735 GrowableArray<RawObject*> objects_; |
| 1736 }; |
| 1737 |
| 1738 |
| 1739 class RODataDeserializationCluster : public DeserializationCluster { |
| 1740 public: |
| 1741 RODataDeserializationCluster() { } |
| 1742 virtual ~RODataDeserializationCluster() { } |
| 1743 |
| 1744 void ReadAlloc(Deserializer* d) { |
| 1745 intptr_t count = d->Read<intptr_t>(); |
| 1746 for (intptr_t i = 0; i < count; i++) { |
| 1747 int32_t rodata_offset = d->Read<int32_t>(); |
| 1748 d->AssignRef(d->GetObjectAt(rodata_offset)); |
| 1749 } |
| 1750 } |
| 1751 |
| 1752 void ReadFill(Deserializer* d) { |
| 1753 // No-op. |
| 1754 } |
| 1755 }; |
| 1756 |
| 1757 |
| 1758 class LocalVarDescriptorsSerializationCluster : public SerializationCluster { |
| 1759 public: |
| 1760 LocalVarDescriptorsSerializationCluster() { } |
| 1761 virtual ~LocalVarDescriptorsSerializationCluster() { } |
| 1762 |
| 1763 void Trace(Serializer* s, RawObject* object) { UNIMPLEMENTED(); } |
| 1764 void WriteAlloc(Serializer* s) {} |
| 1765 void WriteFill(Serializer* s) {} |
| 1766 |
| 1767 private: |
| 1768 GrowableArray<RawClass*> objects_; |
| 1769 }; |
| 1770 |
| 1771 |
| 1772 class ExceptionHandlersSerializationCluster : public SerializationCluster { |
| 1773 public: |
| 1774 ExceptionHandlersSerializationCluster() { } |
| 1775 virtual ~ExceptionHandlersSerializationCluster() { } |
| 1776 |
| 1777 void Trace(Serializer* s, RawObject* object) { |
| 1778 RawExceptionHandlers* handlers = ExceptionHandlers::RawCast(object); |
| 1779 objects_.Add(handlers); |
| 1780 |
| 1781 s->Push(handlers->ptr()->handled_types_data_); |
| 1782 } |
| 1783 |
| 1784 void WriteAlloc(Serializer* s) { |
| 1785 s->WriteCid(kExceptionHandlersCid); |
| 1786 intptr_t count = objects_.length(); |
| 1787 s->Write<intptr_t>(count); |
| 1788 for (intptr_t i = 0; i < count; i++) { |
| 1789 RawExceptionHandlers* handlers = objects_[i]; |
| 1790 intptr_t length = handlers->ptr()->num_entries_; |
| 1791 s->Write<intptr_t>(length); |
| 1792 s->AssignRef(handlers); |
| 1793 } |
| 1794 } |
| 1795 |
| 1796 void WriteFill(Serializer* s) { |
| 1797 intptr_t count = objects_.length(); |
| 1798 for (intptr_t i = 0; i < count; i++) { |
| 1799 RawExceptionHandlers* handlers = objects_[i]; |
| 1800 intptr_t length = handlers->ptr()->num_entries_; |
| 1801 s->Write<intptr_t>(length); |
| 1802 s->WriteRef(handlers->ptr()->handled_types_data_); |
| 1803 |
| 1804 uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data()); |
| 1805 intptr_t length_in_bytes = |
| 1806 length * sizeof(RawExceptionHandlers::HandlerInfo); |
| 1807 s->WriteBytes(data, length_in_bytes); |
| 1808 } |
| 1809 } |
| 1810 |
| 1811 private: |
| 1812 GrowableArray<RawExceptionHandlers*> objects_; |
| 1813 }; |
| 1814 |
| 1815 class ExceptionHandlersDeserializationCluster : public DeserializationCluster { |
| 1816 public: |
| 1817 ExceptionHandlersDeserializationCluster() { } |
| 1818 virtual ~ExceptionHandlersDeserializationCluster() { } |
| 1819 |
| 1820 void ReadAlloc(Deserializer* d) { |
| 1821 start_index_ = d->next_index(); |
| 1822 PageSpace* old_space = d->heap()->old_space(); |
| 1823 intptr_t count = d->Read<intptr_t>(); |
| 1824 for (intptr_t i = 0; i < count; i++) { |
| 1825 intptr_t length = d->Read<intptr_t>(); |
| 1826 d->AssignRef(AllocateUninitialized(old_space, |
| 1827 ExceptionHandlers::InstanceSize(length))); |
| 1828 } |
| 1829 stop_index_ = d->next_index(); |
| 1830 } |
| 1831 |
| 1832 void ReadFill(Deserializer* d) { |
| 1833 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 1834 |
| 1835 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 1836 RawExceptionHandlers* handlers = |
| 1837 reinterpret_cast<RawExceptionHandlers*>(d->Ref(id)); |
| 1838 intptr_t length = d->Read<intptr_t>(); |
| 1839 Deserializer::InitializeHeader(handlers, kExceptionHandlersCid, |
| 1840 ExceptionHandlers::InstanceSize(length), |
| 1841 is_vm_object); |
| 1842 handlers->ptr()->num_entries_ = length; |
| 1843 handlers->ptr()->handled_types_data_ = |
| 1844 reinterpret_cast<RawArray*>(d->ReadRef()); |
| 1845 |
| 1846 uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data()); |
| 1847 intptr_t length_in_bytes = |
| 1848 length * sizeof(RawExceptionHandlers::HandlerInfo); |
| 1849 d->ReadBytes(data, length_in_bytes); |
| 1850 } |
| 1851 } |
| 1852 }; |
| 1853 |
| 1854 class ContextSerializationCluster : public SerializationCluster { |
| 1855 public: |
| 1856 ContextSerializationCluster() { } |
| 1857 virtual ~ContextSerializationCluster() { } |
| 1858 |
| 1859 void Trace(Serializer* s, RawObject* object) { |
| 1860 RawContext* context = Context::RawCast(object); |
| 1861 objects_.Add(context); |
| 1862 |
| 1863 s->Push(context->ptr()->parent_); |
| 1864 intptr_t length = context->ptr()->num_variables_; |
| 1865 for (intptr_t i = 0; i < length; i++) { |
| 1866 s->Push(context->ptr()->data()[i]); |
| 1867 } |
| 1868 } |
| 1869 |
| 1870 void WriteAlloc(Serializer* s) { |
| 1871 s->WriteCid(kContextCid); |
| 1872 intptr_t count = objects_.length(); |
| 1873 s->Write<intptr_t>(count); |
| 1874 for (intptr_t i = 0; i < count; i++) { |
| 1875 RawContext* context = objects_[i]; |
| 1876 intptr_t length = context->ptr()->num_variables_; |
| 1877 s->Write<intptr_t>(length); |
| 1878 s->AssignRef(context); |
| 1879 } |
| 1880 } |
| 1881 |
| 1882 void WriteFill(Serializer* s) { |
| 1883 intptr_t count = objects_.length(); |
| 1884 for (intptr_t i = 0; i < count; i++) { |
| 1885 RawContext* context = objects_[i]; |
| 1886 intptr_t length = context->ptr()->num_variables_; |
| 1887 s->Write<intptr_t>(length); |
| 1888 s->WriteRef(context->ptr()->parent_); |
| 1889 for (intptr_t j = 0; j < length; j++) { |
| 1890 s->WriteRef(context->ptr()->data()[j]); |
| 1891 } |
| 1892 } |
| 1893 } |
| 1894 |
| 1895 private: |
| 1896 GrowableArray<RawContext*> objects_; |
| 1897 }; |
| 1898 |
| 1899 class ContextDeserializationCluster : public DeserializationCluster { |
| 1900 public: |
| 1901 ContextDeserializationCluster() { } |
| 1902 virtual ~ContextDeserializationCluster() { } |
| 1903 |
| 1904 void ReadAlloc(Deserializer* d) { |
| 1905 start_index_ = d->next_index(); |
| 1906 PageSpace* old_space = d->heap()->old_space(); |
| 1907 intptr_t count = d->Read<intptr_t>(); |
| 1908 for (intptr_t i = 0; i < count; i++) { |
| 1909 intptr_t length = d->Read<intptr_t>(); |
| 1910 d->AssignRef(AllocateUninitialized(old_space, |
| 1911 Context::InstanceSize(length))); |
| 1912 } |
| 1913 stop_index_ = d->next_index(); |
| 1914 } |
| 1915 |
| 1916 void ReadFill(Deserializer* d) { |
| 1917 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 1918 |
| 1919 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 1920 RawContext* context = reinterpret_cast<RawContext*>(d->Ref(id)); |
| 1921 intptr_t length = d->Read<intptr_t>(); |
| 1922 Deserializer::InitializeHeader(context, kContextCid, |
| 1923 Context::InstanceSize(length), |
| 1924 is_vm_object); |
| 1925 context->ptr()->num_variables_ = length; |
| 1926 context->ptr()->parent_ = reinterpret_cast<RawContext*>(d->ReadRef()); |
| 1927 for (intptr_t j = 0; j < length; j++) { |
| 1928 context->ptr()->data()[j] = d->ReadRef(); |
| 1929 } |
| 1930 } |
| 1931 } |
| 1932 }; |
| 1933 |
| 1934 class ContextScopeSerializationCluster : public SerializationCluster { |
| 1935 public: |
| 1936 ContextScopeSerializationCluster() { } |
| 1937 virtual ~ContextScopeSerializationCluster() { } |
| 1938 |
| 1939 void Trace(Serializer* s, RawObject* object) { |
| 1940 RawContextScope* scope = ContextScope::RawCast(object); |
| 1941 objects_.Add(scope); |
| 1942 |
| 1943 intptr_t length = scope->ptr()->num_variables_; |
| 1944 RawObject** from = scope->from(); |
| 1945 RawObject** to = scope->to(length); |
| 1946 for (RawObject** p = from; p <= to; p++) { |
| 1947 s->Push(*p); |
| 1948 } |
| 1949 } |
| 1950 |
| 1951 void WriteAlloc(Serializer* s) { |
| 1952 s->WriteCid(kContextScopeCid); |
| 1953 intptr_t count = objects_.length(); |
| 1954 s->Write<intptr_t>(count); |
| 1955 for (intptr_t i = 0; i < count; i++) { |
| 1956 RawContextScope* scope = objects_[i]; |
| 1957 intptr_t length = scope->ptr()->num_variables_; |
| 1958 s->Write<intptr_t>(length); |
| 1959 s->AssignRef(scope); |
| 1960 } |
| 1961 } |
| 1962 |
| 1963 void WriteFill(Serializer* s) { |
| 1964 intptr_t count = objects_.length(); |
| 1965 for (intptr_t i = 0; i < count; i++) { |
| 1966 RawContextScope* scope = objects_[i]; |
| 1967 intptr_t length = scope->ptr()->num_variables_; |
| 1968 s->Write<intptr_t>(length); |
| 1969 s->Write<bool>(scope->ptr()->is_implicit_); |
| 1970 RawObject** from = scope->from(); |
| 1971 RawObject** to = scope->to(length); |
| 1972 for (RawObject** p = from; p <= to; p++) { |
| 1973 s->WriteRef(*p); |
| 1974 } |
| 1975 } |
| 1976 } |
| 1977 |
| 1978 private: |
| 1979 GrowableArray<RawContextScope*> objects_; |
| 1980 }; |
| 1981 |
| 1982 class ContextScopeDeserializationCluster : public DeserializationCluster { |
| 1983 public: |
| 1984 ContextScopeDeserializationCluster() { } |
| 1985 virtual ~ContextScopeDeserializationCluster() { } |
| 1986 |
| 1987 void ReadAlloc(Deserializer* d) { |
| 1988 start_index_ = d->next_index(); |
| 1989 PageSpace* old_space = d->heap()->old_space(); |
| 1990 intptr_t count = d->Read<intptr_t>(); |
| 1991 for (intptr_t i = 0; i < count; i++) { |
| 1992 intptr_t length = d->Read<intptr_t>(); |
| 1993 d->AssignRef(AllocateUninitialized(old_space, |
| 1994 ContextScope::InstanceSize(length))); |
| 1995 } |
| 1996 stop_index_ = d->next_index(); |
| 1997 } |
| 1998 |
| 1999 void ReadFill(Deserializer* d) { |
| 2000 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2001 |
| 2002 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2003 RawContextScope* scope = reinterpret_cast<RawContextScope*>(d->Ref(id)); |
| 2004 intptr_t length = d->Read<intptr_t>(); |
| 2005 Deserializer::InitializeHeader(scope, kContextScopeCid, |
| 2006 ContextScope::InstanceSize(length), |
| 2007 is_vm_object); |
| 2008 scope->ptr()->num_variables_ = length; |
| 2009 scope->ptr()->is_implicit_ = d->Read<bool>(); |
| 2010 RawObject** from = scope->from(); |
| 2011 RawObject** to = scope->to(length); |
| 2012 for (RawObject** p = from; p <= to; p++) { |
| 2013 *p = d->ReadRef(); |
| 2014 } |
| 2015 } |
| 2016 } |
| 2017 }; |
| 2018 |
| 2019 |
| 2020 class ICDataSerializationCluster : public SerializationCluster { |
| 2021 public: |
| 2022 ICDataSerializationCluster() { } |
| 2023 virtual ~ICDataSerializationCluster() { } |
| 2024 |
| 2025 void Trace(Serializer* s, RawObject* object) { |
| 2026 RawICData* ic = ICData::RawCast(object); |
| 2027 objects_.Add(ic); |
| 2028 |
| 2029 RawObject** from = ic->from(); |
| 2030 RawObject** to = ic->to_snapshot(s->kind()); |
| 2031 for (RawObject** p = from; p <= to; p++) { |
| 2032 s->Push(*p); |
| 2033 } |
| 2034 } |
| 2035 |
| 2036 void WriteAlloc(Serializer* s) { |
| 2037 s->WriteCid(kICDataCid); |
| 2038 intptr_t count = objects_.length(); |
| 2039 s->Write<intptr_t>(count); |
| 2040 for (intptr_t i = 0; i < count; i++) { |
| 2041 RawICData* ic = objects_[i]; |
| 2042 s->AssignRef(ic); |
| 2043 } |
| 2044 } |
| 2045 |
| 2046 void WriteFill(Serializer* s) { |
| 2047 Snapshot::Kind kind = s->kind(); |
| 2048 intptr_t count = objects_.length(); |
| 2049 for (intptr_t i = 0; i < count; i++) { |
| 2050 RawICData* ic = objects_[i]; |
| 2051 RawObject** from = ic->from(); |
| 2052 RawObject** to = ic->to_snapshot(kind); |
| 2053 for (RawObject** p = from; p <= to; p++) { |
| 2054 s->WriteRef(*p); |
| 2055 } |
| 2056 s->Write<int32_t>(ic->ptr()->deopt_id_); |
| 2057 s->Write<uint32_t>(ic->ptr()->state_bits_); |
| 2058 #if defined(TAG_IC_DATA) |
| 2059 s->Write<intptr_t>(ic->ptr()->tag_); |
| 2060 #endif |
| 2061 } |
| 2062 } |
| 2063 |
| 2064 private: |
| 2065 GrowableArray<RawICData*> objects_; |
| 2066 }; |
| 2067 |
| 2068 class ICDataDeserializationCluster : public DeserializationCluster { |
| 2069 public: |
| 2070 ICDataDeserializationCluster() { } |
| 2071 virtual ~ICDataDeserializationCluster() { } |
| 2072 |
| 2073 void ReadAlloc(Deserializer* d) { |
| 2074 start_index_ = d->next_index(); |
| 2075 PageSpace* old_space = d->heap()->old_space(); |
| 2076 intptr_t count = d->Read<intptr_t>(); |
| 2077 for (intptr_t i = 0; i < count; i++) { |
| 2078 d->AssignRef(AllocateUninitialized(old_space, ICData::InstanceSize())); |
| 2079 } |
| 2080 stop_index_ = d->next_index(); |
| 2081 } |
| 2082 |
| 2083 void ReadFill(Deserializer* d) { |
| 2084 Snapshot::Kind kind = d->kind(); |
| 2085 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2086 |
| 2087 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2088 RawICData* ic = reinterpret_cast<RawICData*>(d->Ref(id)); |
| 2089 Deserializer::InitializeHeader(ic, kICDataCid, |
| 2090 ICData::InstanceSize(), is_vm_object); |
| 2091 RawObject** from = ic->from(); |
| 2092 RawObject** to_snapshot = ic->to_snapshot(kind); |
| 2093 RawObject** to = ic->to(); |
| 2094 for (RawObject** p = from; p <= to_snapshot; p++) { |
| 2095 *p = d->ReadRef(); |
| 2096 } |
| 2097 for (RawObject** p = to_snapshot + 1; p <= to; p++) { |
| 2098 *p = Object::null(); |
| 2099 } |
| 2100 ic->ptr()->deopt_id_ = d->Read<int32_t>(); |
| 2101 ic->ptr()->state_bits_ = d->Read<int32_t>(); |
| 2102 #if defined(TAG_IC_DATA) |
| 2103 ic->ptr()->tag_ = d->Read<intptr_t>(); |
| 2104 #endif |
| 2105 } |
| 2106 } |
| 2107 |
| 2108 void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { |
| 2109 NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(), |
| 2110 Timeline::GetIsolateStream(), "PostLoadICData")); |
| 2111 |
| 2112 if (kind == Snapshot::kAppNoJIT) { |
| 2113 ICData& ic = ICData::Handle(zone); |
| 2114 Object& funcOrCode = Object::Handle(zone); |
| 2115 Code& code = Code::Handle(zone); |
| 2116 Smi& entry_point = Smi::Handle(zone); |
| 2117 for (intptr_t i = start_index_; i < stop_index_; i++) { |
| 2118 ic ^= refs.At(i); |
| 2119 for (intptr_t j = 0; j < ic.NumberOfChecks(); j++) { |
| 2120 funcOrCode = ic.GetTargetOrCodeAt(j); |
| 2121 if (funcOrCode.IsCode()) { |
| 2122 code ^= funcOrCode.raw(); |
| 2123 entry_point = Smi::FromAlignedAddress(code.EntryPoint()); |
| 2124 ic.SetEntryPointAt(j, entry_point); |
| 2125 } |
| 2126 } |
| 2127 } |
| 2128 } |
| 2129 } |
| 2130 }; |
| 2131 |
| 2132 class MegamorphicCacheSerializationCluster : public SerializationCluster { |
| 2133 public: |
| 2134 MegamorphicCacheSerializationCluster() { } |
| 2135 virtual ~MegamorphicCacheSerializationCluster() { } |
| 2136 |
| 2137 void Trace(Serializer* s, RawObject* object) { |
| 2138 RawMegamorphicCache* cache = MegamorphicCache::RawCast(object); |
| 2139 objects_.Add(cache); |
| 2140 |
| 2141 RawObject** from = cache->from(); |
| 2142 RawObject** to = cache->to(); |
| 2143 for (RawObject** p = from; p <= to; p++) { |
| 2144 s->Push(*p); |
| 2145 } |
| 2146 } |
| 2147 |
| 2148 void WriteAlloc(Serializer* s) { |
| 2149 s->WriteCid(kMegamorphicCacheCid); |
| 2150 intptr_t count = objects_.length(); |
| 2151 s->Write<intptr_t>(count); |
| 2152 for (intptr_t i = 0; i < count; i++) { |
| 2153 RawMegamorphicCache* cache = objects_[i]; |
| 2154 s->AssignRef(cache); |
| 2155 } |
| 2156 } |
| 2157 |
| 2158 void WriteFill(Serializer* s) { |
| 2159 intptr_t count = objects_.length(); |
| 2160 for (intptr_t i = 0; i < count; i++) { |
| 2161 RawMegamorphicCache* cache = objects_[i]; |
| 2162 RawObject** from = cache->from(); |
| 2163 RawObject** to = cache->to(); |
| 2164 for (RawObject** p = from; p <= to; p++) { |
| 2165 s->WriteRef(*p); |
| 2166 } |
| 2167 s->Write<int32_t>(cache->ptr()->filled_entry_count_); |
| 2168 } |
| 2169 } |
| 2170 |
| 2171 private: |
| 2172 GrowableArray<RawMegamorphicCache*> objects_; |
| 2173 }; |
| 2174 |
| 2175 |
| 2176 class MegamorphicCacheDeserializationCluster : public DeserializationCluster { |
| 2177 public: |
| 2178 MegamorphicCacheDeserializationCluster() { } |
| 2179 virtual ~MegamorphicCacheDeserializationCluster() { } |
| 2180 |
| 2181 void ReadAlloc(Deserializer* d) { |
| 2182 start_index_ = d->next_index(); |
| 2183 PageSpace* old_space = d->heap()->old_space(); |
| 2184 intptr_t count = d->Read<intptr_t>(); |
| 2185 for (intptr_t i = 0; i < count; i++) { |
| 2186 d->AssignRef(AllocateUninitialized(old_space, |
| 2187 MegamorphicCache::InstanceSize())); |
| 2188 } |
| 2189 stop_index_ = d->next_index(); |
| 2190 } |
| 2191 |
| 2192 void ReadFill(Deserializer* d) { |
| 2193 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2194 |
| 2195 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2196 RawMegamorphicCache* cache = |
| 2197 reinterpret_cast<RawMegamorphicCache*>(d->Ref(id)); |
| 2198 Deserializer::InitializeHeader(cache, kMegamorphicCacheCid, |
| 2199 MegamorphicCache::InstanceSize(), |
| 2200 is_vm_object); |
| 2201 RawObject** from = cache->from(); |
| 2202 RawObject** to = cache->to(); |
| 2203 for (RawObject** p = from; p <= to; p++) { |
| 2204 *p = d->ReadRef(); |
| 2205 } |
| 2206 cache->ptr()->filled_entry_count_ = d->Read<int32_t>(); |
| 2207 } |
| 2208 } |
| 2209 }; |
| 2210 |
| 2211 |
| 2212 class SubtypeTestCacheSerializationCluster : public SerializationCluster { |
| 2213 public: |
| 2214 SubtypeTestCacheSerializationCluster() { } |
| 2215 virtual ~SubtypeTestCacheSerializationCluster() { } |
| 2216 |
| 2217 void Trace(Serializer* s, RawObject* object) { |
| 2218 RawSubtypeTestCache* cache = SubtypeTestCache::RawCast(object); |
| 2219 objects_.Add(cache); |
| 2220 s->Push(cache->ptr()->cache_); |
| 2221 } |
| 2222 |
| 2223 void WriteAlloc(Serializer* s) { |
| 2224 s->WriteCid(kSubtypeTestCacheCid); |
| 2225 intptr_t count = objects_.length(); |
| 2226 s->Write<intptr_t>(count); |
| 2227 for (intptr_t i = 0; i < count; i++) { |
| 2228 RawSubtypeTestCache* cache = objects_[i]; |
| 2229 s->AssignRef(cache); |
| 2230 } |
| 2231 } |
| 2232 |
| 2233 void WriteFill(Serializer* s) { |
| 2234 intptr_t count = objects_.length(); |
| 2235 for (intptr_t i = 0; i < count; i++) { |
| 2236 RawSubtypeTestCache* cache = objects_[i]; |
| 2237 s->WriteRef(cache->ptr()->cache_); |
| 2238 } |
| 2239 } |
| 2240 |
| 2241 private: |
| 2242 GrowableArray<RawSubtypeTestCache*> objects_; |
| 2243 }; |
| 2244 |
| 2245 class SubtypeTestCacheDeserializationCluster : public DeserializationCluster { |
| 2246 public: |
| 2247 SubtypeTestCacheDeserializationCluster() { } |
| 2248 virtual ~SubtypeTestCacheDeserializationCluster() { } |
| 2249 |
| 2250 void ReadAlloc(Deserializer* d) { |
| 2251 start_index_ = d->next_index(); |
| 2252 PageSpace* old_space = d->heap()->old_space(); |
| 2253 intptr_t count = d->Read<intptr_t>(); |
| 2254 for (intptr_t i = 0; i < count; i++) { |
| 2255 d->AssignRef(AllocateUninitialized(old_space, |
| 2256 SubtypeTestCache::InstanceSize())); |
| 2257 } |
| 2258 stop_index_ = d->next_index(); |
| 2259 } |
| 2260 |
| 2261 void ReadFill(Deserializer* d) { |
| 2262 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2263 |
| 2264 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2265 RawSubtypeTestCache* cache = |
| 2266 reinterpret_cast<RawSubtypeTestCache*>(d->Ref(id)); |
| 2267 Deserializer::InitializeHeader(cache, kSubtypeTestCacheCid, |
| 2268 SubtypeTestCache::InstanceSize(), |
| 2269 is_vm_object); |
| 2270 cache->ptr()->cache_ = reinterpret_cast<RawArray*>(d->ReadRef()); |
| 2271 } |
| 2272 } |
| 2273 }; |
| 2274 |
| 2275 class LanguageErrorSerializationCluster : public SerializationCluster { |
| 2276 public: |
| 2277 LanguageErrorSerializationCluster() { } |
| 2278 virtual ~LanguageErrorSerializationCluster() { } |
| 2279 |
| 2280 void Trace(Serializer* s, RawObject* object) { |
| 2281 RawLanguageError* error = LanguageError::RawCast(object); |
| 2282 objects_.Add(error); |
| 2283 |
| 2284 RawObject** from = error->from(); |
| 2285 RawObject** to = error->to(); |
| 2286 for (RawObject** p = from; p <= to; p++) { |
| 2287 s->Push(*p); |
| 2288 } |
| 2289 } |
| 2290 |
| 2291 void WriteAlloc(Serializer* s) { |
| 2292 s->WriteCid(kLanguageErrorCid); |
| 2293 intptr_t count = objects_.length(); |
| 2294 s->Write<intptr_t>(count); |
| 2295 for (intptr_t i = 0; i < count; i++) { |
| 2296 RawLanguageError* error = objects_[i]; |
| 2297 s->AssignRef(error); |
| 2298 } |
| 2299 } |
| 2300 |
| 2301 void WriteFill(Serializer* s) { |
| 2302 intptr_t count = objects_.length(); |
| 2303 for (intptr_t i = 0; i < count; i++) { |
| 2304 RawLanguageError* error = objects_[i]; |
| 2305 RawObject** from = error->from(); |
| 2306 RawObject** to = error->to(); |
| 2307 for (RawObject** p = from; p <= to; p++) { |
| 2308 s->WriteRef(*p); |
| 2309 } |
| 2310 s->WriteTokenPosition(error->ptr()->token_pos_); |
| 2311 s->Write<bool>(error->ptr()->report_after_token_); |
| 2312 s->Write<int8_t>(error->ptr()->kind_); |
| 2313 } |
| 2314 } |
| 2315 |
| 2316 private: |
| 2317 GrowableArray<RawLanguageError*> objects_; |
| 2318 }; |
| 2319 |
| 2320 class LanguageErrorDeserializationCluster : public DeserializationCluster { |
| 2321 public: |
| 2322 LanguageErrorDeserializationCluster() { } |
| 2323 virtual ~LanguageErrorDeserializationCluster() { } |
| 2324 |
| 2325 void ReadAlloc(Deserializer* d) { |
| 2326 start_index_ = d->next_index(); |
| 2327 PageSpace* old_space = d->heap()->old_space(); |
| 2328 intptr_t count = d->Read<intptr_t>(); |
| 2329 for (intptr_t i = 0; i < count; i++) { |
| 2330 d->AssignRef(AllocateUninitialized(old_space, |
| 2331 LanguageError::InstanceSize())); |
| 2332 } |
| 2333 stop_index_ = d->next_index(); |
| 2334 } |
| 2335 |
| 2336 void ReadFill(Deserializer* d) { |
| 2337 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2338 |
| 2339 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2340 RawLanguageError* error = reinterpret_cast<RawLanguageError*>(d->Ref(id)); |
| 2341 Deserializer::InitializeHeader(error, kLanguageErrorCid, |
| 2342 LanguageError::InstanceSize(), |
| 2343 is_vm_object); |
| 2344 RawObject** from = error->from(); |
| 2345 RawObject** to = error->to(); |
| 2346 for (RawObject** p = from; p <= to; p++) { |
| 2347 *p = d->ReadRef(); |
| 2348 } |
| 2349 error->ptr()->token_pos_ = d->ReadTokenPosition(); |
| 2350 error->ptr()->report_after_token_ = d->Read<bool>(); |
| 2351 error->ptr()->kind_ = d->Read<int8_t>(); |
| 2352 } |
| 2353 } |
| 2354 }; |
| 2355 |
| 2356 class UnhandledExceptionSerializationCluster : public SerializationCluster { |
| 2357 public: |
| 2358 UnhandledExceptionSerializationCluster() { } |
| 2359 virtual ~UnhandledExceptionSerializationCluster() { } |
| 2360 |
| 2361 void Trace(Serializer* s, RawObject* object) { |
| 2362 RawUnhandledException* exception = UnhandledException::RawCast(object); |
| 2363 objects_.Add(exception); |
| 2364 |
| 2365 RawObject** from = exception->from(); |
| 2366 RawObject** to = exception->to(); |
| 2367 for (RawObject** p = from; p <= to; p++) { |
| 2368 s->Push(*p); |
| 2369 } |
| 2370 } |
| 2371 |
| 2372 void WriteAlloc(Serializer* s) { |
| 2373 s->WriteCid(kUnhandledExceptionCid); |
| 2374 intptr_t count = objects_.length(); |
| 2375 s->Write<intptr_t>(count); |
| 2376 for (intptr_t i = 0; i < count; i++) { |
| 2377 RawUnhandledException* exception = objects_[i]; |
| 2378 s->AssignRef(exception); |
| 2379 } |
| 2380 } |
| 2381 |
| 2382 void WriteFill(Serializer* s) { |
| 2383 intptr_t count = objects_.length(); |
| 2384 for (intptr_t i = 0; i < count; i++) { |
| 2385 RawUnhandledException* exception = objects_[i]; |
| 2386 RawObject** from = exception->from(); |
| 2387 RawObject** to = exception->to(); |
| 2388 for (RawObject** p = from; p <= to; p++) { |
| 2389 s->WriteRef(*p); |
| 2390 } |
| 2391 } |
| 2392 } |
| 2393 |
| 2394 private: |
| 2395 GrowableArray<RawUnhandledException*> objects_; |
| 2396 }; |
| 2397 |
| 2398 class UnhandledExceptionDeserializationCluster : public DeserializationCluster { |
| 2399 public: |
| 2400 UnhandledExceptionDeserializationCluster() { } |
| 2401 virtual ~UnhandledExceptionDeserializationCluster() { } |
| 2402 |
| 2403 void ReadAlloc(Deserializer* d) { |
| 2404 start_index_ = d->next_index(); |
| 2405 PageSpace* old_space = d->heap()->old_space(); |
| 2406 intptr_t count = d->Read<intptr_t>(); |
| 2407 for (intptr_t i = 0; i < count; i++) { |
| 2408 d->AssignRef(AllocateUninitialized(old_space, |
| 2409 UnhandledException::InstanceSize())); |
| 2410 } |
| 2411 stop_index_ = d->next_index(); |
| 2412 } |
| 2413 |
| 2414 void ReadFill(Deserializer* d) { |
| 2415 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2416 |
| 2417 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2418 RawUnhandledException* exception = |
| 2419 reinterpret_cast<RawUnhandledException*>(d->Ref(id)); |
| 2420 Deserializer::InitializeHeader(exception, kUnhandledExceptionCid, |
| 2421 UnhandledException::InstanceSize(), |
| 2422 is_vm_object); |
| 2423 RawObject** from = exception->from(); |
| 2424 RawObject** to = exception->to(); |
| 2425 for (RawObject** p = from; p <= to; p++) { |
| 2426 *p = d->ReadRef(); |
| 2427 } |
| 2428 } |
| 2429 } |
| 2430 }; |
| 2431 |
| 2432 class InstanceSerializationCluster : public SerializationCluster { |
| 2433 public: |
| 2434 explicit InstanceSerializationCluster(intptr_t cid) : cid_(cid) { |
| 2435 RawClass* cls = Isolate::Current()->class_table()->At(cid); |
| 2436 next_field_offset_ = |
| 2437 cls->ptr()->next_field_offset_in_words_ << kWordSizeLog2; |
| 2438 instance_size_in_words_ = cls->ptr()->instance_size_in_words_; |
| 2439 ASSERT(next_field_offset_ > 0); |
| 2440 ASSERT(instance_size_in_words_ > 0); |
| 2441 } |
| 2442 virtual ~InstanceSerializationCluster() { } |
| 2443 |
| 2444 void Trace(Serializer* s, RawObject* object) { |
| 2445 RawInstance* instance = Instance::RawCast(object); |
| 2446 objects_.Add(instance); |
| 2447 |
| 2448 intptr_t offset = Instance::NextFieldOffset(); |
| 2449 while (offset < next_field_offset_) { |
| 2450 RawObject* raw_obj = *reinterpret_cast<RawObject**>( |
| 2451 reinterpret_cast<uword>(instance->ptr()) + offset); |
| 2452 s->Push(raw_obj); |
| 2453 offset += kWordSize; |
| 2454 } |
| 2455 } |
| 2456 |
| 2457 void WriteAlloc(Serializer* s) { |
| 2458 s->Write<intptr_t>(cid_); |
| 2459 intptr_t count = objects_.length(); |
| 2460 s->Write<intptr_t>(count); |
| 2461 |
| 2462 s->Write<intptr_t>(next_field_offset_); |
| 2463 s->Write<intptr_t>(instance_size_in_words_); |
| 2464 |
| 2465 for (intptr_t i = 0; i < count; i++) { |
| 2466 RawInstance* instance = objects_[i]; |
| 2467 s->AssignRef(instance); |
| 2468 } |
| 2469 } |
| 2470 |
| 2471 void WriteFill(Serializer* s) { |
| 2472 intptr_t count = objects_.length(); |
| 2473 for (intptr_t i = 0; i < count; i++) { |
| 2474 RawInstance* instance = objects_[i]; |
| 2475 s->Write<bool>(instance->IsCanonical()); |
| 2476 intptr_t offset = Instance::NextFieldOffset(); |
| 2477 while (offset < next_field_offset_) { |
| 2478 RawObject* raw_obj = *reinterpret_cast<RawObject**>( |
| 2479 reinterpret_cast<uword>(instance->ptr()) + offset); |
| 2480 s->WriteRef(raw_obj); |
| 2481 offset += kWordSize; |
| 2482 } |
| 2483 } |
| 2484 } |
| 2485 |
| 2486 private: |
| 2487 const intptr_t cid_; |
| 2488 intptr_t next_field_offset_; |
| 2489 intptr_t instance_size_in_words_; |
| 2490 GrowableArray<RawInstance*> objects_; |
| 2491 }; |
| 2492 |
| 2493 class InstanceDeserializationCluster : public DeserializationCluster { |
| 2494 public: |
| 2495 explicit InstanceDeserializationCluster(intptr_t cid) : cid_(cid) { } |
| 2496 virtual ~InstanceDeserializationCluster() { } |
| 2497 |
| 2498 void ReadAlloc(Deserializer* d) { |
| 2499 start_index_ = d->next_index(); |
| 2500 PageSpace* old_space = d->heap()->old_space(); |
| 2501 intptr_t count = d->Read<intptr_t>(); |
| 2502 next_field_offset_ = d->Read<intptr_t>(); |
| 2503 instance_size_in_words_ = d->Read<intptr_t>(); |
| 2504 intptr_t instance_size = |
| 2505 Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize); |
| 2506 for (intptr_t i = 0; i < count; i++) { |
| 2507 d->AssignRef(AllocateUninitialized(old_space, instance_size)); |
| 2508 } |
| 2509 stop_index_ = d->next_index(); |
| 2510 } |
| 2511 |
| 2512 void ReadFill(Deserializer* d) { |
| 2513 intptr_t instance_size = |
| 2514 Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize); |
| 2515 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2516 |
| 2517 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2518 RawInstance* instance = reinterpret_cast<RawInstance*>(d->Ref(id)); |
| 2519 bool is_canonical = d->Read<bool>(); |
| 2520 Deserializer::InitializeHeader(instance, cid_, |
| 2521 instance_size, |
| 2522 is_vm_object, is_canonical); |
| 2523 intptr_t offset = Instance::NextFieldOffset(); |
| 2524 while (offset < next_field_offset_) { |
| 2525 RawObject** p = reinterpret_cast<RawObject**>( |
| 2526 reinterpret_cast<uword>(instance->ptr()) + offset); |
| 2527 *p = d->ReadRef(); |
| 2528 offset += kWordSize; |
| 2529 } |
| 2530 if (offset < instance_size) { |
| 2531 RawObject** p = reinterpret_cast<RawObject**>( |
| 2532 reinterpret_cast<uword>(instance->ptr()) + offset); |
| 2533 *p = Object::null(); |
| 2534 offset += kWordSize; |
| 2535 } |
| 2536 ASSERT(offset == instance_size); |
| 2537 } |
| 2538 } |
| 2539 |
| 2540 private: |
| 2541 const intptr_t cid_; |
| 2542 intptr_t next_field_offset_; |
| 2543 intptr_t instance_size_in_words_; |
| 2544 }; |
| 2545 |
| 2546 class LibraryPrefixSerializationCluster : public SerializationCluster { |
| 2547 public: |
| 2548 LibraryPrefixSerializationCluster() { } |
| 2549 virtual ~LibraryPrefixSerializationCluster() { } |
| 2550 |
| 2551 void Trace(Serializer* s, RawObject* object) { |
| 2552 RawLibraryPrefix* prefix = LibraryPrefix::RawCast(object); |
| 2553 objects_.Add(prefix); |
| 2554 |
| 2555 RawObject** from = prefix->from(); |
| 2556 RawObject** to = prefix->to(); |
| 2557 for (RawObject** p = from; p <= to; p++) { |
| 2558 s->Push(*p); |
| 2559 } |
| 2560 } |
| 2561 |
| 2562 void WriteAlloc(Serializer* s) { |
| 2563 s->WriteCid(kLibraryPrefixCid); |
| 2564 intptr_t count = objects_.length(); |
| 2565 s->Write<intptr_t>(count); |
| 2566 for (intptr_t i = 0; i < count; i++) { |
| 2567 RawLibraryPrefix* prefix = objects_[i]; |
| 2568 s->AssignRef(prefix); |
| 2569 } |
| 2570 } |
| 2571 |
| 2572 void WriteFill(Serializer* s) { |
| 2573 intptr_t count = objects_.length(); |
| 2574 for (intptr_t i = 0; i < count; i++) { |
| 2575 RawLibraryPrefix* prefix = objects_[i]; |
| 2576 RawObject** from = prefix->from(); |
| 2577 RawObject** to = prefix->to(); |
| 2578 for (RawObject** p = from; p <= to; p++) { |
| 2579 s->WriteRef(*p); |
| 2580 } |
| 2581 s->Write<uint16_t>(prefix->ptr()->num_imports_); |
| 2582 s->Write<bool>(prefix->ptr()->is_deferred_load_); |
| 2583 s->Write<bool>(prefix->ptr()->is_loaded_); |
| 2584 } |
| 2585 } |
| 2586 |
| 2587 private: |
| 2588 GrowableArray<RawLibraryPrefix*> objects_; |
| 2589 }; |
| 2590 |
| 2591 class LibraryPrefixDeserializationCluster : public DeserializationCluster { |
| 2592 public: |
| 2593 LibraryPrefixDeserializationCluster() { } |
| 2594 virtual ~LibraryPrefixDeserializationCluster() { } |
| 2595 |
| 2596 void ReadAlloc(Deserializer* d) { |
| 2597 start_index_ = d->next_index(); |
| 2598 PageSpace* old_space = d->heap()->old_space(); |
| 2599 intptr_t count = d->Read<intptr_t>(); |
| 2600 for (intptr_t i = 0; i < count; i++) { |
| 2601 d->AssignRef(AllocateUninitialized(old_space, |
| 2602 LibraryPrefix::InstanceSize())); |
| 2603 } |
| 2604 stop_index_ = d->next_index(); |
| 2605 } |
| 2606 |
| 2607 void ReadFill(Deserializer* d) { |
| 2608 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2609 |
| 2610 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2611 RawLibraryPrefix* prefix = |
| 2612 reinterpret_cast<RawLibraryPrefix*>(d->Ref(id)); |
| 2613 Deserializer::InitializeHeader(prefix, kLibraryPrefixCid, |
| 2614 LibraryPrefix::InstanceSize(), |
| 2615 is_vm_object); |
| 2616 RawObject** from = prefix->from(); |
| 2617 RawObject** to = prefix->to(); |
| 2618 for (RawObject** p = from; p <= to; p++) { |
| 2619 *p = d->ReadRef(); |
| 2620 } |
| 2621 prefix->ptr()->num_imports_ = d->Read<uint16_t>(); |
| 2622 prefix->ptr()->is_deferred_load_ = d->Read<bool>(); |
| 2623 prefix->ptr()->is_loaded_ = d->Read<bool>(); |
| 2624 } |
| 2625 } |
| 2626 }; |
| 2627 |
| 2628 class TypeSerializationCluster : public SerializationCluster { |
| 2629 public: |
| 2630 TypeSerializationCluster() { } |
| 2631 virtual ~TypeSerializationCluster() { } |
| 2632 |
| 2633 void Trace(Serializer* s, RawObject* object) { |
| 2634 RawType* type = Type::RawCast(object); |
| 2635 if (type->IsCanonical()) { |
| 2636 canonical_objects_.Add(type); |
| 2637 } else { |
| 2638 objects_.Add(type); |
| 2639 } |
| 2640 |
| 2641 RawObject** from = type->from(); |
| 2642 RawObject** to = type->to(); |
| 2643 for (RawObject** p = from; p <= to; p++) { |
| 2644 s->Push(*p); |
| 2645 } |
| 2646 |
| 2647 RawSmi* raw_type_class_id = Smi::RawCast(type->ptr()->type_class_id_); |
| 2648 RawClass* type_class = |
| 2649 s->isolate()->class_table()->At(Smi::Value(raw_type_class_id)); |
| 2650 s->Push(type_class); |
| 2651 } |
| 2652 |
| 2653 void WriteAlloc(Serializer* s) { |
| 2654 s->WriteCid(kTypeCid); |
| 2655 intptr_t count = canonical_objects_.length(); |
| 2656 s->Write<intptr_t>(count); |
| 2657 for (intptr_t i = 0; i < count; i++) { |
| 2658 RawType* type = canonical_objects_[i]; |
| 2659 s->AssignRef(type); |
| 2660 } |
| 2661 count = objects_.length(); |
| 2662 s->Write<intptr_t>(count); |
| 2663 for (intptr_t i = 0; i < count; i++) { |
| 2664 RawType* type = objects_[i]; |
| 2665 s->AssignRef(type); |
| 2666 } |
| 2667 } |
| 2668 |
| 2669 void WriteFill(Serializer* s) { |
| 2670 intptr_t count = canonical_objects_.length(); |
| 2671 for (intptr_t i = 0; i < count; i++) { |
| 2672 RawType* type = canonical_objects_[i]; |
| 2673 RawObject** from = type->from(); |
| 2674 RawObject** to = type->to(); |
| 2675 for (RawObject** p = from; p <= to; p++) { |
| 2676 s->WriteRef(*p); |
| 2677 } |
| 2678 s->WriteTokenPosition(type->ptr()->token_pos_); |
| 2679 s->Write<int8_t>(type->ptr()->type_state_); |
| 2680 } |
| 2681 count = objects_.length(); |
| 2682 for (intptr_t i = 0; i < count; i++) { |
| 2683 RawType* type = objects_[i]; |
| 2684 RawObject** from = type->from(); |
| 2685 RawObject** to = type->to(); |
| 2686 for (RawObject** p = from; p <= to; p++) { |
| 2687 s->WriteRef(*p); |
| 2688 } |
| 2689 s->WriteTokenPosition(type->ptr()->token_pos_); |
| 2690 s->Write<int8_t>(type->ptr()->type_state_); |
| 2691 } |
| 2692 } |
| 2693 |
| 2694 private: |
| 2695 GrowableArray<RawType*> canonical_objects_; |
| 2696 GrowableArray<RawType*> objects_; |
| 2697 }; |
| 2698 |
| 2699 class TypeDeserializationCluster : public DeserializationCluster { |
| 2700 public: |
| 2701 TypeDeserializationCluster() { } |
| 2702 virtual ~TypeDeserializationCluster() { } |
| 2703 |
| 2704 void ReadAlloc(Deserializer* d) { |
| 2705 canonical_start_index_ = d->next_index(); |
| 2706 PageSpace* old_space = d->heap()->old_space(); |
| 2707 intptr_t count = d->Read<intptr_t>(); |
| 2708 for (intptr_t i = 0; i < count; i++) { |
| 2709 d->AssignRef(AllocateUninitialized(old_space, Type::InstanceSize())); |
| 2710 } |
| 2711 canonical_stop_index_ = d->next_index(); |
| 2712 |
| 2713 start_index_ = d->next_index(); |
| 2714 count = d->Read<intptr_t>(); |
| 2715 for (intptr_t i = 0; i < count; i++) { |
| 2716 d->AssignRef(AllocateUninitialized(old_space, Type::InstanceSize())); |
| 2717 } |
| 2718 stop_index_ = d->next_index(); |
| 2719 } |
| 2720 |
| 2721 void ReadFill(Deserializer* d) { |
| 2722 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2723 |
| 2724 for (intptr_t id = canonical_start_index_; |
| 2725 id < canonical_stop_index_; |
| 2726 id++) { |
| 2727 RawType* type = reinterpret_cast<RawType*>(d->Ref(id)); |
| 2728 Deserializer::InitializeHeader(type, kTypeCid, |
| 2729 Type::InstanceSize(), is_vm_object, true); |
| 2730 RawObject** from = type->from(); |
| 2731 RawObject** to = type->to(); |
| 2732 for (RawObject** p = from; p <= to; p++) { |
| 2733 *p = d->ReadRef(); |
| 2734 } |
| 2735 type->ptr()->token_pos_ = d->ReadTokenPosition(); |
| 2736 type->ptr()->type_state_ = d->Read<int8_t>(); |
| 2737 } |
| 2738 |
| 2739 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2740 RawType* type = reinterpret_cast<RawType*>(d->Ref(id)); |
| 2741 Deserializer::InitializeHeader(type, kTypeCid, |
| 2742 Type::InstanceSize(), is_vm_object); |
| 2743 RawObject** from = type->from(); |
| 2744 RawObject** to = type->to(); |
| 2745 for (RawObject** p = from; p <= to; p++) { |
| 2746 *p = d->ReadRef(); |
| 2747 } |
| 2748 type->ptr()->token_pos_ = d->ReadTokenPosition(); |
| 2749 type->ptr()->type_state_ = d->Read<int8_t>(); |
| 2750 } |
| 2751 } |
| 2752 |
| 2753 private: |
| 2754 intptr_t canonical_start_index_; |
| 2755 intptr_t canonical_stop_index_; |
| 2756 }; |
| 2757 |
| 2758 class TypeRefSerializationCluster : public SerializationCluster { |
| 2759 public: |
| 2760 TypeRefSerializationCluster() { } |
| 2761 virtual ~TypeRefSerializationCluster() { } |
| 2762 |
| 2763 void Trace(Serializer* s, RawObject* object) { |
| 2764 RawTypeRef* type = TypeRef::RawCast(object); |
| 2765 objects_.Add(type); |
| 2766 |
| 2767 RawObject** from = type->from(); |
| 2768 RawObject** to = type->to(); |
| 2769 for (RawObject** p = from; p <= to; p++) { |
| 2770 s->Push(*p); |
| 2771 } |
| 2772 } |
| 2773 |
| 2774 void WriteAlloc(Serializer* s) { |
| 2775 s->WriteCid(kTypeRefCid); |
| 2776 intptr_t count = objects_.length(); |
| 2777 s->Write<intptr_t>(count); |
| 2778 for (intptr_t i = 0; i < count; i++) { |
| 2779 RawTypeRef* type = objects_[i]; |
| 2780 s->AssignRef(type); |
| 2781 } |
| 2782 } |
| 2783 |
| 2784 void WriteFill(Serializer* s) { |
| 2785 intptr_t count = objects_.length(); |
| 2786 for (intptr_t i = 0; i < count; i++) { |
| 2787 RawTypeRef* type = objects_[i]; |
| 2788 RawObject** from = type->from(); |
| 2789 RawObject** to = type->to(); |
| 2790 for (RawObject** p = from; p <= to; p++) { |
| 2791 s->WriteRef(*p); |
| 2792 } |
| 2793 } |
| 2794 } |
| 2795 |
| 2796 private: |
| 2797 GrowableArray<RawTypeRef*> objects_; |
| 2798 }; |
| 2799 |
| 2800 class TypeRefDeserializationCluster : public DeserializationCluster { |
| 2801 public: |
| 2802 TypeRefDeserializationCluster() { } |
| 2803 virtual ~TypeRefDeserializationCluster() { } |
| 2804 |
| 2805 void ReadAlloc(Deserializer* d) { |
| 2806 start_index_ = d->next_index(); |
| 2807 PageSpace* old_space = d->heap()->old_space(); |
| 2808 intptr_t count = d->Read<intptr_t>(); |
| 2809 for (intptr_t i = 0; i < count; i++) { |
| 2810 d->AssignRef(AllocateUninitialized(old_space, TypeRef::InstanceSize())); |
| 2811 } |
| 2812 stop_index_ = d->next_index(); |
| 2813 } |
| 2814 |
| 2815 void ReadFill(Deserializer* d) { |
| 2816 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2817 |
| 2818 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2819 RawTypeRef* type = reinterpret_cast<RawTypeRef*>(d->Ref(id)); |
| 2820 Deserializer::InitializeHeader(type, kTypeRefCid, |
| 2821 TypeRef::InstanceSize(), is_vm_object); |
| 2822 RawObject** from = type->from(); |
| 2823 RawObject** to = type->to(); |
| 2824 for (RawObject** p = from; p <= to; p++) { |
| 2825 *p = d->ReadRef(); |
| 2826 } |
| 2827 } |
| 2828 } |
| 2829 }; |
| 2830 |
| 2831 class TypeParameterSerializationCluster : public SerializationCluster { |
| 2832 public: |
| 2833 TypeParameterSerializationCluster() { } |
| 2834 virtual ~TypeParameterSerializationCluster() { } |
| 2835 |
| 2836 void Trace(Serializer* s, RawObject* object) { |
| 2837 RawTypeParameter* type = TypeParameter::RawCast(object); |
| 2838 objects_.Add(type); |
| 2839 ASSERT(!type->IsCanonical()); |
| 2840 |
| 2841 RawObject** from = type->from(); |
| 2842 RawObject** to = type->to(); |
| 2843 for (RawObject** p = from; p <= to; p++) { |
| 2844 s->Push(*p); |
| 2845 } |
| 2846 } |
| 2847 |
| 2848 void WriteAlloc(Serializer* s) { |
| 2849 s->WriteCid(kTypeParameterCid); |
| 2850 intptr_t count = objects_.length(); |
| 2851 s->Write<intptr_t>(count); |
| 2852 for (intptr_t i = 0; i < count; i++) { |
| 2853 RawTypeParameter* type = objects_[i]; |
| 2854 s->AssignRef(type); |
| 2855 } |
| 2856 } |
| 2857 |
| 2858 void WriteFill(Serializer* s) { |
| 2859 intptr_t count = objects_.length(); |
| 2860 for (intptr_t i = 0; i < count; i++) { |
| 2861 RawTypeParameter* type = objects_[i]; |
| 2862 RawObject** from = type->from(); |
| 2863 RawObject** to = type->to(); |
| 2864 for (RawObject** p = from; p <= to; p++) { |
| 2865 s->WriteRef(*p); |
| 2866 } |
| 2867 s->Write<intptr_t>(type->ptr()->parameterized_class_id_); |
| 2868 s->WriteTokenPosition(type->ptr()->token_pos_); |
| 2869 s->Write<int16_t>(type->ptr()->index_); |
| 2870 s->Write<int8_t>(type->ptr()->type_state_); |
| 2871 } |
| 2872 } |
| 2873 |
| 2874 private: |
| 2875 GrowableArray<RawTypeParameter*> objects_; |
| 2876 }; |
| 2877 |
| 2878 |
| 2879 class TypeParameterDeserializationCluster : public DeserializationCluster { |
| 2880 public: |
| 2881 TypeParameterDeserializationCluster() { } |
| 2882 virtual ~TypeParameterDeserializationCluster() { } |
| 2883 |
| 2884 void ReadAlloc(Deserializer* d) { |
| 2885 start_index_ = d->next_index(); |
| 2886 PageSpace* old_space = d->heap()->old_space(); |
| 2887 intptr_t count = d->Read<intptr_t>(); |
| 2888 for (intptr_t i = 0; i < count; i++) { |
| 2889 d->AssignRef(AllocateUninitialized(old_space, |
| 2890 TypeParameter::InstanceSize())); |
| 2891 } |
| 2892 stop_index_ = d->next_index(); |
| 2893 } |
| 2894 |
| 2895 void ReadFill(Deserializer* d) { |
| 2896 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2897 |
| 2898 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2899 RawTypeParameter* type = reinterpret_cast<RawTypeParameter*>(d->Ref(id)); |
| 2900 Deserializer::InitializeHeader(type, kTypeParameterCid, |
| 2901 TypeParameter::InstanceSize(), |
| 2902 is_vm_object); |
| 2903 RawObject** from = type->from(); |
| 2904 RawObject** to = type->to(); |
| 2905 for (RawObject** p = from; p <= to; p++) { |
| 2906 *p = d->ReadRef(); |
| 2907 } |
| 2908 type->ptr()->parameterized_class_id_ = d->Read<intptr_t>(); |
| 2909 type->ptr()->token_pos_ = d->ReadTokenPosition(); |
| 2910 type->ptr()->index_ = d->Read<int16_t>(); |
| 2911 type->ptr()->type_state_ = d->Read<int8_t>(); |
| 2912 } |
| 2913 } |
| 2914 }; |
| 2915 |
| 2916 class BoundedTypeSerializationCluster : public SerializationCluster { |
| 2917 public: |
| 2918 BoundedTypeSerializationCluster() { } |
| 2919 virtual ~BoundedTypeSerializationCluster() { } |
| 2920 |
| 2921 void Trace(Serializer* s, RawObject* object) { |
| 2922 RawBoundedType* type = BoundedType::RawCast(object); |
| 2923 objects_.Add(type); |
| 2924 |
| 2925 RawObject** from = type->from(); |
| 2926 RawObject** to = type->to(); |
| 2927 for (RawObject** p = from; p <= to; p++) { |
| 2928 s->Push(*p); |
| 2929 } |
| 2930 } |
| 2931 |
| 2932 void WriteAlloc(Serializer* s) { |
| 2933 s->WriteCid(kBoundedTypeCid); |
| 2934 intptr_t count = objects_.length(); |
| 2935 s->Write<intptr_t>(count); |
| 2936 for (intptr_t i = 0; i < count; i++) { |
| 2937 RawBoundedType* type = objects_[i]; |
| 2938 s->AssignRef(type); |
| 2939 } |
| 2940 } |
| 2941 |
| 2942 void WriteFill(Serializer* s) { |
| 2943 intptr_t count = objects_.length(); |
| 2944 for (intptr_t i = 0; i < count; i++) { |
| 2945 RawBoundedType* type = objects_[i]; |
| 2946 RawObject** from = type->from(); |
| 2947 RawObject** to = type->to(); |
| 2948 for (RawObject** p = from; p <= to; p++) { |
| 2949 s->WriteRef(*p); |
| 2950 } |
| 2951 } |
| 2952 } |
| 2953 |
| 2954 private: |
| 2955 GrowableArray<RawBoundedType*> objects_; |
| 2956 }; |
| 2957 |
| 2958 class BoundedTypeDeserializationCluster : public DeserializationCluster { |
| 2959 public: |
| 2960 BoundedTypeDeserializationCluster() { } |
| 2961 virtual ~BoundedTypeDeserializationCluster() { } |
| 2962 |
| 2963 void ReadAlloc(Deserializer* d) { |
| 2964 start_index_ = d->next_index(); |
| 2965 PageSpace* old_space = d->heap()->old_space(); |
| 2966 intptr_t count = d->Read<intptr_t>(); |
| 2967 for (intptr_t i = 0; i < count; i++) { |
| 2968 d->AssignRef(AllocateUninitialized(old_space, |
| 2969 BoundedType::InstanceSize())); |
| 2970 } |
| 2971 stop_index_ = d->next_index(); |
| 2972 } |
| 2973 |
| 2974 void ReadFill(Deserializer* d) { |
| 2975 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 2976 |
| 2977 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 2978 RawBoundedType* type = reinterpret_cast<RawBoundedType*>(d->Ref(id)); |
| 2979 Deserializer::InitializeHeader(type, kBoundedTypeCid, |
| 2980 BoundedType::InstanceSize(), is_vm_object); |
| 2981 RawObject** from = type->from(); |
| 2982 RawObject** to = type->to(); |
| 2983 for (RawObject** p = from; p <= to; p++) { |
| 2984 *p = d->ReadRef(); |
| 2985 } |
| 2986 } |
| 2987 } |
| 2988 }; |
| 2989 |
| 2990 class ClosureSerializationCluster : public SerializationCluster { |
| 2991 public: |
| 2992 ClosureSerializationCluster() { } |
| 2993 virtual ~ClosureSerializationCluster() { } |
| 2994 |
| 2995 void Trace(Serializer* s, RawObject* object) { |
| 2996 RawClosure* closure = Closure::RawCast(object); |
| 2997 objects_.Add(closure); |
| 2998 |
| 2999 RawObject** from = closure->from(); |
| 3000 RawObject** to = closure->to(); |
| 3001 for (RawObject** p = from; p <= to; p++) { |
| 3002 s->Push(*p); |
| 3003 } |
| 3004 } |
| 3005 |
| 3006 void WriteAlloc(Serializer* s) { |
| 3007 s->WriteCid(kClosureCid); |
| 3008 intptr_t count = objects_.length(); |
| 3009 s->Write<intptr_t>(count); |
| 3010 for (intptr_t i = 0; i < count; i++) { |
| 3011 RawClosure* closure = objects_[i]; |
| 3012 s->AssignRef(closure); |
| 3013 } |
| 3014 } |
| 3015 |
| 3016 void WriteFill(Serializer* s) { |
| 3017 intptr_t count = objects_.length(); |
| 3018 for (intptr_t i = 0; i < count; i++) { |
| 3019 RawClosure* closure = objects_[i]; |
| 3020 s->Write<bool>(closure->IsCanonical()); |
| 3021 RawObject** from = closure->from(); |
| 3022 RawObject** to = closure->to(); |
| 3023 for (RawObject** p = from; p <= to; p++) { |
| 3024 s->WriteRef(*p); |
| 3025 } |
| 3026 } |
| 3027 } |
| 3028 |
| 3029 private: |
| 3030 GrowableArray<RawClosure*> objects_; |
| 3031 }; |
| 3032 |
| 3033 |
| 3034 class ClosureDeserializationCluster : public DeserializationCluster { |
| 3035 public: |
| 3036 ClosureDeserializationCluster() { } |
| 3037 virtual ~ClosureDeserializationCluster() { } |
| 3038 |
| 3039 void ReadAlloc(Deserializer* d) { |
| 3040 start_index_ = d->next_index(); |
| 3041 PageSpace* old_space = d->heap()->old_space(); |
| 3042 intptr_t count = d->Read<intptr_t>(); |
| 3043 for (intptr_t i = 0; i < count; i++) { |
| 3044 d->AssignRef(AllocateUninitialized(old_space, Closure::InstanceSize())); |
| 3045 } |
| 3046 stop_index_ = d->next_index(); |
| 3047 } |
| 3048 |
| 3049 void ReadFill(Deserializer* d) { |
| 3050 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3051 |
| 3052 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3053 RawClosure* closure = reinterpret_cast<RawClosure*>(d->Ref(id)); |
| 3054 bool is_canonical = d->Read<bool>(); |
| 3055 Deserializer::InitializeHeader(closure, kClosureCid, |
| 3056 Closure::InstanceSize(), |
| 3057 is_vm_object, is_canonical); |
| 3058 RawObject** from = closure->from(); |
| 3059 RawObject** to = closure->to(); |
| 3060 for (RawObject** p = from; p <= to; p++) { |
| 3061 *p = d->ReadRef(); |
| 3062 } |
| 3063 } |
| 3064 } |
| 3065 }; |
| 3066 |
| 3067 |
| 3068 class MintSerializationCluster : public SerializationCluster { |
| 3069 public: |
| 3070 MintSerializationCluster() { } |
| 3071 virtual ~MintSerializationCluster() { } |
| 3072 |
| 3073 void Trace(Serializer* s, RawObject* object) { |
| 3074 RawMint* mint = Mint::RawCast(object); |
| 3075 objects_.Add(mint); |
| 3076 } |
| 3077 |
| 3078 void WriteAlloc(Serializer* s) { |
| 3079 s->WriteCid(kMintCid); |
| 3080 intptr_t count = objects_.length(); |
| 3081 s->Write<intptr_t>(count); |
| 3082 for (intptr_t i = 0; i < count; i++) { |
| 3083 RawMint* mint = objects_[i]; |
| 3084 s->AssignRef(mint); |
| 3085 } |
| 3086 } |
| 3087 |
| 3088 void WriteFill(Serializer* s) { |
| 3089 intptr_t count = objects_.length(); |
| 3090 for (intptr_t i = 0; i < count; i++) { |
| 3091 RawMint* mint = objects_[i]; |
| 3092 s->Write<bool>(mint->IsCanonical()); |
| 3093 s->Write<int64_t>(mint->ptr()->value_); |
| 3094 } |
| 3095 } |
| 3096 |
| 3097 private: |
| 3098 GrowableArray<RawMint*> objects_; |
| 3099 }; |
| 3100 |
| 3101 class MintDeserializationCluster : public DeserializationCluster { |
| 3102 public: |
| 3103 MintDeserializationCluster() { } |
| 3104 virtual ~MintDeserializationCluster() { } |
| 3105 |
| 3106 void ReadAlloc(Deserializer* d) { |
| 3107 start_index_ = d->next_index(); |
| 3108 PageSpace* old_space = d->heap()->old_space(); |
| 3109 intptr_t count = d->Read<intptr_t>(); |
| 3110 for (intptr_t i = 0; i < count; i++) { |
| 3111 d->AssignRef(AllocateUninitialized(old_space, Mint::InstanceSize())); |
| 3112 } |
| 3113 stop_index_ = d->next_index(); |
| 3114 } |
| 3115 |
| 3116 void ReadFill(Deserializer* d) { |
| 3117 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3118 |
| 3119 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3120 RawMint* mint = reinterpret_cast<RawMint*>(d->Ref(id)); |
| 3121 bool is_canonical = d->Read<bool>(); |
| 3122 Deserializer::InitializeHeader(mint, kMintCid, |
| 3123 Mint::InstanceSize(), |
| 3124 is_vm_object, is_canonical); |
| 3125 mint->ptr()->value_ = d->Read<int64_t>(); |
| 3126 } |
| 3127 } |
| 3128 }; |
| 3129 |
| 3130 class BigintSerializationCluster : public SerializationCluster { |
| 3131 public: |
| 3132 BigintSerializationCluster() { } |
| 3133 virtual ~BigintSerializationCluster() { } |
| 3134 |
| 3135 void Trace(Serializer* s, RawObject* object) { |
| 3136 RawBigint* bigint = Bigint::RawCast(object); |
| 3137 objects_.Add(bigint); |
| 3138 |
| 3139 RawObject** from = bigint->from(); |
| 3140 RawObject** to = bigint->to(); |
| 3141 for (RawObject** p = from; p <= to; p++) { |
| 3142 s->Push(*p); |
| 3143 } |
| 3144 } |
| 3145 |
| 3146 void WriteAlloc(Serializer* s) { |
| 3147 s->WriteCid(kBigintCid); |
| 3148 intptr_t count = objects_.length(); |
| 3149 s->Write<intptr_t>(count); |
| 3150 for (intptr_t i = 0; i < count; i++) { |
| 3151 RawBigint* bigint = objects_[i]; |
| 3152 s->AssignRef(bigint); |
| 3153 } |
| 3154 } |
| 3155 |
| 3156 void WriteFill(Serializer* s) { |
| 3157 intptr_t count = objects_.length(); |
| 3158 for (intptr_t i = 0; i < count; i++) { |
| 3159 RawBigint* bigint = objects_[i]; |
| 3160 s->Write<bool>(bigint->IsCanonical()); |
| 3161 RawObject** from = bigint->from(); |
| 3162 RawObject** to = bigint->to(); |
| 3163 for (RawObject** p = from; p <= to; p++) { |
| 3164 s->WriteRef(*p); |
| 3165 } |
| 3166 } |
| 3167 } |
| 3168 |
| 3169 private: |
| 3170 GrowableArray<RawBigint*> objects_; |
| 3171 }; |
| 3172 |
| 3173 class BigintDeserializationCluster : public DeserializationCluster { |
| 3174 public: |
| 3175 BigintDeserializationCluster() { } |
| 3176 virtual ~BigintDeserializationCluster() { } |
| 3177 |
| 3178 void ReadAlloc(Deserializer* d) { |
| 3179 start_index_ = d->next_index(); |
| 3180 PageSpace* old_space = d->heap()->old_space(); |
| 3181 intptr_t count = d->Read<intptr_t>(); |
| 3182 for (intptr_t i = 0; i < count; i++) { |
| 3183 d->AssignRef(AllocateUninitialized(old_space, Bigint::InstanceSize())); |
| 3184 } |
| 3185 stop_index_ = d->next_index(); |
| 3186 } |
| 3187 |
| 3188 void ReadFill(Deserializer* d) { |
| 3189 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3190 |
| 3191 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3192 RawBigint* bigint = reinterpret_cast<RawBigint*>(d->Ref(id)); |
| 3193 bool is_canonical = d->Read<bool>(); |
| 3194 Deserializer::InitializeHeader(bigint, kBigintCid, |
| 3195 Bigint::InstanceSize(), |
| 3196 is_vm_object, is_canonical); |
| 3197 RawObject** from = bigint->from(); |
| 3198 RawObject** to = bigint->to(); |
| 3199 for (RawObject** p = from; p <= to; p++) { |
| 3200 *p = d->ReadRef(); |
| 3201 } |
| 3202 } |
| 3203 } |
| 3204 }; |
| 3205 |
| 3206 class DoubleSerializationCluster : public SerializationCluster { |
| 3207 public: |
| 3208 DoubleSerializationCluster() { } |
| 3209 virtual ~DoubleSerializationCluster() { } |
| 3210 |
| 3211 void Trace(Serializer* s, RawObject* object) { |
| 3212 RawDouble* dbl = Double::RawCast(object); |
| 3213 objects_.Add(dbl); |
| 3214 } |
| 3215 |
| 3216 void WriteAlloc(Serializer* s) { |
| 3217 s->WriteCid(kDoubleCid); |
| 3218 intptr_t count = objects_.length(); |
| 3219 s->Write<intptr_t>(count); |
| 3220 for (intptr_t i = 0; i < count; i++) { |
| 3221 RawDouble* dbl = objects_[i]; |
| 3222 s->AssignRef(dbl); |
| 3223 } |
| 3224 } |
| 3225 |
| 3226 void WriteFill(Serializer* s) { |
| 3227 intptr_t count = objects_.length(); |
| 3228 for (intptr_t i = 0; i < count; i++) { |
| 3229 RawDouble* dbl = objects_[i]; |
| 3230 s->Write<bool>(dbl->IsCanonical()); |
| 3231 s->Write<double>(dbl->ptr()->value_); |
| 3232 } |
| 3233 } |
| 3234 |
| 3235 private: |
| 3236 GrowableArray<RawDouble*> objects_; |
| 3237 }; |
| 3238 |
| 3239 class DoubleDeserializationCluster : public DeserializationCluster { |
| 3240 public: |
| 3241 DoubleDeserializationCluster() { } |
| 3242 virtual ~DoubleDeserializationCluster() { } |
| 3243 |
| 3244 void ReadAlloc(Deserializer* d) { |
| 3245 start_index_ = d->next_index(); |
| 3246 PageSpace* old_space = d->heap()->old_space(); |
| 3247 intptr_t count = d->Read<intptr_t>(); |
| 3248 for (intptr_t i = 0; i < count; i++) { |
| 3249 d->AssignRef(AllocateUninitialized(old_space, Double::InstanceSize())); |
| 3250 } |
| 3251 stop_index_ = d->next_index(); |
| 3252 } |
| 3253 |
| 3254 void ReadFill(Deserializer* d) { |
| 3255 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3256 |
| 3257 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3258 RawDouble* dbl = reinterpret_cast<RawDouble*>(d->Ref(id)); |
| 3259 bool is_canonical = d->Read<bool>(); |
| 3260 Deserializer::InitializeHeader(dbl, kDoubleCid, |
| 3261 Double::InstanceSize(), |
| 3262 is_vm_object, is_canonical); |
| 3263 dbl->ptr()->value_ = d->Read<double>(); |
| 3264 } |
| 3265 } |
| 3266 }; |
| 3267 |
| 3268 |
| 3269 class GrowableObjectArraySerializationCluster : public SerializationCluster { |
| 3270 public: |
| 3271 GrowableObjectArraySerializationCluster() { } |
| 3272 virtual ~GrowableObjectArraySerializationCluster() { } |
| 3273 |
| 3274 void Trace(Serializer* s, RawObject* object) { |
| 3275 RawGrowableObjectArray* array = GrowableObjectArray::RawCast(object); |
| 3276 objects_.Add(array); |
| 3277 |
| 3278 RawObject** from = array->from(); |
| 3279 RawObject** to = array->to(); |
| 3280 for (RawObject** p = from; p <= to; p++) { |
| 3281 s->Push(*p); |
| 3282 } |
| 3283 } |
| 3284 |
| 3285 void WriteAlloc(Serializer* s) { |
| 3286 s->WriteCid(kGrowableObjectArrayCid); |
| 3287 intptr_t count = objects_.length(); |
| 3288 s->Write<intptr_t>(count); |
| 3289 for (intptr_t i = 0; i < count; i++) { |
| 3290 RawGrowableObjectArray* array = objects_[i]; |
| 3291 s->AssignRef(array); |
| 3292 } |
| 3293 } |
| 3294 |
| 3295 void WriteFill(Serializer* s) { |
| 3296 intptr_t count = objects_.length(); |
| 3297 for (intptr_t i = 0; i < count; i++) { |
| 3298 RawGrowableObjectArray* array = objects_[i]; |
| 3299 s->Write<bool>(array->IsCanonical()); |
| 3300 RawObject** from = array->from(); |
| 3301 RawObject** to = array->to(); |
| 3302 for (RawObject** p = from; p <= to; p++) { |
| 3303 s->WriteRef(*p); |
| 3304 } |
| 3305 } |
| 3306 } |
| 3307 |
| 3308 private: |
| 3309 GrowableArray<RawGrowableObjectArray*> objects_; |
| 3310 }; |
| 3311 |
| 3312 class GrowableObjectArrayDeserializationCluster |
| 3313 : public DeserializationCluster { |
| 3314 public: |
| 3315 GrowableObjectArrayDeserializationCluster() { } |
| 3316 virtual ~GrowableObjectArrayDeserializationCluster() { } |
| 3317 |
| 3318 void ReadAlloc(Deserializer* d) { |
| 3319 start_index_ = d->next_index(); |
| 3320 PageSpace* old_space = d->heap()->old_space(); |
| 3321 intptr_t count = d->Read<intptr_t>(); |
| 3322 for (intptr_t i = 0; i < count; i++) { |
| 3323 d->AssignRef(AllocateUninitialized(old_space, |
| 3324 GrowableObjectArray::InstanceSize())); |
| 3325 } |
| 3326 stop_index_ = d->next_index(); |
| 3327 } |
| 3328 |
| 3329 void ReadFill(Deserializer* d) { |
| 3330 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3331 |
| 3332 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3333 RawGrowableObjectArray* list = |
| 3334 reinterpret_cast<RawGrowableObjectArray*>(d->Ref(id)); |
| 3335 bool is_canonical = d->Read<bool>(); |
| 3336 Deserializer::InitializeHeader(list, kGrowableObjectArrayCid, |
| 3337 GrowableObjectArray::InstanceSize(), |
| 3338 is_vm_object, is_canonical); |
| 3339 RawObject** from = list->from(); |
| 3340 RawObject** to = list->to(); |
| 3341 for (RawObject** p = from; p <= to; p++) { |
| 3342 *p = d->ReadRef(); |
| 3343 } |
| 3344 } |
| 3345 } |
| 3346 }; |
| 3347 |
| 3348 class TypedDataSerializationCluster : public SerializationCluster { |
| 3349 public: |
| 3350 explicit TypedDataSerializationCluster(intptr_t cid) : cid_(cid) { } |
| 3351 virtual ~TypedDataSerializationCluster() { } |
| 3352 |
| 3353 void Trace(Serializer* s, RawObject* object) { |
| 3354 RawTypedData* data = TypedData::RawCast(object); |
| 3355 objects_.Add(data); |
| 3356 } |
| 3357 |
| 3358 void WriteAlloc(Serializer* s) { |
| 3359 s->Write<intptr_t>(cid_); |
| 3360 intptr_t count = objects_.length(); |
| 3361 s->Write<intptr_t>(count); |
| 3362 for (intptr_t i = 0; i < count; i++) { |
| 3363 RawTypedData* data = objects_[i]; |
| 3364 intptr_t length = Smi::Value(data->ptr()->length_); |
| 3365 s->Write<intptr_t>(length); |
| 3366 s->AssignRef(data); |
| 3367 } |
| 3368 } |
| 3369 |
| 3370 void WriteFill(Serializer* s) { |
| 3371 intptr_t count = objects_.length(); |
| 3372 intptr_t element_size = TypedData::ElementSizeInBytes(cid_); |
| 3373 for (intptr_t i = 0; i < count; i++) { |
| 3374 RawTypedData* data = objects_[i]; |
| 3375 intptr_t length = Smi::Value(data->ptr()->length_); |
| 3376 s->Write<intptr_t>(length); |
| 3377 s->Write<bool>(data->IsCanonical()); |
| 3378 uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data()); |
| 3379 s->WriteBytes(cdata, length * element_size); |
| 3380 } |
| 3381 } |
| 3382 |
| 3383 private: |
| 3384 const intptr_t cid_; |
| 3385 GrowableArray<RawTypedData*> objects_; |
| 3386 }; |
| 3387 |
| 3388 |
| 3389 class TypedDataDeserializationCluster : public DeserializationCluster { |
| 3390 public: |
| 3391 explicit TypedDataDeserializationCluster(intptr_t cid) : cid_(cid) { } |
| 3392 virtual ~TypedDataDeserializationCluster() { } |
| 3393 |
| 3394 void ReadAlloc(Deserializer* d) { |
| 3395 start_index_ = d->next_index(); |
| 3396 PageSpace* old_space = d->heap()->old_space(); |
| 3397 intptr_t count = d->Read<intptr_t>(); |
| 3398 intptr_t element_size = TypedData::ElementSizeInBytes(cid_); |
| 3399 for (intptr_t i = 0; i < count; i++) { |
| 3400 intptr_t length = d->Read<intptr_t>(); |
| 3401 d->AssignRef(AllocateUninitialized(old_space, |
| 3402 TypedData::InstanceSize(length * element_size))); |
| 3403 } |
| 3404 stop_index_ = d->next_index(); |
| 3405 } |
| 3406 |
| 3407 void ReadFill(Deserializer* d) { |
| 3408 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3409 intptr_t element_size = TypedData::ElementSizeInBytes(cid_); |
| 3410 |
| 3411 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3412 RawTypedData* data = reinterpret_cast<RawTypedData*>(d->Ref(id)); |
| 3413 intptr_t length = d->Read<intptr_t>(); |
| 3414 bool is_canonical = d->Read<bool>(); |
| 3415 intptr_t length_in_bytes = length * element_size; |
| 3416 Deserializer::InitializeHeader(data, cid_, |
| 3417 TypedData::InstanceSize(length_in_bytes), |
| 3418 is_vm_object, is_canonical); |
| 3419 data->ptr()->length_ = Smi::New(length); |
| 3420 uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data()); |
| 3421 d->ReadBytes(cdata, length_in_bytes); |
| 3422 } |
| 3423 } |
| 3424 |
| 3425 private: |
| 3426 const intptr_t cid_; |
| 3427 }; |
| 3428 |
| 3429 |
| 3430 class ExternalTypedDataSerializationCluster : public SerializationCluster { |
| 3431 public: |
| 3432 explicit ExternalTypedDataSerializationCluster(intptr_t cid) : cid_(cid) { } |
| 3433 virtual ~ExternalTypedDataSerializationCluster() { } |
| 3434 |
| 3435 void Trace(Serializer* s, RawObject* object) { |
| 3436 RawExternalTypedData* data = ExternalTypedData::RawCast(object); |
| 3437 objects_.Add(data); |
| 3438 ASSERT(!data->IsCanonical()); |
| 3439 } |
| 3440 |
| 3441 void WriteAlloc(Serializer* s) { |
| 3442 s->Write<intptr_t>(cid_); |
| 3443 intptr_t count = objects_.length(); |
| 3444 s->Write<intptr_t>(count); |
| 3445 for (intptr_t i = 0; i < count; i++) { |
| 3446 RawExternalTypedData* data = objects_[i]; |
| 3447 s->AssignRef(data); |
| 3448 } |
| 3449 } |
| 3450 |
| 3451 void WriteFill(Serializer* s) { |
| 3452 intptr_t count = objects_.length(); |
| 3453 intptr_t element_size = ExternalTypedData::ElementSizeInBytes(cid_); |
| 3454 for (intptr_t i = 0; i < count; i++) { |
| 3455 RawExternalTypedData* data = objects_[i]; |
| 3456 intptr_t length = Smi::Value(data->ptr()->length_); |
| 3457 s->Write<intptr_t>(length); |
| 3458 uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data_); |
| 3459 s->WriteBytes(cdata, length * element_size); |
| 3460 } |
| 3461 } |
| 3462 |
| 3463 private: |
| 3464 const intptr_t cid_; |
| 3465 GrowableArray<RawExternalTypedData*> objects_; |
| 3466 }; |
| 3467 |
| 3468 |
| 3469 class ExternalTypedDataDeserializationCluster : public DeserializationCluster { |
| 3470 public: |
| 3471 explicit ExternalTypedDataDeserializationCluster(intptr_t cid) : cid_(cid) { } |
| 3472 virtual ~ExternalTypedDataDeserializationCluster() { } |
| 3473 |
| 3474 void ReadAlloc(Deserializer* d) { |
| 3475 start_index_ = d->next_index(); |
| 3476 PageSpace* old_space = d->heap()->old_space(); |
| 3477 intptr_t count = d->Read<intptr_t>(); |
| 3478 for (intptr_t i = 0; i < count; i++) { |
| 3479 d->AssignRef(AllocateUninitialized(old_space, |
| 3480 ExternalTypedData::InstanceSize())); |
| 3481 } |
| 3482 stop_index_ = d->next_index(); |
| 3483 } |
| 3484 |
| 3485 void ReadFill(Deserializer* d) { |
| 3486 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3487 intptr_t element_size = ExternalTypedData::ElementSizeInBytes(cid_); |
| 3488 |
| 3489 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3490 RawExternalTypedData* data = |
| 3491 reinterpret_cast<RawExternalTypedData*>(d->Ref(id)); |
| 3492 intptr_t length = d->Read<intptr_t>(); |
| 3493 Deserializer::InitializeHeader(data, cid_, |
| 3494 ExternalTypedData::InstanceSize(), |
| 3495 is_vm_object); |
| 3496 data->ptr()->length_ = Smi::New(length); |
| 3497 data->ptr()->data_ = const_cast<uint8_t*>(d->CurrentBufferAddress()); |
| 3498 d->Advance(length * element_size); |
| 3499 } |
| 3500 } |
| 3501 |
| 3502 private: |
| 3503 const intptr_t cid_; |
| 3504 }; |
| 3505 |
| 3506 class StacktraceSerializationCluster : public SerializationCluster { |
| 3507 public: |
| 3508 StacktraceSerializationCluster() { } |
| 3509 virtual ~StacktraceSerializationCluster() { } |
| 3510 |
| 3511 void Trace(Serializer* s, RawObject* object) { |
| 3512 RawStacktrace* trace = Stacktrace::RawCast(object); |
| 3513 objects_.Add(trace); |
| 3514 |
| 3515 RawObject** from = trace->from(); |
| 3516 RawObject** to = trace->to(); |
| 3517 for (RawObject** p = from; p <= to; p++) { |
| 3518 s->Push(*p); |
| 3519 } |
| 3520 } |
| 3521 |
| 3522 void WriteAlloc(Serializer* s) { |
| 3523 s->WriteCid(kStacktraceCid); |
| 3524 intptr_t count = objects_.length(); |
| 3525 s->Write<intptr_t>(count); |
| 3526 for (intptr_t i = 0; i < count; i++) { |
| 3527 RawStacktrace* trace = objects_[i]; |
| 3528 s->AssignRef(trace); |
| 3529 } |
| 3530 } |
| 3531 |
| 3532 void WriteFill(Serializer* s) { |
| 3533 intptr_t count = objects_.length(); |
| 3534 for (intptr_t i = 0; i < count; i++) { |
| 3535 RawStacktrace* trace = objects_[i]; |
| 3536 RawObject** from = trace->from(); |
| 3537 RawObject** to = trace->to(); |
| 3538 for (RawObject** p = from; p <= to; p++) { |
| 3539 s->WriteRef(*p); |
| 3540 } |
| 3541 } |
| 3542 } |
| 3543 |
| 3544 private: |
| 3545 GrowableArray<RawStacktrace*> objects_; |
| 3546 }; |
| 3547 |
| 3548 class StacktraceDeserializationCluster : public DeserializationCluster { |
| 3549 public: |
| 3550 StacktraceDeserializationCluster() { } |
| 3551 virtual ~StacktraceDeserializationCluster() { } |
| 3552 |
| 3553 void ReadAlloc(Deserializer* d) { |
| 3554 start_index_ = d->next_index(); |
| 3555 PageSpace* old_space = d->heap()->old_space(); |
| 3556 intptr_t count = d->Read<intptr_t>(); |
| 3557 for (intptr_t i = 0; i < count; i++) { |
| 3558 d->AssignRef(AllocateUninitialized(old_space, |
| 3559 Stacktrace::InstanceSize())); |
| 3560 } |
| 3561 stop_index_ = d->next_index(); |
| 3562 } |
| 3563 |
| 3564 void ReadFill(Deserializer* d) { |
| 3565 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3566 |
| 3567 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3568 RawStacktrace* trace = reinterpret_cast<RawStacktrace*>(d->Ref(id)); |
| 3569 Deserializer::InitializeHeader(trace, kStacktraceCid, |
| 3570 Stacktrace::InstanceSize(), is_vm_object); |
| 3571 RawObject** from = trace->from(); |
| 3572 RawObject** to = trace->to(); |
| 3573 for (RawObject** p = from; p <= to; p++) { |
| 3574 *p = d->ReadRef(); |
| 3575 } |
| 3576 } |
| 3577 } |
| 3578 }; |
| 3579 |
| 3580 class RegExpSerializationCluster : public SerializationCluster { |
| 3581 public: |
| 3582 RegExpSerializationCluster() { } |
| 3583 virtual ~RegExpSerializationCluster() { } |
| 3584 |
| 3585 void Trace(Serializer* s, RawObject* object) { |
| 3586 RawRegExp* regexp = RegExp::RawCast(object); |
| 3587 objects_.Add(regexp); |
| 3588 |
| 3589 RawObject** from = regexp->from(); |
| 3590 RawObject** to = regexp->to(); |
| 3591 for (RawObject** p = from; p <= to; p++) { |
| 3592 s->Push(*p); |
| 3593 } |
| 3594 } |
| 3595 |
| 3596 void WriteAlloc(Serializer* s) { |
| 3597 s->WriteCid(kRegExpCid); |
| 3598 intptr_t count = objects_.length(); |
| 3599 s->Write<intptr_t>(count); |
| 3600 for (intptr_t i = 0; i < count; i++) { |
| 3601 RawRegExp* regexp = objects_[i]; |
| 3602 s->AssignRef(regexp); |
| 3603 } |
| 3604 } |
| 3605 |
| 3606 void WriteFill(Serializer* s) { |
| 3607 intptr_t count = objects_.length(); |
| 3608 for (intptr_t i = 0; i < count; i++) { |
| 3609 RawRegExp* regexp = objects_[i]; |
| 3610 RawObject** from = regexp->from(); |
| 3611 RawObject** to = regexp->to(); |
| 3612 for (RawObject** p = from; p <= to; p++) { |
| 3613 s->WriteRef(*p); |
| 3614 } |
| 3615 |
| 3616 s->Write<intptr_t>(regexp->ptr()->num_registers_); |
| 3617 s->Write<int8_t>(regexp->ptr()->type_flags_); |
| 3618 } |
| 3619 } |
| 3620 |
| 3621 private: |
| 3622 GrowableArray<RawRegExp*> objects_; |
| 3623 }; |
| 3624 |
| 3625 class RegExpDeserializationCluster : public DeserializationCluster { |
| 3626 public: |
| 3627 RegExpDeserializationCluster() { } |
| 3628 virtual ~RegExpDeserializationCluster() { } |
| 3629 |
| 3630 void ReadAlloc(Deserializer* d) { |
| 3631 start_index_ = d->next_index(); |
| 3632 PageSpace* old_space = d->heap()->old_space(); |
| 3633 intptr_t count = d->Read<intptr_t>(); |
| 3634 for (intptr_t i = 0; i < count; i++) { |
| 3635 d->AssignRef(AllocateUninitialized(old_space, |
| 3636 RegExp::InstanceSize())); |
| 3637 } |
| 3638 stop_index_ = d->next_index(); |
| 3639 } |
| 3640 |
| 3641 void ReadFill(Deserializer* d) { |
| 3642 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3643 |
| 3644 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3645 RawRegExp* regexp = reinterpret_cast<RawRegExp*>(d->Ref(id)); |
| 3646 Deserializer::InitializeHeader(regexp, kRegExpCid, |
| 3647 RegExp::InstanceSize(), is_vm_object); |
| 3648 RawObject** from = regexp->from(); |
| 3649 RawObject** to = regexp->to(); |
| 3650 for (RawObject** p = from; p <= to; p++) { |
| 3651 *p = d->ReadRef(); |
| 3652 } |
| 3653 |
| 3654 regexp->ptr()->num_registers_ = d->Read<intptr_t>(); |
| 3655 regexp->ptr()->type_flags_ = d->Read<int8_t>(); |
| 3656 } |
| 3657 } |
| 3658 }; |
| 3659 |
| 3660 class LinkedHashMapSerializationCluster : public SerializationCluster { |
| 3661 public: |
| 3662 LinkedHashMapSerializationCluster() { } |
| 3663 virtual ~LinkedHashMapSerializationCluster() { } |
| 3664 |
| 3665 void Trace(Serializer* s, RawObject* object) { |
| 3666 RawLinkedHashMap* map = LinkedHashMap::RawCast(object); |
| 3667 objects_.Add(map); |
| 3668 |
| 3669 s->Push(map->ptr()->type_arguments_); |
| 3670 |
| 3671 intptr_t used_data = Smi::Value(map->ptr()->used_data_); |
| 3672 RawArray* data_array = map->ptr()->data_; |
| 3673 RawObject** data_elements = data_array->ptr()->data(); |
| 3674 for (intptr_t i = 0; i < used_data; i += 2) { |
| 3675 RawObject* key = data_elements[i]; |
| 3676 if (key != data_array) { |
| 3677 RawObject* value = data_elements[i + 1]; |
| 3678 s->Push(key); |
| 3679 s->Push(value); |
| 3680 } |
| 3681 } |
| 3682 } |
| 3683 |
| 3684 void WriteAlloc(Serializer* s) { |
| 3685 s->WriteCid(kLinkedHashMapCid); |
| 3686 intptr_t count = objects_.length(); |
| 3687 s->Write<intptr_t>(count); |
| 3688 for (intptr_t i = 0; i < count; i++) { |
| 3689 RawLinkedHashMap* map = objects_[i]; |
| 3690 s->AssignRef(map); |
| 3691 } |
| 3692 } |
| 3693 |
| 3694 void WriteFill(Serializer* s) { |
| 3695 intptr_t count = objects_.length(); |
| 3696 for (intptr_t i = 0; i < count; i++) { |
| 3697 RawLinkedHashMap* map = objects_[i]; |
| 3698 s->Write<bool>(map->IsCanonical()); |
| 3699 |
| 3700 s->WriteRef(map->ptr()->type_arguments_); |
| 3701 |
| 3702 const intptr_t used_data = Smi::Value(map->ptr()->used_data_); |
| 3703 ASSERT((used_data & 1) == 0); // Keys + values, so must be even. |
| 3704 const intptr_t deleted_keys = Smi::Value(map->ptr()->deleted_keys_); |
| 3705 |
| 3706 // Write out the number of (not deleted) key/value pairs that will follow. |
| 3707 s->Write<intptr_t>((used_data >> 1) - deleted_keys); |
| 3708 |
| 3709 RawArray* data_array = map->ptr()->data_; |
| 3710 RawObject** data_elements = data_array->ptr()->data(); |
| 3711 for (intptr_t i = 0; i < used_data; i += 2) { |
| 3712 RawObject* key = data_elements[i]; |
| 3713 if (key != data_array) { |
| 3714 RawObject* value = data_elements[i + 1]; |
| 3715 s->WriteRef(key); |
| 3716 s->WriteRef(value); |
| 3717 } |
| 3718 } |
| 3719 } |
| 3720 } |
| 3721 |
| 3722 private: |
| 3723 GrowableArray<RawLinkedHashMap*> objects_; |
| 3724 }; |
| 3725 |
| 3726 class LinkedHashMapDeserializationCluster : public DeserializationCluster { |
| 3727 public: |
| 3728 LinkedHashMapDeserializationCluster() { } |
| 3729 virtual ~LinkedHashMapDeserializationCluster() { } |
| 3730 |
| 3731 void ReadAlloc(Deserializer* d) { |
| 3732 start_index_ = d->next_index(); |
| 3733 PageSpace* old_space = d->heap()->old_space(); |
| 3734 intptr_t count = d->Read<intptr_t>(); |
| 3735 for (intptr_t i = 0; i < count; i++) { |
| 3736 d->AssignRef(AllocateUninitialized(old_space, |
| 3737 LinkedHashMap::InstanceSize())); |
| 3738 } |
| 3739 stop_index_ = d->next_index(); |
| 3740 } |
| 3741 |
| 3742 void ReadFill(Deserializer* d) { |
| 3743 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3744 PageSpace* old_space = d->heap()->old_space(); |
| 3745 |
| 3746 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3747 RawLinkedHashMap* map = reinterpret_cast<RawLinkedHashMap*>(d->Ref(id)); |
| 3748 bool is_canonical = d->Read<bool>(); |
| 3749 Deserializer::InitializeHeader(map, kLinkedHashMapCid, |
| 3750 LinkedHashMap::InstanceSize(), |
| 3751 is_vm_object, is_canonical); |
| 3752 |
| 3753 map->ptr()->type_arguments_ = |
| 3754 reinterpret_cast<RawTypeArguments*>(d->ReadRef()); |
| 3755 |
| 3756 // TODO(rmacnak): Reserve ref ids and co-allocate in ReadAlloc. |
| 3757 intptr_t pairs = d->Read<intptr_t>(); |
| 3758 intptr_t used_data = pairs << 1; |
| 3759 intptr_t data_size = Utils::Maximum( |
| 3760 Utils::RoundUpToPowerOfTwo(used_data), |
| 3761 static_cast<uintptr_t>(LinkedHashMap::kInitialIndexSize)); |
| 3762 |
| 3763 RawArray* data = reinterpret_cast<RawArray*>( |
| 3764 AllocateUninitialized(old_space, Array::InstanceSize(data_size))); |
| 3765 data->ptr()->type_arguments_ = TypeArguments::null(); |
| 3766 data->ptr()->length_ = Smi::New(data_size); |
| 3767 intptr_t i; |
| 3768 for (i = 0; i < used_data; i++) { |
| 3769 data->ptr()->data()[i] = d->ReadRef(); |
| 3770 } |
| 3771 for (; i < data_size; i++) { |
| 3772 data->ptr()->data()[i] = Object::null(); |
| 3773 } |
| 3774 |
| 3775 map->ptr()->index_ = TypedData::null(); |
| 3776 map->ptr()->hash_mask_ = Smi::New(0); |
| 3777 map->ptr()->data_ = data; |
| 3778 map->ptr()->used_data_ = Smi::New(used_data); |
| 3779 map->ptr()->deleted_keys_ = Smi::New(0); |
| 3780 } |
| 3781 } |
| 3782 }; |
| 3783 |
| 3784 class ArraySerializationCluster : public SerializationCluster { |
| 3785 public: |
| 3786 explicit ArraySerializationCluster(intptr_t cid) : cid_(cid) { } |
| 3787 virtual ~ArraySerializationCluster() { } |
| 3788 |
| 3789 void Trace(Serializer* s, RawObject* object) { |
| 3790 RawArray* array = Array::RawCast(object); |
| 3791 objects_.Add(array); |
| 3792 |
| 3793 s->Push(array->ptr()->type_arguments_); |
| 3794 intptr_t length = Smi::Value(array->ptr()->length_); |
| 3795 for (intptr_t i = 0; i < length; i++) { |
| 3796 s->Push(array->ptr()->data()[i]); |
| 3797 } |
| 3798 } |
| 3799 |
| 3800 void WriteAlloc(Serializer* s) { |
| 3801 s->WriteCid(cid_); |
| 3802 intptr_t count = objects_.length(); |
| 3803 s->Write<intptr_t>(count); |
| 3804 for (intptr_t i = 0; i < count; i++) { |
| 3805 RawArray* array = objects_[i]; |
| 3806 intptr_t length = Smi::Value(array->ptr()->length_); |
| 3807 s->Write<intptr_t>(length); |
| 3808 s->AssignRef(array); |
| 3809 } |
| 3810 } |
| 3811 |
| 3812 void WriteFill(Serializer* s) { |
| 3813 intptr_t count = objects_.length(); |
| 3814 for (intptr_t i = 0; i < count; i++) { |
| 3815 RawArray* array = objects_[i]; |
| 3816 intptr_t length = Smi::Value(array->ptr()->length_); |
| 3817 s->Write<intptr_t>(length); |
| 3818 s->Write<bool>(array->IsCanonical()); |
| 3819 s->WriteRef(array->ptr()->type_arguments_); |
| 3820 for (intptr_t j = 0; j < length; j++) { |
| 3821 s->WriteRef(array->ptr()->data()[j]); |
| 3822 } |
| 3823 } |
| 3824 } |
| 3825 |
| 3826 private: |
| 3827 intptr_t cid_; |
| 3828 GrowableArray<RawArray*> objects_; |
| 3829 }; |
| 3830 |
| 3831 class ArrayDeserializationCluster : public DeserializationCluster { |
| 3832 public: |
| 3833 explicit ArrayDeserializationCluster(intptr_t cid) : cid_(cid) { } |
| 3834 virtual ~ArrayDeserializationCluster() { } |
| 3835 |
| 3836 void ReadAlloc(Deserializer* d) { |
| 3837 start_index_ = d->next_index(); |
| 3838 PageSpace* old_space = d->heap()->old_space(); |
| 3839 intptr_t count = d->Read<intptr_t>(); |
| 3840 for (intptr_t i = 0; i < count; i++) { |
| 3841 intptr_t length = d->Read<intptr_t>(); |
| 3842 d->AssignRef(AllocateUninitialized(old_space, |
| 3843 Array::InstanceSize(length))); |
| 3844 } |
| 3845 stop_index_ = d->next_index(); |
| 3846 } |
| 3847 |
| 3848 void ReadFill(Deserializer* d) { |
| 3849 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3850 |
| 3851 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3852 RawArray* array = reinterpret_cast<RawArray*>(d->Ref(id)); |
| 3853 intptr_t length = d->Read<intptr_t>(); |
| 3854 bool is_canonical = d->Read<bool>(); |
| 3855 Deserializer::InitializeHeader(array, cid_, |
| 3856 Array::InstanceSize(length), |
| 3857 is_vm_object, is_canonical); |
| 3858 array->ptr()->type_arguments_ = |
| 3859 reinterpret_cast<RawTypeArguments*>(d->ReadRef()); |
| 3860 array->ptr()->length_ = Smi::New(length); |
| 3861 for (intptr_t j = 0; j < length; j++) { |
| 3862 array->ptr()->data()[j] = d->ReadRef(); |
| 3863 } |
| 3864 } |
| 3865 } |
| 3866 |
| 3867 private: |
| 3868 const intptr_t cid_; |
| 3869 }; |
| 3870 |
| 3871 class OneByteStringSerializationCluster : public SerializationCluster { |
| 3872 public: |
| 3873 OneByteStringSerializationCluster() { } |
| 3874 virtual ~OneByteStringSerializationCluster() { } |
| 3875 |
| 3876 void Trace(Serializer* s, RawObject* object) { |
| 3877 RawOneByteString* str = reinterpret_cast<RawOneByteString*>(object); |
| 3878 objects_.Add(str); |
| 3879 } |
| 3880 |
| 3881 void WriteAlloc(Serializer* s) { |
| 3882 s->WriteCid(kOneByteStringCid); |
| 3883 intptr_t count = objects_.length(); |
| 3884 s->Write<intptr_t>(count); |
| 3885 for (intptr_t i = 0; i < count; i++) { |
| 3886 RawOneByteString* str = objects_[i]; |
| 3887 intptr_t length = Smi::Value(str->ptr()->length_); |
| 3888 s->Write<intptr_t>(length); |
| 3889 s->AssignRef(str); |
| 3890 } |
| 3891 } |
| 3892 |
| 3893 void WriteFill(Serializer* s) { |
| 3894 intptr_t count = objects_.length(); |
| 3895 for (intptr_t i = 0; i < count; i++) { |
| 3896 RawOneByteString* str = objects_[i]; |
| 3897 intptr_t length = Smi::Value(str->ptr()->length_); |
| 3898 s->Write<intptr_t>(length); |
| 3899 s->Write<bool>(str->IsCanonical()); |
| 3900 intptr_t hash = Smi::Value(str->ptr()->hash_); |
| 3901 s->Write<int32_t>(hash); |
| 3902 s->WriteBytes(str->ptr()->data(), length); |
| 3903 } |
| 3904 } |
| 3905 |
| 3906 private: |
| 3907 GrowableArray<RawOneByteString*> objects_; |
| 3908 }; |
| 3909 |
| 3910 class OneByteStringDeserializationCluster : public DeserializationCluster { |
| 3911 public: |
| 3912 OneByteStringDeserializationCluster() { } |
| 3913 virtual ~OneByteStringDeserializationCluster() { } |
| 3914 |
| 3915 void ReadAlloc(Deserializer* d) { |
| 3916 start_index_ = d->next_index(); |
| 3917 PageSpace* old_space = d->heap()->old_space(); |
| 3918 intptr_t count = d->Read<intptr_t>(); |
| 3919 for (intptr_t i = 0; i < count; i++) { |
| 3920 intptr_t length = d->Read<intptr_t>(); |
| 3921 d->AssignRef(AllocateUninitialized(old_space, |
| 3922 OneByteString::InstanceSize(length))); |
| 3923 } |
| 3924 stop_index_ = d->next_index(); |
| 3925 } |
| 3926 |
| 3927 void ReadFill(Deserializer* d) { |
| 3928 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 3929 |
| 3930 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 3931 RawOneByteString* str = reinterpret_cast<RawOneByteString*>(d->Ref(id)); |
| 3932 intptr_t length = d->Read<intptr_t>(); |
| 3933 bool is_canonical = d->Read<bool>(); |
| 3934 Deserializer::InitializeHeader(str, kOneByteStringCid, |
| 3935 OneByteString::InstanceSize(length), |
| 3936 is_vm_object, is_canonical); |
| 3937 str->ptr()->length_ = Smi::New(length); |
| 3938 str->ptr()->hash_ = Smi::New(d->Read<intptr_t>()); |
| 3939 for (intptr_t j = 0; j < length; j++) { |
| 3940 str->ptr()->data()[j] = d->Read<uint8_t>(); |
| 3941 } |
| 3942 } |
| 3943 } |
| 3944 }; |
| 3945 |
| 3946 class TwoByteStringSerializationCluster : public SerializationCluster { |
| 3947 public: |
| 3948 TwoByteStringSerializationCluster() { } |
| 3949 virtual ~TwoByteStringSerializationCluster() { } |
| 3950 |
| 3951 void Trace(Serializer* s, RawObject* object) { |
| 3952 RawTwoByteString* str = reinterpret_cast<RawTwoByteString*>(object); |
| 3953 objects_.Add(str); |
| 3954 } |
| 3955 |
| 3956 void WriteAlloc(Serializer* s) { |
| 3957 s->WriteCid(kTwoByteStringCid); |
| 3958 intptr_t count = objects_.length(); |
| 3959 s->Write<intptr_t>(count); |
| 3960 for (intptr_t i = 0; i < count; i++) { |
| 3961 RawTwoByteString* str = objects_[i]; |
| 3962 intptr_t length = Smi::Value(str->ptr()->length_); |
| 3963 s->Write<intptr_t>(length); |
| 3964 s->AssignRef(str); |
| 3965 } |
| 3966 } |
| 3967 |
| 3968 void WriteFill(Serializer* s) { |
| 3969 intptr_t count = objects_.length(); |
| 3970 for (intptr_t i = 0; i < count; i++) { |
| 3971 RawTwoByteString* str = objects_[i]; |
| 3972 intptr_t length = Smi::Value(str->ptr()->length_); |
| 3973 s->Write<intptr_t>(length); |
| 3974 s->Write<bool>(str->IsCanonical()); |
| 3975 intptr_t hash = Smi::Value(str->ptr()->hash_); |
| 3976 s->Write<int32_t>(hash); |
| 3977 s->WriteBytes(reinterpret_cast<uint8_t*>(str->ptr()->data()), length * 2); |
| 3978 } |
| 3979 } |
| 3980 |
| 3981 private: |
| 3982 GrowableArray<RawTwoByteString*> objects_; |
| 3983 }; |
| 3984 |
| 3985 class TwoByteStringDeserializationCluster : public DeserializationCluster { |
| 3986 public: |
| 3987 TwoByteStringDeserializationCluster() { } |
| 3988 virtual ~TwoByteStringDeserializationCluster() { } |
| 3989 |
| 3990 void ReadAlloc(Deserializer* d) { |
| 3991 start_index_ = d->next_index(); |
| 3992 PageSpace* old_space = d->heap()->old_space(); |
| 3993 intptr_t count = d->Read<intptr_t>(); |
| 3994 for (intptr_t i = 0; i < count; i++) { |
| 3995 intptr_t length = d->Read<intptr_t>(); |
| 3996 d->AssignRef(AllocateUninitialized(old_space, |
| 3997 TwoByteString::InstanceSize(length))); |
| 3998 } |
| 3999 stop_index_ = d->next_index(); |
| 4000 } |
| 4001 |
| 4002 void ReadFill(Deserializer* d) { |
| 4003 bool is_vm_object = d->isolate() == Dart::vm_isolate(); |
| 4004 |
| 4005 for (intptr_t id = start_index_; id < stop_index_; id++) { |
| 4006 RawTwoByteString* str = |
| 4007 reinterpret_cast<RawTwoByteString*>(d->Ref(id)); |
| 4008 intptr_t length = d->Read<intptr_t>(); |
| 4009 bool is_canonical = d->Read<bool>(); |
| 4010 Deserializer::InitializeHeader(str, kTwoByteStringCid, |
| 4011 TwoByteString::InstanceSize(length), |
| 4012 is_vm_object, is_canonical); |
| 4013 str->ptr()->length_ = Smi::New(length); |
| 4014 str->ptr()->hash_ = Smi::New(d->Read<int32_t>()); |
| 4015 uint8_t* cdata = reinterpret_cast<uint8_t*>(str->ptr()->data()); |
| 4016 d->ReadBytes(cdata, length * 2); |
| 4017 } |
| 4018 } |
| 4019 }; |
| 4020 |
| 4021 |
| 4022 Serializer::Serializer(Thread* thread, |
| 4023 Snapshot::Kind kind, |
| 4024 uint8_t** buffer, |
| 4025 ReAlloc alloc, |
| 4026 intptr_t initial_size, |
| 4027 InstructionsWriter* instructions_writer) |
| 4028 : StackResource(thread), |
| 4029 heap_(thread->isolate()->heap()), |
| 4030 zone_(thread->zone()), |
| 4031 kind_(kind), |
| 4032 stream_(buffer, alloc, initial_size), |
| 4033 instructions_writer_(instructions_writer), |
| 4034 clusters_by_cid_(NULL), |
| 4035 stack_(), |
| 4036 num_cids_(0), |
| 4037 num_base_objects_(0), |
| 4038 num_written_objects_(0), |
| 4039 next_ref_index_(1) { |
| 4040 num_cids_ = thread->isolate()->class_table()->NumCids(); |
| 4041 clusters_by_cid_ = new SerializationCluster*[num_cids_]; |
| 4042 for (intptr_t i = 0; i < num_cids_; i++) { |
| 4043 clusters_by_cid_[i] = NULL; |
| 4044 } |
| 4045 } |
| 4046 |
| 4047 |
| 4048 Serializer::~Serializer() { |
| 4049 delete[] clusters_by_cid_; |
| 4050 } |
| 4051 |
| 4052 |
| 4053 SerializationCluster* Serializer::NewClusterForClass(intptr_t cid) { |
| 4054 Zone* Z = zone_; |
| 4055 if ((cid > kNumPredefinedCids) || |
| 4056 (cid == kInstanceCid) || |
| 4057 RawObject::IsTypedDataViewClassId(cid)) { |
| 4058 Push(isolate()->class_table()->At(cid)); |
| 4059 return new (Z) InstanceSerializationCluster(cid); |
| 4060 } |
| 4061 if (RawObject::IsExternalTypedDataClassId(cid)) { |
| 4062 return new (Z) ExternalTypedDataSerializationCluster(cid); |
| 4063 } |
| 4064 if (RawObject::IsTypedDataClassId(cid)) { |
| 4065 return new (Z) TypedDataSerializationCluster(cid); |
| 4066 } |
| 4067 |
| 4068 switch (cid) { |
| 4069 case kClassCid: return new (Z) ClassSerializationCluster(); |
| 4070 case kUnresolvedClassCid: |
| 4071 return new (Z) UnresolvedClassSerializationCluster(); |
| 4072 case kTypeArgumentsCid: return new (Z) TypeArgumentsSerializationCluster(); |
| 4073 case kPatchClassCid: return new (Z) PatchClassSerializationCluster(); |
| 4074 case kFunctionCid: return new (Z) FunctionSerializationCluster(); |
| 4075 case kClosureDataCid: return new (Z) ClosureDataSerializationCluster(); |
| 4076 case kRedirectionDataCid: |
| 4077 return new (Z) RedirectionDataSerializationCluster(); |
| 4078 case kFieldCid: return new (Z) FieldSerializationCluster(); |
| 4079 case kLiteralTokenCid: return new (Z) LiteralTokenSerializationCluster(); |
| 4080 case kTokenStreamCid: return new (Z) TokenStreamSerializationCluster(); |
| 4081 case kScriptCid: return new (Z) ScriptSerializationCluster(); |
| 4082 case kLibraryCid: return new (Z) LibrarySerializationCluster(); |
| 4083 case kNamespaceCid: return new (Z) NamespaceSerializationCluster(); |
| 4084 case kCodeCid: return new (Z) CodeSerializationCluster(); |
| 4085 case kObjectPoolCid: return new (Z) ObjectPoolSerializationCluster(); |
| 4086 case kPcDescriptorsCid: |
| 4087 return new (Z) RODataSerializationCluster(kPcDescriptorsCid); |
| 4088 case kStackmapCid: |
| 4089 return new (Z) RODataSerializationCluster(kStackmapCid); |
| 4090 case kExceptionHandlersCid: |
| 4091 return new (Z) ExceptionHandlersSerializationCluster(); |
| 4092 case kContextCid: return new (Z) ContextSerializationCluster(); |
| 4093 case kContextScopeCid: return new (Z) ContextScopeSerializationCluster(); |
| 4094 case kICDataCid: return new (Z) ICDataSerializationCluster(); |
| 4095 case kMegamorphicCacheCid: |
| 4096 return new (Z) MegamorphicCacheSerializationCluster(); |
| 4097 case kSubtypeTestCacheCid: |
| 4098 return new (Z) SubtypeTestCacheSerializationCluster(); |
| 4099 case kLanguageErrorCid: |
| 4100 return new (Z) LanguageErrorSerializationCluster(); |
| 4101 case kUnhandledExceptionCid: |
| 4102 return new (Z) UnhandledExceptionSerializationCluster(); |
| 4103 case kLibraryPrefixCid: return new (Z) LibraryPrefixSerializationCluster(); |
| 4104 case kTypeCid: return new (Z) TypeSerializationCluster(); |
| 4105 case kTypeRefCid: return new (Z) TypeRefSerializationCluster(); |
| 4106 case kTypeParameterCid: return new (Z) TypeParameterSerializationCluster(); |
| 4107 case kBoundedTypeCid: return new (Z) BoundedTypeSerializationCluster(); |
| 4108 case kClosureCid: return new (Z) ClosureSerializationCluster(); |
| 4109 case kMintCid: return new (Z) MintSerializationCluster(); |
| 4110 case kBigintCid: return new (Z) BigintSerializationCluster(); |
| 4111 case kDoubleCid: return new (Z) DoubleSerializationCluster(); |
| 4112 case kGrowableObjectArrayCid: |
| 4113 return new (Z) GrowableObjectArraySerializationCluster(); |
| 4114 case kStacktraceCid: return new (Z) StacktraceSerializationCluster(); |
| 4115 case kRegExpCid: return new (Z) RegExpSerializationCluster(); |
| 4116 case kLinkedHashMapCid: return new (Z) LinkedHashMapSerializationCluster(); |
| 4117 case kArrayCid: |
| 4118 return new (Z) ArraySerializationCluster(kArrayCid); |
| 4119 case kImmutableArrayCid: |
| 4120 return new (Z) ArraySerializationCluster(kImmutableArrayCid); |
| 4121 case kOneByteStringCid: { |
| 4122 if (Snapshot::IncludesCode(kind_)) { |
| 4123 return new (Z) RODataSerializationCluster(kOneByteStringCid); |
| 4124 } else { |
| 4125 return new (Z) OneByteStringSerializationCluster(); |
| 4126 } |
| 4127 } |
| 4128 case kTwoByteStringCid: { |
| 4129 if (Snapshot::IncludesCode(kind_)) { |
| 4130 return new (Z) RODataSerializationCluster(kTwoByteStringCid); |
| 4131 } else { |
| 4132 return new (Z) TwoByteStringSerializationCluster(); |
| 4133 } |
| 4134 } |
| 4135 default: break; |
| 4136 } |
| 4137 |
| 4138 FATAL1("No cluster defined for cid %" Pd, cid); |
| 4139 return NULL; |
| 4140 } |
| 4141 |
| 4142 |
| 4143 void Serializer::Trace(RawObject* object) { |
| 4144 intptr_t cid; |
| 4145 if (!object->IsHeapObject()) { |
| 4146 cid = kSmiCid; |
| 4147 } else { |
| 4148 cid = object->GetClassId(); |
| 4149 } |
| 4150 |
| 4151 SerializationCluster* cluster = clusters_by_cid_[cid]; |
| 4152 if (cluster == NULL) { |
| 4153 cluster = NewClusterForClass(cid); |
| 4154 clusters_by_cid_[cid] = cluster; |
| 4155 } |
| 4156 ASSERT(cluster != NULL); |
| 4157 cluster->Trace(this, object); |
| 4158 } |
| 4159 |
| 4160 |
| 4161 void Serializer::WriteVersionAndFeatures() { |
| 4162 const char* expected_version = Version::SnapshotString(); |
| 4163 ASSERT(expected_version != NULL); |
| 4164 const intptr_t version_len = strlen(expected_version); |
| 4165 WriteBytes(reinterpret_cast<const uint8_t*>(expected_version), version_len); |
| 4166 |
| 4167 const char* expected_features = Dart::FeaturesString(kind_); |
| 4168 ASSERT(expected_features != NULL); |
| 4169 const intptr_t features_len = strlen(expected_features); |
| 4170 WriteBytes(reinterpret_cast<const uint8_t*>(expected_features), |
| 4171 features_len + 1); |
| 4172 free(const_cast<char*>(expected_features)); |
| 4173 } |
| 4174 |
| 4175 |
| 4176 #if defined(DEBUG) |
| 4177 static const intptr_t kSectionMarker = 0xABAB; |
| 4178 #endif |
| 4179 |
| 4180 void Serializer::Serialize() { |
| 4181 while (stack_.length() > 0) { |
| 4182 Trace(stack_.RemoveLast()); |
| 4183 } |
| 4184 |
| 4185 intptr_t num_clusters = 0; |
| 4186 for (intptr_t cid = 1; cid < num_cids_; cid++) { |
| 4187 SerializationCluster* cluster = clusters_by_cid_[cid]; |
| 4188 if (cluster != NULL) { |
| 4189 num_clusters++; |
| 4190 } |
| 4191 } |
| 4192 |
| 4193 intptr_t num_objects = num_base_objects_ + num_written_objects_; |
| 4194 |
| 4195 Write<int32_t>(num_objects); |
| 4196 Write<int32_t>(num_clusters); |
| 4197 |
| 4198 for (intptr_t cid = 1; cid < num_cids_; cid++) { |
| 4199 SerializationCluster* cluster = clusters_by_cid_[cid]; |
| 4200 if (cluster != NULL) { |
| 4201 cluster->WriteAlloc(this); |
| 4202 #if defined(DEBUG) |
| 4203 Write<intptr_t>(next_ref_index_); |
| 4204 #endif |
| 4205 } |
| 4206 } |
| 4207 |
| 4208 // We should have assigned a ref to every object we pushed. |
| 4209 ASSERT((next_ref_index_ - 1) == num_objects); |
| 4210 |
| 4211 for (intptr_t cid = 1; cid < num_cids_; cid++) { |
| 4212 SerializationCluster* cluster = clusters_by_cid_[cid]; |
| 4213 if (cluster != NULL) { |
| 4214 cluster->WriteFill(this); |
| 4215 #if defined(DEBUG) |
| 4216 Write<intptr_t>(kSectionMarker); |
| 4217 #endif |
| 4218 } |
| 4219 } |
| 4220 } |
| 4221 |
| 4222 |
| 4223 void Serializer::AddVMIsolateBaseObjects() { |
| 4224 // These objects are always allocated by Object::InitOnce, so they are not |
| 4225 // written into the snapshot. |
| 4226 |
| 4227 AddBaseObject(Object::null()); |
| 4228 AddBaseObject(Object::sentinel().raw()); |
| 4229 AddBaseObject(Object::transition_sentinel().raw()); |
| 4230 AddBaseObject(Object::empty_array().raw()); |
| 4231 AddBaseObject(Object::zero_array().raw()); |
| 4232 AddBaseObject(Object::dynamic_type().raw()); |
| 4233 AddBaseObject(Object::void_type().raw()); |
| 4234 AddBaseObject(Bool::True().raw()); |
| 4235 AddBaseObject(Bool::False().raw()); |
| 4236 AddBaseObject(Object::extractor_parameter_types().raw()); |
| 4237 AddBaseObject(Object::extractor_parameter_names().raw()); |
| 4238 AddBaseObject(Object::empty_context_scope().raw()); |
| 4239 AddBaseObject(Object::empty_descriptors().raw()); |
| 4240 AddBaseObject(Object::empty_var_descriptors().raw()); |
| 4241 AddBaseObject(Object::empty_exception_handlers().raw()); |
| 4242 |
| 4243 for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) { |
| 4244 AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]); |
| 4245 } |
| 4246 for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) { |
| 4247 AddBaseObject(ICData::cached_icdata_arrays_[i]); |
| 4248 } |
| 4249 |
| 4250 ClassTable* table = isolate()->class_table(); |
| 4251 for (intptr_t cid = kClassCid; cid <= kUnwindErrorCid; cid++) { |
| 4252 // Error has no class object. |
| 4253 if (cid != kErrorCid) { |
| 4254 ASSERT(table->HasValidClassAt(cid)); |
| 4255 AddBaseObject(table->At(cid)); |
| 4256 } |
| 4257 } |
| 4258 AddBaseObject(table->At(kDynamicCid)); |
| 4259 AddBaseObject(table->At(kVoidCid)); |
| 4260 } |
| 4261 |
| 4262 |
| 4263 intptr_t Serializer::WriteVMSnapshot(const Array& symbols, |
| 4264 const Array& scripts) { |
| 4265 NoSafepointScope no_safepoint; |
| 4266 |
| 4267 AddVMIsolateBaseObjects(); |
| 4268 |
| 4269 // Push roots. |
| 4270 Push(symbols.raw()); |
| 4271 Push(scripts.raw()); |
| 4272 if (Snapshot::IncludesCode(kind_)) { |
| 4273 StubCode::Push(this); |
| 4274 } |
| 4275 |
| 4276 Serialize(); |
| 4277 |
| 4278 // Write roots. |
| 4279 WriteRef(symbols.raw()); |
| 4280 WriteRef(scripts.raw()); |
| 4281 if (Snapshot::IncludesCode(kind_)) { |
| 4282 StubCode::WriteRef(this); |
| 4283 } |
| 4284 |
| 4285 #if defined(DEBUG) |
| 4286 Write<intptr_t>(kSectionMarker); |
| 4287 #endif |
| 4288 |
| 4289 // Note we are not clearing the object id table. The full ref table |
| 4290 // of the vm isolate snapshot serves as the base objects for the |
| 4291 // regular isolate snapshot. |
| 4292 |
| 4293 // Return the number of objects, -1 accounts for unused ref 0. |
| 4294 return next_ref_index_ - 1; |
| 4295 } |
| 4296 |
| 4297 |
| 4298 void Serializer::WriteFullSnapshot(intptr_t num_base_objects, |
| 4299 ObjectStore* object_store) { |
| 4300 NoSafepointScope no_safepoint; |
| 4301 |
| 4302 if (num_base_objects == 0) { |
| 4303 // Units tests not writing a new vm isolate. |
| 4304 const Array& base_objects = Object::vm_isolate_snapshot_object_table(); |
| 4305 for (intptr_t i = 1; i < base_objects.Length(); i++) { |
| 4306 AddBaseObject(base_objects.At(i)); |
| 4307 } |
| 4308 } else { |
| 4309 // Base objects carried over from WriteVMIsolateSnapshot. |
| 4310 num_base_objects_ += num_base_objects; |
| 4311 next_ref_index_ += num_base_objects; |
| 4312 } |
| 4313 |
| 4314 // Push roots. |
| 4315 RawObject** from = object_store->from(); |
| 4316 RawObject** to = object_store->to_snapshot(kind_); |
| 4317 for (RawObject** p = from; p <= to; p++) { |
| 4318 Push(*p); |
| 4319 } |
| 4320 |
| 4321 Serialize(); |
| 4322 |
| 4323 // Write roots. |
| 4324 for (RawObject** p = from; p <= to; p++) { |
| 4325 WriteRef(*p); |
| 4326 } |
| 4327 |
| 4328 #if defined(DEBUG) |
| 4329 Write<intptr_t>(kSectionMarker); |
| 4330 #endif |
| 4331 |
| 4332 heap_->ResetObjectIdTable(); |
| 4333 } |
| 4334 |
| 4335 |
| 4336 Deserializer::Deserializer(Thread* thread, |
| 4337 Snapshot::Kind kind, |
| 4338 const uint8_t* buffer, |
| 4339 intptr_t size, |
| 4340 const uint8_t* instructions_buffer, |
| 4341 const uint8_t* data_buffer) |
| 4342 : StackResource(thread), |
| 4343 heap_(thread->isolate()->heap()), |
| 4344 zone_(thread->zone()), |
| 4345 kind_(kind), |
| 4346 stream_(buffer, size), |
| 4347 instructions_reader_(NULL), |
| 4348 refs_(NULL), |
| 4349 next_ref_index_(1), |
| 4350 clusters_(NULL) { |
| 4351 if (Snapshot::IncludesCode(kind)) { |
| 4352 ASSERT(instructions_buffer != NULL); |
| 4353 } |
| 4354 if (instructions_buffer != NULL) { |
| 4355 instructions_reader_ = |
| 4356 new (zone_) InstructionsReader(instructions_buffer, data_buffer); |
| 4357 } |
| 4358 } |
| 4359 |
| 4360 |
| 4361 Deserializer::~Deserializer() { |
| 4362 delete[] clusters_; |
| 4363 } |
| 4364 |
| 4365 |
| 4366 DeserializationCluster* Deserializer::ReadCluster() { |
| 4367 intptr_t cid = ReadCid(); |
| 4368 |
| 4369 Zone* Z = zone_; |
| 4370 if ((cid > kNumPredefinedCids) || |
| 4371 (cid == kInstanceCid) || |
| 4372 RawObject::IsTypedDataViewClassId(cid)) { |
| 4373 return new (Z) InstanceDeserializationCluster(cid); |
| 4374 } |
| 4375 if (RawObject::IsExternalTypedDataClassId(cid)) { |
| 4376 return new (Z) ExternalTypedDataDeserializationCluster(cid); |
| 4377 } |
| 4378 if (RawObject::IsTypedDataClassId(cid)) { |
| 4379 return new (Z) TypedDataDeserializationCluster(cid); |
| 4380 } |
| 4381 |
| 4382 switch (cid) { |
| 4383 case kClassCid: return new (Z) ClassDeserializationCluster(); |
| 4384 case kUnresolvedClassCid: |
| 4385 return new (Z) UnresolvedClassDeserializationCluster(); |
| 4386 case kTypeArgumentsCid: |
| 4387 return new (Z) TypeArgumentsDeserializationCluster(); |
| 4388 case kPatchClassCid: return new (Z) PatchClassDeserializationCluster(); |
| 4389 case kFunctionCid: return new (Z) FunctionDeserializationCluster(); |
| 4390 case kClosureDataCid: return new (Z) ClosureDataDeserializationCluster(); |
| 4391 case kRedirectionDataCid: |
| 4392 return new (Z) RedirectionDataDeserializationCluster(); |
| 4393 case kFieldCid: return new (Z) FieldDeserializationCluster(); |
| 4394 case kLiteralTokenCid: return new (Z) LiteralTokenDeserializationCluster(); |
| 4395 case kTokenStreamCid: return new (Z) TokenStreamDeserializationCluster(); |
| 4396 case kScriptCid: return new (Z) ScriptDeserializationCluster(); |
| 4397 case kLibraryCid: return new (Z) LibraryDeserializationCluster(); |
| 4398 case kNamespaceCid: return new (Z) NamespaceDeserializationCluster(); |
| 4399 case kCodeCid: return new (Z) CodeDeserializationCluster(); |
| 4400 case kObjectPoolCid: return new (Z) ObjectPoolDeserializationCluster(); |
| 4401 case kPcDescriptorsCid: |
| 4402 case kStackmapCid: |
| 4403 return new (Z) RODataDeserializationCluster(); |
| 4404 case kExceptionHandlersCid: |
| 4405 return new (Z) ExceptionHandlersDeserializationCluster(); |
| 4406 case kContextCid: return new (Z) ContextDeserializationCluster(); |
| 4407 case kContextScopeCid: return new (Z) ContextScopeDeserializationCluster(); |
| 4408 case kICDataCid: return new (Z) ICDataDeserializationCluster(); |
| 4409 case kMegamorphicCacheCid: |
| 4410 return new (Z) MegamorphicCacheDeserializationCluster(); |
| 4411 case kSubtypeTestCacheCid: |
| 4412 return new (Z) SubtypeTestCacheDeserializationCluster(); |
| 4413 case kLanguageErrorCid: |
| 4414 return new (Z) LanguageErrorDeserializationCluster(); |
| 4415 case kUnhandledExceptionCid: |
| 4416 return new (Z) UnhandledExceptionDeserializationCluster(); |
| 4417 case kLibraryPrefixCid: |
| 4418 return new (Z) LibraryPrefixDeserializationCluster(); |
| 4419 case kTypeCid: return new (Z) TypeDeserializationCluster(); |
| 4420 case kTypeRefCid: return new (Z) TypeRefDeserializationCluster(); |
| 4421 case kTypeParameterCid: |
| 4422 return new (Z) TypeParameterDeserializationCluster(); |
| 4423 case kBoundedTypeCid: return new (Z) BoundedTypeDeserializationCluster(); |
| 4424 case kClosureCid: return new (Z) ClosureDeserializationCluster(); |
| 4425 case kMintCid: return new (Z) MintDeserializationCluster(); |
| 4426 case kBigintCid: return new (Z) BigintDeserializationCluster(); |
| 4427 case kDoubleCid: return new (Z) DoubleDeserializationCluster(); |
| 4428 case kGrowableObjectArrayCid: |
| 4429 return new (Z) GrowableObjectArrayDeserializationCluster(); |
| 4430 case kStacktraceCid: return new (Z) StacktraceDeserializationCluster(); |
| 4431 case kRegExpCid: return new (Z) RegExpDeserializationCluster(); |
| 4432 case kLinkedHashMapCid: |
| 4433 return new (Z) LinkedHashMapDeserializationCluster(); |
| 4434 case kArrayCid: |
| 4435 return new (Z) ArrayDeserializationCluster(kArrayCid); |
| 4436 case kImmutableArrayCid: |
| 4437 return new (Z) ArrayDeserializationCluster(kImmutableArrayCid); |
| 4438 case kOneByteStringCid: { |
| 4439 if (Snapshot::IncludesCode(kind_)) { |
| 4440 return new (Z) RODataDeserializationCluster(); |
| 4441 } else { |
| 4442 return new (Z) OneByteStringDeserializationCluster(); |
| 4443 } |
| 4444 } |
| 4445 case kTwoByteStringCid: { |
| 4446 if (Snapshot::IncludesCode(kind_)) { |
| 4447 return new (Z) RODataDeserializationCluster(); |
| 4448 } else { |
| 4449 return new (Z) TwoByteStringDeserializationCluster(); |
| 4450 } |
| 4451 } |
| 4452 default: break; |
| 4453 } |
| 4454 FATAL1("No cluster defined for cid %" Pd, cid); |
| 4455 return NULL; |
| 4456 } |
| 4457 |
| 4458 |
| 4459 RawApiError* Deserializer::VerifyVersionAndFeatures() { |
| 4460 // If the version string doesn't match, return an error. |
| 4461 // Note: New things are allocated only if we're going to return an error. |
| 4462 |
| 4463 const char* expected_version = Version::SnapshotString(); |
| 4464 ASSERT(expected_version != NULL); |
| 4465 const intptr_t version_len = strlen(expected_version); |
| 4466 if (PendingBytes() < version_len) { |
| 4467 const intptr_t kMessageBufferSize = 128; |
| 4468 char message_buffer[kMessageBufferSize]; |
| 4469 OS::SNPrint(message_buffer, |
| 4470 kMessageBufferSize, |
| 4471 "No full snapshot version found, expected '%s'", |
| 4472 expected_version); |
| 4473 // This can also fail while bringing up the VM isolate, so make sure to |
| 4474 // allocate the error message in old space. |
| 4475 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); |
| 4476 return ApiError::New(msg, Heap::kOld); |
| 4477 } |
| 4478 |
| 4479 const char* version = reinterpret_cast<const char*>(CurrentBufferAddress()); |
| 4480 ASSERT(version != NULL); |
| 4481 if (strncmp(version, expected_version, version_len)) { |
| 4482 const intptr_t kMessageBufferSize = 256; |
| 4483 char message_buffer[kMessageBufferSize]; |
| 4484 char* actual_version = OS::StrNDup(version, version_len); |
| 4485 OS::SNPrint(message_buffer, |
| 4486 kMessageBufferSize, |
| 4487 "Wrong %s snapshot version, expected '%s' found '%s'", |
| 4488 (Snapshot::IsFull(kind_)) ? "full" : "script", |
| 4489 expected_version, |
| 4490 actual_version); |
| 4491 free(actual_version); |
| 4492 // This can also fail while bringing up the VM isolate, so make sure to |
| 4493 // allocate the error message in old space. |
| 4494 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); |
| 4495 return ApiError::New(msg, Heap::kOld); |
| 4496 } |
| 4497 Advance(version_len); |
| 4498 |
| 4499 const char* expected_features = Dart::FeaturesString(kind_); |
| 4500 ASSERT(expected_features != NULL); |
| 4501 const intptr_t expected_len = strlen(expected_features); |
| 4502 |
| 4503 const char* features = reinterpret_cast<const char*>(CurrentBufferAddress()); |
| 4504 ASSERT(features != NULL); |
| 4505 intptr_t buffer_len = OS::StrNLen(features, PendingBytes()); |
| 4506 if ((buffer_len != expected_len) || |
| 4507 strncmp(features, expected_features, expected_len)) { |
| 4508 const intptr_t kMessageBufferSize = 256; |
| 4509 char message_buffer[kMessageBufferSize]; |
| 4510 char* actual_features = OS::StrNDup(features, buffer_len < 128 ? buffer_len |
| 4511 : 128); |
| 4512 OS::SNPrint(message_buffer, |
| 4513 kMessageBufferSize, |
| 4514 "Wrong features in snapshot, expected '%s' found '%s'", |
| 4515 expected_features, |
| 4516 actual_features); |
| 4517 free(const_cast<char*>(expected_features)); |
| 4518 free(actual_features); |
| 4519 // This can also fail while bringing up the VM isolate, so make sure to |
| 4520 // allocate the error message in old space. |
| 4521 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); |
| 4522 return ApiError::New(msg, Heap::kOld); |
| 4523 } |
| 4524 free(const_cast<char*>(expected_features)); |
| 4525 Advance(expected_len + 1); |
| 4526 return ApiError::null(); |
| 4527 } |
| 4528 |
| 4529 |
| 4530 void Deserializer::Prepare() { |
| 4531 num_objects_ = Read<int32_t>(); |
| 4532 num_clusters_ = Read<int32_t>(); |
| 4533 |
| 4534 clusters_ = new DeserializationCluster*[num_clusters_]; |
| 4535 refs_ = Array::New(num_objects_ + 1, Heap::kOld); |
| 4536 } |
| 4537 |
| 4538 |
| 4539 void Deserializer::Deserialize() { |
| 4540 // TODO(rmacnak): Verify num of base objects. |
| 4541 |
| 4542 { |
| 4543 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), |
| 4544 Timeline::GetIsolateStream(), "ReadAlloc")); |
| 4545 for (intptr_t i = 0; i < num_clusters_; i++) { |
| 4546 clusters_[i] = ReadCluster(); |
| 4547 clusters_[i]->ReadAlloc(this); |
| 4548 #if defined(DEBUG) |
| 4549 intptr_t serializers_next_ref_index_ = Read<intptr_t>(); |
| 4550 ASSERT(serializers_next_ref_index_ == next_ref_index_); |
| 4551 #endif |
| 4552 } |
| 4553 } |
| 4554 |
| 4555 // We should have completely filled the ref array. |
| 4556 ASSERT((next_ref_index_ - 1) == num_objects_); |
| 4557 |
| 4558 { |
| 4559 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), |
| 4560 Timeline::GetIsolateStream(), "ReadFill")); |
| 4561 for (intptr_t i = 0; i < num_clusters_; i++) { |
| 4562 clusters_[i]->ReadFill(this); |
| 4563 #if defined(DEBUG) |
| 4564 intptr_t section_marker = Read<intptr_t>(); |
| 4565 ASSERT(section_marker == kSectionMarker); |
| 4566 #endif |
| 4567 } |
| 4568 } |
| 4569 } |
| 4570 |
| 4571 class HeapLocker : public StackResource { |
| 4572 public: |
| 4573 HeapLocker(Thread* thread, PageSpace* page_space) |
| 4574 : StackResource(thread), page_space_(page_space) { |
| 4575 page_space_->AcquireDataLock(); |
| 4576 } |
| 4577 ~HeapLocker() { |
| 4578 page_space_->ReleaseDataLock(); |
| 4579 } |
| 4580 |
| 4581 private: |
| 4582 PageSpace* page_space_; |
| 4583 }; |
| 4584 |
| 4585 |
| 4586 void Deserializer::AddVMIsolateBaseObjects() { |
| 4587 // These objects are always allocated by Object::InitOnce, so they are not |
| 4588 // written into the snapshot. |
| 4589 |
| 4590 AddBaseObject(Object::null()); |
| 4591 AddBaseObject(Object::sentinel().raw()); |
| 4592 AddBaseObject(Object::transition_sentinel().raw()); |
| 4593 AddBaseObject(Object::empty_array().raw()); |
| 4594 AddBaseObject(Object::zero_array().raw()); |
| 4595 AddBaseObject(Object::dynamic_type().raw()); |
| 4596 AddBaseObject(Object::void_type().raw()); |
| 4597 AddBaseObject(Bool::True().raw()); |
| 4598 AddBaseObject(Bool::False().raw()); |
| 4599 AddBaseObject(Object::extractor_parameter_types().raw()); |
| 4600 AddBaseObject(Object::extractor_parameter_names().raw()); |
| 4601 AddBaseObject(Object::empty_context_scope().raw()); |
| 4602 AddBaseObject(Object::empty_descriptors().raw()); |
| 4603 AddBaseObject(Object::empty_var_descriptors().raw()); |
| 4604 AddBaseObject(Object::empty_exception_handlers().raw()); |
| 4605 |
| 4606 for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) { |
| 4607 AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]); |
| 4608 } |
| 4609 for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) { |
| 4610 AddBaseObject(ICData::cached_icdata_arrays_[i]); |
| 4611 } |
| 4612 |
| 4613 ClassTable* table = isolate()->class_table(); |
| 4614 for (intptr_t cid = kClassCid; cid <= kUnwindErrorCid; cid++) { |
| 4615 // Error has no class object. |
| 4616 if (cid != kErrorCid) { |
| 4617 ASSERT(table->HasValidClassAt(cid)); |
| 4618 AddBaseObject(table->At(cid)); |
| 4619 } |
| 4620 } |
| 4621 AddBaseObject(table->At(kDynamicCid)); |
| 4622 AddBaseObject(table->At(kVoidCid)); |
| 4623 } |
| 4624 |
| 4625 |
| 4626 void Deserializer::ReadVMSnapshot() { |
| 4627 Array& symbol_table = Array::Handle(zone_); |
| 4628 Array& refs = Array::Handle(zone_); |
| 4629 Prepare(); |
| 4630 |
| 4631 { |
| 4632 NoSafepointScope no_safepoint; |
| 4633 HeapLocker hl(thread(), heap_->old_space()); |
| 4634 |
| 4635 AddVMIsolateBaseObjects(); |
| 4636 |
| 4637 Deserialize(); |
| 4638 |
| 4639 // Read roots. |
| 4640 symbol_table ^= ReadRef(); |
| 4641 isolate()->object_store()->set_symbol_table(symbol_table); |
| 4642 ReadRef(); // Script list. |
| 4643 if (Snapshot::IncludesCode(kind_)) { |
| 4644 StubCode::ReadRef(this); |
| 4645 } |
| 4646 |
| 4647 #if defined(DEBUG) |
| 4648 intptr_t section_marker = Read<intptr_t>(); |
| 4649 ASSERT(section_marker == kSectionMarker); |
| 4650 #endif |
| 4651 |
| 4652 refs = refs_; |
| 4653 refs_ = NULL; |
| 4654 } |
| 4655 |
| 4656 Symbols::InitOnceFromSnapshot(isolate()); |
| 4657 |
| 4658 Object::set_vm_isolate_snapshot_object_table(refs); |
| 4659 |
| 4660 #if defined(DEBUG) |
| 4661 isolate()->ValidateClassTable(); |
| 4662 #endif |
| 4663 } |
| 4664 |
| 4665 void Deserializer::ReadFullSnapshot(ObjectStore* object_store) { |
| 4666 Array& refs = Array::Handle(); |
| 4667 Prepare(); |
| 4668 |
| 4669 { |
| 4670 NoSafepointScope no_safepoint; |
| 4671 HeapLocker hl(thread(), heap_->old_space()); |
| 4672 |
| 4673 // N.B.: Skipping index 0 because ref 0 is illegal. |
| 4674 const Array& base_objects = Object::vm_isolate_snapshot_object_table(); |
| 4675 for (intptr_t i = 1; i < base_objects.Length(); i++) { |
| 4676 AddBaseObject(base_objects.At(i)); |
| 4677 } |
| 4678 |
| 4679 Deserialize(); |
| 4680 |
| 4681 // Read roots. |
| 4682 RawObject** from = object_store->from(); |
| 4683 RawObject** to = object_store->to_snapshot(kind_); |
| 4684 for (RawObject** p = from; p <= to; p++) { |
| 4685 *p = ReadRef(); |
| 4686 } |
| 4687 |
| 4688 #if defined(DEBUG) |
| 4689 intptr_t section_marker = Read<intptr_t>(); |
| 4690 ASSERT(section_marker == kSectionMarker); |
| 4691 #endif |
| 4692 |
| 4693 refs = refs_; |
| 4694 refs_ = NULL; |
| 4695 } |
| 4696 |
| 4697 #if defined(DEBUG) |
| 4698 Isolate* isolate = thread()->isolate(); |
| 4699 isolate->ValidateClassTable(); |
| 4700 isolate->heap()->Verify(); |
| 4701 #endif |
| 4702 |
| 4703 { |
| 4704 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), |
| 4705 Timeline::GetIsolateStream(), "PostLoad")); |
| 4706 for (intptr_t i = 0; i < num_clusters_; i++) { |
| 4707 clusters_[i]->PostLoad(refs, kind_, zone_); |
| 4708 } |
| 4709 } |
| 4710 |
| 4711 // Setup native resolver for bootstrap impl. |
| 4712 Bootstrap::SetupNativeResolver(); |
| 4713 } |
| 4714 |
| 4715 |
| 4716 // An object visitor which will iterate over all the script objects in the heap |
| 4717 // and either count them or collect them into an array. This is used during |
| 4718 // full snapshot generation of the VM isolate to write out all script |
| 4719 // objects and their accompanying token streams. |
| 4720 class ScriptVisitor : public ObjectVisitor { |
| 4721 public: |
| 4722 explicit ScriptVisitor(Thread* thread) : |
| 4723 objHandle_(Object::Handle(thread->zone())), |
| 4724 count_(0), |
| 4725 scripts_(NULL) {} |
| 4726 |
| 4727 ScriptVisitor(Thread* thread, const Array* scripts) : |
| 4728 objHandle_(Object::Handle(thread->zone())), |
| 4729 count_(0), |
| 4730 scripts_(scripts) {} |
| 4731 |
| 4732 void VisitObject(RawObject* obj) { |
| 4733 if (obj->IsScript()) { |
| 4734 if (scripts_ != NULL) { |
| 4735 objHandle_ = obj; |
| 4736 scripts_->SetAt(count_, objHandle_); |
| 4737 } |
| 4738 count_ += 1; |
| 4739 } |
| 4740 } |
| 4741 |
| 4742 intptr_t count() const { return count_; } |
| 4743 |
| 4744 private: |
| 4745 Object& objHandle_; |
| 4746 intptr_t count_; |
| 4747 const Array* scripts_; |
| 4748 }; |
| 4749 |
| 4750 |
| 4751 |
| 4752 FullSnapshotWriter::FullSnapshotWriter(Snapshot::Kind kind, |
| 4753 uint8_t** vm_isolate_snapshot_buffer, |
| 4754 uint8_t** isolate_snapshot_buffer, |
| 4755 ReAlloc alloc, |
| 4756 InstructionsWriter* instructions_writer) |
| 4757 : thread_(Thread::Current()), |
| 4758 kind_(kind), |
| 4759 vm_isolate_snapshot_buffer_(vm_isolate_snapshot_buffer), |
| 4760 isolate_snapshot_buffer_(isolate_snapshot_buffer), |
| 4761 alloc_(alloc), |
| 4762 vm_isolate_snapshot_size_(0), |
| 4763 isolate_snapshot_size_(0), |
| 4764 instructions_writer_(instructions_writer), |
| 4765 scripts_(Array::Handle(zone())), |
| 4766 saved_symbol_table_(Array::Handle(zone())), |
| 4767 new_vm_symbol_table_(Array::Handle(zone())) { |
| 4768 ASSERT(isolate_snapshot_buffer_ != NULL); |
| 4769 ASSERT(alloc_ != NULL); |
| 4770 ASSERT(isolate() != NULL); |
| 4771 ASSERT(ClassFinalizer::AllClassesFinalized()); |
| 4772 ASSERT(isolate() != NULL); |
| 4773 ASSERT(heap() != NULL); |
| 4774 ObjectStore* object_store = isolate()->object_store(); |
| 4775 ASSERT(object_store != NULL); |
| 4776 |
| 4777 #if defined(DEBUG) |
| 4778 // Ensure the class table is valid. |
| 4779 isolate()->ValidateClassTable(); |
| 4780 #endif |
| 4781 // Can't have any mutation happening while we're serializing. |
| 4782 ASSERT(isolate()->background_compiler() == NULL); |
| 4783 |
| 4784 if (vm_isolate_snapshot_buffer != NULL) { |
| 4785 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), |
| 4786 Timeline::GetIsolateStream(), "PrepareNewVMIsolate")); |
| 4787 |
| 4788 // Collect all the script objects and their accompanying token stream |
| 4789 // objects into an array so that we can write it out as part of the VM |
| 4790 // isolate snapshot. We first count the number of script objects, allocate |
| 4791 // an array and then fill it up with the script objects. |
| 4792 ScriptVisitor scripts_counter(thread()); |
| 4793 heap()->IterateOldObjects(&scripts_counter); |
| 4794 Dart::vm_isolate()->heap()->IterateOldObjects(&scripts_counter); |
| 4795 intptr_t count = scripts_counter.count(); |
| 4796 scripts_ = Array::New(count, Heap::kOld); |
| 4797 ScriptVisitor script_visitor(thread(), &scripts_); |
| 4798 heap()->IterateOldObjects(&script_visitor); |
| 4799 Dart::vm_isolate()->heap()->IterateOldObjects(&script_visitor); |
| 4800 ASSERT(script_visitor.count() == count); |
| 4801 |
| 4802 // Tuck away the current symbol table. |
| 4803 saved_symbol_table_ = object_store->symbol_table(); |
| 4804 |
| 4805 // Create a unified symbol table that will be written as the vm isolate's |
| 4806 // symbol table. |
| 4807 new_vm_symbol_table_ = Symbols::UnifiedSymbolTable(); |
| 4808 |
| 4809 // Create an empty symbol table that will be written as the isolate's symbol |
| 4810 // table. |
| 4811 Symbols::SetupSymbolTable(isolate()); |
| 4812 } else { |
| 4813 // Reuse the current vm isolate. |
| 4814 } |
| 4815 } |
| 4816 |
| 4817 FullSnapshotWriter::~FullSnapshotWriter() { |
| 4818 // We may run Dart code afterwards, restore the symbol table if needed. |
| 4819 if (!saved_symbol_table_.IsNull()) { |
| 4820 isolate()->object_store()->set_symbol_table(saved_symbol_table_); |
| 4821 saved_symbol_table_ = Array::null(); |
| 4822 } |
| 4823 new_vm_symbol_table_ = Array::null(); |
| 4824 scripts_ = Array::null(); |
| 4825 } |
| 4826 |
| 4827 |
| 4828 intptr_t FullSnapshotWriter::WriteVmIsolateSnapshot() { |
| 4829 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), |
| 4830 Timeline::GetIsolateStream(), "WriteVmIsolateSnapshot")); |
| 4831 |
| 4832 ASSERT(vm_isolate_snapshot_buffer_ != NULL); |
| 4833 Serializer serializer(thread(), |
| 4834 kind_, |
| 4835 vm_isolate_snapshot_buffer_, |
| 4836 alloc_, |
| 4837 kInitialSize, |
| 4838 instructions_writer_); |
| 4839 |
| 4840 serializer.ReserveHeader(); |
| 4841 serializer.WriteVersionAndFeatures(); |
| 4842 /* |
| 4843 * Now Write out the following |
| 4844 * - the symbol table |
| 4845 * - all the scripts and token streams for these scripts |
| 4846 * - the stub code (precompiled snapshots only) |
| 4847 **/ |
| 4848 intptr_t num_objects = serializer.WriteVMSnapshot(new_vm_symbol_table_, |
| 4849 scripts_); |
| 4850 serializer.FillHeader(serializer.kind()); |
| 4851 |
| 4852 vm_isolate_snapshot_size_ = serializer.bytes_written(); |
| 4853 return num_objects; |
| 4854 } |
| 4855 |
| 4856 |
| 4857 void FullSnapshotWriter::WriteIsolateFullSnapshot( |
| 4858 intptr_t num_base_objects) { |
| 4859 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), |
| 4860 Timeline::GetIsolateStream(), "WriteIsolateFullSnapshot")); |
| 4861 |
| 4862 Serializer serializer(thread(), |
| 4863 kind_, |
| 4864 isolate_snapshot_buffer_, |
| 4865 alloc_, |
| 4866 kInitialSize, |
| 4867 instructions_writer_); |
| 4868 ObjectStore* object_store = isolate()->object_store(); |
| 4869 ASSERT(object_store != NULL); |
| 4870 |
| 4871 serializer.ReserveHeader(); |
| 4872 serializer.WriteVersionAndFeatures(); |
| 4873 serializer.WriteFullSnapshot(num_base_objects, object_store); |
| 4874 serializer.FillHeader(serializer.kind()); |
| 4875 |
| 4876 isolate_snapshot_size_ = serializer.bytes_written(); |
| 4877 } |
| 4878 |
| 4879 |
| 4880 void FullSnapshotWriter::WriteFullSnapshot() { |
| 4881 intptr_t num_base_objects; |
| 4882 if (vm_isolate_snapshot_buffer() != NULL) { |
| 4883 num_base_objects = WriteVmIsolateSnapshot(); |
| 4884 ASSERT(num_base_objects != 0); |
| 4885 } else { |
| 4886 num_base_objects = 0; |
| 4887 } |
| 4888 |
| 4889 WriteIsolateFullSnapshot(num_base_objects); |
| 4890 |
| 4891 if (Snapshot::IncludesCode(kind_)) { |
| 4892 instructions_writer_->Write(); |
| 4893 |
| 4894 OS::Print("VMIsolate(CodeSize): %" Pd "\n", VmIsolateSnapshotSize()); |
| 4895 OS::Print("Isolate(CodeSize): %" Pd "\n", IsolateSnapshotSize()); |
| 4896 OS::Print("Instructions(CodeSize): %" Pd "\n", |
| 4897 instructions_writer_->binary_size()); |
| 4898 intptr_t total = VmIsolateSnapshotSize() + |
| 4899 IsolateSnapshotSize() + |
| 4900 instructions_writer_->binary_size(); |
| 4901 OS::Print("Total(CodeSize): %" Pd "\n", total); |
| 4902 } |
| 4903 } |
| 4904 |
| 4905 |
| 4906 RawApiError* IsolateSnapshotReader::ReadFullSnapshot() { |
| 4907 Deserializer deserializer(thread_, |
| 4908 kind_, |
| 4909 buffer_, |
| 4910 size_, |
| 4911 instructions_buffer_, |
| 4912 data_buffer_); |
| 4913 |
| 4914 RawApiError* error = deserializer.VerifyVersionAndFeatures(); |
| 4915 if (error != ApiError::null()) { |
| 4916 return error; |
| 4917 } |
| 4918 |
| 4919 deserializer.ReadFullSnapshot(thread_->isolate()->object_store()); |
| 4920 |
| 4921 return ApiError::null(); |
| 4922 } |
| 4923 |
| 4924 |
| 4925 RawApiError* VmIsolateSnapshotReader::ReadVmIsolateSnapshot() { |
| 4926 Deserializer deserializer(thread_, |
| 4927 kind_, |
| 4928 buffer_, |
| 4929 size_, |
| 4930 instructions_buffer_, |
| 4931 data_buffer_); |
| 4932 |
| 4933 RawApiError* error = deserializer.VerifyVersionAndFeatures(); |
| 4934 if (error != ApiError::null()) { |
| 4935 return error; |
| 4936 } |
| 4937 |
| 4938 deserializer.ReadVMSnapshot(); |
| 4939 |
| 4940 Dart::set_instructions_snapshot_buffer(instructions_buffer_); |
| 4941 Dart::set_data_snapshot_buffer(data_buffer_); |
| 4942 |
| 4943 return ApiError::null(); |
| 4944 } |
| 4945 |
| 4946 } // namespace dart |
OLD | NEW |