OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/snapshot.h" | 5 #include "vm/snapshot.h" |
6 | 6 |
7 #include "platform/assert.h" | 7 #include "platform/assert.h" |
8 #include "vm/bootstrap.h" | 8 #include "vm/bootstrap.h" |
9 #include "vm/class_finalizer.h" | 9 #include "vm/class_finalizer.h" |
10 #include "vm/dart.h" | 10 #include "vm/dart.h" |
11 #include "vm/dart_entry.h" | 11 #include "vm/dart_entry.h" |
12 #include "vm/exceptions.h" | 12 #include "vm/exceptions.h" |
13 #include "vm/heap.h" | 13 #include "vm/heap.h" |
14 #include "vm/lockers.h" | 14 #include "vm/lockers.h" |
15 #include "vm/longjump.h" | 15 #include "vm/longjump.h" |
16 #include "vm/object.h" | 16 #include "vm/object.h" |
17 #include "vm/object_store.h" | 17 #include "vm/object_store.h" |
18 #include "vm/snapshot_ids.h" | 18 #include "vm/snapshot_ids.h" |
19 #include "vm/stub_code.h" | 19 #include "vm/stub_code.h" |
20 #include "vm/symbols.h" | 20 #include "vm/symbols.h" |
21 #include "vm/timeline.h" | 21 #include "vm/timeline.h" |
22 #include "vm/version.h" | 22 #include "vm/version.h" |
23 | 23 |
24 // We currently only expect the Dart mutator to read snapshots. | 24 // We currently only expect the Dart mutator to read snapshots. |
25 #define ASSERT_NO_SAFEPOINT_SCOPE() \ | 25 #define ASSERT_NO_SAFEPOINT_SCOPE() \ |
26 isolate()->AssertCurrentThreadIsMutator(); \ | 26 isolate()->AssertCurrentThreadIsMutator(); \ |
27 ASSERT(thread()->no_safepoint_scope_depth() != 0) | 27 ASSERT(thread()->no_safepoint_scope_depth() != 0) |
28 | 28 |
29 namespace dart { | 29 namespace dart { |
30 | 30 |
31 static const int kNumInitialReferences = 64; | 31 static const int kNumInitialReferences = 64; |
32 | 32 |
33 | 33 |
34 static bool IsSingletonClassId(intptr_t class_id) { | 34 static bool IsSingletonClassId(intptr_t class_id) { |
35 // Check if this is a singleton object class which is shared by all isolates. | 35 // Check if this is a singleton object class which is shared by all isolates. |
36 return ((class_id >= kClassCid && class_id <= kUnwindErrorCid) || | 36 return ((class_id >= kClassCid && class_id <= kUnwindErrorCid) || |
37 (class_id >= kNullCid && class_id <= kVoidCid)); | 37 (class_id >= kNullCid && class_id <= kVoidCid)); |
(...skipping 15 matching lines...) Expand all Loading... |
53 static bool IsObjectStoreTypeId(intptr_t index) { | 53 static bool IsObjectStoreTypeId(intptr_t index) { |
54 // Check if this is a type which is stored in the object store. | 54 // Check if this is a type which is stored in the object store. |
55 return (index >= kObjectType && index <= kArrayType); | 55 return (index >= kObjectType && index <= kArrayType); |
56 } | 56 } |
57 | 57 |
58 | 58 |
59 static bool IsSplitClassId(intptr_t class_id) { | 59 static bool IsSplitClassId(intptr_t class_id) { |
60 // Return whether this class is serialized in two steps: first a reference, | 60 // Return whether this class is serialized in two steps: first a reference, |
61 // with sufficient information to allocate a correctly sized object, and then | 61 // with sufficient information to allocate a correctly sized object, and then |
62 // later inline with complete contents. | 62 // later inline with complete contents. |
63 return class_id >= kNumPredefinedCids || | 63 return class_id >= kNumPredefinedCids || class_id == kArrayCid || |
64 class_id == kArrayCid || | 64 class_id == kImmutableArrayCid || class_id == kObjectPoolCid || |
65 class_id == kImmutableArrayCid || | |
66 class_id == kObjectPoolCid || | |
67 RawObject::IsImplicitFieldClassId(class_id); | 65 RawObject::IsImplicitFieldClassId(class_id); |
68 } | 66 } |
69 | 67 |
70 | 68 |
71 static intptr_t ClassIdFromObjectId(intptr_t object_id) { | 69 static intptr_t ClassIdFromObjectId(intptr_t object_id) { |
72 ASSERT(object_id > kClassIdsOffset); | 70 ASSERT(object_id > kClassIdsOffset); |
73 intptr_t class_id = (object_id - kClassIdsOffset); | 71 intptr_t class_id = (object_id - kClassIdsOffset); |
74 return class_id; | 72 return class_id; |
75 } | 73 } |
76 | 74 |
77 | 75 |
78 static intptr_t ObjectIdFromClassId(intptr_t class_id) { | 76 static intptr_t ObjectIdFromClassId(intptr_t class_id) { |
79 ASSERT((class_id > kIllegalCid) && (class_id < kNumPredefinedCids)); | 77 ASSERT((class_id > kIllegalCid) && (class_id < kNumPredefinedCids)); |
80 ASSERT(!(RawObject::IsImplicitFieldClassId(class_id))); | 78 ASSERT(!(RawObject::IsImplicitFieldClassId(class_id))); |
81 return (class_id + kClassIdsOffset); | 79 return (class_id + kClassIdsOffset); |
82 } | 80 } |
83 | 81 |
84 | 82 |
85 static RawType* GetType(ObjectStore* object_store, intptr_t index) { | 83 static RawType* GetType(ObjectStore* object_store, intptr_t index) { |
86 switch (index) { | 84 switch (index) { |
87 case kObjectType: return object_store->object_type(); | 85 case kObjectType: |
88 case kNullType: return object_store->null_type(); | 86 return object_store->object_type(); |
89 case kFunctionType: return object_store->function_type(); | 87 case kNullType: |
90 case kNumberType: return object_store->number_type(); | 88 return object_store->null_type(); |
91 case kSmiType: return object_store->smi_type(); | 89 case kFunctionType: |
92 case kMintType: return object_store->mint_type(); | 90 return object_store->function_type(); |
93 case kDoubleType: return object_store->double_type(); | 91 case kNumberType: |
94 case kIntType: return object_store->int_type(); | 92 return object_store->number_type(); |
95 case kBoolType: return object_store->bool_type(); | 93 case kSmiType: |
96 case kStringType: return object_store->string_type(); | 94 return object_store->smi_type(); |
97 case kArrayType: return object_store->array_type(); | 95 case kMintType: |
98 default: break; | 96 return object_store->mint_type(); |
| 97 case kDoubleType: |
| 98 return object_store->double_type(); |
| 99 case kIntType: |
| 100 return object_store->int_type(); |
| 101 case kBoolType: |
| 102 return object_store->bool_type(); |
| 103 case kStringType: |
| 104 return object_store->string_type(); |
| 105 case kArrayType: |
| 106 return object_store->array_type(); |
| 107 default: |
| 108 break; |
99 } | 109 } |
100 UNREACHABLE(); | 110 UNREACHABLE(); |
101 return Type::null(); | 111 return Type::null(); |
102 } | 112 } |
103 | 113 |
104 | 114 |
105 static intptr_t GetTypeIndex( | 115 static intptr_t GetTypeIndex(ObjectStore* object_store, |
106 ObjectStore* object_store, const RawType* raw_type) { | 116 const RawType* raw_type) { |
107 ASSERT(raw_type->IsHeapObject()); | 117 ASSERT(raw_type->IsHeapObject()); |
108 if (raw_type == object_store->object_type()) { | 118 if (raw_type == object_store->object_type()) { |
109 return kObjectType; | 119 return kObjectType; |
110 } else if (raw_type == object_store->null_type()) { | 120 } else if (raw_type == object_store->null_type()) { |
111 return kNullType; | 121 return kNullType; |
112 } else if (raw_type == object_store->function_type()) { | 122 } else if (raw_type == object_store->function_type()) { |
113 return kFunctionType; | 123 return kFunctionType; |
114 } else if (raw_type == object_store->number_type()) { | 124 } else if (raw_type == object_store->number_type()) { |
115 return kNumberType; | 125 return kNumberType; |
116 } else if (raw_type == object_store->smi_type()) { | 126 } else if (raw_type == object_store->smi_type()) { |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
156 ASSERT((value & kSmiTagMask) == kSmiTag); | 166 ASSERT((value & kSmiTagMask) == kSmiTag); |
157 return reinterpret_cast<RawSmi*>(value); | 167 return reinterpret_cast<RawSmi*>(value); |
158 } | 168 } |
159 | 169 |
160 | 170 |
161 intptr_t BaseReader::ReadSmiValue() { | 171 intptr_t BaseReader::ReadSmiValue() { |
162 return Smi::Value(ReadAsSmi()); | 172 return Smi::Value(ReadAsSmi()); |
163 } | 173 } |
164 | 174 |
165 | 175 |
166 SnapshotReader::SnapshotReader( | 176 SnapshotReader::SnapshotReader(const uint8_t* buffer, |
167 const uint8_t* buffer, | 177 intptr_t size, |
168 intptr_t size, | 178 Snapshot::Kind kind, |
169 Snapshot::Kind kind, | 179 ZoneGrowableArray<BackRefNode>* backward_refs, |
170 ZoneGrowableArray<BackRefNode>* backward_refs, | 180 Thread* thread) |
171 Thread* thread) | |
172 : BaseReader(buffer, size), | 181 : BaseReader(buffer, size), |
173 kind_(kind), | 182 kind_(kind), |
174 thread_(thread), | 183 thread_(thread), |
175 zone_(thread->zone()), | 184 zone_(thread->zone()), |
176 heap_(isolate()->heap()), | 185 heap_(isolate()->heap()), |
177 old_space_(thread_->isolate()->heap()->old_space()), | 186 old_space_(thread_->isolate()->heap()->old_space()), |
178 cls_(Class::Handle(zone_)), | 187 cls_(Class::Handle(zone_)), |
179 obj_(Object::Handle(zone_)), | 188 obj_(Object::Handle(zone_)), |
180 pobj_(PassiveObject::Handle(zone_)), | 189 pobj_(PassiveObject::Handle(zone_)), |
181 array_(Array::Handle(zone_)), | 190 array_(Array::Handle(zone_)), |
182 field_(Field::Handle(zone_)), | 191 field_(Field::Handle(zone_)), |
183 str_(String::Handle(zone_)), | 192 str_(String::Handle(zone_)), |
184 library_(Library::Handle(zone_)), | 193 library_(Library::Handle(zone_)), |
185 type_(AbstractType::Handle(zone_)), | 194 type_(AbstractType::Handle(zone_)), |
186 type_arguments_(TypeArguments::Handle(zone_)), | 195 type_arguments_(TypeArguments::Handle(zone_)), |
187 tokens_(GrowableObjectArray::Handle(zone_)), | 196 tokens_(GrowableObjectArray::Handle(zone_)), |
188 stream_(TokenStream::Handle(zone_)), | 197 stream_(TokenStream::Handle(zone_)), |
189 data_(ExternalTypedData::Handle(zone_)), | 198 data_(ExternalTypedData::Handle(zone_)), |
190 typed_data_(TypedData::Handle(zone_)), | 199 typed_data_(TypedData::Handle(zone_)), |
191 function_(Function::Handle(zone_)), | 200 function_(Function::Handle(zone_)), |
192 error_(UnhandledException::Handle(zone_)), | 201 error_(UnhandledException::Handle(zone_)), |
193 max_vm_isolate_object_id_( | 202 max_vm_isolate_object_id_( |
194 (Snapshot::IsFull(kind)) ? | 203 (Snapshot::IsFull(kind)) |
195 Object::vm_isolate_snapshot_object_table().Length() : 0), | 204 ? Object::vm_isolate_snapshot_object_table().Length() |
196 backward_references_(backward_refs) { | 205 : 0), |
197 } | 206 backward_references_(backward_refs) {} |
198 | 207 |
199 | 208 |
200 RawObject* SnapshotReader::ReadObject() { | 209 RawObject* SnapshotReader::ReadObject() { |
201 // Setup for long jump in case there is an exception while reading. | 210 // Setup for long jump in case there is an exception while reading. |
202 LongJumpScope jump; | 211 LongJumpScope jump; |
203 if (setjmp(*jump.Set()) == 0) { | 212 if (setjmp(*jump.Set()) == 0) { |
204 PassiveObject& obj = | 213 PassiveObject& obj = |
205 PassiveObject::Handle(zone(), ReadObjectImpl(kAsInlinedObject)); | 214 PassiveObject::Handle(zone(), ReadObjectImpl(kAsInlinedObject)); |
206 for (intptr_t i = 0; i < backward_references_->length(); i++) { | 215 for (intptr_t i = 0; i < backward_references_->length(); i++) { |
207 if (!(*backward_references_)[i].is_deserialized()) { | 216 if (!(*backward_references_)[i].is_deserialized()) { |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
320 func = func.ImplicitClosureFunction(); | 329 func = func.ImplicitClosureFunction(); |
321 ASSERT(!func.IsNull()); | 330 ASSERT(!func.IsNull()); |
322 | 331 |
323 // Return the associated implicit static closure. | 332 // Return the associated implicit static closure. |
324 obj = func.ImplicitStaticClosure(); | 333 obj = func.ImplicitStaticClosure(); |
325 return obj.raw(); | 334 return obj.raw(); |
326 } | 335 } |
327 | 336 |
328 | 337 |
329 intptr_t SnapshotReader::NextAvailableObjectId() const { | 338 intptr_t SnapshotReader::NextAvailableObjectId() const { |
330 return backward_references_->length() + | 339 return backward_references_->length() + kMaxPredefinedObjectIds + |
331 kMaxPredefinedObjectIds + max_vm_isolate_object_id_; | 340 max_vm_isolate_object_id_; |
332 } | 341 } |
333 | 342 |
334 | 343 |
335 void SnapshotReader::SetReadException(const char* msg) { | 344 void SnapshotReader::SetReadException(const char* msg) { |
336 const String& error_str = String::Handle(zone(), String::New(msg)); | 345 const String& error_str = String::Handle(zone(), String::New(msg)); |
337 const Array& args = Array::Handle(zone(), Array::New(1)); | 346 const Array& args = Array::Handle(zone(), Array::New(1)); |
338 args.SetAt(0, error_str); | 347 args.SetAt(0, error_str); |
339 Object& result = Object::Handle(zone()); | 348 Object& result = Object::Handle(zone()); |
340 const Library& library = Library::Handle(zone(), Library::CoreLibrary()); | 349 const Library& library = Library::Handle(zone(), Library::CoreLibrary()); |
341 result = DartLibraryCalls::InstanceCreate(library, | 350 result = DartLibraryCalls::InstanceCreate(library, Symbols::ArgumentError(), |
342 Symbols::ArgumentError(), | 351 Symbols::Dot(), args); |
343 Symbols::Dot(), | |
344 args); | |
345 const Stacktrace& stacktrace = Stacktrace::Handle(zone()); | 352 const Stacktrace& stacktrace = Stacktrace::Handle(zone()); |
346 const UnhandledException& error = UnhandledException::Handle( | 353 const UnhandledException& error = UnhandledException::Handle( |
347 zone(), UnhandledException::New(Instance::Cast(result), stacktrace)); | 354 zone(), UnhandledException::New(Instance::Cast(result), stacktrace)); |
348 thread()->long_jump_base()->Jump(1, error); | 355 thread()->long_jump_base()->Jump(1, error); |
349 } | 356 } |
350 | 357 |
351 | 358 |
352 RawObject* SnapshotReader::VmIsolateSnapshotObject(intptr_t index) const { | 359 RawObject* SnapshotReader::VmIsolateSnapshotObject(intptr_t index) const { |
353 return Object::vm_isolate_snapshot_object_table().At(index); | 360 return Object::vm_isolate_snapshot_object_table().At(index); |
354 } | 361 } |
355 | 362 |
356 | 363 |
357 bool SnapshotReader::is_vm_isolate() const { | 364 bool SnapshotReader::is_vm_isolate() const { |
358 return isolate() == Dart::vm_isolate(); | 365 return isolate() == Dart::vm_isolate(); |
359 } | 366 } |
360 | 367 |
361 | 368 |
362 RawObject* SnapshotReader::ReadObjectImpl(bool as_reference, | 369 RawObject* SnapshotReader::ReadObjectImpl(bool as_reference, |
363 intptr_t patch_object_id, | 370 intptr_t patch_object_id, |
364 intptr_t patch_offset) { | 371 intptr_t patch_offset) { |
365 int64_t header_value = Read<int64_t>(); | 372 int64_t header_value = Read<int64_t>(); |
366 if ((header_value & kSmiTagMask) == kSmiTag) { | 373 if ((header_value & kSmiTagMask) == kSmiTag) { |
367 return NewInteger(header_value); | 374 return NewInteger(header_value); |
368 } | 375 } |
369 ASSERT((header_value <= kIntptrMax) && (header_value >= kIntptrMin)); | 376 ASSERT((header_value <= kIntptrMax) && (header_value >= kIntptrMin)); |
370 return ReadObjectImpl(static_cast<intptr_t>(header_value), | 377 return ReadObjectImpl(static_cast<intptr_t>(header_value), as_reference, |
371 as_reference, | 378 patch_object_id, patch_offset); |
372 patch_object_id, | |
373 patch_offset); | |
374 } | 379 } |
375 | 380 |
376 | 381 |
377 RawObject* SnapshotReader::ReadObjectImpl(intptr_t header_value, | 382 RawObject* SnapshotReader::ReadObjectImpl(intptr_t header_value, |
378 bool as_reference, | 383 bool as_reference, |
379 intptr_t patch_object_id, | 384 intptr_t patch_object_id, |
380 intptr_t patch_offset) { | 385 intptr_t patch_offset) { |
381 if (IsVMIsolateObject(header_value)) { | 386 if (IsVMIsolateObject(header_value)) { |
382 return ReadVMIsolateObject(header_value); | 387 return ReadVMIsolateObject(header_value); |
383 } | 388 } |
384 if (SerializedHeaderTag::decode(header_value) == kObjectId) { | 389 if (SerializedHeaderTag::decode(header_value) == kObjectId) { |
385 return ReadIndexedObject(SerializedHeaderData::decode(header_value), | 390 return ReadIndexedObject(SerializedHeaderData::decode(header_value), |
386 patch_object_id, | 391 patch_object_id, patch_offset); |
387 patch_offset); | |
388 } | 392 } |
389 ASSERT(SerializedHeaderTag::decode(header_value) == kInlined); | 393 ASSERT(SerializedHeaderTag::decode(header_value) == kInlined); |
390 intptr_t object_id = SerializedHeaderData::decode(header_value); | 394 intptr_t object_id = SerializedHeaderData::decode(header_value); |
391 if (object_id == kOmittedObjectId) { | 395 if (object_id == kOmittedObjectId) { |
392 object_id = NextAvailableObjectId(); | 396 object_id = NextAvailableObjectId(); |
393 } | 397 } |
394 | 398 |
395 // Read the class header information. | 399 // Read the class header information. |
396 intptr_t class_header = Read<int32_t>(); | 400 intptr_t class_header = Read<int32_t>(); |
397 intptr_t tags = ReadTags(); | 401 intptr_t tags = ReadTags(); |
398 bool read_as_reference = as_reference && !RawObject::IsCanonical(tags); | 402 bool read_as_reference = as_reference && !RawObject::IsCanonical(tags); |
399 intptr_t header_id = SerializedHeaderData::decode(class_header); | 403 intptr_t header_id = SerializedHeaderData::decode(class_header); |
400 if (header_id == kInstanceObjectId) { | 404 if (header_id == kInstanceObjectId) { |
401 return ReadInstance(object_id, tags, read_as_reference); | 405 return ReadInstance(object_id, tags, read_as_reference); |
402 } else if (header_id == kStaticImplicitClosureObjectId) { | 406 } else if (header_id == kStaticImplicitClosureObjectId) { |
403 // We skip the tags that have been written as the implicit static | 407 // We skip the tags that have been written as the implicit static |
404 // closure is going to be created in this isolate or the canonical | 408 // closure is going to be created in this isolate or the canonical |
405 // version already created in the isolate will be used. | 409 // version already created in the isolate will be used. |
406 return ReadStaticImplicitClosure(object_id, class_header); | 410 return ReadStaticImplicitClosure(object_id, class_header); |
407 } | 411 } |
408 ASSERT((class_header & kSmiTagMask) != kSmiTag); | 412 ASSERT((class_header & kSmiTagMask) != kSmiTag); |
409 | 413 |
410 intptr_t class_id = LookupInternalClass(class_header); | 414 intptr_t class_id = LookupInternalClass(class_header); |
411 switch (class_id) { | 415 switch (class_id) { |
412 #define SNAPSHOT_READ(clazz) \ | 416 #define SNAPSHOT_READ(clazz) \ |
413 case clazz::kClassId: { \ | 417 case clazz::kClassId: { \ |
414 pobj_ = clazz::ReadFrom(this, object_id, tags, kind_, read_as_reference);\ | 418 pobj_ = clazz::ReadFrom(this, object_id, tags, kind_, read_as_reference); \ |
415 break; \ | 419 break; \ |
416 } | 420 } |
417 CLASS_LIST_NO_OBJECT(SNAPSHOT_READ) | 421 CLASS_LIST_NO_OBJECT(SNAPSHOT_READ) |
418 #undef SNAPSHOT_READ | 422 #undef SNAPSHOT_READ |
419 #define SNAPSHOT_READ(clazz) \ | 423 #define SNAPSHOT_READ(clazz) case kTypedData##clazz##Cid: |
420 case kTypedData##clazz##Cid: \ | |
421 | 424 |
422 CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) { | 425 CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) { |
423 tags = RawObject::ClassIdTag::update(class_id, tags); | 426 tags = RawObject::ClassIdTag::update(class_id, tags); |
424 pobj_ = TypedData::ReadFrom( | 427 pobj_ = |
425 this, object_id, tags, kind_, read_as_reference); | 428 TypedData::ReadFrom(this, object_id, tags, kind_, read_as_reference); |
426 break; | 429 break; |
427 } | 430 } |
428 #undef SNAPSHOT_READ | 431 #undef SNAPSHOT_READ |
429 #define SNAPSHOT_READ(clazz) \ | 432 #define SNAPSHOT_READ(clazz) case kExternalTypedData##clazz##Cid: |
430 case kExternalTypedData##clazz##Cid: \ | |
431 | 433 |
432 CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) { | 434 CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) { |
433 tags = RawObject::ClassIdTag::update(class_id, tags); | 435 tags = RawObject::ClassIdTag::update(class_id, tags); |
434 pobj_ = ExternalTypedData::ReadFrom(this, object_id, tags, kind_, true); | 436 pobj_ = ExternalTypedData::ReadFrom(this, object_id, tags, kind_, true); |
435 break; | 437 break; |
436 } | 438 } |
437 #undef SNAPSHOT_READ | 439 #undef SNAPSHOT_READ |
438 default: UNREACHABLE(); break; | 440 default: |
| 441 UNREACHABLE(); |
| 442 break; |
439 } | 443 } |
440 if (!read_as_reference) { | 444 if (!read_as_reference) { |
441 AddPatchRecord(object_id, patch_object_id, patch_offset); | 445 AddPatchRecord(object_id, patch_object_id, patch_offset); |
442 } | 446 } |
443 return pobj_.raw(); | 447 return pobj_.raw(); |
444 } | 448 } |
445 | 449 |
446 | 450 |
447 RawObject* SnapshotReader::ReadInstance(intptr_t object_id, | 451 RawObject* SnapshotReader::ReadInstance(intptr_t object_id, |
448 intptr_t tags, | 452 intptr_t tags, |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
481 ASSERT(next_field_offset > 0); | 485 ASSERT(next_field_offset > 0); |
482 // Instance::NextFieldOffset() returns the offset of the first field in | 486 // Instance::NextFieldOffset() returns the offset of the first field in |
483 // a Dart object. | 487 // a Dart object. |
484 bool read_as_reference = RawObject::IsCanonical(tags) ? false : true; | 488 bool read_as_reference = RawObject::IsCanonical(tags) ? false : true; |
485 intptr_t offset = Instance::NextFieldOffset(); | 489 intptr_t offset = Instance::NextFieldOffset(); |
486 intptr_t result_cid = result->GetClassId(); | 490 intptr_t result_cid = result->GetClassId(); |
487 while (offset < next_field_offset) { | 491 while (offset < next_field_offset) { |
488 pobj_ = ReadObjectImpl(read_as_reference); | 492 pobj_ = ReadObjectImpl(read_as_reference); |
489 result->SetFieldAtOffset(offset, pobj_); | 493 result->SetFieldAtOffset(offset, pobj_); |
490 if ((offset != type_argument_field_offset) && | 494 if ((offset != type_argument_field_offset) && |
491 (kind_ == Snapshot::kMessage) && | 495 (kind_ == Snapshot::kMessage) && FLAG_use_field_guards) { |
492 FLAG_use_field_guards) { | |
493 // TODO(fschneider): Consider hoisting these lookups out of the loop. | 496 // TODO(fschneider): Consider hoisting these lookups out of the loop. |
494 // This would involve creating a handle, since cls_ can't be reused | 497 // This would involve creating a handle, since cls_ can't be reused |
495 // across the call to ReadObjectImpl. | 498 // across the call to ReadObjectImpl. |
496 cls_ = isolate()->class_table()->At(result_cid); | 499 cls_ = isolate()->class_table()->At(result_cid); |
497 array_ = cls_.OffsetToFieldMap(); | 500 array_ = cls_.OffsetToFieldMap(); |
498 field_ ^= array_.At(offset >> kWordSizeLog2); | 501 field_ ^= array_.At(offset >> kWordSizeLog2); |
499 ASSERT(!field_.IsNull()); | 502 ASSERT(!field_.IsNull()); |
500 ASSERT(field_.Offset() == offset); | 503 ASSERT(field_.Offset() == offset); |
501 obj_ = pobj_.raw(); | 504 obj_ = pobj_.raw(); |
502 field_.RecordStore(obj_); | 505 field_.RecordStore(obj_); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
536 return (*backward_references_)[index].reference(); | 539 return (*backward_references_)[index].reference(); |
537 } | 540 } |
538 return NULL; | 541 return NULL; |
539 } | 542 } |
540 | 543 |
541 | 544 |
542 class HeapLocker : public StackResource { | 545 class HeapLocker : public StackResource { |
543 public: | 546 public: |
544 HeapLocker(Thread* thread, PageSpace* page_space) | 547 HeapLocker(Thread* thread, PageSpace* page_space) |
545 : StackResource(thread), page_space_(page_space) { | 548 : StackResource(thread), page_space_(page_space) { |
546 page_space_->AcquireDataLock(); | 549 page_space_->AcquireDataLock(); |
547 } | 550 } |
548 ~HeapLocker() { | 551 ~HeapLocker() { page_space_->ReleaseDataLock(); } |
549 page_space_->ReleaseDataLock(); | |
550 } | |
551 | 552 |
552 private: | 553 private: |
553 PageSpace* page_space_; | 554 PageSpace* page_space_; |
554 }; | 555 }; |
555 | 556 |
556 | 557 |
557 RawObject* SnapshotReader::ReadScriptSnapshot() { | 558 RawObject* SnapshotReader::ReadScriptSnapshot() { |
558 ASSERT(kind_ == Snapshot::kScript); | 559 ASSERT(kind_ == Snapshot::kScript); |
559 | 560 |
560 // First read the version string, and check that it matches. | 561 // First read the version string, and check that it matches. |
561 RawApiError* error = VerifyVersionAndFeatures(); | 562 RawApiError* error = VerifyVersionAndFeatures(); |
562 if (error != ApiError::null()) { | 563 if (error != ApiError::null()) { |
563 return error; | 564 return error; |
564 } | 565 } |
565 | 566 |
566 // The version string matches. Read the rest of the snapshot. | 567 // The version string matches. Read the rest of the snapshot. |
567 obj_ = ReadObject(); | 568 obj_ = ReadObject(); |
568 if (!obj_.IsLibrary()) { | 569 if (!obj_.IsLibrary()) { |
569 if (!obj_.IsError()) { | 570 if (!obj_.IsError()) { |
570 const intptr_t kMessageBufferSize = 128; | 571 const intptr_t kMessageBufferSize = 128; |
571 char message_buffer[kMessageBufferSize]; | 572 char message_buffer[kMessageBufferSize]; |
572 OS::SNPrint(message_buffer, | 573 OS::SNPrint(message_buffer, kMessageBufferSize, |
573 kMessageBufferSize, | |
574 "Invalid object %s found in script snapshot", | 574 "Invalid object %s found in script snapshot", |
575 obj_.ToCString()); | 575 obj_.ToCString()); |
576 const String& msg = String::Handle(String::New(message_buffer)); | 576 const String& msg = String::Handle(String::New(message_buffer)); |
577 obj_ = ApiError::New(msg); | 577 obj_ = ApiError::New(msg); |
578 } | 578 } |
579 } | 579 } |
580 return obj_.raw(); | 580 return obj_.raw(); |
581 } | 581 } |
582 | 582 |
583 | 583 |
584 RawApiError* SnapshotReader::VerifyVersionAndFeatures() { | 584 RawApiError* SnapshotReader::VerifyVersionAndFeatures() { |
585 // If the version string doesn't match, return an error. | 585 // If the version string doesn't match, return an error. |
586 // Note: New things are allocated only if we're going to return an error. | 586 // Note: New things are allocated only if we're going to return an error. |
587 | 587 |
588 const char* expected_version = Version::SnapshotString(); | 588 const char* expected_version = Version::SnapshotString(); |
589 ASSERT(expected_version != NULL); | 589 ASSERT(expected_version != NULL); |
590 const intptr_t version_len = strlen(expected_version); | 590 const intptr_t version_len = strlen(expected_version); |
591 if (PendingBytes() < version_len) { | 591 if (PendingBytes() < version_len) { |
592 const intptr_t kMessageBufferSize = 128; | 592 const intptr_t kMessageBufferSize = 128; |
593 char message_buffer[kMessageBufferSize]; | 593 char message_buffer[kMessageBufferSize]; |
594 OS::SNPrint(message_buffer, | 594 OS::SNPrint(message_buffer, kMessageBufferSize, |
595 kMessageBufferSize, | |
596 "No full snapshot version found, expected '%s'", | 595 "No full snapshot version found, expected '%s'", |
597 expected_version); | 596 expected_version); |
598 // This can also fail while bringing up the VM isolate, so make sure to | 597 // This can also fail while bringing up the VM isolate, so make sure to |
599 // allocate the error message in old space. | 598 // allocate the error message in old space. |
600 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); | 599 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); |
601 return ApiError::New(msg, Heap::kOld); | 600 return ApiError::New(msg, Heap::kOld); |
602 } | 601 } |
603 | 602 |
604 const char* version = reinterpret_cast<const char*>(CurrentBufferAddress()); | 603 const char* version = reinterpret_cast<const char*>(CurrentBufferAddress()); |
605 ASSERT(version != NULL); | 604 ASSERT(version != NULL); |
606 if (strncmp(version, expected_version, version_len)) { | 605 if (strncmp(version, expected_version, version_len)) { |
607 const intptr_t kMessageBufferSize = 256; | 606 const intptr_t kMessageBufferSize = 256; |
608 char message_buffer[kMessageBufferSize]; | 607 char message_buffer[kMessageBufferSize]; |
609 char* actual_version = OS::StrNDup(version, version_len); | 608 char* actual_version = OS::StrNDup(version, version_len); |
610 OS::SNPrint(message_buffer, | 609 OS::SNPrint(message_buffer, kMessageBufferSize, |
611 kMessageBufferSize, | |
612 "Wrong %s snapshot version, expected '%s' found '%s'", | 610 "Wrong %s snapshot version, expected '%s' found '%s'", |
613 (Snapshot::IsFull(kind_)) ? "full" : "script", | 611 (Snapshot::IsFull(kind_)) ? "full" : "script", expected_version, |
614 expected_version, | |
615 actual_version); | 612 actual_version); |
616 free(actual_version); | 613 free(actual_version); |
617 // This can also fail while bringing up the VM isolate, so make sure to | 614 // This can also fail while bringing up the VM isolate, so make sure to |
618 // allocate the error message in old space. | 615 // allocate the error message in old space. |
619 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); | 616 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); |
620 return ApiError::New(msg, Heap::kOld); | 617 return ApiError::New(msg, Heap::kOld); |
621 } | 618 } |
622 Advance(version_len); | 619 Advance(version_len); |
623 | 620 |
624 const char* expected_features = Dart::FeaturesString(kind_); | 621 const char* expected_features = Dart::FeaturesString(kind_); |
625 ASSERT(expected_features != NULL); | 622 ASSERT(expected_features != NULL); |
626 const intptr_t expected_len = strlen(expected_features); | 623 const intptr_t expected_len = strlen(expected_features); |
627 | 624 |
628 const char* features = reinterpret_cast<const char*>(CurrentBufferAddress()); | 625 const char* features = reinterpret_cast<const char*>(CurrentBufferAddress()); |
629 ASSERT(features != NULL); | 626 ASSERT(features != NULL); |
630 intptr_t buffer_len = OS::StrNLen(features, PendingBytes()); | 627 intptr_t buffer_len = OS::StrNLen(features, PendingBytes()); |
631 if ((buffer_len != expected_len) || | 628 if ((buffer_len != expected_len) || |
632 strncmp(features, expected_features, expected_len)) { | 629 strncmp(features, expected_features, expected_len)) { |
633 const intptr_t kMessageBufferSize = 256; | 630 const intptr_t kMessageBufferSize = 256; |
634 char message_buffer[kMessageBufferSize]; | 631 char message_buffer[kMessageBufferSize]; |
635 char* actual_features = OS::StrNDup(features, buffer_len < 128 ? buffer_len | 632 char* actual_features = |
636 : 128); | 633 OS::StrNDup(features, buffer_len < 128 ? buffer_len : 128); |
637 OS::SNPrint(message_buffer, | 634 OS::SNPrint(message_buffer, kMessageBufferSize, |
638 kMessageBufferSize, | |
639 "Wrong features in snapshot, expected '%s' found '%s'", | 635 "Wrong features in snapshot, expected '%s' found '%s'", |
640 expected_features, | 636 expected_features, actual_features); |
641 actual_features); | |
642 free(const_cast<char*>(expected_features)); | 637 free(const_cast<char*>(expected_features)); |
643 free(actual_features); | 638 free(actual_features); |
644 // This can also fail while bringing up the VM isolate, so make sure to | 639 // This can also fail while bringing up the VM isolate, so make sure to |
645 // allocate the error message in old space. | 640 // allocate the error message in old space. |
646 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); | 641 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); |
647 return ApiError::New(msg, Heap::kOld); | 642 return ApiError::New(msg, Heap::kOld); |
648 } | 643 } |
649 free(const_cast<char*>(expected_features)); | 644 free(const_cast<char*>(expected_features)); |
650 Advance(expected_len + 1); | 645 Advance(expected_len + 1); |
651 return ApiError::null(); | 646 return ApiError::null(); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
687 intptr_t heap_size = raw_object->Size(); | 682 intptr_t heap_size = raw_object->Size(); |
688 intptr_t offset = next_object_offset_; | 683 intptr_t offset = next_object_offset_; |
689 next_object_offset_ += heap_size; | 684 next_object_offset_ += heap_size; |
690 objects_.Add(ObjectData(raw_object)); | 685 objects_.Add(ObjectData(raw_object)); |
691 return offset; | 686 return offset; |
692 } | 687 } |
693 | 688 |
694 | 689 |
695 static void EnsureIdentifier(char* label) { | 690 static void EnsureIdentifier(char* label) { |
696 for (char c = *label; c != '\0'; c = *++label) { | 691 for (char c = *label; c != '\0'; c = *++label) { |
697 if (((c >= 'a') && (c <= 'z')) || | 692 if (((c >= 'a') && (c <= 'z')) || ((c >= 'A') && (c <= 'Z')) || |
698 ((c >= 'A') && (c <= 'Z')) || | |
699 ((c >= '0') && (c <= '9'))) { | 693 ((c >= '0') && (c <= '9'))) { |
700 continue; | 694 continue; |
701 } | 695 } |
702 *label = '_'; | 696 *label = '_'; |
703 } | 697 } |
704 } | 698 } |
705 | 699 |
706 | 700 |
707 void AssemblyInstructionsWriter::Write(uint8_t* vmisolate_buffer, | 701 void AssemblyInstructionsWriter::Write(uint8_t* vmisolate_buffer, |
708 intptr_t vmisolate_length, | 702 intptr_t vmisolate_length, |
709 uint8_t* isolate_buffer, | 703 uint8_t* isolate_buffer, |
710 intptr_t isolate_length) { | 704 intptr_t isolate_length) { |
711 Thread* thread = Thread::Current(); | 705 Thread* thread = Thread::Current(); |
712 Zone* zone = thread->zone(); | 706 Zone* zone = thread->zone(); |
713 NOT_IN_PRODUCT(TimelineDurationScope tds(thread, | 707 NOT_IN_PRODUCT(TimelineDurationScope tds(thread, Timeline::GetIsolateStream(), |
714 Timeline::GetIsolateStream(), "WriteInstructions")); | 708 "WriteInstructions")); |
715 | 709 |
716 // Handlify collected raw pointers as building the names below | 710 // Handlify collected raw pointers as building the names below |
717 // will allocate on the Dart heap. | 711 // will allocate on the Dart heap. |
718 for (intptr_t i = 0; i < instructions_.length(); i++) { | 712 for (intptr_t i = 0; i < instructions_.length(); i++) { |
719 InstructionsData& data = instructions_[i]; | 713 InstructionsData& data = instructions_[i]; |
720 data.insns_ = &Instructions::Handle(zone, data.raw_insns_); | 714 data.insns_ = &Instructions::Handle(zone, data.raw_insns_); |
721 ASSERT(data.raw_code_ != NULL); | 715 ASSERT(data.raw_code_ != NULL); |
722 data.code_ = &Code::Handle(zone, data.raw_code_); | 716 data.code_ = &Code::Handle(zone, data.raw_code_); |
723 } | 717 } |
724 for (intptr_t i = 0; i < objects_.length(); i++) { | 718 for (intptr_t i = 0; i < objects_.length(); i++) { |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
763 | 757 |
764 // Write Instructions with the mark and VM heap bits set. | 758 // Write Instructions with the mark and VM heap bits set. |
765 uword marked_tags = insns.raw_ptr()->tags_; | 759 uword marked_tags = insns.raw_ptr()->tags_; |
766 marked_tags = RawObject::VMHeapObjectTag::update(true, marked_tags); | 760 marked_tags = RawObject::VMHeapObjectTag::update(true, marked_tags); |
767 marked_tags = RawObject::MarkBit::update(true, marked_tags); | 761 marked_tags = RawObject::MarkBit::update(true, marked_tags); |
768 | 762 |
769 WriteWordLiteralText(marked_tags); | 763 WriteWordLiteralText(marked_tags); |
770 beginning += sizeof(uword); | 764 beginning += sizeof(uword); |
771 | 765 |
772 for (uword* cursor = reinterpret_cast<uword*>(beginning); | 766 for (uword* cursor = reinterpret_cast<uword*>(beginning); |
773 cursor < reinterpret_cast<uword*>(entry); | 767 cursor < reinterpret_cast<uword*>(entry); cursor++) { |
774 cursor++) { | |
775 WriteWordLiteralText(*cursor); | 768 WriteWordLiteralText(*cursor); |
776 } | 769 } |
777 } | 770 } |
778 | 771 |
779 // 2. Write a label at the entry point. | 772 // 2. Write a label at the entry point. |
780 owner = code.owner(); | 773 owner = code.owner(); |
781 if (owner.IsNull()) { | 774 if (owner.IsNull()) { |
782 const char* name = StubCode::NameOfStub(insns.UncheckedEntryPoint()); | 775 const char* name = StubCode::NameOfStub(insns.UncheckedEntryPoint()); |
783 assembly_stream_.Print("Precompiled_Stub_%s:\n", name); | 776 assembly_stream_.Print("Precompiled_Stub_%s:\n", name); |
784 } else if (owner.IsClass()) { | 777 } else if (owner.IsClass()) { |
785 str = Class::Cast(owner).Name(); | 778 str = Class::Cast(owner).Name(); |
786 const char* name = str.ToCString(); | 779 const char* name = str.ToCString(); |
787 EnsureIdentifier(const_cast<char*>(name)); | 780 EnsureIdentifier(const_cast<char*>(name)); |
788 assembly_stream_.Print("Precompiled_AllocationStub_%s_%" Pd ":\n", | 781 assembly_stream_.Print("Precompiled_AllocationStub_%s_%" Pd ":\n", name, |
789 name, i); | 782 i); |
790 } else if (owner.IsFunction()) { | 783 } else if (owner.IsFunction()) { |
791 const char* name = Function::Cast(owner).ToQualifiedCString(); | 784 const char* name = Function::Cast(owner).ToQualifiedCString(); |
792 EnsureIdentifier(const_cast<char*>(name)); | 785 EnsureIdentifier(const_cast<char*>(name)); |
793 assembly_stream_.Print("Precompiled_%s_%" Pd ":\n", name, i); | 786 assembly_stream_.Print("Precompiled_%s_%" Pd ":\n", name, i); |
794 } else { | 787 } else { |
795 UNREACHABLE(); | 788 UNREACHABLE(); |
796 } | 789 } |
797 | 790 |
798 { | 791 { |
799 // 3. Write from the entry point to the end. | 792 // 3. Write from the entry point to the end. |
800 NoSafepointScope no_safepoint; | 793 NoSafepointScope no_safepoint; |
801 uword beginning = reinterpret_cast<uword>(insns.raw()) - kHeapObjectTag; | 794 uword beginning = reinterpret_cast<uword>(insns.raw()) - kHeapObjectTag; |
802 uword entry = beginning + Instructions::HeaderSize(); | 795 uword entry = beginning + Instructions::HeaderSize(); |
803 uword payload_size = insns.size(); | 796 uword payload_size = insns.size(); |
804 payload_size = Utils::RoundUp(payload_size, OS::PreferredCodeAlignment()); | 797 payload_size = Utils::RoundUp(payload_size, OS::PreferredCodeAlignment()); |
805 uword end = entry + payload_size; | 798 uword end = entry + payload_size; |
806 | 799 |
807 ASSERT(Utils::IsAligned(beginning, sizeof(uint64_t))); | 800 ASSERT(Utils::IsAligned(beginning, sizeof(uint64_t))); |
808 ASSERT(Utils::IsAligned(entry, sizeof(uint64_t))); | 801 ASSERT(Utils::IsAligned(entry, sizeof(uint64_t))); |
809 ASSERT(Utils::IsAligned(end, sizeof(uint64_t))); | 802 ASSERT(Utils::IsAligned(end, sizeof(uint64_t))); |
810 | 803 |
811 for (uword* cursor = reinterpret_cast<uword*>(entry); | 804 for (uword* cursor = reinterpret_cast<uword*>(entry); |
812 cursor < reinterpret_cast<uword*>(end); | 805 cursor < reinterpret_cast<uword*>(end); cursor++) { |
813 cursor++) { | |
814 WriteWordLiteralText(*cursor); | 806 WriteWordLiteralText(*cursor); |
815 } | 807 } |
816 } | 808 } |
817 } | 809 } |
818 #if defined(TARGET_OS_LINUX) | 810 #if defined(TARGET_OS_LINUX) |
819 assembly_stream_.Print(".section .rodata\n"); | 811 assembly_stream_.Print(".section .rodata\n"); |
820 #elif defined(TARGET_OS_MACOS) | 812 #elif defined(TARGET_OS_MACOS) |
821 assembly_stream_.Print(".const\n"); | 813 assembly_stream_.Print(".const\n"); |
822 #else | 814 #else |
823 // Unsupported platform. | 815 // Unsupported platform. |
(...skipping 16 matching lines...) Expand all Loading... |
840 uword start = reinterpret_cast<uword>(obj.raw()) - kHeapObjectTag; | 832 uword start = reinterpret_cast<uword>(obj.raw()) - kHeapObjectTag; |
841 uword end = start + obj.raw()->Size(); | 833 uword end = start + obj.raw()->Size(); |
842 | 834 |
843 // Write object header with the mark and VM heap bits set. | 835 // Write object header with the mark and VM heap bits set. |
844 uword marked_tags = obj.raw()->ptr()->tags_; | 836 uword marked_tags = obj.raw()->ptr()->tags_; |
845 marked_tags = RawObject::VMHeapObjectTag::update(true, marked_tags); | 837 marked_tags = RawObject::VMHeapObjectTag::update(true, marked_tags); |
846 marked_tags = RawObject::MarkBit::update(true, marked_tags); | 838 marked_tags = RawObject::MarkBit::update(true, marked_tags); |
847 WriteWordLiteralData(marked_tags); | 839 WriteWordLiteralData(marked_tags); |
848 start += sizeof(uword); | 840 start += sizeof(uword); |
849 for (uword* cursor = reinterpret_cast<uword*>(start); | 841 for (uword* cursor = reinterpret_cast<uword*>(start); |
850 cursor < reinterpret_cast<uword*>(end); | 842 cursor < reinterpret_cast<uword*>(end); cursor++) { |
851 cursor++) { | |
852 WriteWordLiteralData(*cursor); | 843 WriteWordLiteralData(*cursor); |
853 } | 844 } |
854 } | 845 } |
855 | 846 |
856 | 847 |
857 assembly_stream_.Print(".globl _kVmIsolateSnapshot\n"); | 848 assembly_stream_.Print(".globl _kVmIsolateSnapshot\n"); |
858 assembly_stream_.Print(".balign %" Pd ", 0\n", VirtualMemory::PageSize()); | 849 assembly_stream_.Print(".balign %" Pd ", 0\n", VirtualMemory::PageSize()); |
859 assembly_stream_.Print("_kVmIsolateSnapshot:\n"); | 850 assembly_stream_.Print("_kVmIsolateSnapshot:\n"); |
860 for (intptr_t i = 0; i < vmisolate_length; i++) { | 851 for (intptr_t i = 0; i < vmisolate_length; i++) { |
861 assembly_stream_.Print(".byte %" Pd "\n", vmisolate_buffer[i]); | 852 assembly_stream_.Print(".byte %" Pd "\n", vmisolate_buffer[i]); |
862 } | 853 } |
863 | 854 |
864 assembly_stream_.Print(".globl _kIsolateSnapshot\n"); | 855 assembly_stream_.Print(".globl _kIsolateSnapshot\n"); |
865 assembly_stream_.Print(".balign %" Pd ", 0\n", VirtualMemory::PageSize()); | 856 assembly_stream_.Print(".balign %" Pd ", 0\n", VirtualMemory::PageSize()); |
866 assembly_stream_.Print("_kIsolateSnapshot:\n"); | 857 assembly_stream_.Print("_kIsolateSnapshot:\n"); |
867 for (intptr_t i = 0; i < isolate_length; i++) { | 858 for (intptr_t i = 0; i < isolate_length; i++) { |
868 assembly_stream_.Print(".byte %" Pd "\n", isolate_buffer[i]); | 859 assembly_stream_.Print(".byte %" Pd "\n", isolate_buffer[i]); |
869 } | 860 } |
870 } | 861 } |
871 | 862 |
872 | 863 |
873 void BlobInstructionsWriter::Write(uint8_t* vmisolate_buffer, | 864 void BlobInstructionsWriter::Write(uint8_t* vmisolate_buffer, |
874 intptr_t vmisolate_len, | 865 intptr_t vmisolate_len, |
875 uint8_t* isolate_buffer, | 866 uint8_t* isolate_buffer, |
876 intptr_t isolate_length) { | 867 intptr_t isolate_length) { |
877 Thread* thread = Thread::Current(); | 868 Thread* thread = Thread::Current(); |
878 Zone* zone = thread->zone(); | 869 Zone* zone = thread->zone(); |
879 NOT_IN_PRODUCT(TimelineDurationScope tds(thread, | 870 NOT_IN_PRODUCT(TimelineDurationScope tds(thread, Timeline::GetIsolateStream(), |
880 Timeline::GetIsolateStream(), "WriteInstructions")); | 871 "WriteInstructions")); |
881 | 872 |
882 // Handlify collected raw pointers as building the names below | 873 // Handlify collected raw pointers as building the names below |
883 // will allocate on the Dart heap. | 874 // will allocate on the Dart heap. |
884 for (intptr_t i = 0; i < instructions_.length(); i++) { | 875 for (intptr_t i = 0; i < instructions_.length(); i++) { |
885 InstructionsData& data = instructions_[i]; | 876 InstructionsData& data = instructions_[i]; |
886 data.insns_ = &Instructions::Handle(zone, data.raw_insns_); | 877 data.insns_ = &Instructions::Handle(zone, data.raw_insns_); |
887 ASSERT(data.raw_code_ != NULL); | 878 ASSERT(data.raw_code_ != NULL); |
888 data.code_ = &Code::Handle(zone, data.raw_code_); | 879 data.code_ = &Code::Handle(zone, data.raw_code_); |
889 } | 880 } |
890 for (intptr_t i = 0; i < objects_.length(); i++) { | 881 for (intptr_t i = 0; i < objects_.length(); i++) { |
(...skipping 25 matching lines...) Expand all Loading... |
916 | 907 |
917 // Write Instructions with the mark and VM heap bits set. | 908 // Write Instructions with the mark and VM heap bits set. |
918 uword marked_tags = insns.raw_ptr()->tags_; | 909 uword marked_tags = insns.raw_ptr()->tags_; |
919 marked_tags = RawObject::VMHeapObjectTag::update(true, marked_tags); | 910 marked_tags = RawObject::VMHeapObjectTag::update(true, marked_tags); |
920 marked_tags = RawObject::MarkBit::update(true, marked_tags); | 911 marked_tags = RawObject::MarkBit::update(true, marked_tags); |
921 | 912 |
922 instructions_blob_stream_.WriteWord(marked_tags); | 913 instructions_blob_stream_.WriteWord(marked_tags); |
923 beginning += sizeof(uword); | 914 beginning += sizeof(uword); |
924 | 915 |
925 for (uword* cursor = reinterpret_cast<uword*>(beginning); | 916 for (uword* cursor = reinterpret_cast<uword*>(beginning); |
926 cursor < reinterpret_cast<uword*>(entry); | 917 cursor < reinterpret_cast<uword*>(entry); cursor++) { |
927 cursor++) { | |
928 instructions_blob_stream_.WriteWord(*cursor); | 918 instructions_blob_stream_.WriteWord(*cursor); |
929 } | 919 } |
930 } | 920 } |
931 | 921 |
932 // 2. Write from the entry point to the end. | 922 // 2. Write from the entry point to the end. |
933 { | 923 { |
934 NoSafepointScope no_safepoint; | 924 NoSafepointScope no_safepoint; |
935 uword beginning = reinterpret_cast<uword>(insns.raw()) - kHeapObjectTag; | 925 uword beginning = reinterpret_cast<uword>(insns.raw()) - kHeapObjectTag; |
936 uword entry = beginning + Instructions::HeaderSize(); | 926 uword entry = beginning + Instructions::HeaderSize(); |
937 uword payload_size = insns.size(); | 927 uword payload_size = insns.size(); |
938 payload_size = Utils::RoundUp(payload_size, OS::PreferredCodeAlignment()); | 928 payload_size = Utils::RoundUp(payload_size, OS::PreferredCodeAlignment()); |
939 uword end = entry + payload_size; | 929 uword end = entry + payload_size; |
940 | 930 |
941 ASSERT(Utils::IsAligned(beginning, sizeof(uint64_t))); | 931 ASSERT(Utils::IsAligned(beginning, sizeof(uint64_t))); |
942 ASSERT(Utils::IsAligned(entry, sizeof(uint64_t))); | 932 ASSERT(Utils::IsAligned(entry, sizeof(uint64_t))); |
943 ASSERT(Utils::IsAligned(end, sizeof(uint64_t))); | 933 ASSERT(Utils::IsAligned(end, sizeof(uint64_t))); |
944 | 934 |
945 for (uword* cursor = reinterpret_cast<uword*>(entry); | 935 for (uword* cursor = reinterpret_cast<uword*>(entry); |
946 cursor < reinterpret_cast<uword*>(end); | 936 cursor < reinterpret_cast<uword*>(end); cursor++) { |
947 cursor++) { | |
948 instructions_blob_stream_.WriteWord(*cursor); | 937 instructions_blob_stream_.WriteWord(*cursor); |
949 } | 938 } |
950 } | 939 } |
951 } | 940 } |
952 | 941 |
953 rodata_blob_stream_.WriteWord(next_object_offset_); // Data length. | 942 rodata_blob_stream_.WriteWord(next_object_offset_); // Data length. |
954 COMPILE_ASSERT(OS::kMaxPreferredCodeAlignment >= kObjectAlignment); | 943 COMPILE_ASSERT(OS::kMaxPreferredCodeAlignment >= kObjectAlignment); |
955 while (!Utils::IsAligned(rodata_blob_stream_.bytes_written(), | 944 while (!Utils::IsAligned(rodata_blob_stream_.bytes_written(), |
956 OS::kMaxPreferredCodeAlignment)) { | 945 OS::kMaxPreferredCodeAlignment)) { |
957 rodata_blob_stream_.WriteWord(0); | 946 rodata_blob_stream_.WriteWord(0); |
958 } | 947 } |
959 | 948 |
960 for (intptr_t i = 0; i < objects_.length(); i++) { | 949 for (intptr_t i = 0; i < objects_.length(); i++) { |
961 const Object& obj = *objects_[i].obj_; | 950 const Object& obj = *objects_[i].obj_; |
962 | 951 |
963 NoSafepointScope no_safepoint; | 952 NoSafepointScope no_safepoint; |
964 uword start = reinterpret_cast<uword>(obj.raw()) - kHeapObjectTag; | 953 uword start = reinterpret_cast<uword>(obj.raw()) - kHeapObjectTag; |
965 uword end = start + obj.raw()->Size(); | 954 uword end = start + obj.raw()->Size(); |
966 | 955 |
967 // Write object header with the mark and VM heap bits set. | 956 // Write object header with the mark and VM heap bits set. |
968 uword marked_tags = obj.raw()->ptr()->tags_; | 957 uword marked_tags = obj.raw()->ptr()->tags_; |
969 marked_tags = RawObject::VMHeapObjectTag::update(true, marked_tags); | 958 marked_tags = RawObject::VMHeapObjectTag::update(true, marked_tags); |
970 marked_tags = RawObject::MarkBit::update(true, marked_tags); | 959 marked_tags = RawObject::MarkBit::update(true, marked_tags); |
971 rodata_blob_stream_.WriteWord(marked_tags); | 960 rodata_blob_stream_.WriteWord(marked_tags); |
972 start += sizeof(uword); | 961 start += sizeof(uword); |
973 for (uword* cursor = reinterpret_cast<uword*>(start); | 962 for (uword* cursor = reinterpret_cast<uword*>(start); |
974 cursor < reinterpret_cast<uword*>(end); | 963 cursor < reinterpret_cast<uword*>(end); cursor++) { |
975 cursor++) { | |
976 rodata_blob_stream_.WriteWord(*cursor); | 964 rodata_blob_stream_.WriteWord(*cursor); |
977 } | 965 } |
978 } | 966 } |
979 } | 967 } |
980 | 968 |
981 | 969 |
982 uword InstructionsReader::GetInstructionsAt(int32_t offset) { | 970 uword InstructionsReader::GetInstructionsAt(int32_t offset) { |
983 ASSERT(Utils::IsAligned(offset, OS::PreferredCodeAlignment())); | 971 ASSERT(Utils::IsAligned(offset, OS::PreferredCodeAlignment())); |
984 return reinterpret_cast<uword>(instructions_buffer_) + offset; | 972 return reinterpret_cast<uword>(instructions_buffer_) + offset; |
985 } | 973 } |
986 | 974 |
987 | 975 |
988 RawObject* InstructionsReader::GetObjectAt(int32_t offset) { | 976 RawObject* InstructionsReader::GetObjectAt(int32_t offset) { |
989 ASSERT(Utils::IsAligned(offset, kWordSize)); | 977 ASSERT(Utils::IsAligned(offset, kWordSize)); |
990 | 978 |
991 RawObject* result = | 979 RawObject* result = reinterpret_cast<RawObject*>( |
992 reinterpret_cast<RawObject*>( | 980 reinterpret_cast<uword>(data_buffer_) + offset + kHeapObjectTag); |
993 reinterpret_cast<uword>(data_buffer_) + offset + kHeapObjectTag); | |
994 ASSERT(result->IsMarked()); | 981 ASSERT(result->IsMarked()); |
995 | 982 |
996 return result; | 983 return result; |
997 } | 984 } |
998 | 985 |
999 | 986 |
1000 intptr_t SnapshotReader::LookupInternalClass(intptr_t class_header) { | 987 intptr_t SnapshotReader::LookupInternalClass(intptr_t class_header) { |
1001 // If the header is an object Id, lookup singleton VM classes or classes | 988 // If the header is an object Id, lookup singleton VM classes or classes |
1002 // stored in the object store. | 989 // stored in the object store. |
1003 if (IsVMIsolateObject(class_header)) { | 990 if (IsVMIsolateObject(class_header)) { |
1004 intptr_t class_id = GetVMIsolateObjectId(class_header); | 991 intptr_t class_id = GetVMIsolateObjectId(class_header); |
1005 ASSERT(IsSingletonClassId(class_id)); | 992 ASSERT(IsSingletonClassId(class_id)); |
1006 return class_id; | 993 return class_id; |
1007 } | 994 } |
1008 ASSERT(SerializedHeaderTag::decode(class_header) == kObjectId); | 995 ASSERT(SerializedHeaderTag::decode(class_header) == kObjectId); |
1009 intptr_t class_id = SerializedHeaderData::decode(class_header); | 996 intptr_t class_id = SerializedHeaderData::decode(class_header); |
1010 ASSERT(IsObjectStoreClassId(class_id) || IsSingletonClassId(class_id)); | 997 ASSERT(IsObjectStoreClassId(class_id) || IsSingletonClassId(class_id)); |
1011 return class_id; | 998 return class_id; |
1012 } | 999 } |
1013 | 1000 |
1014 | 1001 |
1015 #define READ_VM_SINGLETON_OBJ(id, obj) \ | 1002 #define READ_VM_SINGLETON_OBJ(id, obj) \ |
1016 if (object_id == id) { \ | 1003 if (object_id == id) { \ |
1017 return obj; \ | 1004 return obj; \ |
1018 } \ | 1005 } |
1019 | 1006 |
1020 RawObject* SnapshotReader::ReadVMIsolateObject(intptr_t header_value) { | 1007 RawObject* SnapshotReader::ReadVMIsolateObject(intptr_t header_value) { |
1021 intptr_t object_id = GetVMIsolateObjectId(header_value); | 1008 intptr_t object_id = GetVMIsolateObjectId(header_value); |
1022 | 1009 |
1023 // First check if it is one of the singleton objects. | 1010 // First check if it is one of the singleton objects. |
1024 READ_VM_SINGLETON_OBJ(kNullObject, Object::null()); | 1011 READ_VM_SINGLETON_OBJ(kNullObject, Object::null()); |
1025 READ_VM_SINGLETON_OBJ(kSentinelObject, Object::sentinel().raw()); | 1012 READ_VM_SINGLETON_OBJ(kSentinelObject, Object::sentinel().raw()); |
1026 READ_VM_SINGLETON_OBJ(kTransitionSentinelObject, | 1013 READ_VM_SINGLETON_OBJ(kTransitionSentinelObject, |
1027 Object::transition_sentinel().raw()); | 1014 Object::transition_sentinel().raw()); |
1028 READ_VM_SINGLETON_OBJ(kEmptyArrayObject, Object::empty_array().raw()); | 1015 READ_VM_SINGLETON_OBJ(kEmptyArrayObject, Object::empty_array().raw()); |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1127 typeargs ^= objref->raw(); | 1114 typeargs ^= objref->raw(); |
1128 newobj = typeargs.Canonicalize(); | 1115 newobj = typeargs.Canonicalize(); |
1129 } | 1116 } |
1130 if (newobj.raw() != objref->raw()) { | 1117 if (newobj.raw() != objref->raw()) { |
1131 ZoneGrowableArray<intptr_t>* patches = backref.patch_records(); | 1118 ZoneGrowableArray<intptr_t>* patches = backref.patch_records(); |
1132 ASSERT(newobj.IsCanonical()); | 1119 ASSERT(newobj.IsCanonical()); |
1133 ASSERT(patches != NULL); | 1120 ASSERT(patches != NULL); |
1134 // First we replace the back ref table with the canonical object. | 1121 // First we replace the back ref table with the canonical object. |
1135 *objref = newobj.raw(); | 1122 *objref = newobj.raw(); |
1136 // Now we go over all the patch records and patch the canonical object. | 1123 // Now we go over all the patch records and patch the canonical object. |
1137 for (intptr_t j = 0; j < patches->length(); j+=2) { | 1124 for (intptr_t j = 0; j < patches->length(); j += 2) { |
1138 NoSafepointScope no_safepoint; | 1125 NoSafepointScope no_safepoint; |
1139 intptr_t patch_object_id = (*patches)[j]; | 1126 intptr_t patch_object_id = (*patches)[j]; |
1140 intptr_t patch_offset = (*patches)[j + 1]; | 1127 intptr_t patch_offset = (*patches)[j + 1]; |
1141 Object* target = GetBackRef(patch_object_id); | 1128 Object* target = GetBackRef(patch_object_id); |
1142 // We should not backpatch an object that is canonical. | 1129 // We should not backpatch an object that is canonical. |
1143 if (!target->IsCanonical()) { | 1130 if (!target->IsCanonical()) { |
1144 RawObject** rawptr = | 1131 RawObject** rawptr = |
1145 reinterpret_cast<RawObject**>(target->raw()->ptr()); | 1132 reinterpret_cast<RawObject**>(target->raw()->ptr()); |
1146 target->StorePointer((rawptr + patch_offset), newobj.raw()); | 1133 target->StorePointer((rawptr + patch_offset), newobj.raw()); |
1147 } | 1134 } |
1148 } | 1135 } |
1149 } else { | 1136 } else { |
1150 ASSERT(objref->IsCanonical()); | 1137 ASSERT(objref->IsCanonical()); |
1151 } | 1138 } |
1152 } | 1139 } |
1153 } | 1140 } |
1154 } | 1141 } |
1155 | 1142 |
1156 | 1143 |
1157 void SnapshotReader::ArrayReadFrom(intptr_t object_id, | 1144 void SnapshotReader::ArrayReadFrom(intptr_t object_id, |
1158 const Array& result, | 1145 const Array& result, |
1159 intptr_t len, | 1146 intptr_t len, |
1160 intptr_t tags) { | 1147 intptr_t tags) { |
1161 // Setup the object fields. | 1148 // Setup the object fields. |
1162 const intptr_t typeargs_offset = | 1149 const intptr_t typeargs_offset = |
1163 GrowableObjectArray::type_arguments_offset() / kWordSize; | 1150 GrowableObjectArray::type_arguments_offset() / kWordSize; |
1164 *TypeArgumentsHandle() ^= ReadObjectImpl(kAsInlinedObject, | 1151 *TypeArgumentsHandle() ^= |
1165 object_id, | 1152 ReadObjectImpl(kAsInlinedObject, object_id, typeargs_offset); |
1166 typeargs_offset); | |
1167 result.SetTypeArguments(*TypeArgumentsHandle()); | 1153 result.SetTypeArguments(*TypeArgumentsHandle()); |
1168 | 1154 |
1169 bool as_reference = RawObject::IsCanonical(tags) ? false : true; | 1155 bool as_reference = RawObject::IsCanonical(tags) ? false : true; |
1170 intptr_t offset = result.raw_ptr()->data() - | 1156 intptr_t offset = result.raw_ptr()->data() - |
1171 reinterpret_cast<RawObject**>(result.raw()->ptr()); | 1157 reinterpret_cast<RawObject**>(result.raw()->ptr()); |
1172 for (intptr_t i = 0; i < len; i++) { | 1158 for (intptr_t i = 0; i < len; i++) { |
1173 *PassiveObjectHandle() = ReadObjectImpl(as_reference, | 1159 *PassiveObjectHandle() = |
1174 object_id, | 1160 ReadObjectImpl(as_reference, object_id, (i + offset)); |
1175 (i + offset)); | |
1176 result.SetAt(i, *PassiveObjectHandle()); | 1161 result.SetAt(i, *PassiveObjectHandle()); |
1177 } | 1162 } |
1178 } | 1163 } |
1179 | 1164 |
1180 | 1165 |
1181 ScriptSnapshotReader::ScriptSnapshotReader(const uint8_t* buffer, | 1166 ScriptSnapshotReader::ScriptSnapshotReader(const uint8_t* buffer, |
1182 intptr_t size, | 1167 intptr_t size, |
1183 Thread* thread) | 1168 Thread* thread) |
1184 : SnapshotReader(buffer, | 1169 : SnapshotReader(buffer, |
1185 size, | 1170 size, |
1186 Snapshot::kScript, | 1171 Snapshot::kScript, |
1187 new ZoneGrowableArray<BackRefNode>(kNumInitialReferences), | 1172 new ZoneGrowableArray<BackRefNode>(kNumInitialReferences), |
1188 thread) { | 1173 thread) {} |
1189 } | |
1190 | 1174 |
1191 | 1175 |
1192 ScriptSnapshotReader::~ScriptSnapshotReader() { | 1176 ScriptSnapshotReader::~ScriptSnapshotReader() { |
1193 ResetBackwardReferenceTable(); | 1177 ResetBackwardReferenceTable(); |
1194 } | 1178 } |
1195 | 1179 |
1196 | 1180 |
1197 MessageSnapshotReader::MessageSnapshotReader(const uint8_t* buffer, | 1181 MessageSnapshotReader::MessageSnapshotReader(const uint8_t* buffer, |
1198 intptr_t size, | 1182 intptr_t size, |
1199 Thread* thread) | 1183 Thread* thread) |
1200 : SnapshotReader(buffer, | 1184 : SnapshotReader(buffer, |
1201 size, | 1185 size, |
1202 Snapshot::kMessage, | 1186 Snapshot::kMessage, |
1203 new ZoneGrowableArray<BackRefNode>(kNumInitialReferences), | 1187 new ZoneGrowableArray<BackRefNode>(kNumInitialReferences), |
1204 thread) { | 1188 thread) {} |
1205 } | |
1206 | 1189 |
1207 | 1190 |
1208 MessageSnapshotReader::~MessageSnapshotReader() { | 1191 MessageSnapshotReader::~MessageSnapshotReader() { |
1209 ResetBackwardReferenceTable(); | 1192 ResetBackwardReferenceTable(); |
1210 } | 1193 } |
1211 | 1194 |
1212 | 1195 |
1213 SnapshotWriter::SnapshotWriter(Thread* thread, | 1196 SnapshotWriter::SnapshotWriter(Thread* thread, |
1214 Snapshot::Kind kind, | 1197 Snapshot::Kind kind, |
1215 uint8_t** buffer, | 1198 uint8_t** buffer, |
(...skipping 24 matching lines...) Expand all Loading... |
1240 return raw->ptr()->tags_; | 1223 return raw->ptr()->tags_; |
1241 } | 1224 } |
1242 | 1225 |
1243 | 1226 |
1244 #define VM_OBJECT_CLASS_LIST(V) \ | 1227 #define VM_OBJECT_CLASS_LIST(V) \ |
1245 V(OneByteString) \ | 1228 V(OneByteString) \ |
1246 V(TwoByteString) \ | 1229 V(TwoByteString) \ |
1247 V(Mint) \ | 1230 V(Mint) \ |
1248 V(Bigint) \ | 1231 V(Bigint) \ |
1249 V(Double) \ | 1232 V(Double) \ |
1250 V(ImmutableArray) \ | 1233 V(ImmutableArray) |
1251 | 1234 |
1252 #define VM_OBJECT_WRITE(clazz) \ | 1235 #define VM_OBJECT_WRITE(clazz) \ |
1253 case clazz::kClassId: { \ | 1236 case clazz::kClassId: { \ |
1254 object_id = forward_list_->AddObject(zone(), rawobj, kIsSerialized); \ | 1237 object_id = forward_list_->AddObject(zone(), rawobj, kIsSerialized); \ |
1255 Raw##clazz* raw_obj = reinterpret_cast<Raw##clazz*>(rawobj); \ | 1238 Raw##clazz* raw_obj = reinterpret_cast<Raw##clazz*>(rawobj); \ |
1256 raw_obj->WriteTo(this, object_id, kind(), false); \ | 1239 raw_obj->WriteTo(this, object_id, kind(), false); \ |
1257 return true; \ | 1240 return true; \ |
1258 } \ | 1241 } |
1259 | 1242 |
1260 #define WRITE_VM_SINGLETON_OBJ(obj, id) \ | 1243 #define WRITE_VM_SINGLETON_OBJ(obj, id) \ |
1261 if (rawobj == obj) { \ | 1244 if (rawobj == obj) { \ |
1262 WriteVMIsolateObject(id); \ | 1245 WriteVMIsolateObject(id); \ |
1263 return true; \ | 1246 return true; \ |
1264 } \ | 1247 } |
1265 | 1248 |
1266 bool SnapshotWriter::HandleVMIsolateObject(RawObject* rawobj) { | 1249 bool SnapshotWriter::HandleVMIsolateObject(RawObject* rawobj) { |
1267 // Check if it is one of the singleton VM objects. | 1250 // Check if it is one of the singleton VM objects. |
1268 WRITE_VM_SINGLETON_OBJ(Object::null(), kNullObject); | 1251 WRITE_VM_SINGLETON_OBJ(Object::null(), kNullObject); |
1269 WRITE_VM_SINGLETON_OBJ(Object::sentinel().raw(), kSentinelObject); | 1252 WRITE_VM_SINGLETON_OBJ(Object::sentinel().raw(), kSentinelObject); |
1270 WRITE_VM_SINGLETON_OBJ(Object::transition_sentinel().raw(), | 1253 WRITE_VM_SINGLETON_OBJ(Object::transition_sentinel().raw(), |
1271 kTransitionSentinelObject); | 1254 kTransitionSentinelObject); |
1272 WRITE_VM_SINGLETON_OBJ(Object::empty_array().raw(), kEmptyArrayObject); | 1255 WRITE_VM_SINGLETON_OBJ(Object::empty_array().raw(), kEmptyArrayObject); |
1273 WRITE_VM_SINGLETON_OBJ(Object::zero_array().raw(), kZeroArrayObject); | 1256 WRITE_VM_SINGLETON_OBJ(Object::zero_array().raw(), kZeroArrayObject); |
1274 WRITE_VM_SINGLETON_OBJ(Object::dynamic_type().raw(), kDynamicType); | 1257 WRITE_VM_SINGLETON_OBJ(Object::dynamic_type().raw(), kDynamicType); |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1346 | 1329 |
1347 #undef VM_OBJECT_WRITE | 1330 #undef VM_OBJECT_WRITE |
1348 | 1331 |
1349 | 1332 |
1350 // An object visitor which will iterate over all the script objects in the heap | 1333 // An object visitor which will iterate over all the script objects in the heap |
1351 // and either count them or collect them into an array. This is used during | 1334 // and either count them or collect them into an array. This is used during |
1352 // full snapshot generation of the VM isolate to write out all script | 1335 // full snapshot generation of the VM isolate to write out all script |
1353 // objects and their accompanying token streams. | 1336 // objects and their accompanying token streams. |
1354 class ScriptVisitor : public ObjectVisitor { | 1337 class ScriptVisitor : public ObjectVisitor { |
1355 public: | 1338 public: |
1356 explicit ScriptVisitor(Thread* thread) : | 1339 explicit ScriptVisitor(Thread* thread) |
1357 objHandle_(Object::Handle(thread->zone())), | 1340 : objHandle_(Object::Handle(thread->zone())), count_(0), scripts_(NULL) {} |
1358 count_(0), | |
1359 scripts_(NULL) {} | |
1360 | 1341 |
1361 ScriptVisitor(Thread* thread, const Array* scripts) : | 1342 ScriptVisitor(Thread* thread, const Array* scripts) |
1362 objHandle_(Object::Handle(thread->zone())), | 1343 : objHandle_(Object::Handle(thread->zone())), |
1363 count_(0), | 1344 count_(0), |
1364 scripts_(scripts) {} | 1345 scripts_(scripts) {} |
1365 | 1346 |
1366 void VisitObject(RawObject* obj) { | 1347 void VisitObject(RawObject* obj) { |
1367 if (obj->IsScript()) { | 1348 if (obj->IsScript()) { |
1368 if (scripts_ != NULL) { | 1349 if (scripts_ != NULL) { |
1369 objHandle_ = obj; | 1350 objHandle_ = obj; |
1370 scripts_->SetAt(count_, objHandle_); | 1351 scripts_->SetAt(count_, objHandle_); |
1371 } | 1352 } |
1372 count_ += 1; | 1353 count_ += 1; |
1373 } | 1354 } |
1374 } | 1355 } |
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1526 RawClass* cls = class_table_->At(RawObject::ClassIdTag::decode(tags)); | 1507 RawClass* cls = class_table_->At(RawObject::ClassIdTag::decode(tags)); |
1527 intptr_t class_id = cls->ptr()->id_; | 1508 intptr_t class_id = cls->ptr()->id_; |
1528 ASSERT(class_id == RawObject::ClassIdTag::decode(tags)); | 1509 ASSERT(class_id == RawObject::ClassIdTag::decode(tags)); |
1529 if (class_id >= kNumPredefinedCids || | 1510 if (class_id >= kNumPredefinedCids || |
1530 RawObject::IsImplicitFieldClassId(class_id)) { | 1511 RawObject::IsImplicitFieldClassId(class_id)) { |
1531 WriteInstance(raw, cls, tags, object_id, as_reference); | 1512 WriteInstance(raw, cls, tags, object_id, as_reference); |
1532 return; | 1513 return; |
1533 } | 1514 } |
1534 switch (class_id) { | 1515 switch (class_id) { |
1535 #define SNAPSHOT_WRITE(clazz) \ | 1516 #define SNAPSHOT_WRITE(clazz) \ |
1536 case clazz::kClassId: { \ | 1517 case clazz::kClassId: { \ |
1537 Raw##clazz* raw_obj = reinterpret_cast<Raw##clazz*>(raw); \ | 1518 Raw##clazz* raw_obj = reinterpret_cast<Raw##clazz*>(raw); \ |
1538 raw_obj->WriteTo(this, object_id, kind_, as_reference); \ | 1519 raw_obj->WriteTo(this, object_id, kind_, as_reference); \ |
1539 return; \ | 1520 return; \ |
1540 } \ | 1521 } |
1541 | 1522 |
1542 CLASS_LIST_NO_OBJECT(SNAPSHOT_WRITE) | 1523 CLASS_LIST_NO_OBJECT(SNAPSHOT_WRITE) |
1543 #undef SNAPSHOT_WRITE | 1524 #undef SNAPSHOT_WRITE |
1544 #define SNAPSHOT_WRITE(clazz) \ | 1525 #define SNAPSHOT_WRITE(clazz) case kTypedData##clazz##Cid: |
1545 case kTypedData##clazz##Cid: \ | |
1546 | 1526 |
1547 CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) { | 1527 CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) { |
1548 RawTypedData* raw_obj = reinterpret_cast<RawTypedData*>(raw); | 1528 RawTypedData* raw_obj = reinterpret_cast<RawTypedData*>(raw); |
1549 raw_obj->WriteTo(this, object_id, kind_, as_reference); | 1529 raw_obj->WriteTo(this, object_id, kind_, as_reference); |
1550 return; | 1530 return; |
1551 } | 1531 } |
1552 #undef SNAPSHOT_WRITE | 1532 #undef SNAPSHOT_WRITE |
1553 #define SNAPSHOT_WRITE(clazz) \ | 1533 #define SNAPSHOT_WRITE(clazz) case kExternalTypedData##clazz##Cid: |
1554 case kExternalTypedData##clazz##Cid: \ | |
1555 | 1534 |
1556 CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) { | 1535 CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) { |
1557 RawExternalTypedData* raw_obj = | 1536 RawExternalTypedData* raw_obj = |
1558 reinterpret_cast<RawExternalTypedData*>(raw); | 1537 reinterpret_cast<RawExternalTypedData*>(raw); |
1559 raw_obj->WriteTo(this, object_id, kind_, as_reference); | 1538 raw_obj->WriteTo(this, object_id, kind_, as_reference); |
1560 return; | 1539 return; |
1561 } | 1540 } |
1562 #undef SNAPSHOT_WRITE | 1541 #undef SNAPSHOT_WRITE |
1563 default: break; | 1542 default: |
| 1543 break; |
1564 } | 1544 } |
1565 | 1545 |
1566 const Object& obj = Object::Handle(raw); | 1546 const Object& obj = Object::Handle(raw); |
1567 FATAL1("Unexpected object: %s\n", obj.ToCString()); | 1547 FATAL1("Unexpected object: %s\n", obj.ToCString()); |
1568 } | 1548 } |
1569 | 1549 |
1570 | 1550 |
1571 class WriteInlinedObjectVisitor : public ObjectVisitor { | 1551 class WriteInlinedObjectVisitor : public ObjectVisitor { |
1572 public: | 1552 public: |
1573 explicit WriteInlinedObjectVisitor(SnapshotWriter* writer) | 1553 explicit WriteInlinedObjectVisitor(SnapshotWriter* writer) |
(...skipping 11 matching lines...) Expand all Loading... |
1585 }; | 1565 }; |
1586 | 1566 |
1587 | 1567 |
1588 void SnapshotWriter::WriteForwardedObjects() { | 1568 void SnapshotWriter::WriteForwardedObjects() { |
1589 WriteInlinedObjectVisitor visitor(this); | 1569 WriteInlinedObjectVisitor visitor(this); |
1590 forward_list_->SerializeAll(&visitor); | 1570 forward_list_->SerializeAll(&visitor); |
1591 } | 1571 } |
1592 | 1572 |
1593 | 1573 |
1594 void ForwardList::SerializeAll(ObjectVisitor* writer) { | 1574 void ForwardList::SerializeAll(ObjectVisitor* writer) { |
1595 // Write out all objects that were added to the forward list and have | 1575 // Write out all objects that were added to the forward list and have |
1596 // not been serialized yet. These would typically be fields of instance | 1576 // not been serialized yet. These would typically be fields of instance |
1597 // objects, arrays or immutable arrays (this is done in order to avoid | 1577 // objects, arrays or immutable arrays (this is done in order to avoid |
1598 // deep recursive calls to WriteObjectImpl). | 1578 // deep recursive calls to WriteObjectImpl). |
1599 // NOTE: The forward list might grow as we process the list. | 1579 // NOTE: The forward list might grow as we process the list. |
1600 #ifdef DEBUG | 1580 #ifdef DEBUG |
1601 for (intptr_t i = first_object_id(); i < first_unprocessed_object_id_; ++i) { | 1581 for (intptr_t i = first_object_id(); i < first_unprocessed_object_id_; ++i) { |
1602 ASSERT(NodeForObjectId(i)->is_serialized()); | 1582 ASSERT(NodeForObjectId(i)->is_serialized()); |
1603 } | 1583 } |
1604 #endif // DEBUG | 1584 #endif // DEBUG |
1605 for (intptr_t id = first_unprocessed_object_id_; | 1585 for (intptr_t id = first_unprocessed_object_id_; id < next_object_id(); |
1606 id < next_object_id(); | |
1607 ++id) { | 1586 ++id) { |
1608 if (!NodeForObjectId(id)->is_serialized()) { | 1587 if (!NodeForObjectId(id)->is_serialized()) { |
1609 // Write the object out in the stream. | 1588 // Write the object out in the stream. |
1610 RawObject* raw = NodeForObjectId(id)->obj()->raw(); | 1589 RawObject* raw = NodeForObjectId(id)->obj()->raw(); |
1611 writer->VisitObject(raw); | 1590 writer->VisitObject(raw); |
1612 | 1591 |
1613 // Mark object as serialized. | 1592 // Mark object as serialized. |
1614 NodeForObjectId(id)->set_state(kIsSerialized); | 1593 NodeForObjectId(id)->set_state(kIsSerialized); |
1615 } | 1594 } |
1616 } | 1595 } |
1617 first_unprocessed_object_id_ = next_object_id(); | 1596 first_unprocessed_object_id_ = next_object_id(); |
1618 } | 1597 } |
1619 | 1598 |
1620 | 1599 |
1621 void SnapshotWriter::WriteClassId(RawClass* cls) { | 1600 void SnapshotWriter::WriteClassId(RawClass* cls) { |
1622 ASSERT(!Snapshot::IsFull(kind_)); | 1601 ASSERT(!Snapshot::IsFull(kind_)); |
1623 int class_id = cls->ptr()->id_; | 1602 int class_id = cls->ptr()->id_; |
1624 ASSERT(!IsSingletonClassId(class_id) && !IsObjectStoreClassId(class_id)); | 1603 ASSERT(!IsSingletonClassId(class_id) && !IsObjectStoreClassId(class_id)); |
1625 | 1604 |
1626 // Write out the library url and class name. | 1605 // Write out the library url and class name. |
1627 RawLibrary* library = cls->ptr()->library_; | 1606 RawLibrary* library = cls->ptr()->library_; |
1628 ASSERT(library != Library::null()); | 1607 ASSERT(library != Library::null()); |
1629 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject); | 1608 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject); |
1630 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject); | 1609 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject); |
1631 } | 1610 } |
1632 | 1611 |
1633 | 1612 |
1634 void SnapshotWriter::WriteFunctionId(RawFunction* func, bool owner_is_class) { | 1613 void SnapshotWriter::WriteFunctionId(RawFunction* func, bool owner_is_class) { |
1635 ASSERT(kind_ == Snapshot::kScript); | 1614 ASSERT(kind_ == Snapshot::kScript); |
1636 RawClass* cls = (owner_is_class) ? | 1615 RawClass* cls = (owner_is_class) |
1637 reinterpret_cast<RawClass*>(func->ptr()->owner_) : | 1616 ? reinterpret_cast<RawClass*>(func->ptr()->owner_) |
1638 reinterpret_cast<RawPatchClass*>( | 1617 : reinterpret_cast<RawPatchClass*>(func->ptr()->owner_) |
1639 func->ptr()->owner_)->ptr()->patched_class_; | 1618 ->ptr() |
| 1619 ->patched_class_; |
1640 | 1620 |
1641 // Write out the library url and class name. | 1621 // Write out the library url and class name. |
1642 RawLibrary* library = cls->ptr()->library_; | 1622 RawLibrary* library = cls->ptr()->library_; |
1643 ASSERT(library != Library::null()); | 1623 ASSERT(library != Library::null()); |
1644 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject); | 1624 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject); |
1645 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject); | 1625 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject); |
1646 WriteObjectImpl(func->ptr()->name_, kAsInlinedObject); | 1626 WriteObjectImpl(func->ptr()->name_, kAsInlinedObject); |
1647 } | 1627 } |
1648 | 1628 |
1649 | 1629 |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1723 return func; | 1703 return func; |
1724 } | 1704 } |
1725 // Not a closure of a top level method or static function, throw an | 1705 // Not a closure of a top level method or static function, throw an |
1726 // exception as we do not allow these objects to be serialized. | 1706 // exception as we do not allow these objects to be serialized. |
1727 HANDLESCOPE(thread()); | 1707 HANDLESCOPE(thread()); |
1728 | 1708 |
1729 const Function& errorFunc = Function::Handle(zone(), func); | 1709 const Function& errorFunc = Function::Handle(zone(), func); |
1730 ASSERT(!errorFunc.IsNull()); | 1710 ASSERT(!errorFunc.IsNull()); |
1731 | 1711 |
1732 // All other closures are errors. | 1712 // All other closures are errors. |
1733 char* chars = OS::SCreate(thread()->zone(), | 1713 char* chars = OS::SCreate( |
| 1714 thread()->zone(), |
1734 "Illegal argument in isolate message : (object is a closure - %s)", | 1715 "Illegal argument in isolate message : (object is a closure - %s)", |
1735 errorFunc.ToCString()); | 1716 errorFunc.ToCString()); |
1736 SetWriteException(Exceptions::kArgument, chars); | 1717 SetWriteException(Exceptions::kArgument, chars); |
1737 return Function::null(); | 1718 return Function::null(); |
1738 } | 1719 } |
1739 | 1720 |
1740 | 1721 |
1741 RawClass* SnapshotWriter::GetFunctionOwner(RawFunction* func) { | 1722 RawClass* SnapshotWriter::GetFunctionOwner(RawFunction* func) { |
1742 RawObject* owner = func->ptr()->owner_; | 1723 RawObject* owner = func->ptr()->owner_; |
1743 uword tags = GetObjectTags(owner); | 1724 uword tags = GetObjectTags(owner); |
1744 intptr_t class_id = RawObject::ClassIdTag::decode(tags); | 1725 intptr_t class_id = RawObject::ClassIdTag::decode(tags); |
1745 if (class_id == kClassCid) { | 1726 if (class_id == kClassCid) { |
1746 return reinterpret_cast<RawClass*>(owner); | 1727 return reinterpret_cast<RawClass*>(owner); |
1747 } | 1728 } |
1748 ASSERT(class_id == kPatchClassCid); | 1729 ASSERT(class_id == kPatchClassCid); |
1749 return reinterpret_cast<RawPatchClass*>(owner)->ptr()->patched_class_; | 1730 return reinterpret_cast<RawPatchClass*>(owner)->ptr()->patched_class_; |
1750 } | 1731 } |
1751 | 1732 |
1752 | 1733 |
1753 void SnapshotWriter::CheckForNativeFields(RawClass* cls) { | 1734 void SnapshotWriter::CheckForNativeFields(RawClass* cls) { |
1754 if (cls->ptr()->num_native_fields_ != 0) { | 1735 if (cls->ptr()->num_native_fields_ != 0) { |
1755 // We do not allow objects with native fields in an isolate message. | 1736 // We do not allow objects with native fields in an isolate message. |
1756 HANDLESCOPE(thread()); | 1737 HANDLESCOPE(thread()); |
1757 const Class& clazz = Class::Handle(zone(), cls); | 1738 const Class& clazz = Class::Handle(zone(), cls); |
1758 char* chars = OS::SCreate(thread()->zone(), | 1739 char* chars = OS::SCreate(thread()->zone(), |
1759 "Illegal argument in isolate message" | 1740 "Illegal argument in isolate message" |
1760 " : (object extends NativeWrapper - %s)", | 1741 " : (object extends NativeWrapper - %s)", |
1761 clazz.ToCString()); | 1742 clazz.ToCString()); |
1762 SetWriteException(Exceptions::kArgument, chars); | 1743 SetWriteException(Exceptions::kArgument, chars); |
1763 } | 1744 } |
1764 } | 1745 } |
1765 | 1746 |
1766 | 1747 |
1767 void SnapshotWriter::SetWriteException(Exceptions::ExceptionType type, | 1748 void SnapshotWriter::SetWriteException(Exceptions::ExceptionType type, |
1768 const char* msg) { | 1749 const char* msg) { |
1769 set_exception_type(type); | 1750 set_exception_type(type); |
1770 set_exception_msg(msg); | 1751 set_exception_msg(msg); |
1771 // The more specific error is set up in SnapshotWriter::ThrowException(). | 1752 // The more specific error is set up in SnapshotWriter::ThrowException(). |
1772 thread()->long_jump_base()-> | 1753 thread()->long_jump_base()->Jump(1, Object::snapshot_writer_error()); |
1773 Jump(1, Object::snapshot_writer_error()); | |
1774 } | 1754 } |
1775 | 1755 |
1776 | 1756 |
1777 void SnapshotWriter::WriteInstance(RawObject* raw, | 1757 void SnapshotWriter::WriteInstance(RawObject* raw, |
1778 RawClass* cls, | 1758 RawClass* cls, |
1779 intptr_t tags, | 1759 intptr_t tags, |
1780 intptr_t object_id, | 1760 intptr_t object_id, |
1781 bool as_reference) { | 1761 bool as_reference) { |
1782 // Closure instances are handled by RawClosure::WriteTo(). | 1762 // Closure instances are handled by RawClosure::WriteTo(). |
1783 ASSERT(!Class::IsClosureClass(cls)); | 1763 ASSERT(!Class::IsClosureClass(cls)); |
1784 | 1764 |
1785 // Check if the instance has native fields and throw an exception if it does. | 1765 // Check if the instance has native fields and throw an exception if it does. |
1786 CheckForNativeFields(cls); | 1766 CheckForNativeFields(cls); |
1787 | 1767 |
1788 // Object is regular dart instance. | 1768 // Object is regular dart instance. |
1789 if (as_reference) { | 1769 if (as_reference) { |
1790 // Write out the serialization header value for this object. | 1770 // Write out the serialization header value for this object. |
1791 WriteInlinedObjectHeader(kOmittedObjectId); | 1771 WriteInlinedObjectHeader(kOmittedObjectId); |
1792 | 1772 |
1793 // Indicate this is an instance object. | 1773 // Indicate this is an instance object. |
1794 Write<int32_t>(SerializedHeaderData::encode(kInstanceObjectId)); | 1774 Write<int32_t>(SerializedHeaderData::encode(kInstanceObjectId)); |
1795 WriteTags(tags); | 1775 WriteTags(tags); |
1796 | 1776 |
1797 // Write out the class information for this object. | 1777 // Write out the class information for this object. |
1798 WriteObjectImpl(cls, kAsInlinedObject); | 1778 WriteObjectImpl(cls, kAsInlinedObject); |
1799 } else { | 1779 } else { |
1800 intptr_t next_field_offset = | 1780 intptr_t next_field_offset = cls->ptr()->next_field_offset_in_words_ |
1801 cls->ptr()->next_field_offset_in_words_ << kWordSizeLog2; | 1781 << kWordSizeLog2; |
1802 ASSERT(next_field_offset > 0); | 1782 ASSERT(next_field_offset > 0); |
1803 | 1783 |
1804 // Write out the serialization header value for this object. | 1784 // Write out the serialization header value for this object. |
1805 WriteInlinedObjectHeader(object_id); | 1785 WriteInlinedObjectHeader(object_id); |
1806 | 1786 |
1807 // Indicate this is an instance object. | 1787 // Indicate this is an instance object. |
1808 Write<int32_t>(SerializedHeaderData::encode(kInstanceObjectId)); | 1788 Write<int32_t>(SerializedHeaderData::encode(kInstanceObjectId)); |
1809 | 1789 |
1810 // Write out the tags. | 1790 // Write out the tags. |
1811 WriteTags(tags); | 1791 WriteTags(tags); |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1870 | 1850 |
1871 const char* expected_features = Dart::FeaturesString(kind_); | 1851 const char* expected_features = Dart::FeaturesString(kind_); |
1872 ASSERT(expected_features != NULL); | 1852 ASSERT(expected_features != NULL); |
1873 const intptr_t features_len = strlen(expected_features); | 1853 const intptr_t features_len = strlen(expected_features); |
1874 WriteBytes(reinterpret_cast<const uint8_t*>(expected_features), | 1854 WriteBytes(reinterpret_cast<const uint8_t*>(expected_features), |
1875 features_len + 1); | 1855 features_len + 1); |
1876 free(const_cast<char*>(expected_features)); | 1856 free(const_cast<char*>(expected_features)); |
1877 } | 1857 } |
1878 | 1858 |
1879 | 1859 |
1880 ScriptSnapshotWriter::ScriptSnapshotWriter(uint8_t** buffer, | 1860 ScriptSnapshotWriter::ScriptSnapshotWriter(uint8_t** buffer, ReAlloc alloc) |
1881 ReAlloc alloc) | |
1882 : SnapshotWriter(Thread::Current(), | 1861 : SnapshotWriter(Thread::Current(), |
1883 Snapshot::kScript, | 1862 Snapshot::kScript, |
1884 buffer, | 1863 buffer, |
1885 alloc, | 1864 alloc, |
1886 kInitialSize, | 1865 kInitialSize, |
1887 &forward_list_, | 1866 &forward_list_, |
1888 true /* can_send_any_object */), | 1867 true /* can_send_any_object */), |
1889 forward_list_(thread(), kMaxPredefinedObjectIds) { | 1868 forward_list_(thread(), kMaxPredefinedObjectIds) { |
1890 ASSERT(buffer != NULL); | 1869 ASSERT(buffer != NULL); |
1891 ASSERT(alloc != NULL); | 1870 ASSERT(alloc != NULL); |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1956 if (setjmp(*jump.Set()) == 0) { | 1935 if (setjmp(*jump.Set()) == 0) { |
1957 NoSafepointScope no_safepoint; | 1936 NoSafepointScope no_safepoint; |
1958 WriteObject(obj.raw()); | 1937 WriteObject(obj.raw()); |
1959 } else { | 1938 } else { |
1960 ThrowException(exception_type(), exception_msg()); | 1939 ThrowException(exception_type(), exception_msg()); |
1961 } | 1940 } |
1962 } | 1941 } |
1963 | 1942 |
1964 | 1943 |
1965 } // namespace dart | 1944 } // namespace dart |
OLD | NEW |