Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(280)

Side by Side Diff: runtime/vm/snapshot.cc

Issue 2974233002: VM: Re-format to use at most one newline between functions (Closed)
Patch Set: Rebase and merge Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/snapshot.h ('k') | runtime/vm/snapshot_ids.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/snapshot.h" 5 #include "vm/snapshot.h"
6 6
7 #include "platform/assert.h" 7 #include "platform/assert.h"
8 #include "vm/bootstrap.h" 8 #include "vm/bootstrap.h"
9 #include "vm/class_finalizer.h" 9 #include "vm/class_finalizer.h"
10 #include "vm/dart.h" 10 #include "vm/dart.h"
(...skipping 13 matching lines...) Expand all
24 24
25 // We currently only expect the Dart mutator to read snapshots. 25 // We currently only expect the Dart mutator to read snapshots.
26 #define ASSERT_NO_SAFEPOINT_SCOPE() \ 26 #define ASSERT_NO_SAFEPOINT_SCOPE() \
27 isolate()->AssertCurrentThreadIsMutator(); \ 27 isolate()->AssertCurrentThreadIsMutator(); \
28 ASSERT(thread()->no_safepoint_scope_depth() != 0) 28 ASSERT(thread()->no_safepoint_scope_depth() != 0)
29 29
30 namespace dart { 30 namespace dart {
31 31
32 static const int kNumInitialReferences = 32; 32 static const int kNumInitialReferences = 32;
33 33
34
35 static bool IsSingletonClassId(intptr_t class_id) { 34 static bool IsSingletonClassId(intptr_t class_id) {
36 // Check if this is a singleton object class which is shared by all isolates. 35 // Check if this is a singleton object class which is shared by all isolates.
37 return ((class_id >= kClassCid && class_id <= kUnwindErrorCid) || 36 return ((class_id >= kClassCid && class_id <= kUnwindErrorCid) ||
38 (class_id >= kNullCid && class_id <= kVoidCid)); 37 (class_id >= kNullCid && class_id <= kVoidCid));
39 } 38 }
40 39
41
42 static bool IsObjectStoreClassId(intptr_t class_id) { 40 static bool IsObjectStoreClassId(intptr_t class_id) {
43 // Check if this is a class which is stored in the object store. 41 // Check if this is a class which is stored in the object store.
44 return (class_id == kObjectCid || 42 return (class_id == kObjectCid ||
45 (class_id >= kInstanceCid && class_id <= kUserTagCid) || 43 (class_id >= kInstanceCid && class_id <= kUserTagCid) ||
46 class_id == kArrayCid || class_id == kImmutableArrayCid || 44 class_id == kArrayCid || class_id == kImmutableArrayCid ||
47 RawObject::IsStringClassId(class_id) || 45 RawObject::IsStringClassId(class_id) ||
48 RawObject::IsTypedDataClassId(class_id) || 46 RawObject::IsTypedDataClassId(class_id) ||
49 RawObject::IsExternalTypedDataClassId(class_id) || 47 RawObject::IsExternalTypedDataClassId(class_id) ||
50 class_id == kNullCid); 48 class_id == kNullCid);
51 } 49 }
52 50
53
54 static bool IsObjectStoreTypeId(intptr_t index) { 51 static bool IsObjectStoreTypeId(intptr_t index) {
55 // Check if this is a type which is stored in the object store. 52 // Check if this is a type which is stored in the object store.
56 return (index >= kObjectType && index <= kArrayType); 53 return (index >= kObjectType && index <= kArrayType);
57 } 54 }
58 55
59
60 static bool IsSplitClassId(intptr_t class_id) { 56 static bool IsSplitClassId(intptr_t class_id) {
61 // Return whether this class is serialized in two steps: first a reference, 57 // Return whether this class is serialized in two steps: first a reference,
62 // with sufficient information to allocate a correctly sized object, and then 58 // with sufficient information to allocate a correctly sized object, and then
63 // later inline with complete contents. 59 // later inline with complete contents.
64 return class_id >= kNumPredefinedCids || class_id == kArrayCid || 60 return class_id >= kNumPredefinedCids || class_id == kArrayCid ||
65 class_id == kImmutableArrayCid || class_id == kObjectPoolCid || 61 class_id == kImmutableArrayCid || class_id == kObjectPoolCid ||
66 RawObject::IsImplicitFieldClassId(class_id); 62 RawObject::IsImplicitFieldClassId(class_id);
67 } 63 }
68 64
69
70 static intptr_t ClassIdFromObjectId(intptr_t object_id) { 65 static intptr_t ClassIdFromObjectId(intptr_t object_id) {
71 ASSERT(object_id > kClassIdsOffset); 66 ASSERT(object_id > kClassIdsOffset);
72 intptr_t class_id = (object_id - kClassIdsOffset); 67 intptr_t class_id = (object_id - kClassIdsOffset);
73 return class_id; 68 return class_id;
74 } 69 }
75 70
76
77 static intptr_t ObjectIdFromClassId(intptr_t class_id) { 71 static intptr_t ObjectIdFromClassId(intptr_t class_id) {
78 ASSERT((class_id > kIllegalCid) && (class_id < kNumPredefinedCids)); 72 ASSERT((class_id > kIllegalCid) && (class_id < kNumPredefinedCids));
79 ASSERT(!(RawObject::IsImplicitFieldClassId(class_id))); 73 ASSERT(!(RawObject::IsImplicitFieldClassId(class_id)));
80 return (class_id + kClassIdsOffset); 74 return (class_id + kClassIdsOffset);
81 } 75 }
82 76
83
84 static RawType* GetType(ObjectStore* object_store, intptr_t index) { 77 static RawType* GetType(ObjectStore* object_store, intptr_t index) {
85 switch (index) { 78 switch (index) {
86 case kObjectType: 79 case kObjectType:
87 return object_store->object_type(); 80 return object_store->object_type();
88 case kNullType: 81 case kNullType:
89 return object_store->null_type(); 82 return object_store->null_type();
90 case kFunctionType: 83 case kFunctionType:
91 return object_store->function_type(); 84 return object_store->function_type();
92 case kNumberType: 85 case kNumberType:
93 return object_store->number_type(); 86 return object_store->number_type();
(...skipping 11 matching lines...) Expand all
105 return object_store->string_type(); 98 return object_store->string_type();
106 case kArrayType: 99 case kArrayType:
107 return object_store->array_type(); 100 return object_store->array_type();
108 default: 101 default:
109 break; 102 break;
110 } 103 }
111 UNREACHABLE(); 104 UNREACHABLE();
112 return Type::null(); 105 return Type::null();
113 } 106 }
114 107
115
116 static intptr_t GetTypeIndex(ObjectStore* object_store, 108 static intptr_t GetTypeIndex(ObjectStore* object_store,
117 const RawType* raw_type) { 109 const RawType* raw_type) {
118 ASSERT(raw_type->IsHeapObject()); 110 ASSERT(raw_type->IsHeapObject());
119 if (raw_type == object_store->object_type()) { 111 if (raw_type == object_store->object_type()) {
120 return kObjectType; 112 return kObjectType;
121 } else if (raw_type == object_store->null_type()) { 113 } else if (raw_type == object_store->null_type()) {
122 return kNullType; 114 return kNullType;
123 } else if (raw_type == object_store->function_type()) { 115 } else if (raw_type == object_store->function_type()) {
124 return kFunctionType; 116 return kFunctionType;
125 } else if (raw_type == object_store->number_type()) { 117 } else if (raw_type == object_store->number_type()) {
126 return kNumberType; 118 return kNumberType;
127 } else if (raw_type == object_store->smi_type()) { 119 } else if (raw_type == object_store->smi_type()) {
128 return kSmiType; 120 return kSmiType;
129 } else if (raw_type == object_store->mint_type()) { 121 } else if (raw_type == object_store->mint_type()) {
130 return kMintType; 122 return kMintType;
131 } else if (raw_type == object_store->double_type()) { 123 } else if (raw_type == object_store->double_type()) {
132 return kDoubleType; 124 return kDoubleType;
133 } else if (raw_type == object_store->int_type()) { 125 } else if (raw_type == object_store->int_type()) {
134 return kIntType; 126 return kIntType;
135 } else if (raw_type == object_store->bool_type()) { 127 } else if (raw_type == object_store->bool_type()) {
136 return kBoolType; 128 return kBoolType;
137 } else if (raw_type == object_store->string_type()) { 129 } else if (raw_type == object_store->string_type()) {
138 return kStringType; 130 return kStringType;
139 } else if (raw_type == object_store->array_type()) { 131 } else if (raw_type == object_store->array_type()) {
140 return kArrayType; 132 return kArrayType;
141 } 133 }
142 return kInvalidIndex; 134 return kInvalidIndex;
143 } 135 }
144 136
145
146 const char* Snapshot::KindToCString(Kind kind) { 137 const char* Snapshot::KindToCString(Kind kind) {
147 switch (kind) { 138 switch (kind) {
148 case kFull: 139 case kFull:
149 return "full"; 140 return "full";
150 case kScript: 141 case kScript:
151 return "script"; 142 return "script";
152 case kMessage: 143 case kMessage:
153 return "message"; 144 return "message";
154 case kFullJIT: 145 case kFullJIT:
155 return "full-jit"; 146 return "full-jit";
156 case kFullAOT: 147 case kFullAOT:
157 return "full-aot"; 148 return "full-aot";
158 case kNone: 149 case kNone:
159 return "none"; 150 return "none";
160 case kInvalid: 151 case kInvalid:
161 default: 152 default:
162 return "invalid"; 153 return "invalid";
163 } 154 }
164 } 155 }
165 156
166
167 // TODO(5411462): Temporary setup of snapshot for testing purposes, 157 // TODO(5411462): Temporary setup of snapshot for testing purposes,
168 // the actual creation of a snapshot maybe done differently. 158 // the actual creation of a snapshot maybe done differently.
169 const Snapshot* Snapshot::SetupFromBuffer(const void* raw_memory) { 159 const Snapshot* Snapshot::SetupFromBuffer(const void* raw_memory) {
170 ASSERT(raw_memory != NULL); 160 ASSERT(raw_memory != NULL);
171 ASSERT(kHeaderSize == sizeof(Snapshot)); 161 ASSERT(kHeaderSize == sizeof(Snapshot));
172 ASSERT(kLengthIndex == length_offset()); 162 ASSERT(kLengthIndex == length_offset());
173 ASSERT((kSnapshotFlagIndex * sizeof(int64_t)) == kind_offset()); 163 ASSERT((kSnapshotFlagIndex * sizeof(int64_t)) == kind_offset());
174 ASSERT((kHeapObjectTag & kInlined)); 164 ASSERT((kHeapObjectTag & kInlined));
175 const Snapshot* snapshot = reinterpret_cast<const Snapshot*>(raw_memory); 165 const Snapshot* snapshot = reinterpret_cast<const Snapshot*>(raw_memory);
176 // If the raw length is negative or greater than what the local machine can 166 // If the raw length is negative or greater than what the local machine can
177 // handle, then signal an error. 167 // handle, then signal an error.
178 int64_t snapshot_length = ReadUnaligned(&snapshot->unaligned_length_); 168 int64_t snapshot_length = ReadUnaligned(&snapshot->unaligned_length_);
179 if ((snapshot_length < 0) || (snapshot_length > kIntptrMax)) { 169 if ((snapshot_length < 0) || (snapshot_length > kIntptrMax)) {
180 return NULL; 170 return NULL;
181 } 171 }
182 return snapshot; 172 return snapshot;
183 } 173 }
184 174
185
186 RawSmi* BaseReader::ReadAsSmi() { 175 RawSmi* BaseReader::ReadAsSmi() {
187 intptr_t value = Read<int32_t>(); 176 intptr_t value = Read<int32_t>();
188 ASSERT((value & kSmiTagMask) == kSmiTag); 177 ASSERT((value & kSmiTagMask) == kSmiTag);
189 return reinterpret_cast<RawSmi*>(value); 178 return reinterpret_cast<RawSmi*>(value);
190 } 179 }
191 180
192
193 intptr_t BaseReader::ReadSmiValue() { 181 intptr_t BaseReader::ReadSmiValue() {
194 return Smi::Value(ReadAsSmi()); 182 return Smi::Value(ReadAsSmi());
195 } 183 }
196 184
197
198 SnapshotReader::SnapshotReader(const uint8_t* buffer, 185 SnapshotReader::SnapshotReader(const uint8_t* buffer,
199 intptr_t size, 186 intptr_t size,
200 Snapshot::Kind kind, 187 Snapshot::Kind kind,
201 ZoneGrowableArray<BackRefNode>* backward_refs, 188 ZoneGrowableArray<BackRefNode>* backward_refs,
202 Thread* thread) 189 Thread* thread)
203 : BaseReader(buffer, size), 190 : BaseReader(buffer, size),
204 kind_(kind), 191 kind_(kind),
205 thread_(thread), 192 thread_(thread),
206 zone_(thread->zone()), 193 zone_(thread->zone()),
207 heap_(isolate()->heap()), 194 heap_(isolate()->heap()),
(...skipping 12 matching lines...) Expand all
220 data_(ExternalTypedData::Handle(zone_)), 207 data_(ExternalTypedData::Handle(zone_)),
221 typed_data_(TypedData::Handle(zone_)), 208 typed_data_(TypedData::Handle(zone_)),
222 function_(Function::Handle(zone_)), 209 function_(Function::Handle(zone_)),
223 error_(UnhandledException::Handle(zone_)), 210 error_(UnhandledException::Handle(zone_)),
224 max_vm_isolate_object_id_( 211 max_vm_isolate_object_id_(
225 (Snapshot::IsFull(kind)) 212 (Snapshot::IsFull(kind))
226 ? Object::vm_isolate_snapshot_object_table().Length() 213 ? Object::vm_isolate_snapshot_object_table().Length()
227 : 0), 214 : 0),
228 backward_references_(backward_refs) {} 215 backward_references_(backward_refs) {}
229 216
230
231 RawObject* SnapshotReader::ReadObject() { 217 RawObject* SnapshotReader::ReadObject() {
232 // Setup for long jump in case there is an exception while reading. 218 // Setup for long jump in case there is an exception while reading.
233 LongJumpScope jump; 219 LongJumpScope jump;
234 if (setjmp(*jump.Set()) == 0) { 220 if (setjmp(*jump.Set()) == 0) {
235 PassiveObject& obj = 221 PassiveObject& obj =
236 PassiveObject::Handle(zone(), ReadObjectImpl(kAsInlinedObject)); 222 PassiveObject::Handle(zone(), ReadObjectImpl(kAsInlinedObject));
237 for (intptr_t i = 0; i < backward_references_->length(); i++) { 223 for (intptr_t i = 0; i < backward_references_->length(); i++) {
238 if (!(*backward_references_)[i].is_deserialized()) { 224 if (!(*backward_references_)[i].is_deserialized()) {
239 ReadObjectImpl(kAsInlinedObject); 225 ReadObjectImpl(kAsInlinedObject);
240 (*backward_references_)[i].set_state(kIsDeserialized); 226 (*backward_references_)[i].set_state(kIsDeserialized);
241 } 227 }
242 } 228 }
243 if (backward_references_->length() > 0) { 229 if (backward_references_->length() > 0) {
244 ProcessDeferredCanonicalizations(); 230 ProcessDeferredCanonicalizations();
245 if (kind() == Snapshot::kScript) { 231 if (kind() == Snapshot::kScript) {
246 FixSubclassesAndImplementors(); 232 FixSubclassesAndImplementors();
247 } 233 }
248 return (*backward_references_)[0].reference()->raw(); 234 return (*backward_references_)[0].reference()->raw();
249 } else { 235 } else {
250 return obj.raw(); 236 return obj.raw();
251 } 237 }
252 } else { 238 } else {
253 // An error occurred while reading, return the error object. 239 // An error occurred while reading, return the error object.
254 const Error& err = Error::Handle(thread()->sticky_error()); 240 const Error& err = Error::Handle(thread()->sticky_error());
255 thread()->clear_sticky_error(); 241 thread()->clear_sticky_error();
256 return err.raw(); 242 return err.raw();
257 } 243 }
258 } 244 }
259 245
260
261 RawClass* SnapshotReader::ReadClassId(intptr_t object_id) { 246 RawClass* SnapshotReader::ReadClassId(intptr_t object_id) {
262 ASSERT(!Snapshot::IsFull(kind_)); 247 ASSERT(!Snapshot::IsFull(kind_));
263 // Read the class header information and lookup the class. 248 // Read the class header information and lookup the class.
264 intptr_t class_header = Read<int32_t>(); 249 intptr_t class_header = Read<int32_t>();
265 ASSERT((class_header & kSmiTagMask) != kSmiTag); 250 ASSERT((class_header & kSmiTagMask) != kSmiTag);
266 ASSERT(!IsVMIsolateObject(class_header) || 251 ASSERT(!IsVMIsolateObject(class_header) ||
267 !IsSingletonClassId(GetVMIsolateObjectId(class_header))); 252 !IsSingletonClassId(GetVMIsolateObjectId(class_header)));
268 ASSERT((SerializedHeaderTag::decode(class_header) != kObjectId) || 253 ASSERT((SerializedHeaderTag::decode(class_header) != kObjectId) ||
269 !IsObjectStoreClassId(SerializedHeaderData::decode(class_header))); 254 !IsObjectStoreClassId(SerializedHeaderData::decode(class_header)));
270 Class& cls = Class::ZoneHandle(zone(), Class::null()); 255 Class& cls = Class::ZoneHandle(zone(), Class::null());
(...skipping 10 matching lines...) Expand all
281 } else { 266 } else {
282 cls = library_.LookupClassAllowPrivate(str_); 267 cls = library_.LookupClassAllowPrivate(str_);
283 } 268 }
284 if (cls.IsNull()) { 269 if (cls.IsNull()) {
285 SetReadException("Invalid object found in message."); 270 SetReadException("Invalid object found in message.");
286 } 271 }
287 cls.EnsureIsFinalized(thread()); 272 cls.EnsureIsFinalized(thread());
288 return cls.raw(); 273 return cls.raw();
289 } 274 }
290 275
291
292 RawFunction* SnapshotReader::ReadFunctionId(intptr_t object_id) { 276 RawFunction* SnapshotReader::ReadFunctionId(intptr_t object_id) {
293 ASSERT(kind_ == Snapshot::kScript); 277 ASSERT(kind_ == Snapshot::kScript);
294 // Read the function header information and lookup the function. 278 // Read the function header information and lookup the function.
295 intptr_t func_header = Read<int32_t>(); 279 intptr_t func_header = Read<int32_t>();
296 ASSERT((func_header & kSmiTagMask) != kSmiTag); 280 ASSERT((func_header & kSmiTagMask) != kSmiTag);
297 ASSERT(!IsVMIsolateObject(func_header) || 281 ASSERT(!IsVMIsolateObject(func_header) ||
298 !IsSingletonClassId(GetVMIsolateObjectId(func_header))); 282 !IsSingletonClassId(GetVMIsolateObjectId(func_header)));
299 ASSERT((SerializedHeaderTag::decode(func_header) != kObjectId) || 283 ASSERT((SerializedHeaderTag::decode(func_header) != kObjectId) ||
300 !IsObjectStoreClassId(SerializedHeaderData::decode(func_header))); 284 !IsObjectStoreClassId(SerializedHeaderData::decode(func_header)));
301 Function& func = Function::ZoneHandle(zone(), Function::null()); 285 Function& func = Function::ZoneHandle(zone(), Function::null());
(...skipping 16 matching lines...) Expand all
318 cls_.EnsureIsFinalized(thread()); 302 cls_.EnsureIsFinalized(thread());
319 str_ ^= ReadObjectImpl(kAsInlinedObject); 303 str_ ^= ReadObjectImpl(kAsInlinedObject);
320 func ^= cls_.LookupFunctionAllowPrivate(str_); 304 func ^= cls_.LookupFunctionAllowPrivate(str_);
321 } 305 }
322 if (func.IsNull()) { 306 if (func.IsNull()) {
323 SetReadException("Expected a function name, but found an invalid name."); 307 SetReadException("Expected a function name, but found an invalid name.");
324 } 308 }
325 return func.raw(); 309 return func.raw();
326 } 310 }
327 311
328
329 RawObject* SnapshotReader::ReadStaticImplicitClosure(intptr_t object_id, 312 RawObject* SnapshotReader::ReadStaticImplicitClosure(intptr_t object_id,
330 intptr_t class_header) { 313 intptr_t class_header) {
331 ASSERT(!Snapshot::IsFull(kind_)); 314 ASSERT(!Snapshot::IsFull(kind_));
332 315
333 // First create a function object and associate it with the specified 316 // First create a function object and associate it with the specified
334 // 'object_id'. 317 // 'object_id'.
335 Function& func = Function::Handle(zone(), Function::null()); 318 Function& func = Function::Handle(zone(), Function::null());
336 Instance& obj = Instance::ZoneHandle(zone(), Instance::null()); 319 Instance& obj = Instance::ZoneHandle(zone(), Instance::null());
337 AddBackRef(object_id, &obj, kIsDeserialized); 320 AddBackRef(object_id, &obj, kIsDeserialized);
338 321
(...skipping 21 matching lines...) Expand all
360 SetReadException("Invalid function object found in message."); 343 SetReadException("Invalid function object found in message.");
361 } 344 }
362 func = func.ImplicitClosureFunction(); 345 func = func.ImplicitClosureFunction();
363 ASSERT(!func.IsNull()); 346 ASSERT(!func.IsNull());
364 347
365 // Return the associated implicit static closure. 348 // Return the associated implicit static closure.
366 obj = func.ImplicitStaticClosure(); 349 obj = func.ImplicitStaticClosure();
367 return obj.raw(); 350 return obj.raw();
368 } 351 }
369 352
370
371 intptr_t SnapshotReader::NextAvailableObjectId() const { 353 intptr_t SnapshotReader::NextAvailableObjectId() const {
372 return backward_references_->length() + kMaxPredefinedObjectIds + 354 return backward_references_->length() + kMaxPredefinedObjectIds +
373 max_vm_isolate_object_id_; 355 max_vm_isolate_object_id_;
374 } 356 }
375 357
376
377 void SnapshotReader::SetReadException(const char* msg) { 358 void SnapshotReader::SetReadException(const char* msg) {
378 const String& error_str = String::Handle(zone(), String::New(msg)); 359 const String& error_str = String::Handle(zone(), String::New(msg));
379 const Array& args = Array::Handle(zone(), Array::New(1)); 360 const Array& args = Array::Handle(zone(), Array::New(1));
380 args.SetAt(0, error_str); 361 args.SetAt(0, error_str);
381 Object& result = Object::Handle(zone()); 362 Object& result = Object::Handle(zone());
382 const Library& library = Library::Handle(zone(), Library::CoreLibrary()); 363 const Library& library = Library::Handle(zone(), Library::CoreLibrary());
383 result = DartLibraryCalls::InstanceCreate(library, Symbols::ArgumentError(), 364 result = DartLibraryCalls::InstanceCreate(library, Symbols::ArgumentError(),
384 Symbols::Dot(), args); 365 Symbols::Dot(), args);
385 const StackTrace& stacktrace = StackTrace::Handle(zone()); 366 const StackTrace& stacktrace = StackTrace::Handle(zone());
386 const UnhandledException& error = UnhandledException::Handle( 367 const UnhandledException& error = UnhandledException::Handle(
387 zone(), UnhandledException::New(Instance::Cast(result), stacktrace)); 368 zone(), UnhandledException::New(Instance::Cast(result), stacktrace));
388 thread()->long_jump_base()->Jump(1, error); 369 thread()->long_jump_base()->Jump(1, error);
389 } 370 }
390 371
391
392 RawObject* SnapshotReader::VmIsolateSnapshotObject(intptr_t index) const { 372 RawObject* SnapshotReader::VmIsolateSnapshotObject(intptr_t index) const {
393 return Object::vm_isolate_snapshot_object_table().At(index); 373 return Object::vm_isolate_snapshot_object_table().At(index);
394 } 374 }
395 375
396
397 bool SnapshotReader::is_vm_isolate() const { 376 bool SnapshotReader::is_vm_isolate() const {
398 return isolate() == Dart::vm_isolate(); 377 return isolate() == Dart::vm_isolate();
399 } 378 }
400 379
401
402 RawObject* SnapshotReader::ReadObjectImpl(bool as_reference, 380 RawObject* SnapshotReader::ReadObjectImpl(bool as_reference,
403 intptr_t patch_object_id, 381 intptr_t patch_object_id,
404 intptr_t patch_offset) { 382 intptr_t patch_offset) {
405 int64_t header_value = Read<int64_t>(); 383 int64_t header_value = Read<int64_t>();
406 if ((header_value & kSmiTagMask) == kSmiTag) { 384 if ((header_value & kSmiTagMask) == kSmiTag) {
407 return NewInteger(header_value); 385 return NewInteger(header_value);
408 } 386 }
409 ASSERT((header_value <= kIntptrMax) && (header_value >= kIntptrMin)); 387 ASSERT((header_value <= kIntptrMax) && (header_value >= kIntptrMin));
410 return ReadObjectImpl(static_cast<intptr_t>(header_value), as_reference, 388 return ReadObjectImpl(static_cast<intptr_t>(header_value), as_reference,
411 patch_object_id, patch_offset); 389 patch_object_id, patch_offset);
412 } 390 }
413 391
414
415 RawObject* SnapshotReader::ReadObjectImpl(intptr_t header_value, 392 RawObject* SnapshotReader::ReadObjectImpl(intptr_t header_value,
416 bool as_reference, 393 bool as_reference,
417 intptr_t patch_object_id, 394 intptr_t patch_object_id,
418 intptr_t patch_offset) { 395 intptr_t patch_offset) {
419 if (IsVMIsolateObject(header_value)) { 396 if (IsVMIsolateObject(header_value)) {
420 return ReadVMIsolateObject(header_value); 397 return ReadVMIsolateObject(header_value);
421 } 398 }
422 if (SerializedHeaderTag::decode(header_value) == kObjectId) { 399 if (SerializedHeaderTag::decode(header_value) == kObjectId) {
423 return ReadIndexedObject(SerializedHeaderData::decode(header_value), 400 return ReadIndexedObject(SerializedHeaderData::decode(header_value),
424 patch_object_id, patch_offset); 401 patch_object_id, patch_offset);
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
473 default: 450 default:
474 UNREACHABLE(); 451 UNREACHABLE();
475 break; 452 break;
476 } 453 }
477 if (!read_as_reference) { 454 if (!read_as_reference) {
478 AddPatchRecord(object_id, patch_object_id, patch_offset); 455 AddPatchRecord(object_id, patch_object_id, patch_offset);
479 } 456 }
480 return pobj_.raw(); 457 return pobj_.raw();
481 } 458 }
482 459
483
484 RawObject* SnapshotReader::ReadInstance(intptr_t object_id, 460 RawObject* SnapshotReader::ReadInstance(intptr_t object_id,
485 intptr_t tags, 461 intptr_t tags,
486 bool as_reference) { 462 bool as_reference) {
487 // Object is regular dart instance. 463 // Object is regular dart instance.
488 intptr_t instance_size = 0; 464 intptr_t instance_size = 0;
489 Instance* result = NULL; 465 Instance* result = NULL;
490 DeserializeState state; 466 DeserializeState state;
491 if (!as_reference) { 467 if (!as_reference) {
492 result = reinterpret_cast<Instance*>(GetBackRef(object_id)); 468 result = reinterpret_cast<Instance*>(GetBackRef(object_id));
493 state = kIsDeserialized; 469 state = kIsDeserialized;
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
543 offset += kWordSize; 519 offset += kWordSize;
544 } 520 }
545 if (RawObject::IsCanonical(tags)) { 521 if (RawObject::IsCanonical(tags)) {
546 *result = result->CheckAndCanonicalize(thread(), NULL); 522 *result = result->CheckAndCanonicalize(thread(), NULL);
547 ASSERT(!result->IsNull()); 523 ASSERT(!result->IsNull());
548 } 524 }
549 } 525 }
550 return result->raw(); 526 return result->raw();
551 } 527 }
552 528
553
554 void SnapshotReader::AddBackRef(intptr_t id, 529 void SnapshotReader::AddBackRef(intptr_t id,
555 Object* obj, 530 Object* obj,
556 DeserializeState state, 531 DeserializeState state,
557 bool defer_canonicalization) { 532 bool defer_canonicalization) {
558 intptr_t index = (id - kMaxPredefinedObjectIds); 533 intptr_t index = (id - kMaxPredefinedObjectIds);
559 ASSERT(index >= max_vm_isolate_object_id_); 534 ASSERT(index >= max_vm_isolate_object_id_);
560 index -= max_vm_isolate_object_id_; 535 index -= max_vm_isolate_object_id_;
561 ASSERT(index == backward_references_->length()); 536 ASSERT(index == backward_references_->length());
562 BackRefNode node(obj, state, defer_canonicalization); 537 BackRefNode node(obj, state, defer_canonicalization);
563 backward_references_->Add(node); 538 backward_references_->Add(node);
564 } 539 }
565 540
566
567 Object* SnapshotReader::GetBackRef(intptr_t id) { 541 Object* SnapshotReader::GetBackRef(intptr_t id) {
568 ASSERT(id >= kMaxPredefinedObjectIds); 542 ASSERT(id >= kMaxPredefinedObjectIds);
569 intptr_t index = (id - kMaxPredefinedObjectIds); 543 intptr_t index = (id - kMaxPredefinedObjectIds);
570 ASSERT(index >= max_vm_isolate_object_id_); 544 ASSERT(index >= max_vm_isolate_object_id_);
571 index -= max_vm_isolate_object_id_; 545 index -= max_vm_isolate_object_id_;
572 if (index < backward_references_->length()) { 546 if (index < backward_references_->length()) {
573 return (*backward_references_)[index].reference(); 547 return (*backward_references_)[index].reference();
574 } 548 }
575 return NULL; 549 return NULL;
576 } 550 }
577 551
578
579 class HeapLocker : public StackResource { 552 class HeapLocker : public StackResource {
580 public: 553 public:
581 HeapLocker(Thread* thread, PageSpace* page_space) 554 HeapLocker(Thread* thread, PageSpace* page_space)
582 : StackResource(thread), page_space_(page_space) { 555 : StackResource(thread), page_space_(page_space) {
583 page_space_->AcquireDataLock(); 556 page_space_->AcquireDataLock();
584 } 557 }
585 ~HeapLocker() { page_space_->ReleaseDataLock(); } 558 ~HeapLocker() { page_space_->ReleaseDataLock(); }
586 559
587 private: 560 private:
588 PageSpace* page_space_; 561 PageSpace* page_space_;
589 }; 562 };
590 563
591
592 RawObject* SnapshotReader::ReadScriptSnapshot() { 564 RawObject* SnapshotReader::ReadScriptSnapshot() {
593 ASSERT(kind_ == Snapshot::kScript); 565 ASSERT(kind_ == Snapshot::kScript);
594 566
595 // First read the version string, and check that it matches. 567 // First read the version string, and check that it matches.
596 RawApiError* error = VerifyVersionAndFeatures(Isolate::Current()); 568 RawApiError* error = VerifyVersionAndFeatures(Isolate::Current());
597 if (error != ApiError::null()) { 569 if (error != ApiError::null()) {
598 return error; 570 return error;
599 } 571 }
600 572
601 // The version string matches. Read the rest of the snapshot. 573 // The version string matches. Read the rest of the snapshot.
602 obj_ = ReadObject(); 574 obj_ = ReadObject();
603 if (!obj_.IsLibrary()) { 575 if (!obj_.IsLibrary()) {
604 if (!obj_.IsError()) { 576 if (!obj_.IsError()) {
605 const intptr_t kMessageBufferSize = 128; 577 const intptr_t kMessageBufferSize = 128;
606 char message_buffer[kMessageBufferSize]; 578 char message_buffer[kMessageBufferSize];
607 OS::SNPrint(message_buffer, kMessageBufferSize, 579 OS::SNPrint(message_buffer, kMessageBufferSize,
608 "Invalid object %s found in script snapshot", 580 "Invalid object %s found in script snapshot",
609 obj_.ToCString()); 581 obj_.ToCString());
610 const String& msg = String::Handle(String::New(message_buffer)); 582 const String& msg = String::Handle(String::New(message_buffer));
611 obj_ = ApiError::New(msg); 583 obj_ = ApiError::New(msg);
612 } 584 }
613 } 585 }
614 return obj_.raw(); 586 return obj_.raw();
615 } 587 }
616 588
617
618 RawApiError* SnapshotReader::VerifyVersionAndFeatures(Isolate* isolate) { 589 RawApiError* SnapshotReader::VerifyVersionAndFeatures(Isolate* isolate) {
619 // If the version string doesn't match, return an error. 590 // If the version string doesn't match, return an error.
620 // Note: New things are allocated only if we're going to return an error. 591 // Note: New things are allocated only if we're going to return an error.
621 592
622 const char* expected_version = Version::SnapshotString(); 593 const char* expected_version = Version::SnapshotString();
623 ASSERT(expected_version != NULL); 594 ASSERT(expected_version != NULL);
624 const intptr_t version_len = strlen(expected_version); 595 const intptr_t version_len = strlen(expected_version);
625 if (PendingBytes() < version_len) { 596 if (PendingBytes() < version_len) {
626 const intptr_t kMessageBufferSize = 128; 597 const intptr_t kMessageBufferSize = 128;
627 char message_buffer[kMessageBufferSize]; 598 char message_buffer[kMessageBufferSize];
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
674 // This can also fail while bringing up the VM isolate, so make sure to 645 // This can also fail while bringing up the VM isolate, so make sure to
675 // allocate the error message in old space. 646 // allocate the error message in old space.
676 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); 647 const String& msg = String::Handle(String::New(message_buffer, Heap::kOld));
677 return ApiError::New(msg, Heap::kOld); 648 return ApiError::New(msg, Heap::kOld);
678 } 649 }
679 free(const_cast<char*>(expected_features)); 650 free(const_cast<char*>(expected_features));
680 Advance(expected_len + 1); 651 Advance(expected_len + 1);
681 return ApiError::null(); 652 return ApiError::null();
682 } 653 }
683 654
684
685 RawObject* SnapshotReader::NewInteger(int64_t value) { 655 RawObject* SnapshotReader::NewInteger(int64_t value) {
686 ASSERT((value & kSmiTagMask) == kSmiTag); 656 ASSERT((value & kSmiTagMask) == kSmiTag);
687 value = value >> kSmiTagShift; 657 value = value >> kSmiTagShift;
688 if (Smi::IsValid(value)) { 658 if (Smi::IsValid(value)) {
689 return Smi::New(static_cast<intptr_t>(value)); 659 return Smi::New(static_cast<intptr_t>(value));
690 } 660 }
691 return Mint::NewCanonical(value); 661 return Mint::NewCanonical(value);
692 } 662 }
693 663
694
695 int32_t ImageWriter::GetTextOffsetFor(RawInstructions* instructions, 664 int32_t ImageWriter::GetTextOffsetFor(RawInstructions* instructions,
696 RawCode* code) { 665 RawCode* code) {
697 intptr_t heap_size = instructions->Size(); 666 intptr_t heap_size = instructions->Size();
698 intptr_t offset = next_offset_; 667 intptr_t offset = next_offset_;
699 next_offset_ += heap_size; 668 next_offset_ += heap_size;
700 instructions_.Add(InstructionsData(instructions, code, offset)); 669 instructions_.Add(InstructionsData(instructions, code, offset));
701 return offset; 670 return offset;
702 } 671 }
703 672
704
705 int32_t ImageWriter::GetDataOffsetFor(RawObject* raw_object) { 673 int32_t ImageWriter::GetDataOffsetFor(RawObject* raw_object) {
706 intptr_t heap_size = raw_object->Size(); 674 intptr_t heap_size = raw_object->Size();
707 intptr_t offset = next_object_offset_; 675 intptr_t offset = next_object_offset_;
708 next_object_offset_ += heap_size; 676 next_object_offset_ += heap_size;
709 objects_.Add(ObjectData(raw_object)); 677 objects_.Add(ObjectData(raw_object));
710 return offset; 678 return offset;
711 } 679 }
712 680
713
714 void ImageWriter::Write(WriteStream* clustered_stream, bool vm) { 681 void ImageWriter::Write(WriteStream* clustered_stream, bool vm) {
715 Thread* thread = Thread::Current(); 682 Thread* thread = Thread::Current();
716 Zone* zone = thread->zone(); 683 Zone* zone = thread->zone();
717 NOT_IN_PRODUCT(TimelineDurationScope tds(thread, Timeline::GetIsolateStream(), 684 NOT_IN_PRODUCT(TimelineDurationScope tds(thread, Timeline::GetIsolateStream(),
718 "WriteInstructions")); 685 "WriteInstructions"));
719 686
720 // Handlify collected raw pointers as building the names below 687 // Handlify collected raw pointers as building the names below
721 // will allocate on the Dart heap. 688 // will allocate on the Dart heap.
722 for (intptr_t i = 0; i < instructions_.length(); i++) { 689 for (intptr_t i = 0; i < instructions_.length(); i++) {
723 InstructionsData& data = instructions_[i]; 690 InstructionsData& data = instructions_[i];
724 data.insns_ = &Instructions::Handle(zone, data.raw_insns_); 691 data.insns_ = &Instructions::Handle(zone, data.raw_insns_);
725 ASSERT(data.raw_code_ != NULL); 692 ASSERT(data.raw_code_ != NULL);
726 data.code_ = &Code::Handle(zone, data.raw_code_); 693 data.code_ = &Code::Handle(zone, data.raw_code_);
727 } 694 }
728 for (intptr_t i = 0; i < objects_.length(); i++) { 695 for (intptr_t i = 0; i < objects_.length(); i++) {
729 ObjectData& data = objects_[i]; 696 ObjectData& data = objects_[i];
730 data.obj_ = &Object::Handle(zone, data.raw_obj_); 697 data.obj_ = &Object::Handle(zone, data.raw_obj_);
731 } 698 }
732 699
733 // Append the direct-mapped RO data objects after the clustered snapshot. 700 // Append the direct-mapped RO data objects after the clustered snapshot.
734 WriteROData(clustered_stream); 701 WriteROData(clustered_stream);
735 702
736 WriteText(clustered_stream, vm); 703 WriteText(clustered_stream, vm);
737 } 704 }
738 705
739
740 void ImageWriter::WriteROData(WriteStream* stream) { 706 void ImageWriter::WriteROData(WriteStream* stream) {
741 stream->Align(OS::kMaxPreferredCodeAlignment); 707 stream->Align(OS::kMaxPreferredCodeAlignment);
742 708
743 // Heap page starts here. 709 // Heap page starts here.
744 710
745 stream->WriteWord(next_object_offset_); // Data length. 711 stream->WriteWord(next_object_offset_); // Data length.
746 COMPILE_ASSERT(OS::kMaxPreferredCodeAlignment >= kObjectAlignment); 712 COMPILE_ASSERT(OS::kMaxPreferredCodeAlignment >= kObjectAlignment);
747 stream->Align(OS::kMaxPreferredCodeAlignment); 713 stream->Align(OS::kMaxPreferredCodeAlignment);
748 714
749 // Heap page objects start here. 715 // Heap page objects start here.
(...skipping 14 matching lines...) Expand all
764 #endif 730 #endif
765 stream->WriteWord(marked_tags); 731 stream->WriteWord(marked_tags);
766 start += sizeof(uword); 732 start += sizeof(uword);
767 for (uword* cursor = reinterpret_cast<uword*>(start); 733 for (uword* cursor = reinterpret_cast<uword*>(start);
768 cursor < reinterpret_cast<uword*>(end); cursor++) { 734 cursor < reinterpret_cast<uword*>(end); cursor++) {
769 stream->WriteWord(*cursor); 735 stream->WriteWord(*cursor);
770 } 736 }
771 } 737 }
772 } 738 }
773 739
774
775 AssemblyImageWriter::AssemblyImageWriter(uint8_t** assembly_buffer, 740 AssemblyImageWriter::AssemblyImageWriter(uint8_t** assembly_buffer,
776 ReAlloc alloc, 741 ReAlloc alloc,
777 intptr_t initial_size) 742 intptr_t initial_size)
778 : ImageWriter(), 743 : ImageWriter(),
779 assembly_stream_(assembly_buffer, alloc, initial_size), 744 assembly_stream_(assembly_buffer, alloc, initial_size),
780 text_size_(0), 745 text_size_(0),
781 dwarf_(NULL) { 746 dwarf_(NULL) {
782 #if defined(DART_PRECOMPILER) 747 #if defined(DART_PRECOMPILER)
783 Zone* zone = Thread::Current()->zone(); 748 Zone* zone = Thread::Current()->zone();
784 dwarf_ = new (zone) Dwarf(zone, &assembly_stream_); 749 dwarf_ = new (zone) Dwarf(zone, &assembly_stream_);
785 #endif 750 #endif
786 } 751 }
787 752
788
789 void AssemblyImageWriter::Finalize() { 753 void AssemblyImageWriter::Finalize() {
790 #ifdef DART_PRECOMPILER 754 #ifdef DART_PRECOMPILER
791 dwarf_->Write(); 755 dwarf_->Write();
792 #endif 756 #endif
793 } 757 }
794 758
795
796 static void EnsureIdentifier(char* label) { 759 static void EnsureIdentifier(char* label) {
797 for (char c = *label; c != '\0'; c = *++label) { 760 for (char c = *label; c != '\0'; c = *++label) {
798 if (((c >= 'a') && (c <= 'z')) || ((c >= 'A') && (c <= 'Z')) || 761 if (((c >= 'a') && (c <= 'z')) || ((c >= 'A') && (c <= 'Z')) ||
799 ((c >= '0') && (c <= '9'))) { 762 ((c >= '0') && (c <= '9'))) {
800 continue; 763 continue;
801 } 764 }
802 *label = '_'; 765 *label = '_';
803 } 766 }
804 } 767 }
805 768
806
807 void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) { 769 void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
808 Zone* zone = Thread::Current()->zone(); 770 Zone* zone = Thread::Current()->zone();
809 771
810 const char* instructions_symbol = 772 const char* instructions_symbol =
811 vm ? "_kDartVmSnapshotInstructions" : "_kDartIsolateSnapshotInstructions"; 773 vm ? "_kDartVmSnapshotInstructions" : "_kDartIsolateSnapshotInstructions";
812 assembly_stream_.Print(".text\n"); 774 assembly_stream_.Print(".text\n");
813 assembly_stream_.Print(".globl %s\n", instructions_symbol); 775 assembly_stream_.Print(".globl %s\n", instructions_symbol);
814 776
815 // Start snapshot at page boundary. 777 // Start snapshot at page boundary.
816 ASSERT(VirtualMemory::PageSize() >= OS::kMaxPreferredCodeAlignment); 778 ASSERT(VirtualMemory::PageSize() >= OS::kMaxPreferredCodeAlignment);
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
918 vm ? "_kDartVmSnapshotData" : "_kDartIsolateSnapshotData"; 880 vm ? "_kDartVmSnapshotData" : "_kDartIsolateSnapshotData";
919 assembly_stream_.Print(".globl %s\n", data_symbol); 881 assembly_stream_.Print(".globl %s\n", data_symbol);
920 assembly_stream_.Print(".balign %" Pd ", 0\n", 882 assembly_stream_.Print(".balign %" Pd ", 0\n",
921 OS::kMaxPreferredCodeAlignment); 883 OS::kMaxPreferredCodeAlignment);
922 assembly_stream_.Print("%s:\n", data_symbol); 884 assembly_stream_.Print("%s:\n", data_symbol);
923 uword buffer = reinterpret_cast<uword>(clustered_stream->buffer()); 885 uword buffer = reinterpret_cast<uword>(clustered_stream->buffer());
924 intptr_t length = clustered_stream->bytes_written(); 886 intptr_t length = clustered_stream->bytes_written();
925 WriteByteSequence(buffer, buffer + length); 887 WriteByteSequence(buffer, buffer + length);
926 } 888 }
927 889
928
929 void AssemblyImageWriter::FrameUnwindPrologue() { 890 void AssemblyImageWriter::FrameUnwindPrologue() {
930 // Creates DWARF's .debug_frame 891 // Creates DWARF's .debug_frame
931 // CFI = Call frame information 892 // CFI = Call frame information
932 // CFA = Canonical frame address 893 // CFA = Canonical frame address
933 assembly_stream_.Print(".cfi_startproc\n"); 894 assembly_stream_.Print(".cfi_startproc\n");
934 895
935 #if defined(TARGET_ARCH_X64) 896 #if defined(TARGET_ARCH_X64)
936 assembly_stream_.Print(".cfi_def_cfa rbp, 0\n"); // CFA is fp+0 897 assembly_stream_.Print(".cfi_def_cfa rbp, 0\n"); // CFA is fp+0
937 assembly_stream_.Print(".cfi_offset rbp, 0\n"); // saved fp is *(CFA+0) 898 assembly_stream_.Print(".cfi_offset rbp, 0\n"); // saved fp is *(CFA+0)
938 assembly_stream_.Print(".cfi_offset rip, 8\n"); // saved pc is *(CFA+8) 899 assembly_stream_.Print(".cfi_offset rip, 8\n"); // saved pc is *(CFA+8)
(...skipping 19 matching lines...) Expand all
958 // DW_CFA_expression 0x10 919 // DW_CFA_expression 0x10
959 // uleb128 register (x31) 31 920 // uleb128 register (x31) 31
960 // uleb128 size of operation 2 921 // uleb128 size of operation 2
961 // DW_OP_plus_uconst 0x23 922 // DW_OP_plus_uconst 0x23
962 // uleb128 addend 16 923 // uleb128 addend 16
963 assembly_stream_.Print(".cfi_escape 0x10, 31, 2, 0x23, 16\n"); 924 assembly_stream_.Print(".cfi_escape 0x10, 31, 2, 0x23, 16\n");
964 925
965 #elif defined(TARGET_ARCH_ARM) 926 #elif defined(TARGET_ARCH_ARM)
966 #if defined(TARGET_OS_MACOS) || defined(TARGET_OS_MACOS_IOS) 927 #if defined(TARGET_OS_MACOS) || defined(TARGET_OS_MACOS_IOS)
967 COMPILE_ASSERT(FP == R7); 928 COMPILE_ASSERT(FP == R7);
968 assembly_stream_.Print(".cfi_def_cfa r7, 0\n"); // CFA is fp+j0 929 assembly_stream_.Print(".cfi_def_cfa r7, 0\n"); // CFA is fp+j0
969 assembly_stream_.Print(".cfi_offset r7, 0\n"); // saved fp is *(CFA+0) 930 assembly_stream_.Print(".cfi_offset r7, 0\n"); // saved fp is *(CFA+0)
970 #else 931 #else
971 COMPILE_ASSERT(FP == R11); 932 COMPILE_ASSERT(FP == R11);
972 assembly_stream_.Print(".cfi_def_cfa r11, 0\n"); // CFA is fp+0 933 assembly_stream_.Print(".cfi_def_cfa r11, 0\n"); // CFA is fp+0
973 assembly_stream_.Print(".cfi_offset r11, 0\n"); // saved fp is *(CFA+0) 934 assembly_stream_.Print(".cfi_offset r11, 0\n"); // saved fp is *(CFA+0)
974 #endif 935 #endif
975 assembly_stream_.Print(".cfi_offset lr, 4\n"); // saved pc is *(CFA+4) 936 assembly_stream_.Print(".cfi_offset lr, 4\n"); // saved pc is *(CFA+4)
976 // saved sp is CFA+8 937 // saved sp is CFA+8
977 // Should be ".cfi_value_offset sp, 8", but requires gcc newer than late 938 // Should be ".cfi_value_offset sp, 8", but requires gcc newer than late
978 // 2016 and not supported by Android's libunwind. 939 // 2016 and not supported by Android's libunwind.
979 // DW_CFA_expression 0x10 940 // DW_CFA_expression 0x10
980 // uleb128 register (sp) 13 941 // uleb128 register (sp) 13
981 // uleb128 size of operation 2 942 // uleb128 size of operation 2
982 // DW_OP_plus_uconst 0x23 943 // DW_OP_plus_uconst 0x23
983 // uleb128 addend 8 944 // uleb128 addend 8
984 assembly_stream_.Print(".cfi_escape 0x10, 13, 2, 0x23, 8\n"); 945 assembly_stream_.Print(".cfi_escape 0x10, 13, 2, 0x23, 8\n");
985 946
986 // libunwind on ARM may use .ARM.exidx instead of .debug_frame 947 // libunwind on ARM may use .ARM.exidx instead of .debug_frame
987 #if defined(TARGET_OS_MACOS) || defined(TARGET_OS_MACOS_IOS) 948 #if defined(TARGET_OS_MACOS) || defined(TARGET_OS_MACOS_IOS)
988 COMPILE_ASSERT(FP == R7); 949 COMPILE_ASSERT(FP == R7);
989 assembly_stream_.Print(".fnstart\n"); 950 assembly_stream_.Print(".fnstart\n");
990 assembly_stream_.Print(".save {r7, lr}\n"); 951 assembly_stream_.Print(".save {r7, lr}\n");
991 assembly_stream_.Print(".setfp r7, sp, #0\n"); 952 assembly_stream_.Print(".setfp r7, sp, #0\n");
992 #else 953 #else
993 COMPILE_ASSERT(FP == R11); 954 COMPILE_ASSERT(FP == R11);
994 assembly_stream_.Print(".fnstart\n"); 955 assembly_stream_.Print(".fnstart\n");
995 assembly_stream_.Print(".save {r11, lr}\n"); 956 assembly_stream_.Print(".save {r11, lr}\n");
996 assembly_stream_.Print(".setfp r11, sp, #0\n"); 957 assembly_stream_.Print(".setfp r11, sp, #0\n");
997 #endif 958 #endif
998 959
999 #endif 960 #endif
1000 } 961 }
1001 962
1002
1003 void AssemblyImageWriter::FrameUnwindEpilogue() { 963 void AssemblyImageWriter::FrameUnwindEpilogue() {
1004 #if defined(TARGET_ARCH_ARM) 964 #if defined(TARGET_ARCH_ARM)
1005 assembly_stream_.Print(".fnend\n"); 965 assembly_stream_.Print(".fnend\n");
1006 #endif 966 #endif
1007 assembly_stream_.Print(".cfi_endproc\n"); 967 assembly_stream_.Print(".cfi_endproc\n");
1008 } 968 }
1009 969
1010
1011 void AssemblyImageWriter::WriteByteSequence(uword start, uword end) { 970 void AssemblyImageWriter::WriteByteSequence(uword start, uword end) {
1012 for (uword* cursor = reinterpret_cast<uword*>(start); 971 for (uword* cursor = reinterpret_cast<uword*>(start);
1013 cursor < reinterpret_cast<uword*>(end); cursor++) { 972 cursor < reinterpret_cast<uword*>(end); cursor++) {
1014 WriteWordLiteralText(*cursor); 973 WriteWordLiteralText(*cursor);
1015 } 974 }
1016 } 975 }
1017 976
1018
1019 void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) { 977 void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
1020 // This header provides the gap to make the instructions snapshot look like a 978 // This header provides the gap to make the instructions snapshot look like a
1021 // HeapPage. 979 // HeapPage.
1022 intptr_t instructions_length = next_offset_; 980 intptr_t instructions_length = next_offset_;
1023 instructions_blob_stream_.WriteWord(instructions_length); 981 instructions_blob_stream_.WriteWord(instructions_length);
1024 intptr_t header_words = Image::kHeaderSize / sizeof(uword); 982 intptr_t header_words = Image::kHeaderSize / sizeof(uword);
1025 for (intptr_t i = 1; i < header_words; i++) { 983 for (intptr_t i = 1; i < header_words; i++) {
1026 instructions_blob_stream_.WriteWord(0); 984 instructions_blob_stream_.WriteWord(0);
1027 } 985 }
1028 986
(...skipping 23 matching lines...) Expand all
1052 instructions_blob_stream_.WriteWord(marked_tags); 1010 instructions_blob_stream_.WriteWord(marked_tags);
1053 beginning += sizeof(uword); 1011 beginning += sizeof(uword);
1054 1012
1055 for (uword* cursor = reinterpret_cast<uword*>(beginning); 1013 for (uword* cursor = reinterpret_cast<uword*>(beginning);
1056 cursor < reinterpret_cast<uword*>(end); cursor++) { 1014 cursor < reinterpret_cast<uword*>(end); cursor++) {
1057 instructions_blob_stream_.WriteWord(*cursor); 1015 instructions_blob_stream_.WriteWord(*cursor);
1058 } 1016 }
1059 } 1017 }
1060 } 1018 }
1061 1019
1062
1063 RawInstructions* ImageReader::GetInstructionsAt(int32_t offset) { 1020 RawInstructions* ImageReader::GetInstructionsAt(int32_t offset) {
1064 ASSERT(Utils::IsAligned(offset, OS::PreferredCodeAlignment())); 1021 ASSERT(Utils::IsAligned(offset, OS::PreferredCodeAlignment()));
1065 1022
1066 RawInstructions* result = reinterpret_cast<RawInstructions*>( 1023 RawInstructions* result = reinterpret_cast<RawInstructions*>(
1067 reinterpret_cast<uword>(instructions_buffer_) + offset + kHeapObjectTag); 1024 reinterpret_cast<uword>(instructions_buffer_) + offset + kHeapObjectTag);
1068 ASSERT(result->IsInstructions()); 1025 ASSERT(result->IsInstructions());
1069 ASSERT(result->IsMarked()); 1026 ASSERT(result->IsMarked());
1070 1027
1071 return result; 1028 return result;
1072 } 1029 }
1073 1030
1074
1075 RawObject* ImageReader::GetObjectAt(int32_t offset) { 1031 RawObject* ImageReader::GetObjectAt(int32_t offset) {
1076 ASSERT(Utils::IsAligned(offset, kWordSize)); 1032 ASSERT(Utils::IsAligned(offset, kWordSize));
1077 1033
1078 RawObject* result = reinterpret_cast<RawObject*>( 1034 RawObject* result = reinterpret_cast<RawObject*>(
1079 reinterpret_cast<uword>(data_buffer_) + offset + kHeapObjectTag); 1035 reinterpret_cast<uword>(data_buffer_) + offset + kHeapObjectTag);
1080 ASSERT(result->IsMarked()); 1036 ASSERT(result->IsMarked());
1081 1037
1082 return result; 1038 return result;
1083 } 1039 }
1084 1040
1085
1086 intptr_t SnapshotReader::LookupInternalClass(intptr_t class_header) { 1041 intptr_t SnapshotReader::LookupInternalClass(intptr_t class_header) {
1087 // If the header is an object Id, lookup singleton VM classes or classes 1042 // If the header is an object Id, lookup singleton VM classes or classes
1088 // stored in the object store. 1043 // stored in the object store.
1089 if (IsVMIsolateObject(class_header)) { 1044 if (IsVMIsolateObject(class_header)) {
1090 intptr_t class_id = GetVMIsolateObjectId(class_header); 1045 intptr_t class_id = GetVMIsolateObjectId(class_header);
1091 ASSERT(IsSingletonClassId(class_id)); 1046 ASSERT(IsSingletonClassId(class_id));
1092 return class_id; 1047 return class_id;
1093 } 1048 }
1094 ASSERT(SerializedHeaderTag::decode(class_header) == kObjectId); 1049 ASSERT(SerializedHeaderTag::decode(class_header) == kObjectId);
1095 intptr_t class_id = SerializedHeaderData::decode(class_header); 1050 intptr_t class_id = SerializedHeaderData::decode(class_header);
1096 ASSERT(IsObjectStoreClassId(class_id) || IsSingletonClassId(class_id)); 1051 ASSERT(IsObjectStoreClassId(class_id) || IsSingletonClassId(class_id));
1097 return class_id; 1052 return class_id;
1098 } 1053 }
1099 1054
1100
1101 #define READ_VM_SINGLETON_OBJ(id, obj) \ 1055 #define READ_VM_SINGLETON_OBJ(id, obj) \
1102 if (object_id == id) { \ 1056 if (object_id == id) { \
1103 return obj; \ 1057 return obj; \
1104 } 1058 }
1105 1059
1106 RawObject* SnapshotReader::ReadVMIsolateObject(intptr_t header_value) { 1060 RawObject* SnapshotReader::ReadVMIsolateObject(intptr_t header_value) {
1107 intptr_t object_id = GetVMIsolateObjectId(header_value); 1061 intptr_t object_id = GetVMIsolateObjectId(header_value);
1108 1062
1109 // First check if it is one of the singleton objects. 1063 // First check if it is one of the singleton objects.
1110 READ_VM_SINGLETON_OBJ(kNullObject, Object::null()); 1064 READ_VM_SINGLETON_OBJ(kNullObject, Object::null());
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1154 for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) { 1108 for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) {
1155 if (object_id == (kCachedICDataArray0 + i)) { 1109 if (object_id == (kCachedICDataArray0 + i)) {
1156 return ICData::cached_icdata_arrays_[i]; 1110 return ICData::cached_icdata_arrays_[i];
1157 } 1111 }
1158 } 1112 }
1159 1113
1160 ASSERT(Symbols::IsPredefinedSymbolId(object_id)); 1114 ASSERT(Symbols::IsPredefinedSymbolId(object_id));
1161 return Symbols::GetPredefinedSymbol(object_id); // return VM symbol. 1115 return Symbols::GetPredefinedSymbol(object_id); // return VM symbol.
1162 } 1116 }
1163 1117
1164
1165 RawObject* SnapshotReader::ReadIndexedObject(intptr_t object_id, 1118 RawObject* SnapshotReader::ReadIndexedObject(intptr_t object_id,
1166 intptr_t patch_object_id, 1119 intptr_t patch_object_id,
1167 intptr_t patch_offset) { 1120 intptr_t patch_offset) {
1168 intptr_t class_id = ClassIdFromObjectId(object_id); 1121 intptr_t class_id = ClassIdFromObjectId(object_id);
1169 if (IsObjectStoreClassId(class_id)) { 1122 if (IsObjectStoreClassId(class_id)) {
1170 return isolate()->class_table()->At(class_id); // get singleton class. 1123 return isolate()->class_table()->At(class_id); // get singleton class.
1171 } 1124 }
1172 if (IsObjectStoreTypeId(object_id)) { 1125 if (IsObjectStoreTypeId(object_id)) {
1173 return GetType(object_store(), object_id); // return type obj. 1126 return GetType(object_store(), object_id); // return type obj.
1174 } 1127 }
1175 ASSERT(object_id >= kMaxPredefinedObjectIds); 1128 ASSERT(object_id >= kMaxPredefinedObjectIds);
1176 intptr_t index = (object_id - kMaxPredefinedObjectIds); 1129 intptr_t index = (object_id - kMaxPredefinedObjectIds);
1177 if (index < max_vm_isolate_object_id_) { 1130 if (index < max_vm_isolate_object_id_) {
1178 return VmIsolateSnapshotObject(index); 1131 return VmIsolateSnapshotObject(index);
1179 } 1132 }
1180 AddPatchRecord(object_id, patch_object_id, patch_offset); 1133 AddPatchRecord(object_id, patch_object_id, patch_offset);
1181 return GetBackRef(object_id)->raw(); 1134 return GetBackRef(object_id)->raw();
1182 } 1135 }
1183 1136
1184
1185 void SnapshotReader::AddPatchRecord(intptr_t object_id, 1137 void SnapshotReader::AddPatchRecord(intptr_t object_id,
1186 intptr_t patch_object_id, 1138 intptr_t patch_object_id,
1187 intptr_t patch_offset) { 1139 intptr_t patch_offset) {
1188 if (patch_object_id != kInvalidPatchIndex) { 1140 if (patch_object_id != kInvalidPatchIndex) {
1189 ASSERT(object_id >= kMaxPredefinedObjectIds); 1141 ASSERT(object_id >= kMaxPredefinedObjectIds);
1190 intptr_t index = (object_id - kMaxPredefinedObjectIds); 1142 intptr_t index = (object_id - kMaxPredefinedObjectIds);
1191 ASSERT(index >= max_vm_isolate_object_id_); 1143 ASSERT(index >= max_vm_isolate_object_id_);
1192 index -= max_vm_isolate_object_id_; 1144 index -= max_vm_isolate_object_id_;
1193 ASSERT(index < backward_references_->length()); 1145 ASSERT(index < backward_references_->length());
1194 BackRefNode& ref = (*backward_references_)[index]; 1146 BackRefNode& ref = (*backward_references_)[index];
1195 ref.AddPatchRecord(patch_object_id, patch_offset); 1147 ref.AddPatchRecord(patch_object_id, patch_offset);
1196 } 1148 }
1197 } 1149 }
1198 1150
1199
1200 void SnapshotReader::ProcessDeferredCanonicalizations() { 1151 void SnapshotReader::ProcessDeferredCanonicalizations() {
1201 Type& typeobj = Type::Handle(); 1152 Type& typeobj = Type::Handle();
1202 TypeArguments& typeargs = TypeArguments::Handle(); 1153 TypeArguments& typeargs = TypeArguments::Handle();
1203 Object& newobj = Object::Handle(); 1154 Object& newobj = Object::Handle();
1204 for (intptr_t i = 0; i < backward_references_->length(); i++) { 1155 for (intptr_t i = 0; i < backward_references_->length(); i++) {
1205 BackRefNode& backref = (*backward_references_)[i]; 1156 BackRefNode& backref = (*backward_references_)[i];
1206 if (backref.defer_canonicalization()) { 1157 if (backref.defer_canonicalization()) {
1207 Object* objref = backref.reference(); 1158 Object* objref = backref.reference();
1208 // Object should either be a type or a type argument. 1159 // Object should either be a type or a type argument.
1209 if (objref->IsType()) { 1160 if (objref->IsType()) {
(...skipping 24 matching lines...) Expand all
1234 } 1185 }
1235 } 1186 }
1236 } 1187 }
1237 } else { 1188 } else {
1238 ASSERT(objref->IsCanonical()); 1189 ASSERT(objref->IsCanonical());
1239 } 1190 }
1240 } 1191 }
1241 } 1192 }
1242 } 1193 }
1243 1194
1244
1245 void SnapshotReader::FixSubclassesAndImplementors() { 1195 void SnapshotReader::FixSubclassesAndImplementors() {
1246 Class& cls = Class::Handle(zone()); 1196 Class& cls = Class::Handle(zone());
1247 Class& supercls = Class::Handle(zone()); 1197 Class& supercls = Class::Handle(zone());
1248 Array& interfaces = Array::Handle(zone()); 1198 Array& interfaces = Array::Handle(zone());
1249 AbstractType& interface = AbstractType::Handle(zone()); 1199 AbstractType& interface = AbstractType::Handle(zone());
1250 Class& interface_cls = Class::Handle(zone()); 1200 Class& interface_cls = Class::Handle(zone());
1251 for (intptr_t i = 0; i < backward_references_->length(); i++) { 1201 for (intptr_t i = 0; i < backward_references_->length(); i++) {
1252 BackRefNode& backref = (*backward_references_)[i]; 1202 BackRefNode& backref = (*backward_references_)[i];
1253 Object* objref = backref.reference(); 1203 Object* objref = backref.reference();
1254 if (objref->IsClass()) { 1204 if (objref->IsClass()) {
(...skipping 10 matching lines...) Expand all
1265 interface ^= interfaces.At(i); 1215 interface ^= interfaces.At(i);
1266 interface_cls = interface.type_class(); 1216 interface_cls = interface.type_class();
1267 interface_cls.set_is_implemented(); 1217 interface_cls.set_is_implemented();
1268 interface_cls.DisableCHAOptimizedCode(cls); 1218 interface_cls.DisableCHAOptimizedCode(cls);
1269 } 1219 }
1270 } 1220 }
1271 } 1221 }
1272 } 1222 }
1273 } 1223 }
1274 1224
1275
1276 void SnapshotReader::ArrayReadFrom(intptr_t object_id, 1225 void SnapshotReader::ArrayReadFrom(intptr_t object_id,
1277 const Array& result, 1226 const Array& result,
1278 intptr_t len, 1227 intptr_t len,
1279 intptr_t tags) { 1228 intptr_t tags) {
1280 // Setup the object fields. 1229 // Setup the object fields.
1281 const intptr_t typeargs_offset = 1230 const intptr_t typeargs_offset =
1282 GrowableObjectArray::type_arguments_offset() / kWordSize; 1231 GrowableObjectArray::type_arguments_offset() / kWordSize;
1283 *TypeArgumentsHandle() ^= 1232 *TypeArgumentsHandle() ^=
1284 ReadObjectImpl(kAsInlinedObject, object_id, typeargs_offset); 1233 ReadObjectImpl(kAsInlinedObject, object_id, typeargs_offset);
1285 result.SetTypeArguments(*TypeArgumentsHandle()); 1234 result.SetTypeArguments(*TypeArgumentsHandle());
1286 1235
1287 bool as_reference = RawObject::IsCanonical(tags) ? false : true; 1236 bool as_reference = RawObject::IsCanonical(tags) ? false : true;
1288 intptr_t offset = result.raw_ptr()->data() - 1237 intptr_t offset = result.raw_ptr()->data() -
1289 reinterpret_cast<RawObject**>(result.raw()->ptr()); 1238 reinterpret_cast<RawObject**>(result.raw()->ptr());
1290 for (intptr_t i = 0; i < len; i++) { 1239 for (intptr_t i = 0; i < len; i++) {
1291 *PassiveObjectHandle() = 1240 *PassiveObjectHandle() =
1292 ReadObjectImpl(as_reference, object_id, (i + offset)); 1241 ReadObjectImpl(as_reference, object_id, (i + offset));
1293 result.SetAt(i, *PassiveObjectHandle()); 1242 result.SetAt(i, *PassiveObjectHandle());
1294 } 1243 }
1295 } 1244 }
1296 1245
1297
1298 ScriptSnapshotReader::ScriptSnapshotReader(const uint8_t* buffer, 1246 ScriptSnapshotReader::ScriptSnapshotReader(const uint8_t* buffer,
1299 intptr_t size, 1247 intptr_t size,
1300 Thread* thread) 1248 Thread* thread)
1301 : SnapshotReader(buffer, 1249 : SnapshotReader(buffer,
1302 size, 1250 size,
1303 Snapshot::kScript, 1251 Snapshot::kScript,
1304 new ZoneGrowableArray<BackRefNode>(kNumInitialReferences), 1252 new ZoneGrowableArray<BackRefNode>(kNumInitialReferences),
1305 thread) {} 1253 thread) {}
1306 1254
1307
1308 ScriptSnapshotReader::~ScriptSnapshotReader() { 1255 ScriptSnapshotReader::~ScriptSnapshotReader() {
1309 ResetBackwardReferenceTable(); 1256 ResetBackwardReferenceTable();
1310 } 1257 }
1311 1258
1312
1313 MessageSnapshotReader::MessageSnapshotReader(const uint8_t* buffer, 1259 MessageSnapshotReader::MessageSnapshotReader(const uint8_t* buffer,
1314 intptr_t size, 1260 intptr_t size,
1315 Thread* thread) 1261 Thread* thread)
1316 : SnapshotReader(buffer, 1262 : SnapshotReader(buffer,
1317 size, 1263 size,
1318 Snapshot::kMessage, 1264 Snapshot::kMessage,
1319 new ZoneGrowableArray<BackRefNode>(kNumInitialReferences), 1265 new ZoneGrowableArray<BackRefNode>(kNumInitialReferences),
1320 thread) {} 1266 thread) {}
1321 1267
1322
1323 MessageSnapshotReader::~MessageSnapshotReader() { 1268 MessageSnapshotReader::~MessageSnapshotReader() {
1324 ResetBackwardReferenceTable(); 1269 ResetBackwardReferenceTable();
1325 } 1270 }
1326 1271
1327
1328 SnapshotWriter::SnapshotWriter(Thread* thread, 1272 SnapshotWriter::SnapshotWriter(Thread* thread,
1329 Snapshot::Kind kind, 1273 Snapshot::Kind kind,
1330 uint8_t** buffer, 1274 uint8_t** buffer,
1331 ReAlloc alloc, 1275 ReAlloc alloc,
1332 DeAlloc dealloc, 1276 DeAlloc dealloc,
1333 intptr_t initial_size, 1277 intptr_t initial_size,
1334 ForwardList* forward_list, 1278 ForwardList* forward_list,
1335 bool can_send_any_object) 1279 bool can_send_any_object)
1336 : BaseWriter(buffer, alloc, dealloc, initial_size), 1280 : BaseWriter(buffer, alloc, dealloc, initial_size),
1337 thread_(thread), 1281 thread_(thread),
1338 kind_(kind), 1282 kind_(kind),
1339 object_store_(isolate()->object_store()), 1283 object_store_(isolate()->object_store()),
1340 class_table_(isolate()->class_table()), 1284 class_table_(isolate()->class_table()),
1341 forward_list_(forward_list), 1285 forward_list_(forward_list),
1342 exception_type_(Exceptions::kNone), 1286 exception_type_(Exceptions::kNone),
1343 exception_msg_(NULL), 1287 exception_msg_(NULL),
1344 can_send_any_object_(can_send_any_object) { 1288 can_send_any_object_(can_send_any_object) {
1345 ASSERT(forward_list_ != NULL); 1289 ASSERT(forward_list_ != NULL);
1346 } 1290 }
1347 1291
1348
1349 void SnapshotWriter::WriteObject(RawObject* rawobj) { 1292 void SnapshotWriter::WriteObject(RawObject* rawobj) {
1350 WriteObjectImpl(rawobj, kAsInlinedObject); 1293 WriteObjectImpl(rawobj, kAsInlinedObject);
1351 WriteForwardedObjects(); 1294 WriteForwardedObjects();
1352 } 1295 }
1353 1296
1354
1355 uint32_t SnapshotWriter::GetObjectTags(RawObject* raw) { 1297 uint32_t SnapshotWriter::GetObjectTags(RawObject* raw) {
1356 return raw->ptr()->tags_; 1298 return raw->ptr()->tags_;
1357 } 1299 }
1358 1300
1359
1360 uword SnapshotWriter::GetObjectTagsAndHash(RawObject* raw) { 1301 uword SnapshotWriter::GetObjectTagsAndHash(RawObject* raw) {
1361 uword result = raw->ptr()->tags_; 1302 uword result = raw->ptr()->tags_;
1362 #if defined(HASH_IN_OBJECT_HEADER) 1303 #if defined(HASH_IN_OBJECT_HEADER)
1363 result |= static_cast<uword>(raw->ptr()->hash_) << 32; 1304 result |= static_cast<uword>(raw->ptr()->hash_) << 32;
1364 #endif 1305 #endif
1365 return result; 1306 return result;
1366 } 1307 }
1367 1308
1368
1369 #define VM_OBJECT_CLASS_LIST(V) \ 1309 #define VM_OBJECT_CLASS_LIST(V) \
1370 V(OneByteString) \ 1310 V(OneByteString) \
1371 V(TwoByteString) \ 1311 V(TwoByteString) \
1372 V(Mint) \ 1312 V(Mint) \
1373 V(Bigint) \ 1313 V(Bigint) \
1374 V(Double) \ 1314 V(Double) \
1375 V(ImmutableArray) 1315 V(ImmutableArray)
1376 1316
1377 #define VM_OBJECT_WRITE(clazz) \ 1317 #define VM_OBJECT_WRITE(clazz) \
1378 case clazz::kClassId: { \ 1318 case clazz::kClassId: { \
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1465 } 1405 }
1466 } 1406 }
1467 1407
1468 const Object& obj = Object::Handle(rawobj); 1408 const Object& obj = Object::Handle(rawobj);
1469 FATAL1("Unexpected reference to object in VM isolate: %s\n", obj.ToCString()); 1409 FATAL1("Unexpected reference to object in VM isolate: %s\n", obj.ToCString());
1470 return false; 1410 return false;
1471 } 1411 }
1472 1412
1473 #undef VM_OBJECT_WRITE 1413 #undef VM_OBJECT_WRITE
1474 1414
1475
1476 ForwardList::ForwardList(Thread* thread, intptr_t first_object_id) 1415 ForwardList::ForwardList(Thread* thread, intptr_t first_object_id)
1477 : thread_(thread), 1416 : thread_(thread),
1478 first_object_id_(first_object_id), 1417 first_object_id_(first_object_id),
1479 nodes_(), 1418 nodes_(),
1480 first_unprocessed_object_id_(first_object_id) { 1419 first_unprocessed_object_id_(first_object_id) {
1481 ASSERT(first_object_id > 0); 1420 ASSERT(first_object_id > 0);
1482 } 1421 }
1483 1422
1484
1485 ForwardList::~ForwardList() { 1423 ForwardList::~ForwardList() {
1486 heap()->ResetObjectIdTable(); 1424 heap()->ResetObjectIdTable();
1487 } 1425 }
1488 1426
1489
1490 intptr_t ForwardList::AddObject(Zone* zone, 1427 intptr_t ForwardList::AddObject(Zone* zone,
1491 RawObject* raw, 1428 RawObject* raw,
1492 SerializeState state) { 1429 SerializeState state) {
1493 NoSafepointScope no_safepoint; 1430 NoSafepointScope no_safepoint;
1494 intptr_t object_id = next_object_id(); 1431 intptr_t object_id = next_object_id();
1495 ASSERT(object_id > 0 && object_id <= kMaxObjectId); 1432 ASSERT(object_id > 0 && object_id <= kMaxObjectId);
1496 const Object& obj = Object::ZoneHandle(zone, raw); 1433 const Object& obj = Object::ZoneHandle(zone, raw);
1497 Node* node = new Node(&obj, state); 1434 Node* node = new Node(&obj, state);
1498 ASSERT(node != NULL); 1435 ASSERT(node != NULL);
1499 nodes_.Add(node); 1436 nodes_.Add(node);
1500 ASSERT(object_id != 0); 1437 ASSERT(object_id != 0);
1501 heap()->SetObjectId(raw, object_id); 1438 heap()->SetObjectId(raw, object_id);
1502 return object_id; 1439 return object_id;
1503 } 1440 }
1504 1441
1505
1506 intptr_t ForwardList::FindObject(RawObject* raw) { 1442 intptr_t ForwardList::FindObject(RawObject* raw) {
1507 NoSafepointScope no_safepoint; 1443 NoSafepointScope no_safepoint;
1508 intptr_t id = heap()->GetObjectId(raw); 1444 intptr_t id = heap()->GetObjectId(raw);
1509 ASSERT(id == 0 || NodeForObjectId(id)->obj()->raw() == raw); 1445 ASSERT(id == 0 || NodeForObjectId(id)->obj()->raw() == raw);
1510 return (id == 0) ? static_cast<intptr_t>(kInvalidIndex) : id; 1446 return (id == 0) ? static_cast<intptr_t>(kInvalidIndex) : id;
1511 } 1447 }
1512 1448
1513
1514 bool SnapshotWriter::CheckAndWritePredefinedObject(RawObject* rawobj) { 1449 bool SnapshotWriter::CheckAndWritePredefinedObject(RawObject* rawobj) {
1515 // Check if object can be written in one of the following ways: 1450 // Check if object can be written in one of the following ways:
1516 // - Smi: the Smi value is written as is (last bit is not tagged). 1451 // - Smi: the Smi value is written as is (last bit is not tagged).
1517 // - VM internal class (from VM isolate): (index of class in vm isolate | 0x3) 1452 // - VM internal class (from VM isolate): (index of class in vm isolate | 0x3)
1518 // - Object that has already been written: (negative id in stream | 0x3) 1453 // - Object that has already been written: (negative id in stream | 0x3)
1519 1454
1520 NoSafepointScope no_safepoint; 1455 NoSafepointScope no_safepoint;
1521 1456
1522 // First check if it is a Smi (i.e not a heap object). 1457 // First check if it is a Smi (i.e not a heap object).
1523 if (!rawobj->IsHeapObject()) { 1458 if (!rawobj->IsHeapObject()) {
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1572 RawType* raw_type = reinterpret_cast<RawType*>(rawobj); 1507 RawType* raw_type = reinterpret_cast<RawType*>(rawobj);
1573 intptr_t index = GetTypeIndex(object_store(), raw_type); 1508 intptr_t index = GetTypeIndex(object_store(), raw_type);
1574 if (index != kInvalidIndex) { 1509 if (index != kInvalidIndex) {
1575 WriteIndexedObject(index); 1510 WriteIndexedObject(index);
1576 return true; 1511 return true;
1577 } 1512 }
1578 1513
1579 return false; 1514 return false;
1580 } 1515 }
1581 1516
1582
1583 void SnapshotWriter::WriteObjectImpl(RawObject* raw, bool as_reference) { 1517 void SnapshotWriter::WriteObjectImpl(RawObject* raw, bool as_reference) {
1584 // First check if object can be written as a simple predefined type. 1518 // First check if object can be written as a simple predefined type.
1585 if (CheckAndWritePredefinedObject(raw)) { 1519 if (CheckAndWritePredefinedObject(raw)) {
1586 return; 1520 return;
1587 } 1521 }
1588 1522
1589 // When we know that we are dealing with leaf or shallow objects we write 1523 // When we know that we are dealing with leaf or shallow objects we write
1590 // these objects inline even when 'as_reference' is true. 1524 // these objects inline even when 'as_reference' is true.
1591 const bool write_as_reference = as_reference && !raw->IsCanonical(); 1525 const bool write_as_reference = as_reference && !raw->IsCanonical();
1592 uintptr_t tags = GetObjectTagsAndHash(raw); 1526 uintptr_t tags = GetObjectTagsAndHash(raw);
1593 1527
1594 // Add object to the forward ref list and mark it so that future references 1528 // Add object to the forward ref list and mark it so that future references
1595 // to this object in the snapshot will use this object id. Mark the 1529 // to this object in the snapshot will use this object id. Mark the
1596 // serialization state so that we do the right thing when we go through 1530 // serialization state so that we do the right thing when we go through
1597 // the forward list. 1531 // the forward list.
1598 intptr_t class_id = raw->GetClassId(); 1532 intptr_t class_id = raw->GetClassId();
1599 intptr_t object_id; 1533 intptr_t object_id;
1600 if (write_as_reference && IsSplitClassId(class_id)) { 1534 if (write_as_reference && IsSplitClassId(class_id)) {
1601 object_id = forward_list_->AddObject(zone(), raw, kIsNotSerialized); 1535 object_id = forward_list_->AddObject(zone(), raw, kIsNotSerialized);
1602 } else { 1536 } else {
1603 object_id = forward_list_->AddObject(zone(), raw, kIsSerialized); 1537 object_id = forward_list_->AddObject(zone(), raw, kIsSerialized);
1604 } 1538 }
1605 if (write_as_reference || !IsSplitClassId(class_id)) { 1539 if (write_as_reference || !IsSplitClassId(class_id)) {
1606 object_id = kOmittedObjectId; 1540 object_id = kOmittedObjectId;
1607 } 1541 }
1608 WriteMarkedObjectImpl(raw, tags, object_id, write_as_reference); 1542 WriteMarkedObjectImpl(raw, tags, object_id, write_as_reference);
1609 } 1543 }
1610 1544
1611
1612 void SnapshotWriter::WriteMarkedObjectImpl(RawObject* raw, 1545 void SnapshotWriter::WriteMarkedObjectImpl(RawObject* raw,
1613 intptr_t tags, 1546 intptr_t tags,
1614 intptr_t object_id, 1547 intptr_t object_id,
1615 bool as_reference) { 1548 bool as_reference) {
1616 NoSafepointScope no_safepoint; 1549 NoSafepointScope no_safepoint;
1617 RawClass* cls = class_table_->At(RawObject::ClassIdTag::decode(tags)); 1550 RawClass* cls = class_table_->At(RawObject::ClassIdTag::decode(tags));
1618 intptr_t class_id = cls->ptr()->id_; 1551 intptr_t class_id = cls->ptr()->id_;
1619 ASSERT(class_id == RawObject::ClassIdTag::decode(tags)); 1552 ASSERT(class_id == RawObject::ClassIdTag::decode(tags));
1620 if (class_id >= kNumPredefinedCids || 1553 if (class_id >= kNumPredefinedCids ||
1621 RawObject::IsImplicitFieldClassId(class_id)) { 1554 RawObject::IsImplicitFieldClassId(class_id)) {
(...skipping 28 matching lines...) Expand all
1650 } 1583 }
1651 #undef SNAPSHOT_WRITE 1584 #undef SNAPSHOT_WRITE
1652 default: 1585 default:
1653 break; 1586 break;
1654 } 1587 }
1655 1588
1656 const Object& obj = Object::Handle(raw); 1589 const Object& obj = Object::Handle(raw);
1657 FATAL1("Unexpected object: %s\n", obj.ToCString()); 1590 FATAL1("Unexpected object: %s\n", obj.ToCString());
1658 } 1591 }
1659 1592
1660
1661 class WriteInlinedObjectVisitor : public ObjectVisitor { 1593 class WriteInlinedObjectVisitor : public ObjectVisitor {
1662 public: 1594 public:
1663 explicit WriteInlinedObjectVisitor(SnapshotWriter* writer) 1595 explicit WriteInlinedObjectVisitor(SnapshotWriter* writer)
1664 : writer_(writer) {} 1596 : writer_(writer) {}
1665 1597
1666 virtual void VisitObject(RawObject* obj) { 1598 virtual void VisitObject(RawObject* obj) {
1667 intptr_t object_id = writer_->forward_list_->FindObject(obj); 1599 intptr_t object_id = writer_->forward_list_->FindObject(obj);
1668 ASSERT(object_id != kInvalidIndex); 1600 ASSERT(object_id != kInvalidIndex);
1669 intptr_t tags = MessageWriter::GetObjectTagsAndHash(obj); 1601 intptr_t tags = MessageWriter::GetObjectTagsAndHash(obj);
1670 writer_->WriteMarkedObjectImpl(obj, tags, object_id, kAsInlinedObject); 1602 writer_->WriteMarkedObjectImpl(obj, tags, object_id, kAsInlinedObject);
1671 } 1603 }
1672 1604
1673 private: 1605 private:
1674 SnapshotWriter* writer_; 1606 SnapshotWriter* writer_;
1675 }; 1607 };
1676 1608
1677
1678 void SnapshotWriter::WriteForwardedObjects() { 1609 void SnapshotWriter::WriteForwardedObjects() {
1679 WriteInlinedObjectVisitor visitor(this); 1610 WriteInlinedObjectVisitor visitor(this);
1680 forward_list_->SerializeAll(&visitor); 1611 forward_list_->SerializeAll(&visitor);
1681 } 1612 }
1682 1613
1683
1684 void ForwardList::SerializeAll(ObjectVisitor* writer) { 1614 void ForwardList::SerializeAll(ObjectVisitor* writer) {
1685 // Write out all objects that were added to the forward list and have 1615 // Write out all objects that were added to the forward list and have
1686 // not been serialized yet. These would typically be fields of instance 1616 // not been serialized yet. These would typically be fields of instance
1687 // objects, arrays or immutable arrays (this is done in order to avoid 1617 // objects, arrays or immutable arrays (this is done in order to avoid
1688 // deep recursive calls to WriteObjectImpl). 1618 // deep recursive calls to WriteObjectImpl).
1689 // NOTE: The forward list might grow as we process the list. 1619 // NOTE: The forward list might grow as we process the list.
1690 #ifdef DEBUG 1620 #ifdef DEBUG
1691 for (intptr_t i = first_object_id(); i < first_unprocessed_object_id_; ++i) { 1621 for (intptr_t i = first_object_id(); i < first_unprocessed_object_id_; ++i) {
1692 ASSERT(NodeForObjectId(i)->is_serialized()); 1622 ASSERT(NodeForObjectId(i)->is_serialized());
1693 } 1623 }
1694 #endif // DEBUG 1624 #endif // DEBUG
1695 for (intptr_t id = first_unprocessed_object_id_; id < next_object_id(); 1625 for (intptr_t id = first_unprocessed_object_id_; id < next_object_id();
1696 ++id) { 1626 ++id) {
1697 if (!NodeForObjectId(id)->is_serialized()) { 1627 if (!NodeForObjectId(id)->is_serialized()) {
1698 // Write the object out in the stream. 1628 // Write the object out in the stream.
1699 RawObject* raw = NodeForObjectId(id)->obj()->raw(); 1629 RawObject* raw = NodeForObjectId(id)->obj()->raw();
1700 writer->VisitObject(raw); 1630 writer->VisitObject(raw);
1701 1631
1702 // Mark object as serialized. 1632 // Mark object as serialized.
1703 NodeForObjectId(id)->set_state(kIsSerialized); 1633 NodeForObjectId(id)->set_state(kIsSerialized);
1704 } 1634 }
1705 } 1635 }
1706 first_unprocessed_object_id_ = next_object_id(); 1636 first_unprocessed_object_id_ = next_object_id();
1707 } 1637 }
1708 1638
1709
1710 void SnapshotWriter::WriteClassId(RawClass* cls) { 1639 void SnapshotWriter::WriteClassId(RawClass* cls) {
1711 ASSERT(!Snapshot::IsFull(kind_)); 1640 ASSERT(!Snapshot::IsFull(kind_));
1712 int class_id = cls->ptr()->id_; 1641 int class_id = cls->ptr()->id_;
1713 ASSERT(!IsSingletonClassId(class_id) && !IsObjectStoreClassId(class_id)); 1642 ASSERT(!IsSingletonClassId(class_id) && !IsObjectStoreClassId(class_id));
1714 1643
1715 // Write out the library url and class name. 1644 // Write out the library url and class name.
1716 RawLibrary* library = cls->ptr()->library_; 1645 RawLibrary* library = cls->ptr()->library_;
1717 ASSERT(library != Library::null()); 1646 ASSERT(library != Library::null());
1718 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject); 1647 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject);
1719 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject); 1648 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject);
1720 } 1649 }
1721 1650
1722
1723 void SnapshotWriter::WriteFunctionId(RawFunction* func, bool owner_is_class) { 1651 void SnapshotWriter::WriteFunctionId(RawFunction* func, bool owner_is_class) {
1724 ASSERT(kind_ == Snapshot::kScript); 1652 ASSERT(kind_ == Snapshot::kScript);
1725 RawClass* cls = (owner_is_class) 1653 RawClass* cls = (owner_is_class)
1726 ? reinterpret_cast<RawClass*>(func->ptr()->owner_) 1654 ? reinterpret_cast<RawClass*>(func->ptr()->owner_)
1727 : reinterpret_cast<RawPatchClass*>(func->ptr()->owner_) 1655 : reinterpret_cast<RawPatchClass*>(func->ptr()->owner_)
1728 ->ptr() 1656 ->ptr()
1729 ->patched_class_; 1657 ->patched_class_;
1730 1658
1731 // Write out the library url and class name. 1659 // Write out the library url and class name.
1732 RawLibrary* library = cls->ptr()->library_; 1660 RawLibrary* library = cls->ptr()->library_;
1733 ASSERT(library != Library::null()); 1661 ASSERT(library != Library::null());
1734 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject); 1662 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject);
1735 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject); 1663 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject);
1736 WriteObjectImpl(func->ptr()->name_, kAsInlinedObject); 1664 WriteObjectImpl(func->ptr()->name_, kAsInlinedObject);
1737 } 1665 }
1738 1666
1739
1740 void SnapshotWriter::WriteStaticImplicitClosure(intptr_t object_id, 1667 void SnapshotWriter::WriteStaticImplicitClosure(intptr_t object_id,
1741 RawFunction* func, 1668 RawFunction* func,
1742 intptr_t tags) { 1669 intptr_t tags) {
1743 // Write out the serialization header value for this object. 1670 // Write out the serialization header value for this object.
1744 WriteInlinedObjectHeader(object_id); 1671 WriteInlinedObjectHeader(object_id);
1745 1672
1746 // Indicate this is a static implicit closure object. 1673 // Indicate this is a static implicit closure object.
1747 Write<int32_t>(SerializedHeaderData::encode(kStaticImplicitClosureObjectId)); 1674 Write<int32_t>(SerializedHeaderData::encode(kStaticImplicitClosureObjectId));
1748 1675
1749 // Write out the tags. 1676 // Write out the tags.
1750 WriteTags(tags); 1677 WriteTags(tags);
1751 1678
1752 // Write out the library url, class name and signature function name. 1679 // Write out the library url, class name and signature function name.
1753 RawClass* cls = GetFunctionOwner(func); 1680 RawClass* cls = GetFunctionOwner(func);
1754 ASSERT(cls != Class::null()); 1681 ASSERT(cls != Class::null());
1755 RawLibrary* library = cls->ptr()->library_; 1682 RawLibrary* library = cls->ptr()->library_;
1756 ASSERT(library != Library::null()); 1683 ASSERT(library != Library::null());
1757 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject); 1684 WriteObjectImpl(library->ptr()->url_, kAsInlinedObject);
1758 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject); 1685 WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject);
1759 WriteObjectImpl(func->ptr()->name_, kAsInlinedObject); 1686 WriteObjectImpl(func->ptr()->name_, kAsInlinedObject);
1760 } 1687 }
1761 1688
1762
1763 void SnapshotWriter::ArrayWriteTo(intptr_t object_id, 1689 void SnapshotWriter::ArrayWriteTo(intptr_t object_id,
1764 intptr_t array_kind, 1690 intptr_t array_kind,
1765 intptr_t tags, 1691 intptr_t tags,
1766 RawSmi* length, 1692 RawSmi* length,
1767 RawTypeArguments* type_arguments, 1693 RawTypeArguments* type_arguments,
1768 RawObject* data[], 1694 RawObject* data[],
1769 bool as_reference) { 1695 bool as_reference) {
1770 if (as_reference) { 1696 if (as_reference) {
1771 // Write out the serialization header value for this object. 1697 // Write out the serialization header value for this object.
1772 WriteInlinedObjectHeader(kOmittedObjectId); 1698 WriteInlinedObjectHeader(kOmittedObjectId);
(...skipping 21 matching lines...) Expand all
1794 WriteObjectImpl(type_arguments, kAsInlinedObject); 1720 WriteObjectImpl(type_arguments, kAsInlinedObject);
1795 1721
1796 // Write out the individual object ids. 1722 // Write out the individual object ids.
1797 bool write_as_reference = RawObject::IsCanonical(tags) ? false : true; 1723 bool write_as_reference = RawObject::IsCanonical(tags) ? false : true;
1798 for (intptr_t i = 0; i < len; i++) { 1724 for (intptr_t i = 0; i < len; i++) {
1799 WriteObjectImpl(data[i], write_as_reference); 1725 WriteObjectImpl(data[i], write_as_reference);
1800 } 1726 }
1801 } 1727 }
1802 } 1728 }
1803 1729
1804
1805 RawFunction* SnapshotWriter::IsSerializableClosure(RawClosure* closure) { 1730 RawFunction* SnapshotWriter::IsSerializableClosure(RawClosure* closure) {
1806 // Extract the function object to check if this closure 1731 // Extract the function object to check if this closure
1807 // can be sent in an isolate message. 1732 // can be sent in an isolate message.
1808 RawFunction* func = closure->ptr()->function_; 1733 RawFunction* func = closure->ptr()->function_;
1809 // We only allow closure of top level methods or static functions in a 1734 // We only allow closure of top level methods or static functions in a
1810 // class to be sent in isolate messages. 1735 // class to be sent in isolate messages.
1811 if (can_send_any_object() && 1736 if (can_send_any_object() &&
1812 Function::IsImplicitStaticClosureFunction(func)) { 1737 Function::IsImplicitStaticClosureFunction(func)) {
1813 return func; 1738 return func;
1814 } 1739 }
1815 // Not a closure of a top level method or static function, throw an 1740 // Not a closure of a top level method or static function, throw an
1816 // exception as we do not allow these objects to be serialized. 1741 // exception as we do not allow these objects to be serialized.
1817 HANDLESCOPE(thread()); 1742 HANDLESCOPE(thread());
1818 1743
1819 const Function& errorFunc = Function::Handle(zone(), func); 1744 const Function& errorFunc = Function::Handle(zone(), func);
1820 ASSERT(!errorFunc.IsNull()); 1745 ASSERT(!errorFunc.IsNull());
1821 1746
1822 // All other closures are errors. 1747 // All other closures are errors.
1823 char* chars = OS::SCreate( 1748 char* chars = OS::SCreate(
1824 thread()->zone(), 1749 thread()->zone(),
1825 "Illegal argument in isolate message : (object is a closure - %s)", 1750 "Illegal argument in isolate message : (object is a closure - %s)",
1826 errorFunc.ToCString()); 1751 errorFunc.ToCString());
1827 SetWriteException(Exceptions::kArgument, chars); 1752 SetWriteException(Exceptions::kArgument, chars);
1828 return Function::null(); 1753 return Function::null();
1829 } 1754 }
1830 1755
1831
1832 RawClass* SnapshotWriter::GetFunctionOwner(RawFunction* func) { 1756 RawClass* SnapshotWriter::GetFunctionOwner(RawFunction* func) {
1833 RawObject* owner = func->ptr()->owner_; 1757 RawObject* owner = func->ptr()->owner_;
1834 uint32_t tags = GetObjectTags(owner); 1758 uint32_t tags = GetObjectTags(owner);
1835 intptr_t class_id = RawObject::ClassIdTag::decode(tags); 1759 intptr_t class_id = RawObject::ClassIdTag::decode(tags);
1836 if (class_id == kClassCid) { 1760 if (class_id == kClassCid) {
1837 return reinterpret_cast<RawClass*>(owner); 1761 return reinterpret_cast<RawClass*>(owner);
1838 } 1762 }
1839 ASSERT(class_id == kPatchClassCid); 1763 ASSERT(class_id == kPatchClassCid);
1840 return reinterpret_cast<RawPatchClass*>(owner)->ptr()->patched_class_; 1764 return reinterpret_cast<RawPatchClass*>(owner)->ptr()->patched_class_;
1841 } 1765 }
1842 1766
1843
1844 void SnapshotWriter::CheckForNativeFields(RawClass* cls) { 1767 void SnapshotWriter::CheckForNativeFields(RawClass* cls) {
1845 if (cls->ptr()->num_native_fields_ != 0) { 1768 if (cls->ptr()->num_native_fields_ != 0) {
1846 // We do not allow objects with native fields in an isolate message. 1769 // We do not allow objects with native fields in an isolate message.
1847 HANDLESCOPE(thread()); 1770 HANDLESCOPE(thread());
1848 const Class& clazz = Class::Handle(zone(), cls); 1771 const Class& clazz = Class::Handle(zone(), cls);
1849 char* chars = OS::SCreate(thread()->zone(), 1772 char* chars = OS::SCreate(thread()->zone(),
1850 "Illegal argument in isolate message" 1773 "Illegal argument in isolate message"
1851 " : (object extends NativeWrapper - %s)", 1774 " : (object extends NativeWrapper - %s)",
1852 clazz.ToCString()); 1775 clazz.ToCString());
1853 SetWriteException(Exceptions::kArgument, chars); 1776 SetWriteException(Exceptions::kArgument, chars);
1854 } 1777 }
1855 } 1778 }
1856 1779
1857
1858 void SnapshotWriter::SetWriteException(Exceptions::ExceptionType type, 1780 void SnapshotWriter::SetWriteException(Exceptions::ExceptionType type,
1859 const char* msg) { 1781 const char* msg) {
1860 set_exception_type(type); 1782 set_exception_type(type);
1861 set_exception_msg(msg); 1783 set_exception_msg(msg);
1862 // The more specific error is set up in SnapshotWriter::ThrowException(). 1784 // The more specific error is set up in SnapshotWriter::ThrowException().
1863 thread()->long_jump_base()->Jump(1, Object::snapshot_writer_error()); 1785 thread()->long_jump_base()->Jump(1, Object::snapshot_writer_error());
1864 } 1786 }
1865 1787
1866
1867 void SnapshotWriter::WriteInstance(RawObject* raw, 1788 void SnapshotWriter::WriteInstance(RawObject* raw,
1868 RawClass* cls, 1789 RawClass* cls,
1869 intptr_t tags, 1790 intptr_t tags,
1870 intptr_t object_id, 1791 intptr_t object_id,
1871 bool as_reference) { 1792 bool as_reference) {
1872 // Closure instances are handled by RawClosure::WriteTo(). 1793 // Closure instances are handled by RawClosure::WriteTo().
1873 ASSERT(!Class::IsClosureClass(cls)); 1794 ASSERT(!Class::IsClosureClass(cls));
1874 1795
1875 // Check if the instance has native fields and throw an exception if it does. 1796 // Check if the instance has native fields and throw an exception if it does.
1876 CheckForNativeFields(cls); 1797 CheckForNativeFields(cls);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1911 while (offset < next_field_offset) { 1832 while (offset < next_field_offset) {
1912 RawObject* raw_obj = *reinterpret_cast<RawObject**>( 1833 RawObject* raw_obj = *reinterpret_cast<RawObject**>(
1913 reinterpret_cast<uword>(raw->ptr()) + offset); 1834 reinterpret_cast<uword>(raw->ptr()) + offset);
1914 WriteObjectImpl(raw_obj, write_as_reference); 1835 WriteObjectImpl(raw_obj, write_as_reference);
1915 offset += kWordSize; 1836 offset += kWordSize;
1916 } 1837 }
1917 } 1838 }
1918 return; 1839 return;
1919 } 1840 }
1920 1841
1921
1922 bool SnapshotWriter::AllowObjectsInDartLibrary(RawLibrary* library) { 1842 bool SnapshotWriter::AllowObjectsInDartLibrary(RawLibrary* library) {
1923 return (library == object_store()->collection_library() || 1843 return (library == object_store()->collection_library() ||
1924 library == object_store()->core_library() || 1844 library == object_store()->core_library() ||
1925 library == object_store()->typed_data_library()); 1845 library == object_store()->typed_data_library());
1926 } 1846 }
1927 1847
1928
1929 intptr_t SnapshotWriter::FindVmSnapshotObject(RawObject* rawobj) { 1848 intptr_t SnapshotWriter::FindVmSnapshotObject(RawObject* rawobj) {
1930 intptr_t length = Object::vm_isolate_snapshot_object_table().Length(); 1849 intptr_t length = Object::vm_isolate_snapshot_object_table().Length();
1931 for (intptr_t i = 0; i < length; i++) { 1850 for (intptr_t i = 0; i < length; i++) {
1932 if (Object::vm_isolate_snapshot_object_table().At(i) == rawobj) { 1851 if (Object::vm_isolate_snapshot_object_table().At(i) == rawobj) {
1933 return (i + kMaxPredefinedObjectIds); 1852 return (i + kMaxPredefinedObjectIds);
1934 } 1853 }
1935 } 1854 }
1936 return kInvalidIndex; 1855 return kInvalidIndex;
1937 } 1856 }
1938 1857
1939
1940 void SnapshotWriter::ThrowException(Exceptions::ExceptionType type, 1858 void SnapshotWriter::ThrowException(Exceptions::ExceptionType type,
1941 const char* msg) { 1859 const char* msg) {
1942 thread()->clear_sticky_error(); 1860 thread()->clear_sticky_error();
1943 if (msg != NULL) { 1861 if (msg != NULL) {
1944 const String& msg_obj = String::Handle(String::New(msg)); 1862 const String& msg_obj = String::Handle(String::New(msg));
1945 const Array& args = Array::Handle(Array::New(1)); 1863 const Array& args = Array::Handle(Array::New(1));
1946 args.SetAt(0, msg_obj); 1864 args.SetAt(0, msg_obj);
1947 Exceptions::ThrowByType(type, args); 1865 Exceptions::ThrowByType(type, args);
1948 } else { 1866 } else {
1949 Exceptions::ThrowByType(type, Object::empty_array()); 1867 Exceptions::ThrowByType(type, Object::empty_array());
1950 } 1868 }
1951 UNREACHABLE(); 1869 UNREACHABLE();
1952 } 1870 }
1953 1871
1954
1955 void SnapshotWriter::WriteVersionAndFeatures() { 1872 void SnapshotWriter::WriteVersionAndFeatures() {
1956 const char* expected_version = Version::SnapshotString(); 1873 const char* expected_version = Version::SnapshotString();
1957 ASSERT(expected_version != NULL); 1874 ASSERT(expected_version != NULL);
1958 const intptr_t version_len = strlen(expected_version); 1875 const intptr_t version_len = strlen(expected_version);
1959 WriteBytes(reinterpret_cast<const uint8_t*>(expected_version), version_len); 1876 WriteBytes(reinterpret_cast<const uint8_t*>(expected_version), version_len);
1960 1877
1961 const char* expected_features = 1878 const char* expected_features =
1962 Dart::FeaturesString(Isolate::Current(), kind_); 1879 Dart::FeaturesString(Isolate::Current(), kind_);
1963 ASSERT(expected_features != NULL); 1880 ASSERT(expected_features != NULL);
1964 const intptr_t features_len = strlen(expected_features); 1881 const intptr_t features_len = strlen(expected_features);
1965 WriteBytes(reinterpret_cast<const uint8_t*>(expected_features), 1882 WriteBytes(reinterpret_cast<const uint8_t*>(expected_features),
1966 features_len + 1); 1883 features_len + 1);
1967 free(const_cast<char*>(expected_features)); 1884 free(const_cast<char*>(expected_features));
1968 } 1885 }
1969 1886
1970
1971 ScriptSnapshotWriter::ScriptSnapshotWriter(uint8_t** buffer, ReAlloc alloc) 1887 ScriptSnapshotWriter::ScriptSnapshotWriter(uint8_t** buffer, ReAlloc alloc)
1972 : SnapshotWriter(Thread::Current(), 1888 : SnapshotWriter(Thread::Current(),
1973 Snapshot::kScript, 1889 Snapshot::kScript,
1974 buffer, 1890 buffer,
1975 alloc, 1891 alloc,
1976 NULL, 1892 NULL,
1977 kInitialSize, 1893 kInitialSize,
1978 &forward_list_, 1894 &forward_list_,
1979 true /* can_send_any_object */), 1895 true /* can_send_any_object */),
1980 forward_list_(thread(), kMaxPredefinedObjectIds) { 1896 forward_list_(thread(), kMaxPredefinedObjectIds) {
1981 ASSERT(buffer != NULL); 1897 ASSERT(buffer != NULL);
1982 ASSERT(alloc != NULL); 1898 ASSERT(alloc != NULL);
1983 } 1899 }
1984 1900
1985
1986 void ScriptSnapshotWriter::WriteScriptSnapshot(const Library& lib) { 1901 void ScriptSnapshotWriter::WriteScriptSnapshot(const Library& lib) {
1987 ASSERT(kind() == Snapshot::kScript); 1902 ASSERT(kind() == Snapshot::kScript);
1988 ASSERT(isolate() != NULL); 1903 ASSERT(isolate() != NULL);
1989 ASSERT(ClassFinalizer::AllClassesFinalized()); 1904 ASSERT(ClassFinalizer::AllClassesFinalized());
1990 1905
1991 // Setup for long jump in case there is an exception while writing 1906 // Setup for long jump in case there is an exception while writing
1992 // the snapshot. 1907 // the snapshot.
1993 LongJumpScope jump; 1908 LongJumpScope jump;
1994 if (setjmp(*jump.Set()) == 0) { 1909 if (setjmp(*jump.Set()) == 0) {
1995 // Reserve space in the output buffer for a snapshot header. 1910 // Reserve space in the output buffer for a snapshot header.
1996 ReserveHeader(); 1911 ReserveHeader();
1997 1912
1998 // Write out the version string. 1913 // Write out the version string.
1999 WriteVersionAndFeatures(); 1914 WriteVersionAndFeatures();
2000 1915
2001 // Write out the library object. 1916 // Write out the library object.
2002 { 1917 {
2003 NoSafepointScope no_safepoint; 1918 NoSafepointScope no_safepoint;
2004 1919
2005 // Write out the library object. 1920 // Write out the library object.
2006 WriteObject(lib.raw()); 1921 WriteObject(lib.raw());
2007 1922
2008 FillHeader(kind()); 1923 FillHeader(kind());
2009 } 1924 }
2010 } else { 1925 } else {
2011 ThrowException(exception_type(), exception_msg()); 1926 ThrowException(exception_type(), exception_msg());
2012 } 1927 }
2013 } 1928 }
2014 1929
2015
2016 void SnapshotWriterVisitor::VisitPointers(RawObject** first, RawObject** last) { 1930 void SnapshotWriterVisitor::VisitPointers(RawObject** first, RawObject** last) {
2017 for (RawObject** current = first; current <= last; current++) { 1931 for (RawObject** current = first; current <= last; current++) {
2018 RawObject* raw_obj = *current; 1932 RawObject* raw_obj = *current;
2019 writer_->WriteObjectImpl(raw_obj, as_references_); 1933 writer_->WriteObjectImpl(raw_obj, as_references_);
2020 } 1934 }
2021 } 1935 }
2022 1936
2023
2024 MessageWriter::MessageWriter(uint8_t** buffer, 1937 MessageWriter::MessageWriter(uint8_t** buffer,
2025 ReAlloc alloc, 1938 ReAlloc alloc,
2026 DeAlloc dealloc, 1939 DeAlloc dealloc,
2027 bool can_send_any_object, 1940 bool can_send_any_object,
2028 intptr_t* buffer_len) 1941 intptr_t* buffer_len)
2029 : SnapshotWriter(Thread::Current(), 1942 : SnapshotWriter(Thread::Current(),
2030 Snapshot::kMessage, 1943 Snapshot::kMessage,
2031 buffer, 1944 buffer,
2032 alloc, 1945 alloc,
2033 dealloc, 1946 dealloc,
2034 kInitialSize, 1947 kInitialSize,
2035 &forward_list_, 1948 &forward_list_,
2036 can_send_any_object), 1949 can_send_any_object),
2037 forward_list_(thread(), kMaxPredefinedObjectIds), 1950 forward_list_(thread(), kMaxPredefinedObjectIds),
2038 buffer_len_(buffer_len) { 1951 buffer_len_(buffer_len) {
2039 ASSERT(buffer != NULL); 1952 ASSERT(buffer != NULL);
2040 ASSERT(alloc != NULL); 1953 ASSERT(alloc != NULL);
2041 } 1954 }
2042 1955
2043
2044 void MessageWriter::WriteMessage(const Object& obj) { 1956 void MessageWriter::WriteMessage(const Object& obj) {
2045 ASSERT(kind() == Snapshot::kMessage); 1957 ASSERT(kind() == Snapshot::kMessage);
2046 ASSERT(isolate() != NULL); 1958 ASSERT(isolate() != NULL);
2047 1959
2048 // Setup for long jump in case there is an exception while writing 1960 // Setup for long jump in case there is an exception while writing
2049 // the message. 1961 // the message.
2050 LongJumpScope jump; 1962 LongJumpScope jump;
2051 if (setjmp(*jump.Set()) == 0) { 1963 if (setjmp(*jump.Set()) == 0) {
2052 NoSafepointScope no_safepoint; 1964 NoSafepointScope no_safepoint;
2053 WriteObject(obj.raw()); 1965 WriteObject(obj.raw());
2054 if (buffer_len_ != NULL) { 1966 if (buffer_len_ != NULL) {
2055 *buffer_len_ = BytesWritten(); 1967 *buffer_len_ = BytesWritten();
2056 } 1968 }
2057 } else { 1969 } else {
2058 FreeBuffer(); 1970 FreeBuffer();
2059 ThrowException(exception_type(), exception_msg()); 1971 ThrowException(exception_type(), exception_msg());
2060 } 1972 }
2061 } 1973 }
2062 1974
2063
2064 } // namespace dart 1975 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/snapshot.h ('k') | runtime/vm/snapshot_ids.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698