Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file | |
| 2 // for details. All rights reserved. Use of this source code is governed by a | |
| 3 // BSD-style license that can be found in the LICENSE file. | |
| 4 | |
| 5 #include "vm/isolate_reload.h" | |
| 6 | |
| 7 #include "vm/become.h" | |
| 8 #include "vm/code_generator.h" | |
| 9 #include "vm/compiler.h" | |
| 10 #include "vm/dart_api_impl.h" | |
| 11 #include "vm/hash_table.h" | |
| 12 #include "vm/isolate.h" | |
| 13 #include "vm/log.h" | |
| 14 #include "vm/object.h" | |
| 15 #include "vm/object_store.h" | |
| 16 #include "vm/parser.h" | |
| 17 #include "vm/safepoint.h" | |
| 18 #include "vm/service_event.h" | |
| 19 #include "vm/stack_frame.h" | |
| 20 #include "vm/thread.h" | |
| 21 #include "vm/timeline.h" | |
| 22 #include "vm/visitor.h" | |
| 23 | |
| 24 namespace dart { | |
| 25 | |
| 26 DEFINE_FLAG(bool, trace_reload, false, "Trace isolate reloading"); | |
| 27 DEFINE_FLAG(bool, identity_reload, false, "Enable checks for identity reload."); | |
| 28 DEFINE_FLAG(int, reload_every, 0, "Reload every N stack overflow checks."); | |
| 29 DEFINE_FLAG(bool, reload_every_optimized, true, "Only from optimized code."); | |
| 30 | |
| 31 #ifndef PRODUCT | |
| 32 | |
| 33 #define I (isolate()) | |
| 34 #define Z (thread->zone()) | |
| 35 | |
| 36 #define TIMELINE_SCOPE(name) \ | |
| 37 TimelineDurationScope tds##name(Thread::Current(), \ | |
| 38 Timeline::GetIsolateStream(), \ | |
| 39 #name) | |
| 40 | |
| 41 | |
| 42 class ScriptUrlSetTraits { | |
| 43 public: | |
| 44 static bool ReportStats() { return false; } | |
| 45 static const char* Name() { return "ScriptUrlSetTraits"; } | |
| 46 | |
| 47 static bool IsMatch(const Object& a, const Object& b) { | |
| 48 if (!a.IsString() || !b.IsString()) { | |
| 49 return false; | |
| 50 } | |
| 51 | |
| 52 return String::Cast(a).Equals(String::Cast(b)); | |
| 53 } | |
| 54 | |
| 55 static uword Hash(const Object& obj) { | |
| 56 return String::Cast(obj).Hash(); | |
| 57 } | |
| 58 }; | |
| 59 | |
| 60 | |
| 61 class ClassMapTraits { | |
| 62 public: | |
| 63 static bool ReportStats() { return false; } | |
| 64 static const char* Name() { return "ClassMapTraits"; } | |
| 65 | |
| 66 static bool IsMatch(const Object& a, const Object& b) { | |
| 67 if (!a.IsClass() || !b.IsClass()) { | |
| 68 return false; | |
| 69 } | |
| 70 return IsolateReloadContext::IsSameClass(Class::Cast(a), Class::Cast(b)); | |
| 71 } | |
| 72 | |
| 73 static uword Hash(const Object& obj) { | |
| 74 return String::HashRawSymbol(Class::Cast(obj).Name()); | |
| 75 } | |
| 76 }; | |
| 77 | |
| 78 | |
| 79 class LibraryMapTraits { | |
| 80 public: | |
| 81 static bool ReportStats() { return false; } | |
| 82 static const char* Name() { return "LibraryMapTraits"; } | |
| 83 | |
| 84 static bool IsMatch(const Object& a, const Object& b) { | |
| 85 if (!a.IsLibrary() || !b.IsLibrary()) { | |
| 86 return false; | |
| 87 } | |
| 88 return IsolateReloadContext::IsSameLibrary( | |
| 89 Library::Cast(a), Library::Cast(b)); | |
| 90 } | |
| 91 | |
| 92 static uword Hash(const Object& obj) { | |
| 93 return Library::Cast(obj).UrlHash(); | |
| 94 } | |
| 95 }; | |
| 96 | |
| 97 | |
| 98 class BecomeMapTraits { | |
| 99 public: | |
| 100 static bool ReportStats() { return false; } | |
| 101 static const char* Name() { return "BecomeMapTraits"; } | |
| 102 | |
| 103 static bool IsMatch(const Object& a, const Object& b) { | |
| 104 return a.raw() == b.raw(); | |
| 105 } | |
| 106 | |
| 107 static uword Hash(const Object& obj) { | |
| 108 if (obj.IsLibrary()) { | |
| 109 return Library::Cast(obj).UrlHash(); | |
| 110 } else if (obj.IsClass()) { | |
| 111 if (Class::Cast(obj).id() == kFreeListElement) { | |
| 112 return 0; | |
| 113 } | |
| 114 return String::HashRawSymbol(Class::Cast(obj).Name()); | |
| 115 } else if (obj.IsField()) { | |
| 116 return String::HashRawSymbol(Field::Cast(obj).name()); | |
| 117 } | |
| 118 return 0; | |
| 119 } | |
| 120 }; | |
| 121 | |
| 122 | |
| 123 bool IsolateReloadContext::IsSameField(const Field& a, const Field& b) { | |
| 124 if (a.is_static() != b.is_static()) { | |
| 125 return false; | |
| 126 } | |
| 127 const Class& a_cls = Class::Handle(a.Owner()); | |
| 128 const Class& b_cls = Class::Handle(b.Owner()); | |
| 129 | |
| 130 if (!IsSameClass(a_cls, b_cls)) { | |
| 131 return false; | |
| 132 } | |
| 133 | |
| 134 const String& a_name = String::Handle(a.name()); | |
| 135 const String& b_name = String::Handle(b.name()); | |
| 136 | |
| 137 return a_name.Equals(b_name); | |
| 138 } | |
| 139 | |
| 140 | |
| 141 bool IsolateReloadContext::IsSameClass(const Class& a, const Class& b) { | |
| 142 if (a.is_patch() != b.is_patch()) { | |
| 143 // TODO(johnmccutchan): Should we just check the class kind bits? | |
| 144 return false; | |
| 145 } | |
| 146 | |
| 147 // TODO(turnidge): We need to look at generic type arguments for | |
| 148 // synthetic mixin classes. Their names are not necessarily unique | |
| 149 // currently. | |
| 150 const String& a_name = String::Handle(Class::Cast(a).Name()); | |
| 151 const String& b_name = String::Handle(Class::Cast(b).Name()); | |
| 152 | |
| 153 if (!a_name.Equals(b_name)) { | |
| 154 return false; | |
| 155 } | |
| 156 | |
| 157 const Library& a_lib = Library::Handle(Class::Cast(a).library()); | |
|
rmacnak
2016/05/11 19:56:18
What classes don't have a library?
Cutch
2016/05/12 15:50:14
Acknowledged.
| |
| 158 const String& a_lib_url = | |
| 159 String::Handle(a_lib.IsNull() ? String::null() : a_lib.url()); | |
| 160 | |
| 161 const Library& b_lib = Library::Handle(Class::Cast(b).library()); | |
| 162 const String& b_lib_url = | |
| 163 String::Handle(b_lib.IsNull() ? String::null() : b_lib.url()); | |
| 164 | |
| 165 return a_lib_url.Equals(b_lib_url); | |
|
rmacnak
2016/05/11 19:56:18
return IsSameLibrary(a_lib, b_lib)
Cutch
2016/05/12 15:50:14
Done.
| |
| 166 } | |
| 167 | |
| 168 | |
| 169 bool IsolateReloadContext::IsSameLibrary( | |
| 170 const Library& a_lib, const Library& b_lib) { | |
| 171 const String& a_lib_url = | |
| 172 String::Handle(a_lib.IsNull() ? String::null() : a_lib.url()); | |
| 173 const String& b_lib_url = | |
| 174 String::Handle(b_lib.IsNull() ? String::null() : b_lib.url()); | |
| 175 return a_lib_url.Equals(b_lib_url); | |
| 176 } | |
| 177 | |
| 178 | |
| 179 IsolateReloadContext::IsolateReloadContext(Isolate* isolate, bool test_mode) | |
| 180 : start_time_micros_(OS::GetCurrentMonotonicMicros()), | |
| 181 isolate_(isolate), | |
| 182 test_mode_(test_mode), | |
| 183 has_error_(false), | |
| 184 saved_num_cids_(-1), | |
| 185 saved_class_table_(NULL), | |
| 186 num_saved_libs_(-1), | |
| 187 script_uri_(String::null()), | |
| 188 error_(Error::null()), | |
| 189 clean_scripts_set_storage_(Array::null()), | |
| 190 compile_time_constants_(Array::null()), | |
| 191 old_classes_set_storage_(Array::null()), | |
| 192 class_map_storage_(Array::null()), | |
| 193 old_libraries_set_storage_(Array::null()), | |
| 194 library_map_storage_(Array::null()), | |
| 195 become_map_storage_(Array::null()), | |
| 196 saved_root_library_(Library::null()), | |
| 197 saved_libraries_(GrowableObjectArray::null()) { | |
| 198 // Preallocate storage for maps. | |
| 199 clean_scripts_set_storage_ = | |
| 200 HashTables::New<UnorderedHashSet<ScriptUrlSetTraits> >(4); | |
| 201 old_classes_set_storage_ = | |
| 202 HashTables::New<UnorderedHashSet<ClassMapTraits> >(4); | |
| 203 class_map_storage_ = | |
| 204 HashTables::New<UnorderedHashMap<ClassMapTraits> >(4); | |
| 205 old_libraries_set_storage_ = | |
| 206 HashTables::New<UnorderedHashSet<LibraryMapTraits> >(4); | |
| 207 library_map_storage_ = | |
| 208 HashTables::New<UnorderedHashMap<LibraryMapTraits> >(4); | |
| 209 become_map_storage_ = | |
| 210 HashTables::New<UnorderedHashMap<BecomeMapTraits> >(4); | |
| 211 } | |
| 212 | |
| 213 | |
| 214 IsolateReloadContext::~IsolateReloadContext() { | |
| 215 } | |
| 216 | |
| 217 | |
| 218 void IsolateReloadContext::ReportError(const Error& error) { | |
| 219 has_error_ = true; | |
| 220 error_ = error.raw(); | |
| 221 if (FLAG_trace_reload) { | |
| 222 THR_Print("ISO-RELOAD: Error: %s\n", error.ToErrorCString()); | |
| 223 } | |
| 224 ServiceEvent service_event(Isolate::Current(), ServiceEvent::kIsolateReload); | |
| 225 service_event.set_reload_error(&error); | |
| 226 Service::HandleEvent(&service_event); | |
| 227 } | |
| 228 | |
| 229 | |
| 230 void IsolateReloadContext::ReportError(const String& error_msg) { | |
| 231 ReportError(LanguageError::Handle(LanguageError::New(error_msg))); | |
| 232 } | |
| 233 | |
| 234 | |
| 235 void IsolateReloadContext::ReportSuccess() { | |
| 236 ServiceEvent service_event(Isolate::Current(), ServiceEvent::kIsolateReload); | |
| 237 Service::HandleEvent(&service_event); | |
| 238 } | |
| 239 | |
| 240 | |
| 241 void IsolateReloadContext::StartReload() { | |
| 242 Thread* thread = Thread::Current(); | |
|
rmacnak
2016/05/11 19:56:18
Add a comment that because we haven't yet decided
Cutch
2016/05/12 15:50:13
I don't think this is the right place for the comm
| |
| 243 | |
| 244 // Grab root library before calling CheckpointBeforeReload. | |
| 245 const Library& root_lib = Library::Handle(object_store()->root_library()); | |
|
rmacnak
2016/05/11 19:56:18
root_lib may be null if an isolate reload is reque
Cutch
2016/05/12 15:50:13
We do not allow a reload to be triggered until the
| |
| 246 const String& root_lib_url = String::Handle(root_lib.url()); | |
| 247 | |
| 248 // Switch all functions on the stack to compiled, unoptimized code. | |
| 249 SwitchStackToUnoptimizedCode(); | |
| 250 // Deoptimize all code that had optimizing decisions that are dependent on | |
| 251 // assumptions from field guards or CHA. | |
|
rmacnak
2016/05/11 19:56:18
or unloaded deferred prefixes
Cutch
2016/05/12 15:50:14
Done.
| |
| 252 // TODO(johnmccutchan): Deoptimizing dependent code here (before the reload) | |
| 253 // is paranoid. This likely can be moved to the commit phase. | |
| 254 DeoptimizeDependentCode(); | |
| 255 Checkpoint(); | |
| 256 | |
| 257 // Block class finalization attempts when calling into the library | |
| 258 // tag handler. | |
| 259 I->BlockClassFinalization(); | |
| 260 Object& result = Object::Handle(thread->zone()); | |
| 261 { | |
| 262 TransitionVMToNative transition(thread); | |
| 263 Api::Scope api_scope(thread); | |
| 264 | |
| 265 Dart_Handle retval = | |
| 266 (I->library_tag_handler())(Dart_kScriptTag, | |
| 267 Api::NewHandle(thread, Library::null()), | |
| 268 Api::NewHandle(thread, root_lib_url.raw())); | |
| 269 result = Api::UnwrapHandle(retval); | |
| 270 } | |
| 271 I->UnblockClassFinalization(); | |
| 272 if (result.IsError()) { | |
| 273 ReportError(Error::Cast(result)); | |
| 274 } | |
| 275 } | |
| 276 | |
| 277 | |
| 278 void IsolateReloadContext::RegisterClass(const Class& new_cls) { | |
| 279 const Class& old_cls = Class::Handle(OldClassOrNull(new_cls)); | |
| 280 if (old_cls.IsNull()) { | |
| 281 Isolate::Current()->class_table()->Register(new_cls); | |
| 282 | |
| 283 if (FLAG_identity_reload) { | |
| 284 TIR_Print("Could not find replacement class for %s\n", | |
| 285 new_cls.ToCString()); | |
| 286 UNREACHABLE(); | |
| 287 } | |
| 288 | |
| 289 // New class maps to itself. | |
| 290 AddClassMapping(new_cls, new_cls); | |
| 291 return; | |
| 292 } | |
| 293 new_cls.set_id(old_cls.id()); | |
| 294 isolate()->class_table()->SetAt(old_cls.id(), new_cls.raw()); | |
| 295 if (!old_cls.is_enum_class()) { | |
| 296 new_cls.CopyCanonicalConstants(old_cls); | |
| 297 } | |
| 298 new_cls.CopyCanonicalTypes(old_cls); | |
| 299 AddBecomeMapping(old_cls, new_cls); | |
| 300 AddClassMapping(new_cls, old_cls); | |
| 301 } | |
| 302 | |
| 303 | |
| 304 void IsolateReloadContext::FinishReload() { | |
| 305 // Disable the background compiler while we are performing the reload. | |
| 306 BackgroundCompiler::Disable(); | |
| 307 | |
| 308 BuildLibraryMapping(); | |
| 309 TIR_Print("---- DONE FINALIZING\n"); | |
| 310 if (ValidateReload()) { | |
| 311 Commit(); | |
| 312 PostCommit(); | |
| 313 } else { | |
| 314 Rollback(); | |
| 315 } | |
| 316 // ValidateReload mutates the direct subclass information and does | |
| 317 // not remove dead subclasses. Rebuild the direct subclass | |
| 318 // information from scratch. | |
| 319 RebuildDirectSubclasses(); | |
| 320 | |
| 321 BackgroundCompiler::Enable(); | |
| 322 } | |
| 323 | |
| 324 | |
| 325 void IsolateReloadContext::AbortReload(const Error& error) { | |
| 326 ReportError(error); | |
| 327 Rollback(); | |
| 328 } | |
| 329 | |
| 330 | |
| 331 void IsolateReloadContext::SwitchStackToUnoptimizedCode() { | |
|
rmacnak
2016/05/11 19:56:18
Doesn't actually switch the function's code. Consi
Cutch
2016/05/12 15:50:13
Done.
| |
| 332 TIMELINE_SCOPE(SwitchStackToUnoptimizedCode); | |
| 333 StackFrameIterator it(StackFrameIterator::kDontValidateFrames); | |
| 334 | |
| 335 Function& func = Function::Handle(); | |
| 336 while (it.HasNextFrame()) { | |
| 337 StackFrame* frame = it.NextFrame(); | |
| 338 if (frame->IsDartFrame()) { | |
| 339 func = frame->LookupDartFunction(); | |
| 340 ASSERT(!func.IsNull()); | |
| 341 func.EnsureHasCompiledUnoptimizedCode(); | |
| 342 } | |
| 343 } | |
| 344 } | |
| 345 | |
| 346 | |
| 347 void IsolateReloadContext::DeoptimizeDependentCode() { | |
| 348 ClassTable* class_table = I->class_table(); | |
| 349 | |
| 350 const intptr_t bottom = Dart::vm_isolate()->class_table()->NumCids(); | |
| 351 const intptr_t top = I->class_table()->NumCids(); | |
| 352 Class& cls = Class::Handle(); | |
| 353 Array& fields = Array::Handle(); | |
| 354 Field& field = Field::Handle(); | |
| 355 for (intptr_t cls_idx = bottom; cls_idx < top; cls_idx++) { | |
| 356 if (!class_table->HasValidClassAt(cls_idx)) { | |
| 357 // Skip. | |
| 358 continue; | |
| 359 } | |
| 360 | |
| 361 // Deoptimize CHA code. | |
| 362 cls = class_table->At(cls_idx); | |
| 363 ASSERT(!cls.IsNull()); | |
| 364 | |
| 365 cls.DisableAllCHAOptimizedCode(); | |
| 366 | |
| 367 // Deoptimize field guard code. | |
| 368 fields = cls.fields(); | |
| 369 ASSERT(!fields.IsNull()); | |
| 370 for (intptr_t field_idx = 0; field_idx < fields.Length(); field_idx++) { | |
| 371 field = Field::RawCast(fields.At(field_idx)); | |
| 372 ASSERT(!field.IsNull()); | |
| 373 field.DeoptimizeDependentCode(); | |
| 374 } | |
| 375 } | |
|
rmacnak
2016/05/11 19:56:18
library prefixes also have dependent code.
Cutch
2016/05/12 15:50:13
Added TODO
| |
| 376 } | |
| 377 | |
| 378 | |
| 379 void IsolateReloadContext::CheckpointClasses() { | |
| 380 TIMELINE_SCOPE(CheckpointClasses); | |
| 381 TIR_Print("---- CHECKPOINTING CLASSES\n"); | |
| 382 // Checkpoint classes before a reload. We need to copy the following: | |
| 383 // 1) The size of the class table. | |
| 384 // 2) The class table itself. | |
| 385 // For efficiency, we build a set of classes before the reload. This set | |
| 386 // is used to pair new classes with old classes. | |
| 387 | |
| 388 ClassTable* class_table = I->class_table(); | |
| 389 | |
| 390 // Copy the size of the class table. | |
| 391 saved_num_cids_ = I->class_table()->NumCids(); | |
| 392 | |
| 393 // Copy of the class table. | |
| 394 RawClass** local_saved_class_table = | |
| 395 reinterpret_cast<RawClass**>(malloc(sizeof(RawClass*) * saved_num_cids_)); | |
| 396 | |
| 397 Class& cls = Class::Handle(); | |
| 398 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_); | |
| 399 for (intptr_t i = 0; i < saved_num_cids_; i++) { | |
| 400 if (class_table->IsValidIndex(i) && | |
| 401 class_table->HasValidClassAt(i)) { | |
| 402 // Copy the class into the saved class table and add it to the set. | |
| 403 local_saved_class_table[i] = class_table->At(i); | |
| 404 if (i != kFreeListElement) { | |
|
rmacnak
2016/05/11 19:56:18
Why is this class special? I'd expect either all t
Cutch
2016/05/12 15:50:14
It is special because it doesn't have a name and t
| |
| 405 cls = class_table->At(i); | |
| 406 bool already_present = old_classes_set.Insert(cls); | |
| 407 ASSERT(!already_present); | |
| 408 } | |
| 409 } else { | |
| 410 // No class at this index, mark it as NULL. | |
| 411 local_saved_class_table[i] = NULL; | |
| 412 } | |
| 413 } | |
| 414 old_classes_set_storage_ = old_classes_set.Release().raw(); | |
| 415 // Assigning the field must be done after saving the class table. | |
| 416 saved_class_table_ = local_saved_class_table; | |
| 417 TIR_Print("---- System had %" Pd " classes\n", saved_num_cids_); | |
| 418 } | |
| 419 | |
| 420 | |
| 421 bool IsolateReloadContext::IsCleanLibrary(const Library& lib) { | |
| 422 return lib.is_dart_scheme(); | |
| 423 } | |
| 424 | |
| 425 | |
| 426 void IsolateReloadContext::CheckpointLibraries() { | |
| 427 TIMELINE_SCOPE(CheckpointLibraries); | |
| 428 | |
| 429 // Save the root library in case we abort the reload. | |
| 430 const Library& root_lib = | |
| 431 Library::Handle(object_store()->root_library()); | |
| 432 set_saved_root_library(root_lib); | |
| 433 | |
| 434 // Save the old libraries array in case we abort the reload. | |
| 435 const GrowableObjectArray& libs = | |
| 436 GrowableObjectArray::Handle(object_store()->libraries()); | |
| 437 set_saved_libraries(libs); | |
| 438 | |
| 439 // Make a filtered copy of the old libraries array. Keep "clean" libraries | |
| 440 // that we will use instead of reloading. | |
| 441 const GrowableObjectArray& new_libs = GrowableObjectArray::Handle( | |
| 442 GrowableObjectArray::New(Heap::kOld)); | |
| 443 Library& lib = Library::Handle(); | |
| 444 UnorderedHashSet<LibraryMapTraits> | |
| 445 old_libraries_set(old_libraries_set_storage_); | |
| 446 num_saved_libs_ = 0; | |
| 447 for (intptr_t i = 0; i < libs.Length(); i++) { | |
| 448 lib ^= libs.At(i); | |
| 449 if (IsCleanLibrary(lib)) { | |
| 450 // We are preserving this library across the reload, assign its new index | |
| 451 lib.set_index(new_libs.Length()); | |
| 452 new_libs.Add(lib, Heap::kOld); | |
| 453 num_saved_libs_++; | |
| 454 } else { | |
| 455 // We are going to reload this library. Clear the index. | |
| 456 lib.set_index(-1); | |
| 457 } | |
| 458 // Add old library to old libraries set. | |
| 459 bool already_present = old_libraries_set.Insert(lib); | |
| 460 ASSERT(!already_present); | |
| 461 } | |
| 462 old_libraries_set_storage_ = old_libraries_set.Release().raw(); | |
| 463 | |
| 464 // Reset the registered libraries to the filtered array. | |
| 465 Library::RegisterLibraries(Thread::Current(), new_libs); | |
| 466 // Reset the root library to null. | |
| 467 object_store()->set_root_library(Library::Handle()); | |
| 468 } | |
| 469 | |
| 470 | |
| 471 void IsolateReloadContext::BuildCleanScriptSet() { | |
| 472 const GrowableObjectArray& libs = | |
| 473 GrowableObjectArray::Handle(object_store()->libraries()); | |
| 474 | |
| 475 UnorderedHashSet<ScriptUrlSetTraits> | |
| 476 clean_scripts_set(clean_scripts_set_storage_); | |
| 477 | |
| 478 Library& lib = Library::Handle(); | |
| 479 Array& scripts = Array::Handle(); | |
| 480 Script& script = Script::Handle(); | |
| 481 String& script_url = String::Handle(); | |
| 482 for (intptr_t lib_idx = 0; lib_idx < libs.Length(); lib_idx++) { | |
| 483 lib = Library::RawCast(libs.At(lib_idx)); | |
| 484 ASSERT(!lib.IsNull()); | |
| 485 ASSERT(IsCleanLibrary(lib)); | |
| 486 scripts = lib.LoadedScripts(); | |
| 487 ASSERT(!scripts.IsNull()); | |
| 488 for (intptr_t script_idx = 0; script_idx < scripts.Length(); script_idx++) { | |
| 489 script = Script::RawCast(scripts.At(script_idx)); | |
| 490 ASSERT(!script.IsNull()); | |
| 491 script_url = script.url(); | |
| 492 ASSERT(!script_url.IsNull()); | |
| 493 bool already_present = clean_scripts_set.Insert(script_url); | |
| 494 ASSERT(!already_present); | |
| 495 } | |
| 496 } | |
| 497 | |
| 498 clean_scripts_set_storage_ = clean_scripts_set.Release().raw(); | |
| 499 } | |
| 500 | |
| 501 | |
| 502 void IsolateReloadContext::FilterCompileTimeConstants() { | |
| 503 // Save the compile time constants array. | |
| 504 compile_time_constants_ = I->object_store()->compile_time_constants(); | |
| 505 // Clear the compile time constants array. This will be repopulated | |
| 506 // in the loop below. | |
| 507 I->object_store()->set_compile_time_constants(Array::Handle()); | |
| 508 | |
| 509 if (compile_time_constants_ == Array::null()) { | |
| 510 // Nothing to do. | |
| 511 return; | |
| 512 } | |
| 513 | |
| 514 // Iterate over the saved compile time constants map. | |
| 515 ConstantsMap old_constants(compile_time_constants_); | |
| 516 ConstantsMap::Iterator it(&old_constants); | |
| 517 | |
| 518 Array& key = Array::Handle(); | |
| 519 String& url = String::Handle(); | |
| 520 Smi& token_pos = Smi::Handle(); | |
| 521 Instance& value = Instance::Handle(); | |
| 522 | |
| 523 // We filter the compile time constants map so that after it only contains | |
| 524 // constants from scripts contained in this set. | |
| 525 UnorderedHashSet<ScriptUrlSetTraits> | |
| 526 clean_scripts_set(clean_scripts_set_storage_); | |
| 527 | |
| 528 while (it.MoveNext()) { | |
| 529 const intptr_t entry = it.Current(); | |
| 530 ASSERT(entry != -1); | |
| 531 key = Array::RawCast(old_constants.GetKey(entry)); | |
| 532 ASSERT(!key.IsNull()); | |
| 533 url = String::RawCast(key.At(0)); | |
| 534 ASSERT(!url.IsNull()); | |
| 535 if (clean_scripts_set.ContainsKey(url)) { | |
| 536 // We've found a cached constant from a clean script, add it to the | |
| 537 // compile time constants map again. | |
| 538 token_pos = Smi::RawCast(key.At(1)); | |
| 539 TokenPosition tp(token_pos.Value()); | |
| 540 // Use ^= because this might be null. | |
| 541 value ^= old_constants.GetPayload(entry, 0); | |
| 542 Parser::InsertCachedConstantValue(url, tp, value); | |
| 543 } | |
| 544 } | |
| 545 | |
| 546 old_constants.Release(); | |
| 547 clean_scripts_set.Release(); | |
| 548 } | |
| 549 | |
| 550 | |
| 551 void IsolateReloadContext::Checkpoint() { | |
| 552 TIMELINE_SCOPE(Checkpoint); | |
| 553 CheckpointClasses(); | |
| 554 CheckpointLibraries(); | |
| 555 BuildCleanScriptSet(); | |
| 556 FilterCompileTimeConstants(); | |
| 557 } | |
| 558 | |
| 559 | |
| 560 void IsolateReloadContext::RollbackClasses() { | |
| 561 TIR_Print("---- ROLLING BACK CLASS TABLE\n"); | |
| 562 ASSERT(saved_num_cids_ > 0); | |
| 563 ASSERT(saved_class_table_ != NULL); | |
| 564 ClassTable* class_table = I->class_table(); | |
| 565 class_table->SetNumCids(saved_num_cids_); | |
| 566 // Overwrite classes in class table with the saved classes. | |
| 567 for (intptr_t i = 0; i < saved_num_cids_; i++) { | |
| 568 if (class_table->IsValidIndex(i)) { | |
| 569 class_table->SetAt(i, saved_class_table_[i]); | |
| 570 } | |
| 571 } | |
| 572 free(saved_class_table_); | |
| 573 saved_class_table_ = NULL; | |
| 574 saved_num_cids_ = 0; | |
| 575 } | |
| 576 | |
| 577 | |
| 578 void IsolateReloadContext::RollbackLibraries() { | |
| 579 TIR_Print("---- ROLLING BACK LIBRARY CHANGES\n"); | |
| 580 Thread* thread = Thread::Current(); | |
| 581 Library& lib = Library::Handle(); | |
| 582 GrowableObjectArray& saved_libs = GrowableObjectArray::Handle( | |
| 583 Z, saved_libraries()); | |
| 584 if (!saved_libs.IsNull()) { | |
| 585 for (intptr_t i = 0; i < saved_libs.Length(); i++) { | |
| 586 lib = Library::RawCast(saved_libs.At(i)); | |
| 587 // Restore indexes that were modified in CheckpointLibraries. | |
| 588 lib.set_index(i); | |
| 589 } | |
| 590 | |
| 591 // Reset the registered libraries to the filtered array. | |
| 592 Library::RegisterLibraries(Thread::Current(), saved_libs); | |
| 593 } | |
| 594 | |
| 595 Library& saved_root_lib = Library::Handle(Z, saved_root_library()); | |
| 596 if (!saved_root_lib.IsNull()) { | |
| 597 object_store()->set_root_library(saved_root_lib); | |
| 598 } | |
| 599 | |
| 600 set_saved_root_library(Library::Handle()); | |
| 601 set_saved_libraries(GrowableObjectArray::Handle()); | |
| 602 } | |
| 603 | |
| 604 | |
| 605 void IsolateReloadContext::Rollback() { | |
| 606 I->object_store()->set_compile_time_constants( | |
| 607 Array::Handle(compile_time_constants_)); | |
| 608 RollbackClasses(); | |
| 609 RollbackLibraries(); | |
| 610 } | |
| 611 | |
| 612 | |
| 613 #ifdef DEBUG | |
| 614 void IsolateReloadContext::VerifyMaps() { | |
| 615 Class& cls = Class::Handle(); | |
| 616 Class& new_cls = Class::Handle(); | |
| 617 Class& cls2 = Class::Handle(); | |
| 618 Class& new_cls2 = Class::Handle(); | |
| 619 | |
| 620 // Verify that two old classes aren't both mapped to the same new | |
| 621 // class. This could happen is the IsSameClass function is broken. | |
| 622 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_); | |
| 623 { | |
| 624 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map); | |
| 625 while (it.MoveNext()) { | |
| 626 const intptr_t entry = it.Current(); | |
| 627 new_cls = Class::RawCast(class_map.GetKey(entry)); | |
| 628 cls = Class::RawCast(class_map.GetPayload(entry, 0)); | |
| 629 if (new_cls.raw() != cls.raw()) { | |
| 630 UnorderedHashMap<ClassMapTraits>::Iterator it2(&class_map); | |
| 631 while (it2.MoveNext()) { | |
| 632 new_cls2 = Class::RawCast(class_map.GetKey(entry)); | |
| 633 if (new_cls.raw() == new_cls2.raw()) { | |
| 634 cls2 = Class::RawCast(class_map.GetPayload(entry, 0)); | |
| 635 if (cls.raw() != cls2.raw()) { | |
| 636 OS::PrintErr( | |
| 637 "Classes '%s' and '%s' are distinct classes but both map to " | |
| 638 "class '%s'\n", | |
| 639 cls.ToCString(), cls2.ToCString(), new_cls.ToCString()); | |
| 640 UNREACHABLE(); | |
| 641 } | |
| 642 } | |
| 643 } | |
| 644 } | |
| 645 } | |
| 646 } | |
| 647 class_map.Release(); | |
| 648 } | |
| 649 | |
| 650 | |
| 651 void IsolateReloadContext::VerifyCanonicalTypeArguments() { | |
| 652 Thread* thread = Thread::Current(); | |
| 653 const Array& table = | |
| 654 Array::Handle(Z, I->object_store()->canonical_type_arguments()); | |
| 655 const intptr_t table_size = table.Length() - 1; | |
| 656 ASSERT(Utils::IsPowerOfTwo(table_size)); | |
| 657 TypeArguments& element = TypeArguments::Handle(Z); | |
| 658 TypeArguments& other_element = TypeArguments::Handle(); | |
| 659 for (intptr_t i = 0; i < table_size; i++) { | |
| 660 element ^= table.At(i); | |
| 661 for (intptr_t j = 0; j < table_size; j++) { | |
| 662 if ((i != j) && (table.At(j) != TypeArguments::null())) { | |
| 663 other_element ^= table.At(j); | |
| 664 if (element.Equals(other_element)) { | |
| 665 // Recursive types may be equal, but have different hashes. | |
| 666 ASSERT(element.IsRecursive()); | |
| 667 ASSERT(other_element.IsRecursive()); | |
| 668 ASSERT(element.Hash() != other_element.Hash()); | |
| 669 } | |
| 670 } | |
| 671 } | |
| 672 } | |
| 673 } | |
| 674 #endif | |
| 675 | |
| 676 | |
| 677 void IsolateReloadContext::Commit() { | |
| 678 TIMELINE_SCOPE(Commit); | |
| 679 TIR_Print("---- COMMITTING REVERSE MAP\n"); | |
| 680 | |
| 681 #ifdef DEBUG | |
| 682 VerifyMaps(); | |
| 683 #endif | |
| 684 | |
| 685 { | |
| 686 TIMELINE_SCOPE(CopyStaticFieldsAndPatchFieldsAndFunctions); | |
| 687 // Copy static field values from the old classes to the new classes. | |
| 688 // Patch fields and functions in the old classes so that they retain | |
| 689 // the old script. | |
| 690 Class& cls = Class::Handle(); | |
| 691 Class& new_cls = Class::Handle(); | |
| 692 | |
| 693 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_); | |
| 694 | |
| 695 { | |
| 696 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map); | |
| 697 while (it.MoveNext()) { | |
| 698 const intptr_t entry = it.Current(); | |
| 699 new_cls = Class::RawCast(class_map.GetKey(entry)); | |
| 700 cls = Class::RawCast(class_map.GetPayload(entry, 0)); | |
| 701 if (new_cls.raw() != cls.raw()) { | |
| 702 ASSERT(new_cls.is_enum_class() == cls.is_enum_class()); | |
| 703 if (new_cls.is_enum_class() && new_cls.is_finalized()) { | |
| 704 new_cls.ReplaceEnum(cls); | |
| 705 } | |
| 706 new_cls.CopyStaticFieldValues(cls); | |
| 707 cls.PatchFieldsAndFunctions(); | |
| 708 } | |
| 709 } | |
| 710 } | |
| 711 | |
| 712 class_map.Release(); | |
| 713 } | |
| 714 | |
| 715 // Copy over certain properties of libraries, e.g. is the library | |
| 716 // debuggable? | |
| 717 { | |
| 718 TIMELINE_SCOPE(CopyLibraryBits); | |
| 719 Library& lib = Library::Handle(); | |
| 720 Library& new_lib = Library::Handle(); | |
| 721 | |
| 722 UnorderedHashMap<LibraryMapTraits> lib_map(library_map_storage_); | |
| 723 | |
| 724 { | |
| 725 // Reload existing libraries. | |
| 726 UnorderedHashMap<LibraryMapTraits>::Iterator it(&lib_map); | |
| 727 | |
| 728 while (it.MoveNext()) { | |
| 729 const intptr_t entry = it.Current(); | |
| 730 ASSERT(entry != -1); | |
| 731 new_lib = Library::RawCast(lib_map.GetKey(entry)); | |
| 732 lib = Library::RawCast(lib_map.GetPayload(entry, 0)); | |
| 733 new_lib.set_debuggable(lib.IsDebuggable()); | |
| 734 } | |
| 735 } | |
| 736 | |
| 737 // Release the library map. | |
| 738 lib_map.Release(); | |
| 739 } | |
| 740 | |
| 741 { | |
| 742 TIMELINE_SCOPE(UpdateLibrariesArray); | |
| 743 // Update the libraries array. | |
| 744 Library& lib = Library::Handle(); | |
| 745 const GrowableObjectArray& libs = GrowableObjectArray::Handle( | |
| 746 I->object_store()->libraries()); | |
| 747 for (intptr_t i = 0; i < libs.Length(); i++) { | |
| 748 lib = Library::RawCast(libs.At(i)); | |
| 749 TIR_Print("Lib '%s' at index %" Pd "\n", lib.ToCString(), i); | |
| 750 lib.set_index(i); | |
| 751 } | |
| 752 | |
| 753 // Initialize library side table. | |
| 754 library_infos_.SetLength(libs.Length()); | |
| 755 for (intptr_t i = 0; i < libs.Length(); i++) { | |
| 756 lib = Library::RawCast(libs.At(i)); | |
| 757 // Mark the library dirty if it comes after the libraries we saved. | |
| 758 library_infos_[i].dirty = i >= num_saved_libs_; | |
| 759 } | |
| 760 } | |
| 761 | |
| 762 { | |
| 763 UnorderedHashMap<BecomeMapTraits> become_map(become_map_storage_); | |
| 764 intptr_t replacement_count = become_map.NumOccupied(); | |
| 765 const Array& before = | |
| 766 Array::Handle(Array::New(replacement_count, Heap::kOld)); | |
| 767 const Array& after = | |
| 768 Array::Handle(Array::New(replacement_count, Heap::kOld)); | |
| 769 Object& obj = Object::Handle(); | |
| 770 intptr_t replacement_index = 0; | |
| 771 UnorderedHashMap<BecomeMapTraits>::Iterator it(&become_map); | |
| 772 while (it.MoveNext()) { | |
| 773 const intptr_t entry = it.Current(); | |
| 774 obj = become_map.GetKey(entry); | |
| 775 before.SetAt(replacement_index, obj); | |
| 776 obj = become_map.GetPayload(entry, 0); | |
| 777 after.SetAt(replacement_index, obj); | |
| 778 replacement_index++; | |
| 779 } | |
| 780 ASSERT(replacement_index == replacement_count); | |
| 781 become_map.Release(); | |
| 782 | |
| 783 Become::ElementsForwardIdentity(before, after); | |
| 784 } | |
| 785 | |
| 786 if (FLAG_identity_reload) { | |
| 787 if (saved_num_cids_ != I->class_table()->NumCids()) { | |
| 788 TIR_Print("Identity reload failed! B#C=%" Pd " A#C=%" Pd "\n", | |
| 789 saved_num_cids_, | |
| 790 I->class_table()->NumCids()); | |
| 791 } | |
| 792 const GrowableObjectArray& saved_libs = | |
| 793 GrowableObjectArray::Handle(saved_libraries()); | |
| 794 const GrowableObjectArray& libs = | |
| 795 GrowableObjectArray::Handle(I->object_store()->libraries()); | |
| 796 if (saved_libs.Length() != libs.Length()) { | |
| 797 TIR_Print("Identity reload failed! B#L=%" Pd " A#L=%" Pd "\n", | |
| 798 saved_libs.Length(), | |
| 799 libs.Length()); | |
| 800 } | |
| 801 } | |
| 802 | |
| 803 #ifdef DEBUG | |
| 804 // TODO(turnidge): Remove before committing to main branch. | |
| 805 VerifyCanonicalTypeArguments(); | |
| 806 #endif | |
| 807 } | |
| 808 | |
| 809 | |
| 810 bool IsolateReloadContext::IsDirty(const Library& lib) { | |
| 811 const intptr_t index = lib.index(); | |
| 812 if (index == static_cast<classid_t>(-1)) { | |
| 813 // Treat deleted libraries as dirty. | |
| 814 return true; | |
| 815 } | |
| 816 ASSERT((index >= 0) && (index < library_infos_.length())); | |
| 817 return library_infos_[index].dirty; | |
| 818 } | |
| 819 | |
| 820 | |
| 821 void IsolateReloadContext::PostCommit() { | |
| 822 TIMELINE_SCOPE(PostCommit); | |
| 823 set_saved_root_library(Library::Handle()); | |
| 824 set_saved_libraries(GrowableObjectArray::Handle()); | |
| 825 InvalidateWorld(); | |
| 826 } | |
| 827 | |
| 828 | |
| 829 bool IsolateReloadContext::ValidateReload() { | |
| 830 TIMELINE_SCOPE(ValidateReload); | |
| 831 if (has_error_) { | |
| 832 return false; | |
| 833 } | |
| 834 | |
| 835 // Already built. | |
| 836 ASSERT(class_map_storage_ != Array::null()); | |
| 837 UnorderedHashMap<ClassMapTraits> map(class_map_storage_); | |
| 838 UnorderedHashMap<ClassMapTraits>::Iterator it(&map); | |
| 839 Class& cls = Class::Handle(); | |
| 840 Class& new_cls = Class::Handle(); | |
| 841 while (it.MoveNext()) { | |
| 842 const intptr_t entry = it.Current(); | |
| 843 new_cls = Class::RawCast(map.GetKey(entry)); | |
| 844 cls = Class::RawCast(map.GetPayload(entry, 0)); | |
| 845 if (new_cls.raw() != cls.raw()) { | |
| 846 if (!cls.CanReload(new_cls)) { | |
| 847 map.Release(); | |
| 848 return false; | |
| 849 } | |
| 850 } | |
| 851 } | |
| 852 map.Release(); | |
| 853 return true; | |
| 854 } | |
| 855 | |
| 856 | |
| 857 RawClass* IsolateReloadContext::FindOriginalClass(const Class& cls) { | |
| 858 return MappedClass(cls); | |
| 859 } | |
| 860 | |
| 861 | |
| 862 RawClass* IsolateReloadContext::GetClassForHeapWalkAt(intptr_t cid) { | |
| 863 if (saved_class_table_ != NULL) { | |
| 864 ASSERT(cid > 0); | |
| 865 ASSERT(cid < saved_num_cids_); | |
| 866 return saved_class_table_[cid]; | |
| 867 } else { | |
| 868 return isolate_->class_table()->At(cid); | |
| 869 } | |
| 870 } | |
| 871 | |
| 872 | |
| 873 RawLibrary* IsolateReloadContext::saved_root_library() const { | |
| 874 return saved_root_library_; | |
| 875 } | |
| 876 | |
| 877 | |
| 878 void IsolateReloadContext::set_saved_root_library(const Library& value) { | |
| 879 saved_root_library_ = value.raw(); | |
| 880 } | |
| 881 | |
| 882 | |
| 883 RawGrowableObjectArray* IsolateReloadContext::saved_libraries() const { | |
| 884 return saved_libraries_; | |
| 885 } | |
| 886 | |
| 887 | |
| 888 void IsolateReloadContext::set_saved_libraries( | |
| 889 const GrowableObjectArray& value) { | |
| 890 saved_libraries_ = value.raw(); | |
| 891 } | |
| 892 | |
| 893 | |
| 894 void IsolateReloadContext::VisitObjectPointers(ObjectPointerVisitor* visitor) { | |
| 895 visitor->VisitPointers(from(), to()); | |
| 896 if (saved_class_table_ != NULL) { | |
| 897 visitor->VisitPointers( | |
| 898 reinterpret_cast<RawObject**>(&saved_class_table_[0]), saved_num_cids_); | |
| 899 } | |
| 900 } | |
| 901 | |
| 902 | |
| 903 ObjectStore* IsolateReloadContext::object_store() { | |
| 904 return isolate_->object_store(); | |
| 905 } | |
| 906 | |
| 907 | |
| 908 static void ResetICs(const Function& function, const Code& code) { | |
|
rmacnak
2016/05/11 19:56:18
For code on the stack, we need to find the ICs in
Cutch
2016/05/12 15:50:14
Added TODO.
| |
| 909 if (function.ic_data_array() == Array::null()) { | |
| 910 return; // Already reset in an earlier round. | |
|
rmacnak
2016/05/11 19:56:18
I'm wrong here. Even if the ic data array is alrea
Cutch
2016/05/12 15:50:14
Acknowledged.
| |
| 911 } | |
| 912 | |
| 913 Thread* thread = Thread::Current(); | |
| 914 Zone* zone = thread->zone(); | |
| 915 | |
| 916 ZoneGrowableArray<const ICData*>* ic_data_array = | |
| 917 new(zone) ZoneGrowableArray<const ICData*>(); | |
| 918 function.RestoreICDataMap(ic_data_array, false /* clone ic-data */); | |
| 919 const intptr_t ic_data_array_length = ic_data_array->length(); | |
| 920 if (ic_data_array_length == 0) { | |
| 921 return; | |
| 922 } | |
| 923 const PcDescriptors& descriptors = | |
| 924 PcDescriptors::Handle(code.pc_descriptors()); | |
| 925 PcDescriptors::Iterator iter(descriptors, RawPcDescriptors::kIcCall | | |
| 926 RawPcDescriptors::kUnoptStaticCall); | |
| 927 while (iter.MoveNext()) { | |
| 928 const intptr_t index = iter.DeoptId(); | |
| 929 if (index >= ic_data_array_length) { | |
| 930 // TODO(johnmccutchan): Investigate how this can happen. | |
| 931 continue; | |
| 932 } | |
| 933 const ICData* ic_data = (*ic_data_array)[index]; | |
| 934 if (ic_data == NULL) { | |
| 935 // TODO(johnmccutchan): Investigate how this can happen. | |
| 936 continue; | |
| 937 } | |
| 938 bool is_static_call = iter.Kind() == RawPcDescriptors::kUnoptStaticCall; | |
| 939 ic_data->Reset(is_static_call); | |
| 940 } | |
| 941 } | |
| 942 | |
| 943 | |
| 944 void IsolateReloadContext::ResetUnoptimizedICsOnStack() { | |
| 945 Code& code = Code::Handle(); | |
| 946 Function& function = Function::Handle(); | |
| 947 ObjectPool& object_table = ObjectPool::Handle(); | |
| 948 Object& object_table_entry = Object::Handle(); | |
| 949 DartFrameIterator iterator; | |
| 950 StackFrame* frame = iterator.NextFrame(); | |
| 951 while (frame != NULL) { | |
| 952 code = frame->LookupDartCode(); | |
| 953 if (code.is_optimized()) { | |
| 954 // If this code is optimized, we need to reset the ICs in the | |
| 955 // corresponding unoptimized code, which will be executed when the stack | |
| 956 // unwinds to the the optimized code. We must use the unoptimized code | |
| 957 // referenced from the optimized code's deopt object table, because this | |
|
rmacnak
2016/05/11 19:56:18
This comment is out of date. Optimized code doesn'
Cutch
2016/05/12 15:50:13
Acknowledged.
| |
| 958 // is the code that will be used to finish the activation after deopt. It | |
| 959 // can be different from the function's current unoptimized code, which | |
| 960 // may be null if we've already done an atomic install or different code | |
| 961 // if the function has already been recompiled. | |
| 962 function = code.function(); | |
|
rmacnak
2016/05/11 19:56:18
code = function.unoptimized_code();
ASSERT(!code.I
Cutch
2016/05/12 15:50:13
Done.
| |
| 963 object_table = code.object_pool(); | |
| 964 intptr_t reset_count = 0; | |
| 965 for (intptr_t i = 0; i < object_table.Length(); i++) { | |
| 966 if (object_table.InfoAt(i) != ObjectPool::kTaggedObject) { | |
| 967 continue; | |
| 968 } | |
| 969 object_table_entry = object_table.ObjectAt(i); | |
| 970 if (object_table_entry.IsCode()) { | |
| 971 code ^= object_table_entry.raw(); | |
| 972 if (code.function() == function.raw()) { | |
| 973 reset_count++; | |
| 974 ResetICs(function, code); | |
| 975 } | |
| 976 // Why are other code objects in this table? Allocation stubs? | |
| 977 } | |
| 978 } | |
| 979 // ASSERT(reset_count == 1); | |
| 980 // vm shot itself in the foot: no reference to unopt code. | |
| 981 } else { | |
| 982 function = code.function(); | |
| 983 ResetICs(function, code); | |
| 984 } | |
| 985 frame = iterator.NextFrame(); | |
| 986 } | |
| 987 } | |
| 988 | |
| 989 | |
| 990 void IsolateReloadContext::ResetMegamorphicCaches() { | |
| 991 object_store()->set_megamorphic_cache_table(GrowableObjectArray::Handle()); | |
| 992 // Since any current optimized code will not make any more calls, it may be | |
| 993 // better to clear the table instead of clearing each of the caches, allow | |
| 994 // the current megamorphic caches get GC'd and any new optimized code allocate | |
| 995 // new ones. | |
| 996 } | |
| 997 | |
| 998 | |
| 999 class MarkFunctionsForRecompilation : public ObjectVisitor { | |
| 1000 public: | |
| 1001 MarkFunctionsForRecompilation(Isolate* isolate, | |
| 1002 IsolateReloadContext* reload_context) | |
| 1003 : ObjectVisitor(), | |
| 1004 handle_(Object::Handle()), | |
| 1005 owning_class_(Class::Handle()), | |
| 1006 owning_lib_(Library::Handle()), | |
| 1007 code_(Code::Handle()), | |
| 1008 reload_context_(reload_context) { | |
| 1009 } | |
| 1010 | |
| 1011 virtual void VisitObject(RawObject* obj) { | |
| 1012 // Free-list elements cannot even be wrapped in handles. | |
| 1013 if (obj->IsFreeListElement()) { | |
| 1014 return; | |
| 1015 } | |
| 1016 handle_ = obj; | |
| 1017 if (handle_.IsFunction()) { | |
| 1018 const Function& func = Function::Cast(handle_); | |
| 1019 | |
| 1020 // Switch to unoptimized code or the lazy compilation stub. | |
| 1021 func.SwitchToLazyCompiledUnoptimizedCode(); | |
| 1022 | |
| 1023 // Grab the current code. | |
| 1024 code_ = func.CurrentCode(); | |
| 1025 ASSERT(!code_.IsNull()); | |
| 1026 const bool clear_code = IsFromDirtyLibrary(func); | |
| 1027 const bool stub_code = code_.IsStubCode(); | |
| 1028 | |
| 1029 // Zero edge counters. | |
| 1030 func.ZeroEdgeCounters(); | |
|
rmacnak
2016/05/11 19:56:18
Should only be needed in the PreserveUnoptimizedCo
Cutch
2016/05/12 15:50:14
Acknowledged.
| |
| 1031 | |
| 1032 if (!stub_code) { | |
| 1033 if (clear_code) { | |
| 1034 ClearAllCode(func); | |
| 1035 } else { | |
| 1036 PreserveUnoptimizedCode(func); | |
| 1037 } | |
| 1038 } | |
| 1039 | |
| 1040 // Clear counters. | |
| 1041 func.set_usage_counter(0); | |
| 1042 func.set_deoptimization_counter(0); | |
| 1043 func.set_optimized_instruction_count(0); | |
| 1044 func.set_optimized_call_site_count(0); | |
| 1045 } | |
| 1046 } | |
| 1047 | |
| 1048 private: | |
| 1049 void ClearAllCode(const Function& func) { | |
| 1050 // Null out the ICData array and code. | |
| 1051 func.ClearICDataArray(); | |
| 1052 func.ClearCode(); | |
| 1053 func.set_was_compiled(false); | |
| 1054 } | |
| 1055 | |
| 1056 void PreserveUnoptimizedCode(const Function& func) { | |
| 1057 ASSERT(!code_.IsNull()); | |
| 1058 // We are preserving the unoptimized code, fill all ICData arrays with | |
| 1059 // the sentinel values so that we have no stale type feedback. | |
|
rmacnak
2016/05/11 19:56:18
more importantly, no stale method lookups.
Cutch
2016/05/12 15:50:14
Acknowledged.
| |
| 1060 func.FillICDataWithSentinels(code_); | |
| 1061 } | |
| 1062 | |
| 1063 bool IsFromDirtyLibrary(const Function& func) { | |
| 1064 owning_class_ = func.Owner(); | |
| 1065 owning_lib_ = owning_class_.library(); | |
| 1066 return reload_context_->IsDirty(owning_lib_); | |
| 1067 } | |
| 1068 | |
| 1069 Object& handle_; | |
| 1070 Class& owning_class_; | |
| 1071 Library& owning_lib_; | |
| 1072 Code& code_; | |
| 1073 IsolateReloadContext* reload_context_; | |
| 1074 }; | |
| 1075 | |
| 1076 | |
| 1077 void IsolateReloadContext::MarkAllFunctionsForRecompilation() { | |
| 1078 TIMELINE_SCOPE(MarkAllFunctionsForRecompilation); | |
| 1079 MarkFunctionsForRecompilation visitor(isolate_, this); | |
| 1080 isolate_->heap()->VisitObjects(&visitor); | |
| 1081 } | |
| 1082 | |
| 1083 | |
| 1084 void IsolateReloadContext::InvalidateWorld() { | |
| 1085 ResetMegamorphicCaches(); | |
| 1086 | |
| 1087 DeoptimizeFunctionsOnStack(); | |
| 1088 | |
| 1089 { | |
| 1090 NoSafepointScope no_safepoint; | |
| 1091 HeapIterationScope heap_iteration_scope; | |
|
rmacnak
2016/05/11 19:56:18
ResetUnoptimizedICsOnStack isn't a heap walk. Move
Cutch
2016/05/12 15:50:14
Done.
| |
| 1092 | |
| 1093 ResetUnoptimizedICsOnStack(); | |
| 1094 MarkAllFunctionsForRecompilation(); | |
| 1095 } | |
| 1096 } | |
| 1097 | |
| 1098 | |
| 1099 RawClass* IsolateReloadContext::MappedClass(const Class& replacement_or_new) { | |
| 1100 UnorderedHashMap<ClassMapTraits> map(class_map_storage_); | |
| 1101 Class& cls = Class::Handle(); | |
| 1102 cls ^= map.GetOrNull(replacement_or_new); | |
| 1103 // No need to update storage address because no mutation occurred. | |
| 1104 map.Release(); | |
| 1105 return cls.raw(); | |
| 1106 } | |
| 1107 | |
| 1108 | |
| 1109 RawLibrary* IsolateReloadContext::MappedLibrary( | |
| 1110 const Library& replacement_or_new) { | |
| 1111 return Library::null(); | |
| 1112 } | |
| 1113 | |
| 1114 | |
| 1115 RawClass* IsolateReloadContext::OldClassOrNull( | |
| 1116 const Class& replacement_or_new) { | |
| 1117 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_); | |
| 1118 Class& cls = Class::Handle(); | |
| 1119 cls ^= old_classes_set.GetOrNull(replacement_or_new); | |
| 1120 old_classes_set_storage_ = old_classes_set.Release().raw(); | |
| 1121 return cls.raw(); | |
| 1122 } | |
| 1123 | |
| 1124 | |
| 1125 RawLibrary* IsolateReloadContext::OldLibraryOrNull( | |
| 1126 const Library& replacement_or_new) { | |
| 1127 UnorderedHashSet<LibraryMapTraits> | |
| 1128 old_libraries_set(old_libraries_set_storage_); | |
| 1129 Library& lib = Library::Handle(); | |
| 1130 lib ^= old_libraries_set.GetOrNull(replacement_or_new); | |
| 1131 old_libraries_set_storage_ = old_libraries_set.Release().raw(); | |
| 1132 return lib.raw(); | |
| 1133 } | |
| 1134 | |
| 1135 | |
| 1136 void IsolateReloadContext::BuildLibraryMapping() { | |
| 1137 const GrowableObjectArray& libs = | |
| 1138 GrowableObjectArray::Handle(object_store()->libraries()); | |
| 1139 | |
| 1140 Library& replacement_or_new = Library::Handle(); | |
| 1141 Library& old = Library::Handle(); | |
| 1142 for (intptr_t i = 0; i < libs.Length(); i++) { | |
| 1143 replacement_or_new = Library::RawCast(libs.At(i)); | |
| 1144 if (IsCleanLibrary(replacement_or_new)) { | |
| 1145 continue; | |
| 1146 } | |
| 1147 old ^= OldLibraryOrNull(replacement_or_new); | |
| 1148 if (old.IsNull()) { | |
| 1149 // New library. | |
| 1150 AddLibraryMapping(replacement_or_new, replacement_or_new); | |
| 1151 } else { | |
| 1152 ASSERT(!replacement_or_new.is_dart_scheme()); | |
| 1153 // Replaced class. | |
| 1154 AddLibraryMapping(replacement_or_new, old); | |
| 1155 | |
| 1156 AddBecomeMapping(old, replacement_or_new); | |
| 1157 } | |
| 1158 } | |
| 1159 } | |
| 1160 | |
| 1161 | |
| 1162 void IsolateReloadContext::AddClassMapping(const Class& replacement_or_new, | |
| 1163 const Class& original) { | |
| 1164 UnorderedHashMap<ClassMapTraits> map(class_map_storage_); | |
| 1165 bool update = map.UpdateOrInsert(replacement_or_new, original); | |
| 1166 ASSERT(!update); | |
| 1167 // The storage given to the map may have been reallocated, remember the new | |
| 1168 // address. | |
| 1169 class_map_storage_ = map.Release().raw(); | |
| 1170 } | |
| 1171 | |
| 1172 | |
| 1173 void IsolateReloadContext::AddLibraryMapping(const Library& replacement_or_new, | |
| 1174 const Library& original) { | |
| 1175 UnorderedHashMap<LibraryMapTraits> map(library_map_storage_); | |
| 1176 bool update = map.UpdateOrInsert(replacement_or_new, original); | |
| 1177 ASSERT(!update); | |
| 1178 // The storage given to the map may have been reallocated, remember the new | |
| 1179 // address. | |
| 1180 library_map_storage_ = map.Release().raw(); | |
| 1181 } | |
| 1182 | |
| 1183 | |
| 1184 void IsolateReloadContext::AddStaticFieldMapping( | |
| 1185 const Field& old_field, const Field& new_field) { | |
| 1186 ASSERT(old_field.is_static()); | |
| 1187 ASSERT(new_field.is_static()); | |
| 1188 | |
| 1189 AddBecomeMapping(old_field, new_field); | |
| 1190 } | |
| 1191 | |
| 1192 | |
| 1193 void IsolateReloadContext::AddBecomeMapping(const Object& old, | |
| 1194 const Object& neu) { | |
| 1195 ASSERT(become_map_storage_ != Array::null()); | |
| 1196 UnorderedHashMap<BecomeMapTraits> become_map(become_map_storage_); | |
| 1197 bool update = become_map.UpdateOrInsert(old, neu); | |
| 1198 ASSERT(!update); | |
| 1199 become_map_storage_ = become_map.Release().raw(); | |
| 1200 } | |
| 1201 | |
| 1202 | |
| 1203 void IsolateReloadContext::RebuildDirectSubclasses() { | |
| 1204 ClassTable* class_table = I->class_table(); | |
| 1205 intptr_t num_cids = class_table->NumCids(); | |
| 1206 | |
| 1207 // Clear the direct subclasses for all classes. | |
| 1208 Class& cls = Class::Handle(); | |
| 1209 GrowableObjectArray& subclasses = GrowableObjectArray::Handle(); | |
| 1210 for (intptr_t i = 1; i < num_cids; i++) { | |
| 1211 if (class_table->HasValidClassAt(i)) { | |
| 1212 cls = class_table->At(i); | |
| 1213 subclasses = cls.direct_subclasses(); | |
| 1214 if (!subclasses.IsNull()) { | |
| 1215 subclasses.SetLength(0); | |
| 1216 } | |
| 1217 } | |
| 1218 } | |
| 1219 | |
| 1220 // Recompute the direct subclasses. | |
| 1221 AbstractType& super_type = AbstractType::Handle(); | |
| 1222 Class& super_cls = Class::Handle(); | |
| 1223 for (intptr_t i = 1; i < num_cids; i++) { | |
| 1224 if (class_table->HasValidClassAt(i)) { | |
| 1225 cls = class_table->At(i); | |
| 1226 super_type = cls.super_type(); | |
| 1227 if (!super_type.IsNull() && !super_type.IsObjectType()) { | |
| 1228 super_cls = cls.SuperClass(); | |
| 1229 ASSERT(!super_cls.IsNull()); | |
| 1230 super_cls.AddDirectSubclass(cls); | |
| 1231 } | |
| 1232 } | |
| 1233 } | |
| 1234 } | |
| 1235 | |
| 1236 #endif // !PRODUCT | |
| 1237 | |
| 1238 } // namespace dart | |
| OLD | NEW |