| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | |
| 2 // for details. All rights reserved. Use of this source code is governed by a | |
| 3 // BSD-style license that can be found in the LICENSE file. | |
| 4 | |
| 5 #include "vm/code_generator.h" | |
| 6 | |
| 7 #include "vm/assembler.h" | |
| 8 #include "vm/ast.h" | |
| 9 #include "vm/code_patcher.h" | |
| 10 #include "vm/compiler.h" | |
| 11 #include "vm/dart_api_impl.h" | |
| 12 #include "vm/dart_entry.h" | |
| 13 #include "vm/debugger.h" | |
| 14 #include "vm/deopt_instructions.h" | |
| 15 #include "vm/exceptions.h" | |
| 16 #include "vm/flags.h" | |
| 17 #include "vm/object_store.h" | |
| 18 #include "vm/message.h" | |
| 19 #include "vm/message_handler.h" | |
| 20 #include "vm/parser.h" | |
| 21 #include "vm/resolver.h" | |
| 22 #include "vm/runtime_entry.h" | |
| 23 #include "vm/service_isolate.h" | |
| 24 #include "vm/stack_frame.h" | |
| 25 #include "vm/symbols.h" | |
| 26 #include "vm/thread_registry.h" | |
| 27 #include "vm/verifier.h" | |
| 28 | |
| 29 namespace dart { | |
| 30 | |
| 31 DEFINE_FLAG( | |
| 32 int, | |
| 33 max_subtype_cache_entries, | |
| 34 100, | |
| 35 "Maximum number of subtype cache entries (number of checks cached)."); | |
| 36 DEFINE_FLAG( | |
| 37 int, | |
| 38 regexp_optimization_counter_threshold, | |
| 39 1000, | |
| 40 "RegExp's usage-counter value before it is optimized, -1 means never"); | |
| 41 DEFINE_FLAG(int, | |
| 42 reoptimization_counter_threshold, | |
| 43 4000, | |
| 44 "Counter threshold before a function gets reoptimized."); | |
| 45 DEFINE_FLAG(bool, trace_deoptimization, false, "Trace deoptimization"); | |
| 46 DEFINE_FLAG(bool, | |
| 47 trace_deoptimization_verbose, | |
| 48 false, | |
| 49 "Trace deoptimization verbose"); | |
| 50 DEFINE_FLAG(bool, trace_ic, false, "Trace IC handling"); | |
| 51 DEFINE_FLAG(bool, | |
| 52 trace_ic_miss_in_optimized, | |
| 53 false, | |
| 54 "Trace IC miss in optimized code"); | |
| 55 DEFINE_FLAG(bool, | |
| 56 trace_optimized_ic_calls, | |
| 57 false, | |
| 58 "Trace IC calls in optimized code."); | |
| 59 DEFINE_FLAG(bool, trace_patching, false, "Trace patching of code."); | |
| 60 DEFINE_FLAG(bool, trace_runtime_calls, false, "Trace runtime calls"); | |
| 61 DEFINE_FLAG(bool, trace_type_checks, false, "Trace runtime type checks."); | |
| 62 | |
| 63 DECLARE_FLAG(int, max_deoptimization_counter_threshold); | |
| 64 DECLARE_FLAG(bool, enable_inlining_annotations); | |
| 65 DECLARE_FLAG(bool, trace_compiler); | |
| 66 DECLARE_FLAG(bool, trace_optimizing_compiler); | |
| 67 DECLARE_FLAG(int, max_polymorphic_checks); | |
| 68 | |
| 69 DEFINE_FLAG(bool, trace_osr, false, "Trace attempts at on-stack replacement."); | |
| 70 | |
| 71 DEFINE_FLAG(int, | |
| 72 stacktrace_every, | |
| 73 0, | |
| 74 "Compute debugger stacktrace on every N stack overflow checks"); | |
| 75 DEFINE_FLAG(charp, | |
| 76 stacktrace_filter, | |
| 77 NULL, | |
| 78 "Compute stacktrace in named function on stack overflow checks"); | |
| 79 DEFINE_FLAG(charp, | |
| 80 deoptimize_filter, | |
| 81 NULL, | |
| 82 "Deoptimize in named function on stack overflow checks"); | |
| 83 | |
| 84 DECLARE_FLAG(int, reload_every); | |
| 85 DECLARE_FLAG(bool, reload_every_optimized); | |
| 86 DECLARE_FLAG(bool, reload_every_back_off); | |
| 87 | |
| 88 #ifdef DEBUG | |
| 89 DEFINE_FLAG(charp, | |
| 90 gc_at_instance_allocation, | |
| 91 NULL, | |
| 92 "Perform a GC before allocation of instances of " | |
| 93 "the specified class"); | |
| 94 #endif | |
| 95 | |
| 96 DEFINE_RUNTIME_ENTRY(TraceFunctionEntry, 1) { | |
| 97 const Function& function = Function::CheckedHandle(arguments.ArgAt(0)); | |
| 98 const String& function_name = String::Handle(function.name()); | |
| 99 const String& class_name = | |
| 100 String::Handle(Class::Handle(function.Owner()).Name()); | |
| 101 OS::PrintErr("> Entering '%s.%s'\n", class_name.ToCString(), | |
| 102 function_name.ToCString()); | |
| 103 } | |
| 104 | |
| 105 | |
| 106 DEFINE_RUNTIME_ENTRY(TraceFunctionExit, 1) { | |
| 107 const Function& function = Function::CheckedHandle(arguments.ArgAt(0)); | |
| 108 const String& function_name = String::Handle(function.name()); | |
| 109 const String& class_name = | |
| 110 String::Handle(Class::Handle(function.Owner()).Name()); | |
| 111 OS::PrintErr("< Exiting '%s.%s'\n", class_name.ToCString(), | |
| 112 function_name.ToCString()); | |
| 113 } | |
| 114 | |
| 115 | |
| 116 DEFINE_RUNTIME_ENTRY(RangeError, 2) { | |
| 117 const Instance& length = Instance::CheckedHandle(arguments.ArgAt(0)); | |
| 118 const Instance& index = Instance::CheckedHandle(arguments.ArgAt(1)); | |
| 119 if (!length.IsInteger()) { | |
| 120 // Throw: new ArgumentError.value(length, "length", "is not an integer"); | |
| 121 const Array& args = Array::Handle(Array::New(3)); | |
| 122 args.SetAt(0, length); | |
| 123 args.SetAt(1, Symbols::Length()); | |
| 124 args.SetAt(2, String::Handle(String::New("is not an integer"))); | |
| 125 Exceptions::ThrowByType(Exceptions::kArgumentValue, args); | |
| 126 } | |
| 127 if (!index.IsInteger()) { | |
| 128 // Throw: new ArgumentError.value(index, "index", "is not an integer"); | |
| 129 const Array& args = Array::Handle(Array::New(3)); | |
| 130 args.SetAt(0, index); | |
| 131 args.SetAt(1, Symbols::Index()); | |
| 132 args.SetAt(2, String::Handle(String::New("is not an integer"))); | |
| 133 Exceptions::ThrowByType(Exceptions::kArgumentValue, args); | |
| 134 } | |
| 135 // Throw: new RangeError.range(index, 0, length, "length"); | |
| 136 const Array& args = Array::Handle(Array::New(4)); | |
| 137 args.SetAt(0, index); | |
| 138 args.SetAt(1, Integer::Handle(Integer::New(0))); | |
| 139 args.SetAt(2, length); | |
| 140 args.SetAt(3, Symbols::Length()); | |
| 141 Exceptions::ThrowByType(Exceptions::kRange, args); | |
| 142 } | |
| 143 | |
| 144 | |
| 145 // Allocation of a fixed length array of given element type. | |
| 146 // This runtime entry is never called for allocating a List of a generic type, | |
| 147 // because a prior run time call instantiates the element type if necessary. | |
| 148 // Arg0: array length. | |
| 149 // Arg1: array type arguments, i.e. vector of 1 type, the element type. | |
| 150 // Return value: newly allocated array of length arg0. | |
| 151 DEFINE_RUNTIME_ENTRY(AllocateArray, 2) { | |
| 152 const Instance& length = Instance::CheckedHandle(arguments.ArgAt(0)); | |
| 153 if (!length.IsInteger()) { | |
| 154 // Throw: new ArgumentError.value(length, "length", "is not an integer"); | |
| 155 const Array& args = Array::Handle(Array::New(3)); | |
| 156 args.SetAt(0, length); | |
| 157 args.SetAt(1, Symbols::Length()); | |
| 158 args.SetAt(2, String::Handle(String::New("is not an integer"))); | |
| 159 Exceptions::ThrowByType(Exceptions::kArgumentValue, args); | |
| 160 } | |
| 161 if (length.IsSmi()) { | |
| 162 const intptr_t len = Smi::Cast(length).Value(); | |
| 163 if ((len >= 0) && (len <= Array::kMaxElements)) { | |
| 164 const Array& array = Array::Handle(Array::New(len, Heap::kNew)); | |
| 165 arguments.SetReturn(array); | |
| 166 TypeArguments& element_type = | |
| 167 TypeArguments::CheckedHandle(arguments.ArgAt(1)); | |
| 168 // An Array is raw or takes one type argument. However, its type argument | |
| 169 // vector may be longer than 1 due to a type optimization reusing the type | |
| 170 // argument vector of the instantiator. | |
| 171 ASSERT(element_type.IsNull() || | |
| 172 ((element_type.Length() >= 1) && element_type.IsInstantiated())); | |
| 173 array.SetTypeArguments(element_type); // May be null. | |
| 174 return; | |
| 175 } | |
| 176 } | |
| 177 // Throw: new RangeError.range(length, 0, Array::kMaxElements, "length"); | |
| 178 const Array& args = Array::Handle(Array::New(4)); | |
| 179 args.SetAt(0, length); | |
| 180 args.SetAt(1, Integer::Handle(Integer::New(0))); | |
| 181 args.SetAt(2, Integer::Handle(Integer::New(Array::kMaxElements))); | |
| 182 args.SetAt(3, Symbols::Length()); | |
| 183 Exceptions::ThrowByType(Exceptions::kRange, args); | |
| 184 } | |
| 185 | |
| 186 | |
| 187 // Helper returning the token position of the Dart caller. | |
| 188 static TokenPosition GetCallerLocation() { | |
| 189 DartFrameIterator iterator; | |
| 190 StackFrame* caller_frame = iterator.NextFrame(); | |
| 191 ASSERT(caller_frame != NULL); | |
| 192 return caller_frame->GetTokenPos(); | |
| 193 } | |
| 194 | |
| 195 | |
| 196 // Allocate a new object. | |
| 197 // Arg0: class of the object that needs to be allocated. | |
| 198 // Arg1: type arguments of the object that needs to be allocated. | |
| 199 // Return value: newly allocated object. | |
| 200 DEFINE_RUNTIME_ENTRY(AllocateObject, 2) { | |
| 201 const Class& cls = Class::CheckedHandle(arguments.ArgAt(0)); | |
| 202 | |
| 203 #ifdef DEBUG | |
| 204 if (FLAG_gc_at_instance_allocation != NULL) { | |
| 205 const String& name = String::Handle(cls.Name()); | |
| 206 if (String::EqualsIgnoringPrivateKey( | |
| 207 name, | |
| 208 String::Handle(String::New(FLAG_gc_at_instance_allocation)))) { | |
| 209 Isolate::Current()->heap()->CollectAllGarbage(); | |
| 210 } | |
| 211 } | |
| 212 #endif | |
| 213 Heap::Space space = Heap::kNew; | |
| 214 const Instance& instance = Instance::Handle(Instance::New(cls, space)); | |
| 215 | |
| 216 arguments.SetReturn(instance); | |
| 217 if (cls.NumTypeArguments() == 0) { | |
| 218 // No type arguments required for a non-parameterized type. | |
| 219 ASSERT(Instance::CheckedHandle(arguments.ArgAt(1)).IsNull()); | |
| 220 return; | |
| 221 } | |
| 222 TypeArguments& type_arguments = | |
| 223 TypeArguments::CheckedHandle(arguments.ArgAt(1)); | |
| 224 // Unless null (for a raw type), the type argument vector may be longer than | |
| 225 // necessary due to a type optimization reusing the type argument vector of | |
| 226 // the instantiator. | |
| 227 ASSERT(type_arguments.IsNull() || | |
| 228 (type_arguments.IsInstantiated() && | |
| 229 (type_arguments.Length() >= cls.NumTypeArguments()))); | |
| 230 instance.SetTypeArguments(type_arguments); | |
| 231 } | |
| 232 | |
| 233 | |
| 234 // Instantiate type. | |
| 235 // Arg0: uninstantiated type. | |
| 236 // Arg1: instantiator type arguments. | |
| 237 // Arg2: function type arguments. | |
| 238 // Return value: instantiated type. | |
| 239 DEFINE_RUNTIME_ENTRY(InstantiateType, 3) { | |
| 240 AbstractType& type = AbstractType::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 241 const TypeArguments& instantiator_type_arguments = | |
| 242 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1)); | |
| 243 const TypeArguments& function_type_arguments = | |
| 244 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2)); | |
| 245 ASSERT(!type.IsNull() && !type.IsInstantiated()); | |
| 246 ASSERT(instantiator_type_arguments.IsNull() || | |
| 247 instantiator_type_arguments.IsInstantiated()); | |
| 248 ASSERT(function_type_arguments.IsNull() || | |
| 249 function_type_arguments.IsInstantiated()); | |
| 250 Error& bound_error = Error::Handle(zone); | |
| 251 type = | |
| 252 type.InstantiateFrom(instantiator_type_arguments, function_type_arguments, | |
| 253 &bound_error, NULL, NULL, Heap::kOld); | |
| 254 if (!bound_error.IsNull()) { | |
| 255 // Throw a dynamic type error. | |
| 256 const TokenPosition location = GetCallerLocation(); | |
| 257 String& bound_error_message = | |
| 258 String::Handle(zone, String::New(bound_error.ToErrorCString())); | |
| 259 Exceptions::CreateAndThrowTypeError(location, AbstractType::Handle(zone), | |
| 260 AbstractType::Handle(zone), | |
| 261 Symbols::Empty(), bound_error_message); | |
| 262 UNREACHABLE(); | |
| 263 } | |
| 264 if (type.IsTypeRef()) { | |
| 265 type = TypeRef::Cast(type).type(); | |
| 266 ASSERT(!type.IsTypeRef()); | |
| 267 ASSERT(type.IsCanonical()); | |
| 268 } | |
| 269 ASSERT(!type.IsNull() && type.IsInstantiated()); | |
| 270 arguments.SetReturn(type); | |
| 271 } | |
| 272 | |
| 273 | |
| 274 // Instantiate type arguments. | |
| 275 // Arg0: uninstantiated type arguments. | |
| 276 // Arg1: instantiator type arguments. | |
| 277 // Arg2: function type arguments. | |
| 278 // Return value: instantiated type arguments. | |
| 279 DEFINE_RUNTIME_ENTRY(InstantiateTypeArguments, 3) { | |
| 280 TypeArguments& type_arguments = | |
| 281 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 282 const TypeArguments& instantiator_type_arguments = | |
| 283 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1)); | |
| 284 const TypeArguments& function_type_arguments = | |
| 285 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2)); | |
| 286 ASSERT(!type_arguments.IsNull() && !type_arguments.IsInstantiated()); | |
| 287 ASSERT(instantiator_type_arguments.IsNull() || | |
| 288 instantiator_type_arguments.IsInstantiated()); | |
| 289 ASSERT(function_type_arguments.IsNull() || | |
| 290 function_type_arguments.IsInstantiated()); | |
| 291 // Code inlined in the caller should have optimized the case where the | |
| 292 // instantiator can be reused as type argument vector. | |
| 293 ASSERT(!type_arguments.IsUninstantiatedIdentity()); | |
| 294 if (isolate->type_checks()) { | |
| 295 Error& bound_error = Error::Handle(zone); | |
| 296 type_arguments = type_arguments.InstantiateAndCanonicalizeFrom( | |
| 297 instantiator_type_arguments, function_type_arguments, &bound_error); | |
| 298 if (!bound_error.IsNull()) { | |
| 299 // Throw a dynamic type error. | |
| 300 const TokenPosition location = GetCallerLocation(); | |
| 301 String& bound_error_message = | |
| 302 String::Handle(zone, String::New(bound_error.ToErrorCString())); | |
| 303 Exceptions::CreateAndThrowTypeError( | |
| 304 location, AbstractType::Handle(zone), AbstractType::Handle(zone), | |
| 305 Symbols::Empty(), bound_error_message); | |
| 306 UNREACHABLE(); | |
| 307 } | |
| 308 } else { | |
| 309 type_arguments = type_arguments.InstantiateAndCanonicalizeFrom( | |
| 310 instantiator_type_arguments, function_type_arguments, NULL); | |
| 311 } | |
| 312 ASSERT(type_arguments.IsNull() || type_arguments.IsInstantiated()); | |
| 313 arguments.SetReturn(type_arguments); | |
| 314 } | |
| 315 | |
| 316 | |
| 317 // Allocate a new context large enough to hold the given number of variables. | |
| 318 // Arg0: number of variables. | |
| 319 // Return value: newly allocated context. | |
| 320 DEFINE_RUNTIME_ENTRY(AllocateContext, 1) { | |
| 321 const Smi& num_variables = Smi::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 322 arguments.SetReturn(Context::Handle(Context::New(num_variables.Value()))); | |
| 323 } | |
| 324 | |
| 325 | |
| 326 // Make a copy of the given context, including the values of the captured | |
| 327 // variables. | |
| 328 // Arg0: the context to be cloned. | |
| 329 // Return value: newly allocated context. | |
| 330 DEFINE_RUNTIME_ENTRY(CloneContext, 1) { | |
| 331 const Context& ctx = Context::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 332 Context& cloned_ctx = | |
| 333 Context::Handle(zone, Context::New(ctx.num_variables())); | |
| 334 cloned_ctx.set_parent(Context::Handle(ctx.parent())); | |
| 335 Object& inst = Object::Handle(zone); | |
| 336 for (int i = 0; i < ctx.num_variables(); i++) { | |
| 337 inst = ctx.At(i); | |
| 338 cloned_ctx.SetAt(i, inst); | |
| 339 } | |
| 340 arguments.SetReturn(cloned_ctx); | |
| 341 } | |
| 342 | |
| 343 | |
| 344 // Helper routine for tracing a type check. | |
| 345 static void PrintTypeCheck(const char* message, | |
| 346 const Instance& instance, | |
| 347 const AbstractType& type, | |
| 348 const TypeArguments& instantiator_type_arguments, | |
| 349 const TypeArguments& function_type_arguments, | |
| 350 const Bool& result) { | |
| 351 DartFrameIterator iterator; | |
| 352 StackFrame* caller_frame = iterator.NextFrame(); | |
| 353 ASSERT(caller_frame != NULL); | |
| 354 | |
| 355 const AbstractType& instance_type = | |
| 356 AbstractType::Handle(instance.GetType(Heap::kNew)); | |
| 357 ASSERT(instance_type.IsInstantiated()); | |
| 358 if (type.IsInstantiated()) { | |
| 359 OS::PrintErr("%s: '%s' %" Pd " %s '%s' %" Pd " (pc: %#" Px ").\n", message, | |
| 360 String::Handle(instance_type.Name()).ToCString(), | |
| 361 Class::Handle(instance_type.type_class()).id(), | |
| 362 (result.raw() == Bool::True().raw()) ? "is" : "is !", | |
| 363 String::Handle(type.Name()).ToCString(), | |
| 364 Class::Handle(type.type_class()).id(), caller_frame->pc()); | |
| 365 } else { | |
| 366 // Instantiate type before printing. | |
| 367 Error& bound_error = Error::Handle(); | |
| 368 const AbstractType& instantiated_type = | |
| 369 AbstractType::Handle(type.InstantiateFrom( | |
| 370 instantiator_type_arguments, function_type_arguments, &bound_error, | |
| 371 NULL, NULL, Heap::kOld)); | |
| 372 OS::PrintErr("%s: '%s' %s '%s' instantiated from '%s' (pc: %#" Px ").\n", | |
| 373 message, String::Handle(instance_type.Name()).ToCString(), | |
| 374 (result.raw() == Bool::True().raw()) ? "is" : "is !", | |
| 375 String::Handle(instantiated_type.Name()).ToCString(), | |
| 376 String::Handle(type.Name()).ToCString(), caller_frame->pc()); | |
| 377 if (!bound_error.IsNull()) { | |
| 378 OS::Print(" bound error: %s\n", bound_error.ToErrorCString()); | |
| 379 } | |
| 380 } | |
| 381 const Function& function = | |
| 382 Function::Handle(caller_frame->LookupDartFunction()); | |
| 383 OS::PrintErr(" -> Function %s\n", function.ToFullyQualifiedCString()); | |
| 384 } | |
| 385 | |
| 386 | |
| 387 // This updates the type test cache, an array containing 5-value elements | |
| 388 // (instance class (or function if the instance is a closure), instance type | |
| 389 // arguments, instantiator type arguments, function type arguments, | |
| 390 // and test_result). It can be applied to classes with type arguments in which | |
| 391 // case it contains just the result of the class subtype test, not including the | |
| 392 // evaluation of type arguments. | |
| 393 // This operation is currently very slow (lookup of code is not efficient yet). | |
| 394 static void UpdateTypeTestCache( | |
| 395 const Instance& instance, | |
| 396 const AbstractType& type, | |
| 397 const TypeArguments& instantiator_type_arguments, | |
| 398 const TypeArguments& function_type_arguments, | |
| 399 const Bool& result, | |
| 400 const SubtypeTestCache& new_cache) { | |
| 401 // Since the test is expensive, don't do it unless necessary. | |
| 402 // The list of disallowed cases will decrease as they are implemented in | |
| 403 // inlined assembly. | |
| 404 if (new_cache.IsNull()) { | |
| 405 if (FLAG_trace_type_checks) { | |
| 406 OS::Print("UpdateTypeTestCache: cache is null\n"); | |
| 407 } | |
| 408 return; | |
| 409 } | |
| 410 if (instance.IsSmi()) { | |
| 411 if (FLAG_trace_type_checks) { | |
| 412 OS::Print("UpdateTypeTestCache: instance is Smi\n"); | |
| 413 } | |
| 414 return; | |
| 415 } | |
| 416 // If the type is uninstantiated and refers to parent function type | |
| 417 // parameters, the function_type_arguments may not have been canonicalized | |
| 418 // when concatenated. The optimization still works, but the cache could grow | |
| 419 // uncontrollably. For now, do not update the cache in this case. | |
| 420 // TODO(regis): Revisit. | |
| 421 if (!function_type_arguments.IsNull() && | |
| 422 !function_type_arguments.IsCanonical()) { | |
| 423 if (FLAG_trace_type_checks) { | |
| 424 OS::Print( | |
| 425 "UpdateTypeTestCache: function_type_arguments is not canonical\n"); | |
| 426 } | |
| 427 return; | |
| 428 } | |
| 429 const Class& instance_class = Class::Handle(instance.clazz()); | |
| 430 Object& instance_class_id_or_function = Object::Handle(); | |
| 431 TypeArguments& instance_type_arguments = TypeArguments::Handle(); | |
| 432 if (instance_class.IsClosureClass()) { | |
| 433 // If the closure instance is generic, we cannot perform the optimization, | |
| 434 // because one more input (function_type_arguments) would need to be | |
| 435 // considered. For now, only perform the optimization if the closure's | |
| 436 // function_type_arguments is null, meaning the closure function is not | |
| 437 // generic. | |
| 438 // TODO(regis): In addition to null (non-generic closure), we should also | |
| 439 // accept Object::empty_type_arguments() (non-nested generic closure). | |
| 440 // In that case, update stubs and simulator_dbc accordingly. | |
| 441 if (Closure::Cast(instance).function_type_arguments() != | |
| 442 TypeArguments::null()) { | |
| 443 if (FLAG_trace_type_checks) { | |
| 444 OS::Print( | |
| 445 "UpdateTypeTestCache: closure function_type_arguments is " | |
| 446 "not null\n"); | |
| 447 } | |
| 448 return; | |
| 449 } | |
| 450 instance_class_id_or_function = Closure::Cast(instance).function(); | |
| 451 instance_type_arguments = | |
| 452 Closure::Cast(instance).instantiator_type_arguments(); | |
| 453 } else { | |
| 454 instance_class_id_or_function = Smi::New(instance_class.id()); | |
| 455 if (instance_class.NumTypeArguments() > 0) { | |
| 456 instance_type_arguments = instance.GetTypeArguments(); | |
| 457 } | |
| 458 } | |
| 459 const intptr_t len = new_cache.NumberOfChecks(); | |
| 460 if (len >= FLAG_max_subtype_cache_entries) { | |
| 461 return; | |
| 462 } | |
| 463 #if defined(DEBUG) | |
| 464 ASSERT(instance_type_arguments.IsNull() || | |
| 465 instance_type_arguments.IsCanonical()); | |
| 466 ASSERT(instantiator_type_arguments.IsNull() || | |
| 467 instantiator_type_arguments.IsCanonical()); | |
| 468 ASSERT(function_type_arguments.IsNull() || | |
| 469 function_type_arguments.IsCanonical()); | |
| 470 Object& last_instance_class_id_or_function = Object::Handle(); | |
| 471 TypeArguments& last_instance_type_arguments = TypeArguments::Handle(); | |
| 472 TypeArguments& last_instantiator_type_arguments = TypeArguments::Handle(); | |
| 473 TypeArguments& last_function_type_arguments = TypeArguments::Handle(); | |
| 474 Bool& last_result = Bool::Handle(); | |
| 475 for (intptr_t i = 0; i < len; ++i) { | |
| 476 new_cache.GetCheck(i, &last_instance_class_id_or_function, | |
| 477 &last_instance_type_arguments, | |
| 478 &last_instantiator_type_arguments, | |
| 479 &last_function_type_arguments, &last_result); | |
| 480 if ((last_instance_class_id_or_function.raw() == | |
| 481 instance_class_id_or_function.raw()) && | |
| 482 (last_instance_type_arguments.raw() == instance_type_arguments.raw()) && | |
| 483 (last_instantiator_type_arguments.raw() == | |
| 484 instantiator_type_arguments.raw()) && | |
| 485 (last_function_type_arguments.raw() == | |
| 486 last_function_type_arguments.raw())) { | |
| 487 OS::PrintErr(" Error in test cache %p ix: %" Pd ",", new_cache.raw(), i); | |
| 488 PrintTypeCheck(" duplicate cache entry", instance, type, | |
| 489 instantiator_type_arguments, function_type_arguments, | |
| 490 result); | |
| 491 UNREACHABLE(); | |
| 492 return; | |
| 493 } | |
| 494 } | |
| 495 #endif | |
| 496 new_cache.AddCheck(instance_class_id_or_function, instance_type_arguments, | |
| 497 instantiator_type_arguments, function_type_arguments, | |
| 498 result); | |
| 499 if (FLAG_trace_type_checks) { | |
| 500 AbstractType& test_type = AbstractType::Handle(type.raw()); | |
| 501 if (!test_type.IsInstantiated()) { | |
| 502 Error& bound_error = Error::Handle(); | |
| 503 test_type = type.InstantiateFrom(instantiator_type_arguments, | |
| 504 function_type_arguments, &bound_error, | |
| 505 NULL, NULL, Heap::kNew); | |
| 506 ASSERT(bound_error.IsNull()); // Malbounded types are not optimized. | |
| 507 } | |
| 508 OS::PrintErr( | |
| 509 " Updated test cache %p ix: %" Pd | |
| 510 " with " | |
| 511 "(cid-or-fun: %p, type-args: %p, i-type-args: %p, f-type-args: %p, " | |
| 512 "result: %s)\n" | |
| 513 " instance [class: (%p '%s' cid: %" Pd | |
| 514 "), type-args: %p %s]\n" | |
| 515 " test-type [class: (%p '%s' cid: %" Pd | |
| 516 "), i-type-args: %p %s, " | |
| 517 ", f-type-args: %p %s]\n", | |
| 518 new_cache.raw(), len, | |
| 519 | |
| 520 instance_class_id_or_function.raw(), instance_type_arguments.raw(), | |
| 521 instantiator_type_arguments.raw(), instantiator_type_arguments.raw(), | |
| 522 result.ToCString(), | |
| 523 | |
| 524 instance_class.raw(), String::Handle(instance_class.Name()).ToCString(), | |
| 525 instance_class.id(), instance_type_arguments.raw(), | |
| 526 instance_type_arguments.ToCString(), | |
| 527 | |
| 528 test_type.type_class(), | |
| 529 String::Handle(Class::Handle(test_type.type_class()).Name()) | |
| 530 .ToCString(), | |
| 531 Class::Handle(test_type.type_class()).id(), | |
| 532 instantiator_type_arguments.raw(), | |
| 533 instantiator_type_arguments.ToCString(), | |
| 534 instantiator_type_arguments.raw(), | |
| 535 instantiator_type_arguments.ToCString()); | |
| 536 } | |
| 537 } | |
| 538 | |
| 539 | |
| 540 // Check that the given instance is an instance of the given type. | |
| 541 // Tested instance may not be null, because the null test is inlined. | |
| 542 // Arg0: instance being checked. | |
| 543 // Arg1: type. | |
| 544 // Arg2: type arguments of the instantiator of the type. | |
| 545 // Arg3: type arguments of the function of the type. | |
| 546 // Arg4: SubtypeTestCache. | |
| 547 // Return value: true or false, or may throw a type error in checked mode. | |
| 548 DEFINE_RUNTIME_ENTRY(Instanceof, 5) { | |
| 549 const Instance& instance = Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 550 const AbstractType& type = | |
| 551 AbstractType::CheckedHandle(zone, arguments.ArgAt(1)); | |
| 552 const TypeArguments& instantiator_type_arguments = | |
| 553 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2)); | |
| 554 const TypeArguments& function_type_arguments = | |
| 555 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3)); | |
| 556 const SubtypeTestCache& cache = | |
| 557 SubtypeTestCache::CheckedHandle(zone, arguments.ArgAt(4)); | |
| 558 ASSERT(type.IsFinalized()); | |
| 559 ASSERT(!type.IsMalformed()); // Already checked in code generator. | |
| 560 ASSERT(!type.IsMalbounded()); // Already checked in code generator. | |
| 561 ASSERT(!type.IsDynamicType()); // No need to check assignment. | |
| 562 Error& bound_error = Error::Handle(zone); | |
| 563 const Bool& result = | |
| 564 Bool::Get(instance.IsInstanceOf(type, instantiator_type_arguments, | |
| 565 function_type_arguments, &bound_error)); | |
| 566 if (FLAG_trace_type_checks) { | |
| 567 PrintTypeCheck("InstanceOf", instance, type, instantiator_type_arguments, | |
| 568 function_type_arguments, result); | |
| 569 } | |
| 570 if (!result.value() && !bound_error.IsNull()) { | |
| 571 // Throw a dynamic type error only if the instanceof test fails. | |
| 572 const TokenPosition location = GetCallerLocation(); | |
| 573 String& bound_error_message = | |
| 574 String::Handle(zone, String::New(bound_error.ToErrorCString())); | |
| 575 Exceptions::CreateAndThrowTypeError(location, AbstractType::Handle(zone), | |
| 576 AbstractType::Handle(zone), | |
| 577 Symbols::Empty(), bound_error_message); | |
| 578 UNREACHABLE(); | |
| 579 } | |
| 580 UpdateTypeTestCache(instance, type, instantiator_type_arguments, | |
| 581 function_type_arguments, result, cache); | |
| 582 arguments.SetReturn(result); | |
| 583 } | |
| 584 | |
| 585 | |
| 586 // Check that the type of the given instance is a subtype of the given type and | |
| 587 // can therefore be assigned. | |
| 588 // Arg0: instance being assigned. | |
| 589 // Arg1: type being assigned to. | |
| 590 // Arg2: type arguments of the instantiator of the type being assigned to. | |
| 591 // Arg3: type arguments of the function of the type being assigned to. | |
| 592 // Arg4: name of variable being assigned to. | |
| 593 // Arg5: SubtypeTestCache. | |
| 594 // Return value: instance if a subtype, otherwise throw a TypeError. | |
| 595 DEFINE_RUNTIME_ENTRY(TypeCheck, 6) { | |
| 596 const Instance& src_instance = | |
| 597 Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 598 AbstractType& dst_type = | |
| 599 AbstractType::CheckedHandle(zone, arguments.ArgAt(1)); | |
| 600 const TypeArguments& instantiator_type_arguments = | |
| 601 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2)); | |
| 602 const TypeArguments& function_type_arguments = | |
| 603 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3)); | |
| 604 const String& dst_name = String::CheckedHandle(zone, arguments.ArgAt(4)); | |
| 605 const SubtypeTestCache& cache = | |
| 606 SubtypeTestCache::CheckedHandle(zone, arguments.ArgAt(5)); | |
| 607 ASSERT(!dst_type.IsMalformed()); // Already checked in code generator. | |
| 608 ASSERT(!dst_type.IsMalbounded()); // Already checked in code generator. | |
| 609 ASSERT(!dst_type.IsDynamicType()); // No need to check assignment. | |
| 610 ASSERT(!src_instance.IsNull()); // Already checked in inlined code. | |
| 611 | |
| 612 Error& bound_error = Error::Handle(zone); | |
| 613 const bool is_instance_of = | |
| 614 src_instance.IsInstanceOf(dst_type, instantiator_type_arguments, | |
| 615 function_type_arguments, &bound_error); | |
| 616 | |
| 617 if (FLAG_trace_type_checks) { | |
| 618 PrintTypeCheck("TypeCheck", src_instance, dst_type, | |
| 619 instantiator_type_arguments, function_type_arguments, | |
| 620 Bool::Get(is_instance_of)); | |
| 621 } | |
| 622 if (!is_instance_of) { | |
| 623 // Throw a dynamic type error. | |
| 624 const TokenPosition location = GetCallerLocation(); | |
| 625 const AbstractType& src_type = | |
| 626 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew)); | |
| 627 if (!dst_type.IsInstantiated()) { | |
| 628 // Instantiate dst_type before reporting the error. | |
| 629 dst_type = dst_type.InstantiateFrom(instantiator_type_arguments, | |
| 630 function_type_arguments, NULL, NULL, | |
| 631 NULL, Heap::kNew); | |
| 632 // Note that instantiated dst_type may be malbounded. | |
| 633 } | |
| 634 String& bound_error_message = String::Handle(zone); | |
| 635 if (!bound_error.IsNull()) { | |
| 636 ASSERT(isolate->type_checks()); | |
| 637 bound_error_message = String::New(bound_error.ToErrorCString()); | |
| 638 } | |
| 639 Exceptions::CreateAndThrowTypeError(location, src_type, dst_type, dst_name, | |
| 640 bound_error_message); | |
| 641 UNREACHABLE(); | |
| 642 } | |
| 643 UpdateTypeTestCache(src_instance, dst_type, instantiator_type_arguments, | |
| 644 function_type_arguments, Bool::True(), cache); | |
| 645 arguments.SetReturn(src_instance); | |
| 646 } | |
| 647 | |
| 648 | |
| 649 // Report that the type of the given object is not bool in conditional context. | |
| 650 // Throw assertion error if the object is null. (cf. Boolean Conversion | |
| 651 // in language Spec.) | |
| 652 // Arg0: bad object. | |
| 653 // Return value: none, throws TypeError or AssertionError. | |
| 654 DEFINE_RUNTIME_ENTRY(NonBoolTypeError, 1) { | |
| 655 const TokenPosition location = GetCallerLocation(); | |
| 656 const Instance& src_instance = | |
| 657 Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 658 | |
| 659 if (src_instance.IsNull()) { | |
| 660 const Array& args = Array::Handle(zone, Array::New(5)); | |
| 661 args.SetAt( | |
| 662 0, String::Handle( | |
| 663 zone, | |
| 664 String::New( | |
| 665 "Failed assertion: boolean expression must not be null"))); | |
| 666 | |
| 667 // No source code for this assertion, set url to null. | |
| 668 args.SetAt(1, String::Handle(zone, String::null())); | |
| 669 args.SetAt(2, Smi::Handle(zone, Smi::New(0))); | |
| 670 args.SetAt(3, Smi::Handle(zone, Smi::New(0))); | |
| 671 args.SetAt(4, String::Handle(zone, String::null())); | |
| 672 | |
| 673 Exceptions::ThrowByType(Exceptions::kAssertion, args); | |
| 674 UNREACHABLE(); | |
| 675 } | |
| 676 | |
| 677 ASSERT(!src_instance.IsBool()); | |
| 678 const Type& bool_interface = Type::Handle(Type::BoolType()); | |
| 679 const AbstractType& src_type = | |
| 680 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew)); | |
| 681 const String& no_bound_error = String::Handle(zone); | |
| 682 Exceptions::CreateAndThrowTypeError(location, src_type, bool_interface, | |
| 683 Symbols::BooleanExpression(), | |
| 684 no_bound_error); | |
| 685 UNREACHABLE(); | |
| 686 } | |
| 687 | |
| 688 | |
| 689 // Report that the type of the type check is malformed or malbounded. | |
| 690 // Arg0: src value. | |
| 691 // Arg1: name of destination being assigned to. | |
| 692 // Arg2: type of destination being assigned to. | |
| 693 // Return value: none, throws an exception. | |
| 694 DEFINE_RUNTIME_ENTRY(BadTypeError, 3) { | |
| 695 const TokenPosition location = GetCallerLocation(); | |
| 696 const Instance& src_value = Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 697 const String& dst_name = String::CheckedHandle(zone, arguments.ArgAt(1)); | |
| 698 const AbstractType& dst_type = | |
| 699 AbstractType::CheckedHandle(zone, arguments.ArgAt(2)); | |
| 700 const AbstractType& src_type = | |
| 701 AbstractType::Handle(zone, src_value.GetType(Heap::kNew)); | |
| 702 Exceptions::CreateAndThrowTypeError(location, src_type, dst_type, dst_name, | |
| 703 String::Handle(zone)); | |
| 704 UNREACHABLE(); | |
| 705 } | |
| 706 | |
| 707 | |
| 708 DEFINE_RUNTIME_ENTRY(Throw, 1) { | |
| 709 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 710 Exceptions::Throw(thread, exception); | |
| 711 } | |
| 712 | |
| 713 | |
| 714 DEFINE_RUNTIME_ENTRY(ReThrow, 2) { | |
| 715 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 716 const Instance& stacktrace = | |
| 717 Instance::CheckedHandle(zone, arguments.ArgAt(1)); | |
| 718 Exceptions::ReThrow(thread, exception, stacktrace); | |
| 719 } | |
| 720 | |
| 721 | |
| 722 // Patches static call in optimized code with the target's entry point. | |
| 723 // Compiles target if necessary. | |
| 724 DEFINE_RUNTIME_ENTRY(PatchStaticCall, 0) { | |
| 725 DartFrameIterator iterator; | |
| 726 StackFrame* caller_frame = iterator.NextFrame(); | |
| 727 ASSERT(caller_frame != NULL); | |
| 728 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode()); | |
| 729 ASSERT(!caller_code.IsNull()); | |
| 730 ASSERT(caller_code.is_optimized()); | |
| 731 const Function& target_function = Function::Handle( | |
| 732 zone, caller_code.GetStaticCallTargetFunctionAt(caller_frame->pc())); | |
| 733 const Code& target_code = Code::Handle(zone, target_function.EnsureHasCode()); | |
| 734 // Before patching verify that we are not repeatedly patching to the same | |
| 735 // target. | |
| 736 ASSERT(target_code.raw() != | |
| 737 CodePatcher::GetStaticCallTargetAt(caller_frame->pc(), caller_code)); | |
| 738 CodePatcher::PatchStaticCallAt(caller_frame->pc(), caller_code, target_code); | |
| 739 caller_code.SetStaticCallTargetCodeAt(caller_frame->pc(), target_code); | |
| 740 if (FLAG_trace_patching) { | |
| 741 THR_Print("PatchStaticCall: patching caller pc %#" Px | |
| 742 "" | |
| 743 " to '%s' new entry point %#" Px " (%s)\n", | |
| 744 caller_frame->pc(), target_function.ToFullyQualifiedCString(), | |
| 745 target_code.UncheckedEntryPoint(), | |
| 746 target_code.is_optimized() ? "optimized" : "unoptimized"); | |
| 747 } | |
| 748 arguments.SetReturn(target_code); | |
| 749 } | |
| 750 | |
| 751 | |
| 752 // Result of an invoke may be an unhandled exception, in which case we | |
| 753 // rethrow it. | |
| 754 static void CheckResultError(const Object& result) { | |
| 755 if (result.IsError()) { | |
| 756 Exceptions::PropagateError(Error::Cast(result)); | |
| 757 } | |
| 758 } | |
| 759 | |
| 760 | |
| 761 #if !defined(TARGET_ARCH_DBC) | |
| 762 // Gets called from debug stub when code reaches a breakpoint | |
| 763 // set on a runtime stub call. | |
| 764 DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) { | |
| 765 if (!FLAG_support_debugger) { | |
| 766 UNREACHABLE(); | |
| 767 return; | |
| 768 } | |
| 769 DartFrameIterator iterator; | |
| 770 StackFrame* caller_frame = iterator.NextFrame(); | |
| 771 ASSERT(caller_frame != NULL); | |
| 772 const Code& orig_stub = Code::Handle( | |
| 773 zone, isolate->debugger()->GetPatchedStubAddress(caller_frame->pc())); | |
| 774 const Error& error = | |
| 775 Error::Handle(zone, isolate->debugger()->PauseBreakpoint()); | |
| 776 if (!error.IsNull()) { | |
| 777 Exceptions::PropagateError(error); | |
| 778 UNREACHABLE(); | |
| 779 } | |
| 780 arguments.SetReturn(orig_stub); | |
| 781 } | |
| 782 #else | |
| 783 // Gets called from the simulator when the breakpoint is reached. | |
| 784 DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) { | |
| 785 if (!FLAG_support_debugger) { | |
| 786 UNREACHABLE(); | |
| 787 return; | |
| 788 } | |
| 789 const Error& error = Error::Handle(isolate->debugger()->PauseBreakpoint()); | |
| 790 if (!error.IsNull()) { | |
| 791 Exceptions::PropagateError(error); | |
| 792 UNREACHABLE(); | |
| 793 } | |
| 794 } | |
| 795 #endif // !defined(TARGET_ARCH_DBC) | |
| 796 | |
| 797 | |
| 798 DEFINE_RUNTIME_ENTRY(SingleStepHandler, 0) { | |
| 799 if (!FLAG_support_debugger) { | |
| 800 UNREACHABLE(); | |
| 801 return; | |
| 802 } | |
| 803 const Error& error = | |
| 804 Error::Handle(zone, isolate->debugger()->PauseStepping()); | |
| 805 if (!error.IsNull()) { | |
| 806 Exceptions::PropagateError(error); | |
| 807 UNREACHABLE(); | |
| 808 } | |
| 809 } | |
| 810 | |
| 811 | |
| 812 // An instance call of the form o.f(...) could not be resolved. Check if | |
| 813 // there is a getter with the same name. If so, invoke it. If the value is | |
| 814 // a closure, invoke it with the given arguments. If the value is a | |
| 815 // non-closure, attempt to invoke "call" on it. | |
| 816 static bool ResolveCallThroughGetter(const Instance& receiver, | |
| 817 const Class& receiver_class, | |
| 818 const String& target_name, | |
| 819 const Array& arguments_descriptor, | |
| 820 Function* result) { | |
| 821 // 1. Check if there is a getter with the same name. | |
| 822 const String& getter_name = String::Handle(Field::GetterName(target_name)); | |
| 823 const int kNumArguments = 1; | |
| 824 ArgumentsDescriptor args_desc( | |
| 825 Array::Handle(ArgumentsDescriptor::New(kNumArguments))); | |
| 826 const Function& getter = | |
| 827 Function::Handle(Resolver::ResolveDynamicForReceiverClass( | |
| 828 receiver_class, getter_name, args_desc)); | |
| 829 if (getter.IsNull() || getter.IsMethodExtractor()) { | |
| 830 return false; | |
| 831 } | |
| 832 const Function& target_function = | |
| 833 Function::Handle(receiver_class.GetInvocationDispatcher( | |
| 834 target_name, arguments_descriptor, | |
| 835 RawFunction::kInvokeFieldDispatcher, FLAG_lazy_dispatchers)); | |
| 836 ASSERT(!target_function.IsNull() || !FLAG_lazy_dispatchers); | |
| 837 if (FLAG_trace_ic) { | |
| 838 OS::PrintErr( | |
| 839 "InvokeField IC miss: adding <%s> id:%" Pd " -> <%s>\n", | |
| 840 Class::Handle(receiver.clazz()).ToCString(), receiver.GetClassId(), | |
| 841 target_function.IsNull() ? "null" : target_function.ToCString()); | |
| 842 } | |
| 843 *result = target_function.raw(); | |
| 844 return true; | |
| 845 } | |
| 846 | |
| 847 | |
| 848 // Handle other invocations (implicit closures, noSuchMethod). | |
| 849 RawFunction* InlineCacheMissHelper(const Instance& receiver, | |
| 850 const Array& args_descriptor, | |
| 851 const String& target_name) { | |
| 852 const Class& receiver_class = Class::Handle(receiver.clazz()); | |
| 853 | |
| 854 Function& result = Function::Handle(); | |
| 855 if (!ResolveCallThroughGetter(receiver, receiver_class, target_name, | |
| 856 args_descriptor, &result)) { | |
| 857 ArgumentsDescriptor desc(args_descriptor); | |
| 858 const Function& target_function = | |
| 859 Function::Handle(receiver_class.GetInvocationDispatcher( | |
| 860 target_name, args_descriptor, RawFunction::kNoSuchMethodDispatcher, | |
| 861 FLAG_lazy_dispatchers)); | |
| 862 if (FLAG_trace_ic) { | |
| 863 OS::PrintErr( | |
| 864 "NoSuchMethod IC miss: adding <%s> id:%" Pd " -> <%s>\n", | |
| 865 Class::Handle(receiver.clazz()).ToCString(), receiver.GetClassId(), | |
| 866 target_function.IsNull() ? "null" : target_function.ToCString()); | |
| 867 } | |
| 868 result = target_function.raw(); | |
| 869 } | |
| 870 // May be null if --no-lazy-dispatchers, in which case dispatch will be | |
| 871 // handled by InvokeNoSuchMethodDispatcher. | |
| 872 ASSERT(!result.IsNull() || !FLAG_lazy_dispatchers); | |
| 873 return result.raw(); | |
| 874 } | |
| 875 | |
| 876 | |
| 877 // Perform the subtype and return constant function based on the result. | |
| 878 static RawFunction* ComputeTypeCheckTarget(const Instance& receiver, | |
| 879 const AbstractType& type, | |
| 880 const ArgumentsDescriptor& desc) { | |
| 881 Error& error = Error::Handle(); | |
| 882 bool result = receiver.IsInstanceOf(type, Object::null_type_arguments(), | |
| 883 Object::null_type_arguments(), &error); | |
| 884 ASSERT(error.IsNull()); | |
| 885 ObjectStore* store = Isolate::Current()->object_store(); | |
| 886 const Function& target = | |
| 887 Function::Handle(result ? store->simple_instance_of_true_function() | |
| 888 : store->simple_instance_of_false_function()); | |
| 889 ASSERT(!target.IsNull()); | |
| 890 return target.raw(); | |
| 891 } | |
| 892 | |
| 893 | |
| 894 static RawFunction* InlineCacheMissHandler( | |
| 895 const GrowableArray<const Instance*>& args, | |
| 896 const ICData& ic_data) { | |
| 897 const Instance& receiver = *args[0]; | |
| 898 ArgumentsDescriptor arguments_descriptor( | |
| 899 Array::Handle(ic_data.arguments_descriptor())); | |
| 900 String& function_name = String::Handle(ic_data.target_name()); | |
| 901 ASSERT(function_name.IsSymbol()); | |
| 902 | |
| 903 Function& target_function = Function::Handle( | |
| 904 Resolver::ResolveDynamic(receiver, function_name, arguments_descriptor)); | |
| 905 | |
| 906 ObjectStore* store = Isolate::Current()->object_store(); | |
| 907 if (target_function.raw() == store->simple_instance_of_function()) { | |
| 908 // Replace the target function with constant function. | |
| 909 const AbstractType& type = AbstractType::Cast(*args[1]); | |
| 910 target_function = | |
| 911 ComputeTypeCheckTarget(receiver, type, arguments_descriptor); | |
| 912 } | |
| 913 if (target_function.IsNull()) { | |
| 914 if (FLAG_trace_ic) { | |
| 915 OS::PrintErr("InlineCacheMissHandler NULL function for %s receiver: %s\n", | |
| 916 String::Handle(ic_data.target_name()).ToCString(), | |
| 917 receiver.ToCString()); | |
| 918 } | |
| 919 const Array& args_descriptor = | |
| 920 Array::Handle(ic_data.arguments_descriptor()); | |
| 921 const String& target_name = String::Handle(ic_data.target_name()); | |
| 922 target_function = | |
| 923 InlineCacheMissHelper(receiver, args_descriptor, target_name); | |
| 924 } | |
| 925 if (target_function.IsNull()) { | |
| 926 ASSERT(!FLAG_lazy_dispatchers); | |
| 927 return target_function.raw(); | |
| 928 } | |
| 929 if (args.length() == 1) { | |
| 930 ic_data.AddReceiverCheck(args[0]->GetClassId(), target_function); | |
| 931 } else { | |
| 932 GrowableArray<intptr_t> class_ids(args.length()); | |
| 933 ASSERT(ic_data.NumArgsTested() == args.length()); | |
| 934 for (intptr_t i = 0; i < args.length(); i++) { | |
| 935 class_ids.Add(args[i]->GetClassId()); | |
| 936 } | |
| 937 ic_data.AddCheck(class_ids, target_function); | |
| 938 } | |
| 939 if (FLAG_trace_ic_miss_in_optimized || FLAG_trace_ic) { | |
| 940 DartFrameIterator iterator; | |
| 941 StackFrame* caller_frame = iterator.NextFrame(); | |
| 942 ASSERT(caller_frame != NULL); | |
| 943 if (FLAG_trace_ic_miss_in_optimized) { | |
| 944 const Code& caller = Code::Handle(Code::LookupCode(caller_frame->pc())); | |
| 945 if (caller.is_optimized()) { | |
| 946 OS::PrintErr("IC miss in optimized code; call %s -> %s\n", | |
| 947 Function::Handle(caller.function()).ToCString(), | |
| 948 target_function.ToCString()); | |
| 949 } | |
| 950 } | |
| 951 if (FLAG_trace_ic) { | |
| 952 OS::PrintErr("InlineCacheMissHandler %" Pd " call at %#" Px | |
| 953 "' " | |
| 954 "adding <%s> id:%" Pd " -> <%s>\n", | |
| 955 args.length(), caller_frame->pc(), | |
| 956 Class::Handle(receiver.clazz()).ToCString(), | |
| 957 receiver.GetClassId(), target_function.ToCString()); | |
| 958 } | |
| 959 } | |
| 960 return target_function.raw(); | |
| 961 } | |
| 962 | |
| 963 | |
| 964 // Handles inline cache misses by updating the IC data array of the call site. | |
| 965 // Arg0: Receiver object. | |
| 966 // Arg1: IC data object. | |
| 967 // Returns: target function with compiled code or null. | |
| 968 // Modifies the instance call to hold the updated IC data array. | |
| 969 DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerOneArg, 2) { | |
| 970 const Instance& receiver = Instance::CheckedHandle(arguments.ArgAt(0)); | |
| 971 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(1)); | |
| 972 GrowableArray<const Instance*> args(1); | |
| 973 args.Add(&receiver); | |
| 974 const Function& result = | |
| 975 Function::Handle(InlineCacheMissHandler(args, ic_data)); | |
| 976 arguments.SetReturn(result); | |
| 977 } | |
| 978 | |
| 979 | |
| 980 // Handles inline cache misses by updating the IC data array of the call site. | |
| 981 // Arg0: Receiver object. | |
| 982 // Arg1: Argument after receiver. | |
| 983 // Arg2: IC data object. | |
| 984 // Returns: target function with compiled code or null. | |
| 985 // Modifies the instance call to hold the updated IC data array. | |
| 986 DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerTwoArgs, 3) { | |
| 987 const Instance& receiver = Instance::CheckedHandle(arguments.ArgAt(0)); | |
| 988 const Instance& other = Instance::CheckedHandle(arguments.ArgAt(1)); | |
| 989 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(2)); | |
| 990 GrowableArray<const Instance*> args(2); | |
| 991 args.Add(&receiver); | |
| 992 args.Add(&other); | |
| 993 const Function& result = | |
| 994 Function::Handle(InlineCacheMissHandler(args, ic_data)); | |
| 995 arguments.SetReturn(result); | |
| 996 } | |
| 997 | |
| 998 | |
| 999 // Handles a static call in unoptimized code that has one argument type not | |
| 1000 // seen before. Compile the target if necessary and update the ICData. | |
| 1001 // Arg0: argument. | |
| 1002 // Arg1: IC data object. | |
| 1003 DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerOneArg, 2) { | |
| 1004 const Instance& arg = Instance::CheckedHandle(arguments.ArgAt(0)); | |
| 1005 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(1)); | |
| 1006 // IC data for static call is prepopulated with the statically known target. | |
| 1007 ASSERT(ic_data.NumberOfChecksIs(1)); | |
| 1008 const Function& target = Function::Handle(ic_data.GetTargetAt(0)); | |
| 1009 target.EnsureHasCode(); | |
| 1010 ASSERT(!target.IsNull() && target.HasCode()); | |
| 1011 ic_data.AddReceiverCheck(arg.GetClassId(), target, 1); | |
| 1012 if (FLAG_trace_ic) { | |
| 1013 DartFrameIterator iterator; | |
| 1014 StackFrame* caller_frame = iterator.NextFrame(); | |
| 1015 ASSERT(caller_frame != NULL); | |
| 1016 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ")\n", | |
| 1017 caller_frame->pc(), target.ToCString(), arg.GetClassId()); | |
| 1018 } | |
| 1019 arguments.SetReturn(target); | |
| 1020 } | |
| 1021 | |
| 1022 | |
| 1023 // Handles a static call in unoptimized code that has two argument types not | |
| 1024 // seen before. Compile the target if necessary and update the ICData. | |
| 1025 // Arg0: argument 0. | |
| 1026 // Arg1: argument 1. | |
| 1027 // Arg2: IC data object. | |
| 1028 DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerTwoArgs, 3) { | |
| 1029 const Instance& arg0 = Instance::CheckedHandle(arguments.ArgAt(0)); | |
| 1030 const Instance& arg1 = Instance::CheckedHandle(arguments.ArgAt(1)); | |
| 1031 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(2)); | |
| 1032 // IC data for static call is prepopulated with the statically known target. | |
| 1033 ASSERT(!ic_data.NumberOfChecksIs(0)); | |
| 1034 const Function& target = Function::Handle(ic_data.GetTargetAt(0)); | |
| 1035 target.EnsureHasCode(); | |
| 1036 GrowableArray<intptr_t> cids(2); | |
| 1037 cids.Add(arg0.GetClassId()); | |
| 1038 cids.Add(arg1.GetClassId()); | |
| 1039 ic_data.AddCheck(cids, target); | |
| 1040 if (FLAG_trace_ic) { | |
| 1041 DartFrameIterator iterator; | |
| 1042 StackFrame* caller_frame = iterator.NextFrame(); | |
| 1043 ASSERT(caller_frame != NULL); | |
| 1044 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ", %" Pd | |
| 1045 ")\n", | |
| 1046 caller_frame->pc(), target.ToCString(), cids[0], cids[1]); | |
| 1047 } | |
| 1048 arguments.SetReturn(target); | |
| 1049 } | |
| 1050 | |
| 1051 | |
| 1052 #if !defined(TARGET_ARCH_DBC) | |
| 1053 static bool IsSingleTarget(Isolate* isolate, | |
| 1054 Zone* zone, | |
| 1055 intptr_t lower_cid, | |
| 1056 intptr_t upper_cid, | |
| 1057 const Function& target, | |
| 1058 const String& name) { | |
| 1059 Class& cls = Class::Handle(zone); | |
| 1060 ClassTable* table = isolate->class_table(); | |
| 1061 Function& other_target = Function::Handle(zone); | |
| 1062 for (intptr_t cid = lower_cid; cid <= upper_cid; cid++) { | |
| 1063 if (!table->HasValidClassAt(cid)) continue; | |
| 1064 cls = table->At(cid); | |
| 1065 if (cls.is_abstract()) continue; | |
| 1066 if (!cls.is_allocated()) continue; | |
| 1067 other_target = | |
| 1068 Resolver::ResolveDynamicAnyArgs(zone, cls, name, false /* allow_add */); | |
| 1069 if (other_target.raw() != target.raw()) { | |
| 1070 return false; | |
| 1071 } | |
| 1072 } | |
| 1073 return true; | |
| 1074 } | |
| 1075 #endif | |
| 1076 | |
| 1077 | |
| 1078 // Handle a miss of a single target cache. | |
| 1079 // Arg0: Receiver. | |
| 1080 // Returns: the ICData used to continue with a polymorphic call. | |
| 1081 DEFINE_RUNTIME_ENTRY(SingleTargetMiss, 1) { | |
| 1082 #if defined(TARGET_ARCH_DBC) | |
| 1083 // DBC does not use switchable calls. | |
| 1084 UNREACHABLE(); | |
| 1085 #else | |
| 1086 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 1087 | |
| 1088 DartFrameIterator iterator; | |
| 1089 StackFrame* caller_frame = iterator.NextFrame(); | |
| 1090 ASSERT(caller_frame->IsDartFrame()); | |
| 1091 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode()); | |
| 1092 const Function& caller_function = | |
| 1093 Function::Handle(zone, caller_frame->LookupDartFunction()); | |
| 1094 | |
| 1095 SingleTargetCache& cache = SingleTargetCache::Handle(zone); | |
| 1096 cache ^= | |
| 1097 CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(), caller_code); | |
| 1098 Code& old_target_code = Code::Handle(zone, cache.target()); | |
| 1099 Function& old_target = Function::Handle(zone); | |
| 1100 old_target ^= old_target_code.owner(); | |
| 1101 | |
| 1102 // We lost the original ICData when we patched to the monomorphic case. | |
| 1103 const String& name = String::Handle(zone, old_target.name()); | |
| 1104 ASSERT(!old_target.HasOptionalParameters()); | |
| 1105 const Array& descriptor = Array::Handle( | |
| 1106 zone, ArgumentsDescriptor::New(old_target.num_fixed_parameters())); | |
| 1107 const ICData& ic_data = | |
| 1108 ICData::Handle(zone, ICData::New(caller_function, name, descriptor, | |
| 1109 Thread::kNoDeoptId, 1, /* args_tested */ | |
| 1110 false /* static_call */)); | |
| 1111 | |
| 1112 // Maybe add the new target. | |
| 1113 Class& cls = Class::Handle(zone, receiver.clazz()); | |
| 1114 ArgumentsDescriptor args_desc(descriptor); | |
| 1115 Function& target_function = Function::Handle( | |
| 1116 zone, Resolver::ResolveDynamicForReceiverClass(cls, name, args_desc)); | |
| 1117 if (target_function.IsNull()) { | |
| 1118 target_function = InlineCacheMissHelper(receiver, descriptor, name); | |
| 1119 } | |
| 1120 if (target_function.IsNull()) { | |
| 1121 ASSERT(!FLAG_lazy_dispatchers); | |
| 1122 } else { | |
| 1123 ic_data.AddReceiverCheck(receiver.GetClassId(), target_function); | |
| 1124 } | |
| 1125 | |
| 1126 if (old_target.raw() == target_function.raw()) { | |
| 1127 intptr_t lower, upper, unchecked_lower, unchecked_upper; | |
| 1128 if (receiver.GetClassId() < cache.lower_limit()) { | |
| 1129 lower = receiver.GetClassId(); | |
| 1130 unchecked_lower = receiver.GetClassId(); | |
| 1131 upper = cache.upper_limit(); | |
| 1132 unchecked_upper = cache.lower_limit() - 1; | |
| 1133 } else { | |
| 1134 lower = cache.lower_limit(); | |
| 1135 unchecked_lower = cache.upper_limit() + 1; | |
| 1136 upper = receiver.GetClassId(); | |
| 1137 unchecked_upper = receiver.GetClassId(); | |
| 1138 } | |
| 1139 | |
| 1140 if (IsSingleTarget(isolate, zone, unchecked_lower, unchecked_upper, | |
| 1141 target_function, name)) { | |
| 1142 cache.set_lower_limit(lower); | |
| 1143 cache.set_upper_limit(upper); | |
| 1144 // Return the ICData. The single target stub will jump to continue in the | |
| 1145 // IC call stub. | |
| 1146 arguments.SetReturn(ic_data); | |
| 1147 return; | |
| 1148 } | |
| 1149 } | |
| 1150 | |
| 1151 // Call site is not single target, switch to call using ICData. | |
| 1152 const Code& stub = | |
| 1153 Code::Handle(zone, StubCode::ICCallThroughCode_entry()->code()); | |
| 1154 ASSERT(!Isolate::Current()->compilation_allowed()); | |
| 1155 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, ic_data, | |
| 1156 stub); | |
| 1157 | |
| 1158 // Return the ICData. The single target stub will jump to continue in the | |
| 1159 // IC call stub. | |
| 1160 arguments.SetReturn(ic_data); | |
| 1161 #endif | |
| 1162 } | |
| 1163 | |
| 1164 | |
| 1165 DEFINE_RUNTIME_ENTRY(UnlinkedCall, 2) { | |
| 1166 #if defined(TARGET_ARCH_DBC) | |
| 1167 // DBC does not use switchable calls. | |
| 1168 UNREACHABLE(); | |
| 1169 #else | |
| 1170 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 1171 const UnlinkedCall& unlinked = | |
| 1172 UnlinkedCall::CheckedHandle(zone, arguments.ArgAt(1)); | |
| 1173 | |
| 1174 DartFrameIterator iterator; | |
| 1175 StackFrame* caller_frame = iterator.NextFrame(); | |
| 1176 ASSERT(caller_frame->IsDartFrame()); | |
| 1177 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode()); | |
| 1178 const Function& caller_function = | |
| 1179 Function::Handle(zone, caller_frame->LookupDartFunction()); | |
| 1180 | |
| 1181 const String& name = String::Handle(zone, unlinked.target_name()); | |
| 1182 const Array& descriptor = Array::Handle(zone, unlinked.args_descriptor()); | |
| 1183 const ICData& ic_data = | |
| 1184 ICData::Handle(zone, ICData::New(caller_function, name, descriptor, | |
| 1185 Thread::kNoDeoptId, 1, /* args_tested */ | |
| 1186 false /* static_call */)); | |
| 1187 | |
| 1188 Class& cls = Class::Handle(zone, receiver.clazz()); | |
| 1189 ArgumentsDescriptor args_desc(descriptor); | |
| 1190 Function& target_function = Function::Handle( | |
| 1191 zone, Resolver::ResolveDynamicForReceiverClass(cls, name, args_desc)); | |
| 1192 if (target_function.IsNull()) { | |
| 1193 target_function = InlineCacheMissHelper(receiver, descriptor, name); | |
| 1194 } | |
| 1195 if (target_function.IsNull()) { | |
| 1196 ASSERT(!FLAG_lazy_dispatchers); | |
| 1197 } else { | |
| 1198 ic_data.AddReceiverCheck(receiver.GetClassId(), target_function); | |
| 1199 } | |
| 1200 | |
| 1201 if (!target_function.IsNull() && !target_function.HasOptionalParameters()) { | |
| 1202 // Patch to monomorphic call. | |
| 1203 ASSERT(target_function.HasCode()); | |
| 1204 const Code& target_code = Code::Handle(zone, target_function.CurrentCode()); | |
| 1205 const Smi& expected_cid = | |
| 1206 Smi::Handle(zone, Smi::New(receiver.GetClassId())); | |
| 1207 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, | |
| 1208 expected_cid, target_code); | |
| 1209 | |
| 1210 // Return the ICData. The miss stub will jump to continue in the IC call | |
| 1211 // stub. | |
| 1212 arguments.SetReturn(ic_data); | |
| 1213 return; | |
| 1214 } | |
| 1215 | |
| 1216 // Patch to call through stub. | |
| 1217 const Code& stub = | |
| 1218 Code::Handle(zone, StubCode::ICCallThroughCode_entry()->code()); | |
| 1219 ASSERT(!Isolate::Current()->compilation_allowed()); | |
| 1220 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, ic_data, | |
| 1221 stub); | |
| 1222 | |
| 1223 // Return the ICData. The miss stub will jump to continue in the IC lookup | |
| 1224 // stub. | |
| 1225 arguments.SetReturn(ic_data); | |
| 1226 #endif // !DBC | |
| 1227 } | |
| 1228 | |
| 1229 | |
| 1230 // Handle a miss of a megamorphic cache. | |
| 1231 // Arg0: Receiver. | |
| 1232 // Returns: the ICData used to continue with a polymorphic call. | |
| 1233 DEFINE_RUNTIME_ENTRY(MonomorphicMiss, 1) { | |
| 1234 #if defined(TARGET_ARCH_DBC) | |
| 1235 // DBC does not use switchable calls. | |
| 1236 UNREACHABLE(); | |
| 1237 #else | |
| 1238 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 1239 | |
| 1240 DartFrameIterator iterator; | |
| 1241 StackFrame* caller_frame = iterator.NextFrame(); | |
| 1242 ASSERT(caller_frame->IsDartFrame()); | |
| 1243 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode()); | |
| 1244 const Function& caller_function = | |
| 1245 Function::Handle(zone, caller_frame->LookupDartFunction()); | |
| 1246 | |
| 1247 Smi& old_expected_cid = Smi::Handle(zone); | |
| 1248 old_expected_cid ^= | |
| 1249 CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(), caller_code); | |
| 1250 const Code& old_target_code = Code::Handle( | |
| 1251 CodePatcher::GetSwitchableCallTargetAt(caller_frame->pc(), caller_code)); | |
| 1252 Function& old_target = Function::Handle(zone); | |
| 1253 old_target ^= old_target_code.owner(); | |
| 1254 | |
| 1255 // We lost the original ICData when we patched to the monomorphic case. | |
| 1256 const String& name = String::Handle(zone, old_target.name()); | |
| 1257 ASSERT(!old_target.HasOptionalParameters()); | |
| 1258 const Array& descriptor = Array::Handle( | |
| 1259 zone, ArgumentsDescriptor::New(old_target.num_fixed_parameters())); | |
| 1260 const ICData& ic_data = | |
| 1261 ICData::Handle(zone, ICData::New(caller_function, name, descriptor, | |
| 1262 Thread::kNoDeoptId, 1, /* args_tested */ | |
| 1263 false /* static_call */)); | |
| 1264 | |
| 1265 // Add the first target. | |
| 1266 ic_data.AddReceiverCheck(old_expected_cid.Value(), old_target); | |
| 1267 | |
| 1268 // Maybe add the new target. | |
| 1269 Class& cls = Class::Handle(zone, receiver.clazz()); | |
| 1270 ArgumentsDescriptor args_desc(descriptor); | |
| 1271 Function& target_function = Function::Handle( | |
| 1272 zone, Resolver::ResolveDynamicForReceiverClass(cls, name, args_desc)); | |
| 1273 if (target_function.IsNull()) { | |
| 1274 target_function = InlineCacheMissHelper(receiver, descriptor, name); | |
| 1275 } | |
| 1276 if (target_function.IsNull()) { | |
| 1277 ASSERT(!FLAG_lazy_dispatchers); | |
| 1278 } else { | |
| 1279 ic_data.AddReceiverCheck(receiver.GetClassId(), target_function); | |
| 1280 } | |
| 1281 | |
| 1282 if (old_target.raw() == target_function.raw()) { | |
| 1283 intptr_t lower, upper; | |
| 1284 if (old_expected_cid.Value() < receiver.GetClassId()) { | |
| 1285 lower = old_expected_cid.Value(); | |
| 1286 upper = receiver.GetClassId(); | |
| 1287 } else { | |
| 1288 lower = receiver.GetClassId(); | |
| 1289 upper = old_expected_cid.Value(); | |
| 1290 } | |
| 1291 | |
| 1292 if (IsSingleTarget(isolate, zone, lower, upper, target_function, name)) { | |
| 1293 const SingleTargetCache& cache = | |
| 1294 SingleTargetCache::Handle(SingleTargetCache::New()); | |
| 1295 const Code& code = Code::Handle(target_function.CurrentCode()); | |
| 1296 cache.set_target(code); | |
| 1297 cache.set_entry_point(code.UncheckedEntryPoint()); | |
| 1298 cache.set_lower_limit(lower); | |
| 1299 cache.set_upper_limit(upper); | |
| 1300 const Code& stub = | |
| 1301 Code::Handle(zone, StubCode::SingleTargetCall_entry()->code()); | |
| 1302 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, cache, | |
| 1303 stub); | |
| 1304 // Return the ICData. The miss stub will jump to continue in the IC call | |
| 1305 // stub. | |
| 1306 arguments.SetReturn(ic_data); | |
| 1307 return; | |
| 1308 } | |
| 1309 } | |
| 1310 | |
| 1311 // Patch to call through stub. | |
| 1312 const Code& stub = | |
| 1313 Code::Handle(zone, StubCode::ICCallThroughCode_entry()->code()); | |
| 1314 ASSERT(!Isolate::Current()->compilation_allowed()); | |
| 1315 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, ic_data, | |
| 1316 stub); | |
| 1317 | |
| 1318 // Return the ICData. The miss stub will jump to continue in the IC lookup | |
| 1319 // stub. | |
| 1320 arguments.SetReturn(ic_data); | |
| 1321 #endif // !defined(TARGET_ARCH_DBC) | |
| 1322 } | |
| 1323 | |
| 1324 | |
| 1325 // Handle a miss of a megamorphic cache. | |
| 1326 // Arg0: Receiver. | |
| 1327 // Arg1: ICData or MegamorphicCache. | |
| 1328 // Arg2: Arguments descriptor array. | |
| 1329 // Returns: target function to call. | |
| 1330 DEFINE_RUNTIME_ENTRY(MegamorphicCacheMissHandler, 3) { | |
| 1331 #if defined(TARGET_ARCH_DBC) | |
| 1332 // DBC does not use megamorphic calls right now. | |
| 1333 UNREACHABLE(); | |
| 1334 #else | |
| 1335 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 1336 const Object& ic_data_or_cache = Object::Handle(zone, arguments.ArgAt(1)); | |
| 1337 const Array& descriptor = Array::CheckedHandle(zone, arguments.ArgAt(2)); | |
| 1338 String& name = String::Handle(zone); | |
| 1339 if (ic_data_or_cache.IsICData()) { | |
| 1340 name = ICData::Cast(ic_data_or_cache).target_name(); | |
| 1341 } else { | |
| 1342 ASSERT(ic_data_or_cache.IsMegamorphicCache()); | |
| 1343 name = MegamorphicCache::Cast(ic_data_or_cache).target_name(); | |
| 1344 } | |
| 1345 Class& cls = Class::Handle(zone, receiver.clazz()); | |
| 1346 ASSERT(!cls.IsNull()); | |
| 1347 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) { | |
| 1348 OS::PrintErr("Megamorphic IC miss, class=%s, function=%s\n", | |
| 1349 cls.ToCString(), name.ToCString()); | |
| 1350 } | |
| 1351 | |
| 1352 ArgumentsDescriptor args_desc(descriptor); | |
| 1353 Function& target_function = Function::Handle( | |
| 1354 zone, Resolver::ResolveDynamicForReceiverClass(cls, name, args_desc)); | |
| 1355 if (target_function.IsNull()) { | |
| 1356 target_function = InlineCacheMissHelper(receiver, descriptor, name); | |
| 1357 } | |
| 1358 if (target_function.IsNull()) { | |
| 1359 ASSERT(!FLAG_lazy_dispatchers); | |
| 1360 arguments.SetReturn(target_function); | |
| 1361 return; | |
| 1362 } | |
| 1363 | |
| 1364 if (ic_data_or_cache.IsICData()) { | |
| 1365 const ICData& ic_data = ICData::Cast(ic_data_or_cache); | |
| 1366 const intptr_t number_of_checks = ic_data.NumberOfChecks(); | |
| 1367 | |
| 1368 if (number_of_checks == 0 && !target_function.HasOptionalParameters() && | |
| 1369 !Isolate::Current()->compilation_allowed()) { | |
| 1370 // This call site is unlinked: transition to a monomorphic direct call. | |
| 1371 // Note we cannot do this if the target has optional parameters because | |
| 1372 // the monomorphic direct call does not load the arguments descriptor. | |
| 1373 // We cannot do this if we are still in the middle of precompiling because | |
| 1374 // the monomorphic case hides an live instance selector from the | |
| 1375 // treeshaker. | |
| 1376 | |
| 1377 const Code& target_code = | |
| 1378 Code::Handle(zone, target_function.EnsureHasCode()); | |
| 1379 | |
| 1380 DartFrameIterator iterator; | |
| 1381 StackFrame* miss_function_frame = iterator.NextFrame(); | |
| 1382 ASSERT(miss_function_frame->IsDartFrame()); | |
| 1383 StackFrame* caller_frame = iterator.NextFrame(); | |
| 1384 ASSERT(caller_frame->IsDartFrame()); | |
| 1385 const Code& caller_code = | |
| 1386 Code::Handle(zone, caller_frame->LookupDartCode()); | |
| 1387 const Smi& expected_cid = | |
| 1388 Smi::Handle(zone, Smi::New(receiver.GetClassId())); | |
| 1389 | |
| 1390 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, | |
| 1391 expected_cid, target_code); | |
| 1392 } else { | |
| 1393 ic_data.AddReceiverCheck(receiver.GetClassId(), target_function); | |
| 1394 if (number_of_checks > FLAG_max_polymorphic_checks) { | |
| 1395 // Switch to megamorphic call. | |
| 1396 const MegamorphicCache& cache = MegamorphicCache::Handle( | |
| 1397 zone, MegamorphicCacheTable::Lookup(isolate, name, descriptor)); | |
| 1398 DartFrameIterator iterator; | |
| 1399 StackFrame* miss_function_frame = iterator.NextFrame(); | |
| 1400 ASSERT(miss_function_frame->IsDartFrame()); | |
| 1401 StackFrame* caller_frame = iterator.NextFrame(); | |
| 1402 ASSERT(caller_frame->IsDartFrame()); | |
| 1403 const Code& caller_code = | |
| 1404 Code::Handle(zone, caller_frame->LookupDartCode()); | |
| 1405 const Code& stub = | |
| 1406 Code::Handle(zone, StubCode::MegamorphicCall_entry()->code()); | |
| 1407 | |
| 1408 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, | |
| 1409 cache, stub); | |
| 1410 } | |
| 1411 } | |
| 1412 } else { | |
| 1413 const MegamorphicCache& cache = MegamorphicCache::Cast(ic_data_or_cache); | |
| 1414 // Insert function found into cache and return it. | |
| 1415 cache.EnsureCapacity(); | |
| 1416 const Smi& class_id = Smi::Handle(zone, Smi::New(cls.id())); | |
| 1417 cache.Insert(class_id, target_function); | |
| 1418 } | |
| 1419 arguments.SetReturn(target_function); | |
| 1420 #endif // !defined(TARGET_ARCH_DBC) | |
| 1421 } | |
| 1422 | |
| 1423 | |
| 1424 // Invoke appropriate noSuchMethod or closure from getter. | |
| 1425 // Arg0: receiver | |
| 1426 // Arg1: ICData or MegamorphicCache | |
| 1427 // Arg2: arguments descriptor array | |
| 1428 // Arg3: arguments array | |
| 1429 DEFINE_RUNTIME_ENTRY(InvokeNoSuchMethodDispatcher, 4) { | |
| 1430 ASSERT(!FLAG_lazy_dispatchers); | |
| 1431 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 1432 const Object& ic_data_or_cache = Object::Handle(zone, arguments.ArgAt(1)); | |
| 1433 const Array& orig_arguments_desc = | |
| 1434 Array::CheckedHandle(zone, arguments.ArgAt(2)); | |
| 1435 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3)); | |
| 1436 String& target_name = String::Handle(zone); | |
| 1437 if (ic_data_or_cache.IsICData()) { | |
| 1438 target_name = ICData::Cast(ic_data_or_cache).target_name(); | |
| 1439 } else { | |
| 1440 ASSERT(ic_data_or_cache.IsMegamorphicCache()); | |
| 1441 target_name = MegamorphicCache::Cast(ic_data_or_cache).target_name(); | |
| 1442 } | |
| 1443 | |
| 1444 Class& cls = Class::Handle(zone, receiver.clazz()); | |
| 1445 Function& function = Function::Handle(zone); | |
| 1446 | |
| 1447 // Dart distinguishes getters and regular methods and allows their calls | |
| 1448 // to mix with conversions, and its selectors are independent of arity. So do | |
| 1449 // a zigzagged lookup to see if this call failed because of an arity mismatch, | |
| 1450 // need for conversion, or there really is no such method. | |
| 1451 | |
| 1452 #define NO_SUCH_METHOD() \ | |
| 1453 const Object& result = Object::Handle( \ | |
| 1454 zone, DartEntry::InvokeNoSuchMethod( \ | |
| 1455 receiver, target_name, orig_arguments, orig_arguments_desc)); \ | |
| 1456 CheckResultError(result); \ | |
| 1457 arguments.SetReturn(result); | |
| 1458 | |
| 1459 #define CLOSURIZE(some_function) \ | |
| 1460 const Function& closure_function = \ | |
| 1461 Function::Handle(zone, some_function.ImplicitClosureFunction()); \ | |
| 1462 const Object& result = Object::Handle( \ | |
| 1463 zone, closure_function.ImplicitInstanceClosure(receiver)); \ | |
| 1464 arguments.SetReturn(result); | |
| 1465 | |
| 1466 const bool is_getter = Field::IsGetterName(target_name); | |
| 1467 if (is_getter) { | |
| 1468 // o.foo (o.get:foo) failed, closurize o.foo() if it exists. Or, | |
| 1469 // o#foo (o.get:#foo) failed, closurizee o.foo or o.foo(), whichever is | |
| 1470 // encountered first on the inheritance chain. Or, | |
| 1471 // o#foo= (o.get:#set:foo) failed, closurize o.foo= if it exists. | |
| 1472 String& field_name = | |
| 1473 String::Handle(zone, Field::NameFromGetter(target_name)); | |
| 1474 | |
| 1475 const bool is_extractor = field_name.CharAt(0) == '#'; | |
| 1476 if (is_extractor) { | |
| 1477 field_name = String::SubString(field_name, 1); | |
| 1478 ASSERT(!Field::IsGetterName(field_name)); | |
| 1479 field_name = Symbols::New(thread, field_name); | |
| 1480 | |
| 1481 if (!Field::IsSetterName(field_name)) { | |
| 1482 const String& getter_name = | |
| 1483 String::Handle(Field::GetterName(field_name)); | |
| 1484 | |
| 1485 // Zigzagged lookup: closure either a regular method or a getter. | |
| 1486 while (!cls.IsNull()) { | |
| 1487 function ^= cls.LookupDynamicFunction(field_name); | |
| 1488 if (!function.IsNull()) { | |
| 1489 CLOSURIZE(function); | |
| 1490 return; | |
| 1491 } | |
| 1492 function ^= cls.LookupDynamicFunction(getter_name); | |
| 1493 if (!function.IsNull()) { | |
| 1494 CLOSURIZE(function); | |
| 1495 return; | |
| 1496 } | |
| 1497 cls = cls.SuperClass(); | |
| 1498 } | |
| 1499 NO_SUCH_METHOD(); | |
| 1500 return; | |
| 1501 } else { | |
| 1502 // Fall through for non-ziggaged lookup for o#foo=. | |
| 1503 } | |
| 1504 } | |
| 1505 | |
| 1506 while (!cls.IsNull()) { | |
| 1507 function ^= cls.LookupDynamicFunction(field_name); | |
| 1508 if (!function.IsNull()) { | |
| 1509 CLOSURIZE(function); | |
| 1510 return; | |
| 1511 } | |
| 1512 cls = cls.SuperClass(); | |
| 1513 } | |
| 1514 | |
| 1515 // Fall through for noSuchMethod | |
| 1516 } else { | |
| 1517 // o.foo(...) failed, invoke noSuchMethod is foo exists but has the wrong | |
| 1518 // number of arguments, or try (o.foo).call(...) | |
| 1519 | |
| 1520 if ((target_name.raw() == Symbols::Call().raw()) && receiver.IsClosure()) { | |
| 1521 // Special case: closures are implemented with a call getter instead of a | |
| 1522 // call method and with lazy dispatchers the field-invocation-dispatcher | |
| 1523 // would perform the closure call. | |
| 1524 const Object& result = Object::Handle( | |
| 1525 zone, DartEntry::InvokeClosure(orig_arguments, orig_arguments_desc)); | |
| 1526 CheckResultError(result); | |
| 1527 arguments.SetReturn(result); | |
| 1528 return; | |
| 1529 } | |
| 1530 | |
| 1531 const String& getter_name = | |
| 1532 String::Handle(zone, Field::GetterName(target_name)); | |
| 1533 while (!cls.IsNull()) { | |
| 1534 function ^= cls.LookupDynamicFunction(target_name); | |
| 1535 if (!function.IsNull()) { | |
| 1536 ArgumentsDescriptor args_desc(orig_arguments_desc); | |
| 1537 ASSERT(!function.AreValidArguments(args_desc, NULL)); | |
| 1538 break; // mismatch, invoke noSuchMethod | |
| 1539 } | |
| 1540 function ^= cls.LookupDynamicFunction(getter_name); | |
| 1541 if (!function.IsNull()) { | |
| 1542 const Array& getter_arguments = Array::Handle(Array::New(1)); | |
| 1543 getter_arguments.SetAt(0, receiver); | |
| 1544 const Object& getter_result = Object::Handle( | |
| 1545 zone, DartEntry::InvokeFunction(function, getter_arguments)); | |
| 1546 CheckResultError(getter_result); | |
| 1547 ASSERT(getter_result.IsNull() || getter_result.IsInstance()); | |
| 1548 | |
| 1549 orig_arguments.SetAt(0, getter_result); | |
| 1550 const Object& call_result = Object::Handle( | |
| 1551 zone, | |
| 1552 DartEntry::InvokeClosure(orig_arguments, orig_arguments_desc)); | |
| 1553 CheckResultError(call_result); | |
| 1554 arguments.SetReturn(call_result); | |
| 1555 return; | |
| 1556 } | |
| 1557 cls = cls.SuperClass(); | |
| 1558 } | |
| 1559 } | |
| 1560 | |
| 1561 NO_SUCH_METHOD(); | |
| 1562 | |
| 1563 #undef NO_SUCH_METHOD | |
| 1564 #undef CLOSURIZE | |
| 1565 } | |
| 1566 | |
| 1567 | |
| 1568 // Invoke appropriate noSuchMethod function. | |
| 1569 // Arg0: receiver (closure object) | |
| 1570 // Arg1: arguments descriptor array. | |
| 1571 // Arg2: arguments array. | |
| 1572 DEFINE_RUNTIME_ENTRY(InvokeClosureNoSuchMethod, 3) { | |
| 1573 const Closure& receiver = Closure::CheckedHandle(arguments.ArgAt(0)); | |
| 1574 const Array& orig_arguments_desc = Array::CheckedHandle(arguments.ArgAt(1)); | |
| 1575 const Array& orig_arguments = Array::CheckedHandle(arguments.ArgAt(2)); | |
| 1576 | |
| 1577 // For closure the function name is always 'call'. Replace it with the | |
| 1578 // name of the closurized function so that exception contains more | |
| 1579 // relevant information. | |
| 1580 const Function& function = Function::Handle(receiver.function()); | |
| 1581 const String& original_function_name = | |
| 1582 String::Handle(function.QualifiedUserVisibleName()); | |
| 1583 const Object& result = Object::Handle(DartEntry::InvokeNoSuchMethod( | |
| 1584 receiver, original_function_name, orig_arguments, orig_arguments_desc)); | |
| 1585 CheckResultError(result); | |
| 1586 arguments.SetReturn(result); | |
| 1587 } | |
| 1588 | |
| 1589 | |
| 1590 DEFINE_RUNTIME_ENTRY(StackOverflow, 0) { | |
| 1591 #if defined(USING_SIMULATOR) | |
| 1592 uword stack_pos = Simulator::Current()->get_sp(); | |
| 1593 #else | |
| 1594 uword stack_pos = Thread::GetCurrentStackPointer(); | |
| 1595 #endif | |
| 1596 // Always clear the stack overflow flags. They are meant for this | |
| 1597 // particular stack overflow runtime call and are not meant to | |
| 1598 // persist. | |
| 1599 uword stack_overflow_flags = thread->GetAndClearStackOverflowFlags(); | |
| 1600 | |
| 1601 // If an interrupt happens at the same time as a stack overflow, we | |
| 1602 // process the stack overflow now and leave the interrupt for next | |
| 1603 // time. | |
| 1604 if (IsCalleeFrameOf(thread->saved_stack_limit(), stack_pos)) { | |
| 1605 // Use the preallocated stack overflow exception to avoid calling | |
| 1606 // into dart code. | |
| 1607 const Instance& exception = | |
| 1608 Instance::Handle(isolate->object_store()->stack_overflow()); | |
| 1609 Exceptions::Throw(thread, exception); | |
| 1610 UNREACHABLE(); | |
| 1611 } | |
| 1612 | |
| 1613 // The following code is used to stress test deoptimization and | |
| 1614 // debugger stack tracing. | |
| 1615 bool do_deopt = false; | |
| 1616 bool do_stacktrace = false; | |
| 1617 bool do_reload = false; | |
| 1618 const intptr_t isolate_reload_every = | |
| 1619 isolate->reload_every_n_stack_overflow_checks(); | |
| 1620 if ((FLAG_deoptimize_every > 0) || (FLAG_stacktrace_every > 0) || | |
| 1621 (isolate_reload_every > 0)) { | |
| 1622 // TODO(turnidge): To make --deoptimize_every and | |
| 1623 // --stacktrace-every faster we could move this increment/test to | |
| 1624 // the generated code. | |
| 1625 int32_t count = thread->IncrementAndGetStackOverflowCount(); | |
| 1626 if (FLAG_deoptimize_every > 0 && (count % FLAG_deoptimize_every) == 0) { | |
| 1627 do_deopt = true; | |
| 1628 } | |
| 1629 if (FLAG_stacktrace_every > 0 && (count % FLAG_stacktrace_every) == 0) { | |
| 1630 do_stacktrace = true; | |
| 1631 } | |
| 1632 if ((isolate_reload_every > 0) && (count % isolate_reload_every) == 0) { | |
| 1633 do_reload = isolate->CanReload(); | |
| 1634 } | |
| 1635 } | |
| 1636 if ((FLAG_deoptimize_filter != NULL) || (FLAG_stacktrace_filter != NULL) || | |
| 1637 FLAG_reload_every_optimized) { | |
| 1638 DartFrameIterator iterator; | |
| 1639 StackFrame* frame = iterator.NextFrame(); | |
| 1640 ASSERT(frame != NULL); | |
| 1641 const Code& code = Code::Handle(frame->LookupDartCode()); | |
| 1642 ASSERT(!code.IsNull()); | |
| 1643 const Function& function = Function::Handle(code.function()); | |
| 1644 ASSERT(!function.IsNull()); | |
| 1645 const char* function_name = function.ToFullyQualifiedCString(); | |
| 1646 ASSERT(function_name != NULL); | |
| 1647 if (!code.is_optimized() && FLAG_reload_every_optimized) { | |
| 1648 // Don't do the reload if we aren't inside optimized code. | |
| 1649 do_reload = false; | |
| 1650 } | |
| 1651 if (code.is_optimized() && FLAG_deoptimize_filter != NULL && | |
| 1652 strstr(function_name, FLAG_deoptimize_filter) != NULL) { | |
| 1653 OS::PrintErr("*** Forcing deoptimization (%s)\n", | |
| 1654 function.ToFullyQualifiedCString()); | |
| 1655 do_deopt = true; | |
| 1656 } | |
| 1657 if (FLAG_stacktrace_filter != NULL && | |
| 1658 strstr(function_name, FLAG_stacktrace_filter) != NULL) { | |
| 1659 OS::PrintErr("*** Computing stacktrace (%s)\n", | |
| 1660 function.ToFullyQualifiedCString()); | |
| 1661 do_stacktrace = true; | |
| 1662 } | |
| 1663 } | |
| 1664 if (do_deopt) { | |
| 1665 // TODO(turnidge): Consider using DeoptimizeAt instead. | |
| 1666 DeoptimizeFunctionsOnStack(); | |
| 1667 } | |
| 1668 if (do_reload) { | |
| 1669 #ifndef PRODUCT | |
| 1670 JSONStream js; | |
| 1671 // Maybe adjust the rate of future reloads. | |
| 1672 isolate->MaybeIncreaseReloadEveryNStackOverflowChecks(); | |
| 1673 // Issue a reload. | |
| 1674 bool success = isolate->ReloadSources(&js, true /* force_reload */); | |
| 1675 if (!success) { | |
| 1676 FATAL1("*** Isolate reload failed:\n%s\n", js.ToCString()); | |
| 1677 } | |
| 1678 #endif | |
| 1679 } | |
| 1680 if (FLAG_support_debugger && do_stacktrace) { | |
| 1681 String& var_name = String::Handle(); | |
| 1682 Instance& var_value = Instance::Handle(); | |
| 1683 // Collecting the stack trace and accessing local variables | |
| 1684 // of frames may trigger parsing of functions to compute | |
| 1685 // variable descriptors of functions. Parsing may trigger | |
| 1686 // code execution, e.g. to compute compile-time constants. Thus, | |
| 1687 // disable FLAG_stacktrace_every during trace collection to prevent | |
| 1688 // recursive stack trace collection. | |
| 1689 intptr_t saved_stacktrace_every = FLAG_stacktrace_every; | |
| 1690 FLAG_stacktrace_every = 0; | |
| 1691 DebuggerStackTrace* stack = isolate->debugger()->StackTrace(); | |
| 1692 intptr_t num_frames = stack->Length(); | |
| 1693 for (intptr_t i = 0; i < num_frames; i++) { | |
| 1694 ActivationFrame* frame = stack->FrameAt(i); | |
| 1695 #ifndef DART_PRECOMPILED_RUNTIME | |
| 1696 // Ensure that we have unoptimized code. | |
| 1697 frame->function().EnsureHasCompiledUnoptimizedCode(); | |
| 1698 #endif | |
| 1699 // Variable locations and number are unknown when precompiling. | |
| 1700 const int num_vars = | |
| 1701 FLAG_precompiled_runtime ? 0 : frame->NumLocalVariables(); | |
| 1702 TokenPosition unused = TokenPosition::kNoSource; | |
| 1703 for (intptr_t v = 0; v < num_vars; v++) { | |
| 1704 frame->VariableAt(v, &var_name, &unused, &unused, &unused, &var_value); | |
| 1705 } | |
| 1706 } | |
| 1707 if (FLAG_stress_async_stacks) { | |
| 1708 Debugger::CollectAwaiterReturnStackTrace(); | |
| 1709 } | |
| 1710 FLAG_stacktrace_every = saved_stacktrace_every; | |
| 1711 } | |
| 1712 | |
| 1713 const Error& error = Error::Handle(thread->HandleInterrupts()); | |
| 1714 if (!error.IsNull()) { | |
| 1715 Exceptions::PropagateError(error); | |
| 1716 UNREACHABLE(); | |
| 1717 } | |
| 1718 | |
| 1719 if ((stack_overflow_flags & Thread::kOsrRequest) != 0) { | |
| 1720 ASSERT(isolate->use_osr()); | |
| 1721 DartFrameIterator iterator; | |
| 1722 StackFrame* frame = iterator.NextFrame(); | |
| 1723 ASSERT(frame != NULL); | |
| 1724 const Code& code = Code::ZoneHandle(frame->LookupDartCode()); | |
| 1725 ASSERT(!code.IsNull()); | |
| 1726 ASSERT(!code.is_optimized()); | |
| 1727 const Function& function = Function::Handle(code.function()); | |
| 1728 ASSERT(!function.IsNull()); | |
| 1729 | |
| 1730 // If the code of the frame does not match the function's unoptimized code, | |
| 1731 // we bail out since the code was reset by an isolate reload. | |
| 1732 if (code.raw() != function.unoptimized_code()) { | |
| 1733 return; | |
| 1734 } | |
| 1735 | |
| 1736 // Since the code is referenced from the frame and the ZoneHandle, | |
| 1737 // it cannot have been removed from the function. | |
| 1738 ASSERT(function.HasCode()); | |
| 1739 // Don't do OSR on intrinsified functions: The intrinsic code expects to be | |
| 1740 // called like a regular function and can't be entered via OSR. | |
| 1741 if (!Compiler::CanOptimizeFunction(thread, function) || | |
| 1742 function.is_intrinsic()) { | |
| 1743 return; | |
| 1744 } | |
| 1745 | |
| 1746 // The unoptimized code is on the stack and should never be detached from | |
| 1747 // the function at this point. | |
| 1748 ASSERT(function.unoptimized_code() != Object::null()); | |
| 1749 intptr_t osr_id = | |
| 1750 Code::Handle(function.unoptimized_code()).GetDeoptIdForOsr(frame->pc()); | |
| 1751 ASSERT(osr_id != Compiler::kNoOSRDeoptId); | |
| 1752 if (FLAG_trace_osr) { | |
| 1753 OS::Print("Attempting OSR for %s at id=%" Pd ", count=%" Pd "\n", | |
| 1754 function.ToFullyQualifiedCString(), osr_id, | |
| 1755 function.usage_counter()); | |
| 1756 } | |
| 1757 | |
| 1758 // Since the code is referenced from the frame and the ZoneHandle, | |
| 1759 // it cannot have been removed from the function. | |
| 1760 const Object& result = Object::Handle( | |
| 1761 Compiler::CompileOptimizedFunction(thread, function, osr_id)); | |
| 1762 if (result.IsError()) { | |
| 1763 Exceptions::PropagateError(Error::Cast(result)); | |
| 1764 } | |
| 1765 | |
| 1766 if (!result.IsNull()) { | |
| 1767 const Code& code = Code::Cast(result); | |
| 1768 uword optimized_entry = | |
| 1769 Instructions::UncheckedEntryPoint(code.instructions()); | |
| 1770 frame->set_pc(optimized_entry); | |
| 1771 frame->set_pc_marker(code.raw()); | |
| 1772 } | |
| 1773 } | |
| 1774 } | |
| 1775 | |
| 1776 | |
| 1777 DEFINE_RUNTIME_ENTRY(TraceICCall, 2) { | |
| 1778 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(0)); | |
| 1779 const Function& function = Function::CheckedHandle(arguments.ArgAt(1)); | |
| 1780 DartFrameIterator iterator; | |
| 1781 StackFrame* frame = iterator.NextFrame(); | |
| 1782 ASSERT(frame != NULL); | |
| 1783 OS::PrintErr("IC call @%#" Px ": ICData: %p cnt:%" Pd " nchecks: %" Pd | |
| 1784 " %s\n", | |
| 1785 frame->pc(), ic_data.raw(), function.usage_counter(), | |
| 1786 ic_data.NumberOfChecks(), function.ToFullyQualifiedCString()); | |
| 1787 } | |
| 1788 | |
| 1789 | |
| 1790 // This is called from function that needs to be optimized. | |
| 1791 // The requesting function can be already optimized (reoptimization). | |
| 1792 // Returns the Code object where to continue execution. | |
| 1793 DEFINE_RUNTIME_ENTRY(OptimizeInvokedFunction, 1) { | |
| 1794 #if !defined(DART_PRECOMPILED_RUNTIME) | |
| 1795 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0)); | |
| 1796 ASSERT(!function.IsNull()); | |
| 1797 ASSERT(function.HasCode()); | |
| 1798 | |
| 1799 if (Compiler::CanOptimizeFunction(thread, function)) { | |
| 1800 if (FLAG_background_compilation) { | |
| 1801 Field& field = Field::Handle(zone, isolate->GetDeoptimizingBoxedField()); | |
| 1802 while (!field.IsNull()) { | |
| 1803 if (FLAG_trace_optimization || FLAG_trace_field_guards) { | |
| 1804 THR_Print("Lazy disabling unboxing of %s\n", field.ToCString()); | |
| 1805 } | |
| 1806 field.set_is_unboxing_candidate(false); | |
| 1807 field.DeoptimizeDependentCode(); | |
| 1808 // Get next field. | |
| 1809 field = isolate->GetDeoptimizingBoxedField(); | |
| 1810 } | |
| 1811 } | |
| 1812 // TODO(srdjan): Fix background compilation of regular expressions. | |
| 1813 if (FLAG_background_compilation) { | |
| 1814 if (FLAG_enable_inlining_annotations) { | |
| 1815 FATAL("Cannot enable inlining annotations and background compilation"); | |
| 1816 } | |
| 1817 if (!BackgroundCompiler::IsDisabled()) { | |
| 1818 if (FLAG_background_compilation_stop_alot) { | |
| 1819 BackgroundCompiler::Stop(isolate); | |
| 1820 } | |
| 1821 // Reduce the chance of triggering optimization while the function is | |
| 1822 // being optimized in the background. INT_MIN should ensure that it | |
| 1823 // takes long time to trigger optimization. | |
| 1824 // Note that the background compilation queue rejects duplicate entries. | |
| 1825 function.set_usage_counter(INT_MIN); | |
| 1826 BackgroundCompiler::EnsureInit(thread); | |
| 1827 ASSERT(isolate->background_compiler() != NULL); | |
| 1828 isolate->background_compiler()->CompileOptimized(function); | |
| 1829 // Continue in the same code. | |
| 1830 arguments.SetReturn(function); | |
| 1831 return; | |
| 1832 } | |
| 1833 } | |
| 1834 | |
| 1835 // Reset usage counter for reoptimization before calling optimizer to | |
| 1836 // prevent recursive triggering of function optimization. | |
| 1837 function.set_usage_counter(0); | |
| 1838 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) { | |
| 1839 if (function.HasOptimizedCode()) { | |
| 1840 THR_Print("ReCompiling function: '%s' \n", | |
| 1841 function.ToFullyQualifiedCString()); | |
| 1842 } | |
| 1843 } | |
| 1844 const Object& result = Object::Handle( | |
| 1845 zone, Compiler::CompileOptimizedFunction(thread, function)); | |
| 1846 if (result.IsError()) { | |
| 1847 Exceptions::PropagateError(Error::Cast(result)); | |
| 1848 } | |
| 1849 } | |
| 1850 arguments.SetReturn(function); | |
| 1851 #else | |
| 1852 UNREACHABLE(); | |
| 1853 #endif // !DART_PRECOMPILED_RUNTIME | |
| 1854 } | |
| 1855 | |
| 1856 | |
| 1857 // The caller must be a static call in a Dart frame, or an entry frame. | |
| 1858 // Patch static call to point to valid code's entry point. | |
| 1859 DEFINE_RUNTIME_ENTRY(FixCallersTarget, 0) { | |
| 1860 StackFrameIterator iterator(StackFrameIterator::kDontValidateFrames); | |
| 1861 StackFrame* frame = iterator.NextFrame(); | |
| 1862 ASSERT(frame != NULL); | |
| 1863 while (frame->IsStubFrame() || frame->IsExitFrame()) { | |
| 1864 frame = iterator.NextFrame(); | |
| 1865 ASSERT(frame != NULL); | |
| 1866 } | |
| 1867 if (frame->IsEntryFrame()) { | |
| 1868 // Since function's current code is always unpatched, the entry frame always | |
| 1869 // calls to unpatched code. | |
| 1870 UNREACHABLE(); | |
| 1871 } | |
| 1872 ASSERT(frame->IsDartFrame()); | |
| 1873 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode()); | |
| 1874 ASSERT(caller_code.is_optimized()); | |
| 1875 const Function& target_function = Function::Handle( | |
| 1876 zone, caller_code.GetStaticCallTargetFunctionAt(frame->pc())); | |
| 1877 | |
| 1878 const Code& current_target_code = | |
| 1879 Code::Handle(zone, target_function.EnsureHasCode()); | |
| 1880 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, current_target_code); | |
| 1881 caller_code.SetStaticCallTargetCodeAt(frame->pc(), current_target_code); | |
| 1882 if (FLAG_trace_patching) { | |
| 1883 OS::PrintErr("FixCallersTarget: caller %#" Px | |
| 1884 " " | |
| 1885 "target '%s' -> %#" Px "\n", | |
| 1886 frame->pc(), target_function.ToFullyQualifiedCString(), | |
| 1887 current_target_code.UncheckedEntryPoint()); | |
| 1888 } | |
| 1889 ASSERT(!current_target_code.IsDisabled()); | |
| 1890 arguments.SetReturn(current_target_code); | |
| 1891 } | |
| 1892 | |
| 1893 | |
| 1894 // The caller tried to allocate an instance via an invalidated allocation | |
| 1895 // stub. | |
| 1896 DEFINE_RUNTIME_ENTRY(FixAllocationStubTarget, 0) { | |
| 1897 #if !defined(DART_PRECOMPILED_RUNTIME) | |
| 1898 StackFrameIterator iterator(StackFrameIterator::kDontValidateFrames); | |
| 1899 StackFrame* frame = iterator.NextFrame(); | |
| 1900 ASSERT(frame != NULL); | |
| 1901 while (frame->IsStubFrame() || frame->IsExitFrame()) { | |
| 1902 frame = iterator.NextFrame(); | |
| 1903 ASSERT(frame != NULL); | |
| 1904 } | |
| 1905 if (frame->IsEntryFrame()) { | |
| 1906 // There must be a valid Dart frame. | |
| 1907 UNREACHABLE(); | |
| 1908 } | |
| 1909 ASSERT(frame->IsDartFrame()); | |
| 1910 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode()); | |
| 1911 ASSERT(!caller_code.IsNull()); | |
| 1912 const Code& stub = Code::Handle( | |
| 1913 CodePatcher::GetStaticCallTargetAt(frame->pc(), caller_code)); | |
| 1914 Class& alloc_class = Class::ZoneHandle(zone); | |
| 1915 alloc_class ^= stub.owner(); | |
| 1916 Code& alloc_stub = Code::Handle(zone, alloc_class.allocation_stub()); | |
| 1917 if (alloc_stub.IsNull()) { | |
| 1918 alloc_stub = StubCode::GetAllocationStubForClass(alloc_class); | |
| 1919 ASSERT(!alloc_stub.IsDisabled()); | |
| 1920 } | |
| 1921 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, alloc_stub); | |
| 1922 caller_code.SetStubCallTargetCodeAt(frame->pc(), alloc_stub); | |
| 1923 if (FLAG_trace_patching) { | |
| 1924 OS::PrintErr("FixAllocationStubTarget: caller %#" Px | |
| 1925 " alloc-class %s " | |
| 1926 " -> %#" Px "\n", | |
| 1927 frame->pc(), alloc_class.ToCString(), | |
| 1928 alloc_stub.UncheckedEntryPoint()); | |
| 1929 } | |
| 1930 arguments.SetReturn(alloc_stub); | |
| 1931 #else | |
| 1932 UNREACHABLE(); | |
| 1933 #endif | |
| 1934 } | |
| 1935 | |
| 1936 | |
| 1937 const char* DeoptReasonToCString(ICData::DeoptReasonId deopt_reason) { | |
| 1938 switch (deopt_reason) { | |
| 1939 #define DEOPT_REASON_TO_TEXT(name) \ | |
| 1940 case ICData::kDeopt##name: \ | |
| 1941 return #name; | |
| 1942 DEOPT_REASONS(DEOPT_REASON_TO_TEXT) | |
| 1943 #undef DEOPT_REASON_TO_TEXT | |
| 1944 default: | |
| 1945 UNREACHABLE(); | |
| 1946 return ""; | |
| 1947 } | |
| 1948 } | |
| 1949 | |
| 1950 | |
| 1951 void DeoptimizeAt(const Code& optimized_code, StackFrame* frame) { | |
| 1952 ASSERT(optimized_code.is_optimized()); | |
| 1953 Thread* thread = Thread::Current(); | |
| 1954 Zone* zone = thread->zone(); | |
| 1955 const Function& function = Function::Handle(zone, optimized_code.function()); | |
| 1956 const Error& error = | |
| 1957 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, function)); | |
| 1958 if (!error.IsNull()) { | |
| 1959 Exceptions::PropagateError(error); | |
| 1960 } | |
| 1961 const Code& unoptimized_code = | |
| 1962 Code::Handle(zone, function.unoptimized_code()); | |
| 1963 ASSERT(!unoptimized_code.IsNull()); | |
| 1964 // The switch to unoptimized code may have already occurred. | |
| 1965 if (function.HasOptimizedCode()) { | |
| 1966 function.SwitchToUnoptimizedCode(); | |
| 1967 } | |
| 1968 | |
| 1969 #if defined(TARGET_ARCH_DBC) | |
| 1970 const Instructions& instrs = | |
| 1971 Instructions::Handle(zone, optimized_code.instructions()); | |
| 1972 { | |
| 1973 WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size()); | |
| 1974 CodePatcher::InsertDeoptimizationCallAt(frame->pc()); | |
| 1975 if (FLAG_trace_patching) { | |
| 1976 const String& name = String::Handle(function.name()); | |
| 1977 OS::PrintErr("InsertDeoptimizationCallAt: 0x%" Px " for %s\n", | |
| 1978 frame->pc(), name.ToCString()); | |
| 1979 } | |
| 1980 const ExceptionHandlers& handlers = | |
| 1981 ExceptionHandlers::Handle(zone, optimized_code.exception_handlers()); | |
| 1982 ExceptionHandlerInfo info; | |
| 1983 for (intptr_t i = 0; i < handlers.num_entries(); ++i) { | |
| 1984 handlers.GetHandlerInfo(i, &info); | |
| 1985 const uword patch_pc = instrs.PayloadStart() + info.handler_pc_offset; | |
| 1986 CodePatcher::InsertDeoptimizationCallAt(patch_pc); | |
| 1987 if (FLAG_trace_patching) { | |
| 1988 OS::PrintErr(" at handler 0x%" Px "\n", patch_pc); | |
| 1989 } | |
| 1990 } | |
| 1991 } | |
| 1992 #else // !DBC | |
| 1993 if (frame->IsMarkedForLazyDeopt()) { | |
| 1994 // Deopt already scheduled. | |
| 1995 if (FLAG_trace_deoptimization) { | |
| 1996 THR_Print("Lazy deopt already scheduled for fp=%" Pp "\n", frame->fp()); | |
| 1997 } | |
| 1998 } else { | |
| 1999 uword deopt_pc = frame->pc(); | |
| 2000 ASSERT(optimized_code.ContainsInstructionAt(deopt_pc)); | |
| 2001 | |
| 2002 #if defined(DEBUG) | |
| 2003 ValidateFrames(); | |
| 2004 #endif | |
| 2005 | |
| 2006 // N.B.: Update the pending deopt table before updating the frame. The | |
| 2007 // profiler may attempt a stack walk in between. | |
| 2008 thread->isolate()->AddPendingDeopt(frame->fp(), deopt_pc); | |
| 2009 frame->MarkForLazyDeopt(); | |
| 2010 | |
| 2011 if (FLAG_trace_deoptimization) { | |
| 2012 THR_Print("Lazy deopt scheduled for fp=%" Pp ", pc=%" Pp "\n", | |
| 2013 frame->fp(), deopt_pc); | |
| 2014 } | |
| 2015 } | |
| 2016 #endif // !DBC | |
| 2017 | |
| 2018 // Mark code as dead (do not GC its embedded objects). | |
| 2019 optimized_code.set_is_alive(false); | |
| 2020 } | |
| 2021 | |
| 2022 | |
| 2023 // Currently checks only that all optimized frames have kDeoptIndex | |
| 2024 // and unoptimized code has the kDeoptAfter. | |
| 2025 void DeoptimizeFunctionsOnStack() { | |
| 2026 DartFrameIterator iterator; | |
| 2027 StackFrame* frame = iterator.NextFrame(); | |
| 2028 Code& optimized_code = Code::Handle(); | |
| 2029 while (frame != NULL) { | |
| 2030 optimized_code = frame->LookupDartCode(); | |
| 2031 if (optimized_code.is_optimized()) { | |
| 2032 DeoptimizeAt(optimized_code, frame); | |
| 2033 } | |
| 2034 frame = iterator.NextFrame(); | |
| 2035 } | |
| 2036 } | |
| 2037 | |
| 2038 #if !defined(DART_PRECOMPILED_RUNTIME) | |
| 2039 #if !defined(TARGET_ARCH_DBC) | |
| 2040 static const intptr_t kNumberOfSavedCpuRegisters = kNumberOfCpuRegisters; | |
| 2041 static const intptr_t kNumberOfSavedFpuRegisters = kNumberOfFpuRegisters; | |
| 2042 #else | |
| 2043 static const intptr_t kNumberOfSavedCpuRegisters = 0; | |
| 2044 static const intptr_t kNumberOfSavedFpuRegisters = 0; | |
| 2045 #endif | |
| 2046 | |
| 2047 static void CopySavedRegisters(uword saved_registers_address, | |
| 2048 fpu_register_t** fpu_registers, | |
| 2049 intptr_t** cpu_registers) { | |
| 2050 ASSERT(sizeof(fpu_register_t) == kFpuRegisterSize); | |
| 2051 fpu_register_t* fpu_registers_copy = | |
| 2052 new fpu_register_t[kNumberOfSavedFpuRegisters]; | |
| 2053 ASSERT(fpu_registers_copy != NULL); | |
| 2054 for (intptr_t i = 0; i < kNumberOfSavedFpuRegisters; i++) { | |
| 2055 fpu_registers_copy[i] = | |
| 2056 *reinterpret_cast<fpu_register_t*>(saved_registers_address); | |
| 2057 saved_registers_address += kFpuRegisterSize; | |
| 2058 } | |
| 2059 *fpu_registers = fpu_registers_copy; | |
| 2060 | |
| 2061 ASSERT(sizeof(intptr_t) == kWordSize); | |
| 2062 intptr_t* cpu_registers_copy = new intptr_t[kNumberOfSavedCpuRegisters]; | |
| 2063 ASSERT(cpu_registers_copy != NULL); | |
| 2064 for (intptr_t i = 0; i < kNumberOfSavedCpuRegisters; i++) { | |
| 2065 cpu_registers_copy[i] = | |
| 2066 *reinterpret_cast<intptr_t*>(saved_registers_address); | |
| 2067 saved_registers_address += kWordSize; | |
| 2068 } | |
| 2069 *cpu_registers = cpu_registers_copy; | |
| 2070 } | |
| 2071 #endif | |
| 2072 | |
| 2073 | |
| 2074 // Copies saved registers and caller's frame into temporary buffers. | |
| 2075 // Returns the stack size of unoptimized frame. | |
| 2076 // The calling code must be optimized, but its function may not have | |
| 2077 // have optimized code if the code is OSR code, or if the code was invalidated | |
| 2078 // through class loading/finalization or field guard. | |
| 2079 DEFINE_LEAF_RUNTIME_ENTRY(intptr_t, | |
| 2080 DeoptimizeCopyFrame, | |
| 2081 2, | |
| 2082 uword saved_registers_address, | |
| 2083 uword is_lazy_deopt) { | |
| 2084 #if !defined(DART_PRECOMPILED_RUNTIME) | |
| 2085 Thread* thread = Thread::Current(); | |
| 2086 Isolate* isolate = thread->isolate(); | |
| 2087 StackZone zone(thread); | |
| 2088 HANDLESCOPE(thread); | |
| 2089 | |
| 2090 // All registers have been saved below last-fp as if they were locals. | |
| 2091 const uword last_fp = saved_registers_address + | |
| 2092 (kNumberOfSavedCpuRegisters * kWordSize) + | |
| 2093 (kNumberOfSavedFpuRegisters * kFpuRegisterSize) - | |
| 2094 ((kFirstLocalSlotFromFp + 1) * kWordSize); | |
| 2095 | |
| 2096 // Get optimized code and frame that need to be deoptimized. | |
| 2097 DartFrameIterator iterator(last_fp); | |
| 2098 | |
| 2099 StackFrame* caller_frame = iterator.NextFrame(); | |
| 2100 ASSERT(caller_frame != NULL); | |
| 2101 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode()); | |
| 2102 ASSERT(optimized_code.is_optimized()); | |
| 2103 const Function& top_function = | |
| 2104 Function::Handle(thread->zone(), optimized_code.function()); | |
| 2105 const bool deoptimizing_code = top_function.HasOptimizedCode(); | |
| 2106 if (FLAG_trace_deoptimization) { | |
| 2107 const Function& function = Function::Handle(optimized_code.function()); | |
| 2108 THR_Print("== Deoptimizing code for '%s', %s, %s\n", | |
| 2109 function.ToFullyQualifiedCString(), | |
| 2110 deoptimizing_code ? "code & frame" : "frame", | |
| 2111 is_lazy_deopt ? "lazy-deopt" : ""); | |
| 2112 } | |
| 2113 | |
| 2114 #if !defined(TARGET_ARCH_DBC) | |
| 2115 if (is_lazy_deopt) { | |
| 2116 uword deopt_pc = isolate->FindPendingDeopt(caller_frame->fp()); | |
| 2117 if (FLAG_trace_deoptimization) { | |
| 2118 THR_Print("Lazy deopt fp=%" Pp " pc=%" Pp "\n", caller_frame->fp(), | |
| 2119 deopt_pc); | |
| 2120 } | |
| 2121 | |
| 2122 // N.B.: Update frame before updating pending deopt table. The profiler | |
| 2123 // may attempt a stack walk in between. | |
| 2124 caller_frame->set_pc(deopt_pc); | |
| 2125 ASSERT(caller_frame->pc() == deopt_pc); | |
| 2126 ASSERT(optimized_code.ContainsInstructionAt(caller_frame->pc())); | |
| 2127 isolate->ClearPendingDeoptsAtOrBelow(caller_frame->fp()); | |
| 2128 } else { | |
| 2129 if (FLAG_trace_deoptimization) { | |
| 2130 THR_Print("Eager deopt fp=%" Pp " pc=%" Pp "\n", caller_frame->fp(), | |
| 2131 caller_frame->pc()); | |
| 2132 } | |
| 2133 } | |
| 2134 #endif // !DBC | |
| 2135 | |
| 2136 // Copy the saved registers from the stack. | |
| 2137 fpu_register_t* fpu_registers; | |
| 2138 intptr_t* cpu_registers; | |
| 2139 CopySavedRegisters(saved_registers_address, &fpu_registers, &cpu_registers); | |
| 2140 | |
| 2141 // Create the DeoptContext. | |
| 2142 DeoptContext* deopt_context = new DeoptContext( | |
| 2143 caller_frame, optimized_code, DeoptContext::kDestIsOriginalFrame, | |
| 2144 fpu_registers, cpu_registers, is_lazy_deopt != 0, deoptimizing_code); | |
| 2145 isolate->set_deopt_context(deopt_context); | |
| 2146 | |
| 2147 // Stack size (FP - SP) in bytes. | |
| 2148 return deopt_context->DestStackAdjustment() * kWordSize; | |
| 2149 #else | |
| 2150 UNREACHABLE(); | |
| 2151 return 0; | |
| 2152 #endif // !DART_PRECOMPILED_RUNTIME | |
| 2153 } | |
| 2154 END_LEAF_RUNTIME_ENTRY | |
| 2155 | |
| 2156 | |
| 2157 // The stack has been adjusted to fit all values for unoptimized frame. | |
| 2158 // Fill the unoptimized frame. | |
| 2159 DEFINE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, 1, uword last_fp) { | |
| 2160 #if !defined(DART_PRECOMPILED_RUNTIME) | |
| 2161 Thread* thread = Thread::Current(); | |
| 2162 Isolate* isolate = thread->isolate(); | |
| 2163 StackZone zone(thread); | |
| 2164 HANDLESCOPE(thread); | |
| 2165 | |
| 2166 DeoptContext* deopt_context = isolate->deopt_context(); | |
| 2167 DartFrameIterator iterator(last_fp); | |
| 2168 StackFrame* caller_frame = iterator.NextFrame(); | |
| 2169 ASSERT(caller_frame != NULL); | |
| 2170 | |
| 2171 #if defined(DEBUG) | |
| 2172 { | |
| 2173 // The code from the deopt_context. | |
| 2174 const Code& code = Code::Handle(deopt_context->code()); | |
| 2175 | |
| 2176 // The code from our frame. | |
| 2177 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode()); | |
| 2178 const Function& function = Function::Handle(optimized_code.function()); | |
| 2179 ASSERT(!function.IsNull()); | |
| 2180 | |
| 2181 // The code will be the same as before. | |
| 2182 ASSERT(code.raw() == optimized_code.raw()); | |
| 2183 | |
| 2184 // Some sanity checking of the optimized code. | |
| 2185 ASSERT(!optimized_code.IsNull() && optimized_code.is_optimized()); | |
| 2186 } | |
| 2187 #endif | |
| 2188 | |
| 2189 deopt_context->set_dest_frame(caller_frame); | |
| 2190 deopt_context->FillDestFrame(); | |
| 2191 | |
| 2192 #else | |
| 2193 UNREACHABLE(); | |
| 2194 #endif // !DART_PRECOMPILED_RUNTIME | |
| 2195 } | |
| 2196 END_LEAF_RUNTIME_ENTRY | |
| 2197 | |
| 2198 | |
| 2199 // This is the last step in the deoptimization, GC can occur. | |
| 2200 // Returns number of bytes to remove from the expression stack of the | |
| 2201 // bottom-most deoptimized frame. Those arguments were artificially injected | |
| 2202 // under return address to keep them discoverable by GC that can occur during | |
| 2203 // materialization phase. | |
| 2204 DEFINE_RUNTIME_ENTRY(DeoptimizeMaterialize, 0) { | |
| 2205 #if !defined(DART_PRECOMPILED_RUNTIME) | |
| 2206 #if defined(DEBUG) | |
| 2207 { | |
| 2208 // We may rendezvous for a safepoint at entry or GC from the allocations | |
| 2209 // below. Check the stack is walkable. | |
| 2210 ValidateFrames(); | |
| 2211 } | |
| 2212 #endif | |
| 2213 DeoptContext* deopt_context = isolate->deopt_context(); | |
| 2214 intptr_t deopt_arg_count = deopt_context->MaterializeDeferredObjects(); | |
| 2215 isolate->set_deopt_context(NULL); | |
| 2216 delete deopt_context; | |
| 2217 | |
| 2218 // Return value tells deoptimization stub to remove the given number of bytes | |
| 2219 // from the stack. | |
| 2220 arguments.SetReturn(Smi::Handle(Smi::New(deopt_arg_count * kWordSize))); | |
| 2221 #else | |
| 2222 UNREACHABLE(); | |
| 2223 #endif // !DART_PRECOMPILED_RUNTIME | |
| 2224 } | |
| 2225 | |
| 2226 | |
| 2227 DEFINE_RUNTIME_ENTRY(RewindPostDeopt, 0) { | |
| 2228 #if !defined(DART_PRECOMPILED_RUNTIME) | |
| 2229 #if !defined(PRODUCT) | |
| 2230 isolate->debugger()->RewindPostDeopt(); | |
| 2231 #endif // !PRODUCT | |
| 2232 #endif // !DART_PRECOMPILED_RUNTIME | |
| 2233 UNREACHABLE(); | |
| 2234 } | |
| 2235 | |
| 2236 DEFINE_LEAF_RUNTIME_ENTRY(intptr_t, | |
| 2237 BigintCompare, | |
| 2238 2, | |
| 2239 RawBigint* left, | |
| 2240 RawBigint* right) { | |
| 2241 Thread* thread = Thread::Current(); | |
| 2242 StackZone zone(thread); | |
| 2243 HANDLESCOPE(thread); | |
| 2244 const Bigint& big_left = Bigint::Handle(left); | |
| 2245 const Bigint& big_right = Bigint::Handle(right); | |
| 2246 return big_left.CompareWith(big_right); | |
| 2247 } | |
| 2248 END_LEAF_RUNTIME_ENTRY | |
| 2249 | |
| 2250 | |
| 2251 double DartModulo(double left, double right) { | |
| 2252 double remainder = fmod_ieee(left, right); | |
| 2253 if (remainder == 0.0) { | |
| 2254 // We explicitely switch to the positive 0.0 (just in case it was negative). | |
| 2255 remainder = +0.0; | |
| 2256 } else if (remainder < 0.0) { | |
| 2257 if (right < 0) { | |
| 2258 remainder -= right; | |
| 2259 } else { | |
| 2260 remainder += right; | |
| 2261 } | |
| 2262 } | |
| 2263 return remainder; | |
| 2264 } | |
| 2265 | |
| 2266 | |
| 2267 // Update global type feedback recorded for a field recording the assignment | |
| 2268 // of the given value. | |
| 2269 // Arg0: Field object; | |
| 2270 // Arg1: Value that is being stored. | |
| 2271 DEFINE_RUNTIME_ENTRY(UpdateFieldCid, 2) { | |
| 2272 const Field& field = Field::CheckedHandle(arguments.ArgAt(0)); | |
| 2273 const Object& value = Object::Handle(arguments.ArgAt(1)); | |
| 2274 field.RecordStore(value); | |
| 2275 } | |
| 2276 | |
| 2277 | |
| 2278 DEFINE_RUNTIME_ENTRY(InitStaticField, 1) { | |
| 2279 const Field& field = Field::CheckedHandle(arguments.ArgAt(0)); | |
| 2280 field.EvaluateInitializer(); | |
| 2281 } | |
| 2282 | |
| 2283 | |
| 2284 DEFINE_RUNTIME_ENTRY(GrowRegExpStack, 1) { | |
| 2285 const Array& typed_data_cell = Array::CheckedHandle(arguments.ArgAt(0)); | |
| 2286 ASSERT(!typed_data_cell.IsNull() && typed_data_cell.Length() == 1); | |
| 2287 const TypedData& old_data = TypedData::CheckedHandle(typed_data_cell.At(0)); | |
| 2288 ASSERT(!old_data.IsNull()); | |
| 2289 const intptr_t cid = old_data.GetClassId(); | |
| 2290 const intptr_t old_size = old_data.Length(); | |
| 2291 const intptr_t new_size = 2 * old_size; | |
| 2292 const intptr_t elm_size = old_data.ElementSizeInBytes(); | |
| 2293 const TypedData& new_data = | |
| 2294 TypedData::Handle(TypedData::New(cid, new_size, Heap::kOld)); | |
| 2295 TypedData::Copy(new_data, 0, old_data, 0, old_size * elm_size); | |
| 2296 typed_data_cell.SetAt(0, new_data); | |
| 2297 arguments.SetReturn(new_data); | |
| 2298 } | |
| 2299 | |
| 2300 | |
| 2301 } // namespace dart | |
| OLD | NEW |