OLD | NEW |
1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/runtime_entry.h" | 5 #include "vm/runtime_entry.h" |
6 | 6 |
7 #include "vm/object.h" | 7 #include "vm/assembler.h" |
| 8 #include "vm/ast.h" |
| 9 #include "vm/code_patcher.h" |
| 10 #include "vm/compiler.h" |
| 11 #include "vm/dart_api_impl.h" |
| 12 #include "vm/dart_entry.h" |
| 13 #include "vm/debugger.h" |
| 14 #include "vm/deopt_instructions.h" |
| 15 #include "vm/exceptions.h" |
| 16 #include "vm/flags.h" |
| 17 #include "vm/object_store.h" |
| 18 #include "vm/message.h" |
| 19 #include "vm/message_handler.h" |
| 20 #include "vm/parser.h" |
| 21 #include "vm/resolver.h" |
| 22 #include "vm/service_isolate.h" |
| 23 #include "vm/stack_frame.h" |
8 #include "vm/symbols.h" | 24 #include "vm/symbols.h" |
| 25 #include "vm/thread_registry.h" |
9 #include "vm/verifier.h" | 26 #include "vm/verifier.h" |
10 | 27 |
11 namespace dart { | 28 namespace dart { |
12 | 29 |
| 30 DEFINE_FLAG( |
| 31 int, |
| 32 max_subtype_cache_entries, |
| 33 100, |
| 34 "Maximum number of subtype cache entries (number of checks cached)."); |
| 35 DEFINE_FLAG( |
| 36 int, |
| 37 regexp_optimization_counter_threshold, |
| 38 1000, |
| 39 "RegExp's usage-counter value before it is optimized, -1 means never"); |
| 40 DEFINE_FLAG(int, |
| 41 reoptimization_counter_threshold, |
| 42 4000, |
| 43 "Counter threshold before a function gets reoptimized."); |
| 44 DEFINE_FLAG(bool, trace_deoptimization, false, "Trace deoptimization"); |
| 45 DEFINE_FLAG(bool, |
| 46 trace_deoptimization_verbose, |
| 47 false, |
| 48 "Trace deoptimization verbose"); |
| 49 DEFINE_FLAG(bool, trace_ic, false, "Trace IC handling"); |
| 50 DEFINE_FLAG(bool, |
| 51 trace_ic_miss_in_optimized, |
| 52 false, |
| 53 "Trace IC miss in optimized code"); |
| 54 DEFINE_FLAG(bool, |
| 55 trace_optimized_ic_calls, |
| 56 false, |
| 57 "Trace IC calls in optimized code."); |
| 58 DEFINE_FLAG(bool, trace_patching, false, "Trace patching of code."); |
| 59 DEFINE_FLAG(bool, trace_runtime_calls, false, "Trace runtime calls"); |
| 60 DEFINE_FLAG(bool, trace_type_checks, false, "Trace runtime type checks."); |
| 61 |
| 62 DECLARE_FLAG(int, max_deoptimization_counter_threshold); |
| 63 DECLARE_FLAG(bool, enable_inlining_annotations); |
| 64 DECLARE_FLAG(bool, trace_compiler); |
| 65 DECLARE_FLAG(bool, trace_optimizing_compiler); |
| 66 DECLARE_FLAG(int, max_polymorphic_checks); |
| 67 |
| 68 DEFINE_FLAG(bool, trace_osr, false, "Trace attempts at on-stack replacement."); |
| 69 |
| 70 DEFINE_FLAG(int, |
| 71 stacktrace_every, |
| 72 0, |
| 73 "Compute debugger stacktrace on every N stack overflow checks"); |
| 74 DEFINE_FLAG(charp, |
| 75 stacktrace_filter, |
| 76 NULL, |
| 77 "Compute stacktrace in named function on stack overflow checks"); |
| 78 DEFINE_FLAG(charp, |
| 79 deoptimize_filter, |
| 80 NULL, |
| 81 "Deoptimize in named function on stack overflow checks"); |
| 82 |
| 83 DECLARE_FLAG(int, reload_every); |
| 84 DECLARE_FLAG(bool, reload_every_optimized); |
| 85 DECLARE_FLAG(bool, reload_every_back_off); |
| 86 |
| 87 #ifdef DEBUG |
| 88 DEFINE_FLAG(charp, |
| 89 gc_at_instance_allocation, |
| 90 NULL, |
| 91 "Perform a GC before allocation of instances of " |
| 92 "the specified class"); |
| 93 #endif |
| 94 |
| 95 |
13 #if defined(TESTING) || defined(DEBUG) | 96 #if defined(TESTING) || defined(DEBUG) |
14 void VerifyOnTransition() { | 97 void VerifyOnTransition() { |
15 Thread* thread = Thread::Current(); | 98 Thread* thread = Thread::Current(); |
16 TransitionGeneratedToVM transition(thread); | 99 TransitionGeneratedToVM transition(thread); |
17 thread->isolate()->heap()->WaitForSweeperTasks(thread); | 100 thread->isolate()->heap()->WaitForSweeperTasks(thread); |
18 SafepointOperationScope safepoint_scope(thread); | 101 SafepointOperationScope safepoint_scope(thread); |
19 VerifyPointersVisitor::VerifyPointers(); | 102 VerifyPointersVisitor::VerifyPointers(); |
20 thread->isolate()->heap()->Verify(); | 103 thread->isolate()->heap()->Verify(); |
21 } | 104 } |
22 #endif | 105 #endif |
(...skipping 13 matching lines...) Expand all Loading... |
36 function_name, RawFunction::kRegularFunction, true, false, false, false, | 119 function_name, RawFunction::kRegularFunction, true, false, false, false, |
37 false, owner_class, TokenPosition::kMinSource)); | 120 false, owner_class, TokenPosition::kMinSource)); |
38 const Array& functions = Array::Handle(Array::New(1)); | 121 const Array& functions = Array::Handle(Array::New(1)); |
39 functions.SetAt(0, function); | 122 functions.SetAt(0, function); |
40 owner_class.SetFunctions(functions); | 123 owner_class.SetFunctions(functions); |
41 lib.AddClass(owner_class); | 124 lib.AddClass(owner_class); |
42 function.AttachCode(code); | 125 function.AttachCode(code); |
43 return function; | 126 return function; |
44 } | 127 } |
45 | 128 |
| 129 |
| 130 DEFINE_RUNTIME_ENTRY(TraceFunctionEntry, 1) { |
| 131 const Function& function = Function::CheckedHandle(arguments.ArgAt(0)); |
| 132 const String& function_name = String::Handle(function.name()); |
| 133 const String& class_name = |
| 134 String::Handle(Class::Handle(function.Owner()).Name()); |
| 135 OS::PrintErr("> Entering '%s.%s'\n", class_name.ToCString(), |
| 136 function_name.ToCString()); |
| 137 } |
| 138 |
| 139 |
| 140 DEFINE_RUNTIME_ENTRY(TraceFunctionExit, 1) { |
| 141 const Function& function = Function::CheckedHandle(arguments.ArgAt(0)); |
| 142 const String& function_name = String::Handle(function.name()); |
| 143 const String& class_name = |
| 144 String::Handle(Class::Handle(function.Owner()).Name()); |
| 145 OS::PrintErr("< Exiting '%s.%s'\n", class_name.ToCString(), |
| 146 function_name.ToCString()); |
| 147 } |
| 148 |
| 149 |
| 150 DEFINE_RUNTIME_ENTRY(RangeError, 2) { |
| 151 const Instance& length = Instance::CheckedHandle(arguments.ArgAt(0)); |
| 152 const Instance& index = Instance::CheckedHandle(arguments.ArgAt(1)); |
| 153 if (!length.IsInteger()) { |
| 154 // Throw: new ArgumentError.value(length, "length", "is not an integer"); |
| 155 const Array& args = Array::Handle(Array::New(3)); |
| 156 args.SetAt(0, length); |
| 157 args.SetAt(1, Symbols::Length()); |
| 158 args.SetAt(2, String::Handle(String::New("is not an integer"))); |
| 159 Exceptions::ThrowByType(Exceptions::kArgumentValue, args); |
| 160 } |
| 161 if (!index.IsInteger()) { |
| 162 // Throw: new ArgumentError.value(index, "index", "is not an integer"); |
| 163 const Array& args = Array::Handle(Array::New(3)); |
| 164 args.SetAt(0, index); |
| 165 args.SetAt(1, Symbols::Index()); |
| 166 args.SetAt(2, String::Handle(String::New("is not an integer"))); |
| 167 Exceptions::ThrowByType(Exceptions::kArgumentValue, args); |
| 168 } |
| 169 // Throw: new RangeError.range(index, 0, length, "length"); |
| 170 const Array& args = Array::Handle(Array::New(4)); |
| 171 args.SetAt(0, index); |
| 172 args.SetAt(1, Integer::Handle(Integer::New(0))); |
| 173 args.SetAt(2, length); |
| 174 args.SetAt(3, Symbols::Length()); |
| 175 Exceptions::ThrowByType(Exceptions::kRange, args); |
| 176 } |
| 177 |
| 178 |
| 179 // Allocation of a fixed length array of given element type. |
| 180 // This runtime entry is never called for allocating a List of a generic type, |
| 181 // because a prior run time call instantiates the element type if necessary. |
| 182 // Arg0: array length. |
| 183 // Arg1: array type arguments, i.e. vector of 1 type, the element type. |
| 184 // Return value: newly allocated array of length arg0. |
| 185 DEFINE_RUNTIME_ENTRY(AllocateArray, 2) { |
| 186 const Instance& length = Instance::CheckedHandle(arguments.ArgAt(0)); |
| 187 if (!length.IsInteger()) { |
| 188 // Throw: new ArgumentError.value(length, "length", "is not an integer"); |
| 189 const Array& args = Array::Handle(Array::New(3)); |
| 190 args.SetAt(0, length); |
| 191 args.SetAt(1, Symbols::Length()); |
| 192 args.SetAt(2, String::Handle(String::New("is not an integer"))); |
| 193 Exceptions::ThrowByType(Exceptions::kArgumentValue, args); |
| 194 } |
| 195 if (length.IsSmi()) { |
| 196 const intptr_t len = Smi::Cast(length).Value(); |
| 197 if ((len >= 0) && (len <= Array::kMaxElements)) { |
| 198 const Array& array = Array::Handle(Array::New(len, Heap::kNew)); |
| 199 arguments.SetReturn(array); |
| 200 TypeArguments& element_type = |
| 201 TypeArguments::CheckedHandle(arguments.ArgAt(1)); |
| 202 // An Array is raw or takes one type argument. However, its type argument |
| 203 // vector may be longer than 1 due to a type optimization reusing the type |
| 204 // argument vector of the instantiator. |
| 205 ASSERT(element_type.IsNull() || |
| 206 ((element_type.Length() >= 1) && element_type.IsInstantiated())); |
| 207 array.SetTypeArguments(element_type); // May be null. |
| 208 return; |
| 209 } |
| 210 } |
| 211 // Throw: new RangeError.range(length, 0, Array::kMaxElements, "length"); |
| 212 const Array& args = Array::Handle(Array::New(4)); |
| 213 args.SetAt(0, length); |
| 214 args.SetAt(1, Integer::Handle(Integer::New(0))); |
| 215 args.SetAt(2, Integer::Handle(Integer::New(Array::kMaxElements))); |
| 216 args.SetAt(3, Symbols::Length()); |
| 217 Exceptions::ThrowByType(Exceptions::kRange, args); |
| 218 } |
| 219 |
| 220 |
| 221 // Helper returning the token position of the Dart caller. |
| 222 static TokenPosition GetCallerLocation() { |
| 223 DartFrameIterator iterator; |
| 224 StackFrame* caller_frame = iterator.NextFrame(); |
| 225 ASSERT(caller_frame != NULL); |
| 226 return caller_frame->GetTokenPos(); |
| 227 } |
| 228 |
| 229 |
| 230 // Allocate a new object. |
| 231 // Arg0: class of the object that needs to be allocated. |
| 232 // Arg1: type arguments of the object that needs to be allocated. |
| 233 // Return value: newly allocated object. |
| 234 DEFINE_RUNTIME_ENTRY(AllocateObject, 2) { |
| 235 const Class& cls = Class::CheckedHandle(arguments.ArgAt(0)); |
| 236 |
| 237 #ifdef DEBUG |
| 238 if (FLAG_gc_at_instance_allocation != NULL) { |
| 239 const String& name = String::Handle(cls.Name()); |
| 240 if (String::EqualsIgnoringPrivateKey( |
| 241 name, |
| 242 String::Handle(String::New(FLAG_gc_at_instance_allocation)))) { |
| 243 Isolate::Current()->heap()->CollectAllGarbage(); |
| 244 } |
| 245 } |
| 246 #endif |
| 247 Heap::Space space = Heap::kNew; |
| 248 const Instance& instance = Instance::Handle(Instance::New(cls, space)); |
| 249 |
| 250 arguments.SetReturn(instance); |
| 251 if (cls.NumTypeArguments() == 0) { |
| 252 // No type arguments required for a non-parameterized type. |
| 253 ASSERT(Instance::CheckedHandle(arguments.ArgAt(1)).IsNull()); |
| 254 return; |
| 255 } |
| 256 TypeArguments& type_arguments = |
| 257 TypeArguments::CheckedHandle(arguments.ArgAt(1)); |
| 258 // Unless null (for a raw type), the type argument vector may be longer than |
| 259 // necessary due to a type optimization reusing the type argument vector of |
| 260 // the instantiator. |
| 261 ASSERT(type_arguments.IsNull() || |
| 262 (type_arguments.IsInstantiated() && |
| 263 (type_arguments.Length() >= cls.NumTypeArguments()))); |
| 264 instance.SetTypeArguments(type_arguments); |
| 265 } |
| 266 |
| 267 |
| 268 // Instantiate type. |
| 269 // Arg0: uninstantiated type. |
| 270 // Arg1: instantiator type arguments. |
| 271 // Arg2: function type arguments. |
| 272 // Return value: instantiated type. |
| 273 DEFINE_RUNTIME_ENTRY(InstantiateType, 3) { |
| 274 AbstractType& type = AbstractType::CheckedHandle(zone, arguments.ArgAt(0)); |
| 275 const TypeArguments& instantiator_type_arguments = |
| 276 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1)); |
| 277 const TypeArguments& function_type_arguments = |
| 278 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2)); |
| 279 ASSERT(!type.IsNull() && !type.IsInstantiated()); |
| 280 ASSERT(instantiator_type_arguments.IsNull() || |
| 281 instantiator_type_arguments.IsInstantiated()); |
| 282 ASSERT(function_type_arguments.IsNull() || |
| 283 function_type_arguments.IsInstantiated()); |
| 284 Error& bound_error = Error::Handle(zone); |
| 285 type = |
| 286 type.InstantiateFrom(instantiator_type_arguments, function_type_arguments, |
| 287 &bound_error, NULL, NULL, Heap::kOld); |
| 288 if (!bound_error.IsNull()) { |
| 289 // Throw a dynamic type error. |
| 290 const TokenPosition location = GetCallerLocation(); |
| 291 String& bound_error_message = |
| 292 String::Handle(zone, String::New(bound_error.ToErrorCString())); |
| 293 Exceptions::CreateAndThrowTypeError(location, AbstractType::Handle(zone), |
| 294 AbstractType::Handle(zone), |
| 295 Symbols::Empty(), bound_error_message); |
| 296 UNREACHABLE(); |
| 297 } |
| 298 if (type.IsTypeRef()) { |
| 299 type = TypeRef::Cast(type).type(); |
| 300 ASSERT(!type.IsTypeRef()); |
| 301 ASSERT(type.IsCanonical()); |
| 302 } |
| 303 ASSERT(!type.IsNull() && type.IsInstantiated()); |
| 304 arguments.SetReturn(type); |
| 305 } |
| 306 |
| 307 |
| 308 // Instantiate type arguments. |
| 309 // Arg0: uninstantiated type arguments. |
| 310 // Arg1: instantiator type arguments. |
| 311 // Arg2: function type arguments. |
| 312 // Return value: instantiated type arguments. |
| 313 DEFINE_RUNTIME_ENTRY(InstantiateTypeArguments, 3) { |
| 314 TypeArguments& type_arguments = |
| 315 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0)); |
| 316 const TypeArguments& instantiator_type_arguments = |
| 317 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1)); |
| 318 const TypeArguments& function_type_arguments = |
| 319 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2)); |
| 320 ASSERT(!type_arguments.IsNull() && !type_arguments.IsInstantiated()); |
| 321 ASSERT(instantiator_type_arguments.IsNull() || |
| 322 instantiator_type_arguments.IsInstantiated()); |
| 323 ASSERT(function_type_arguments.IsNull() || |
| 324 function_type_arguments.IsInstantiated()); |
| 325 // Code inlined in the caller should have optimized the case where the |
| 326 // instantiator can be reused as type argument vector. |
| 327 ASSERT(!type_arguments.IsUninstantiatedIdentity()); |
| 328 if (isolate->type_checks()) { |
| 329 Error& bound_error = Error::Handle(zone); |
| 330 type_arguments = type_arguments.InstantiateAndCanonicalizeFrom( |
| 331 instantiator_type_arguments, function_type_arguments, &bound_error); |
| 332 if (!bound_error.IsNull()) { |
| 333 // Throw a dynamic type error. |
| 334 const TokenPosition location = GetCallerLocation(); |
| 335 String& bound_error_message = |
| 336 String::Handle(zone, String::New(bound_error.ToErrorCString())); |
| 337 Exceptions::CreateAndThrowTypeError( |
| 338 location, AbstractType::Handle(zone), AbstractType::Handle(zone), |
| 339 Symbols::Empty(), bound_error_message); |
| 340 UNREACHABLE(); |
| 341 } |
| 342 } else { |
| 343 type_arguments = type_arguments.InstantiateAndCanonicalizeFrom( |
| 344 instantiator_type_arguments, function_type_arguments, NULL); |
| 345 } |
| 346 ASSERT(type_arguments.IsNull() || type_arguments.IsInstantiated()); |
| 347 arguments.SetReturn(type_arguments); |
| 348 } |
| 349 |
| 350 |
| 351 // Allocate a new context large enough to hold the given number of variables. |
| 352 // Arg0: number of variables. |
| 353 // Return value: newly allocated context. |
| 354 DEFINE_RUNTIME_ENTRY(AllocateContext, 1) { |
| 355 const Smi& num_variables = Smi::CheckedHandle(zone, arguments.ArgAt(0)); |
| 356 arguments.SetReturn(Context::Handle(Context::New(num_variables.Value()))); |
| 357 } |
| 358 |
| 359 |
| 360 // Make a copy of the given context, including the values of the captured |
| 361 // variables. |
| 362 // Arg0: the context to be cloned. |
| 363 // Return value: newly allocated context. |
| 364 DEFINE_RUNTIME_ENTRY(CloneContext, 1) { |
| 365 const Context& ctx = Context::CheckedHandle(zone, arguments.ArgAt(0)); |
| 366 Context& cloned_ctx = |
| 367 Context::Handle(zone, Context::New(ctx.num_variables())); |
| 368 cloned_ctx.set_parent(Context::Handle(ctx.parent())); |
| 369 Object& inst = Object::Handle(zone); |
| 370 for (int i = 0; i < ctx.num_variables(); i++) { |
| 371 inst = ctx.At(i); |
| 372 cloned_ctx.SetAt(i, inst); |
| 373 } |
| 374 arguments.SetReturn(cloned_ctx); |
| 375 } |
| 376 |
| 377 |
| 378 // Helper routine for tracing a type check. |
| 379 static void PrintTypeCheck(const char* message, |
| 380 const Instance& instance, |
| 381 const AbstractType& type, |
| 382 const TypeArguments& instantiator_type_arguments, |
| 383 const TypeArguments& function_type_arguments, |
| 384 const Bool& result) { |
| 385 DartFrameIterator iterator; |
| 386 StackFrame* caller_frame = iterator.NextFrame(); |
| 387 ASSERT(caller_frame != NULL); |
| 388 |
| 389 const AbstractType& instance_type = |
| 390 AbstractType::Handle(instance.GetType(Heap::kNew)); |
| 391 ASSERT(instance_type.IsInstantiated()); |
| 392 if (type.IsInstantiated()) { |
| 393 OS::PrintErr("%s: '%s' %" Pd " %s '%s' %" Pd " (pc: %#" Px ").\n", message, |
| 394 String::Handle(instance_type.Name()).ToCString(), |
| 395 Class::Handle(instance_type.type_class()).id(), |
| 396 (result.raw() == Bool::True().raw()) ? "is" : "is !", |
| 397 String::Handle(type.Name()).ToCString(), |
| 398 Class::Handle(type.type_class()).id(), caller_frame->pc()); |
| 399 } else { |
| 400 // Instantiate type before printing. |
| 401 Error& bound_error = Error::Handle(); |
| 402 const AbstractType& instantiated_type = |
| 403 AbstractType::Handle(type.InstantiateFrom( |
| 404 instantiator_type_arguments, function_type_arguments, &bound_error, |
| 405 NULL, NULL, Heap::kOld)); |
| 406 OS::PrintErr("%s: '%s' %s '%s' instantiated from '%s' (pc: %#" Px ").\n", |
| 407 message, String::Handle(instance_type.Name()).ToCString(), |
| 408 (result.raw() == Bool::True().raw()) ? "is" : "is !", |
| 409 String::Handle(instantiated_type.Name()).ToCString(), |
| 410 String::Handle(type.Name()).ToCString(), caller_frame->pc()); |
| 411 if (!bound_error.IsNull()) { |
| 412 OS::Print(" bound error: %s\n", bound_error.ToErrorCString()); |
| 413 } |
| 414 } |
| 415 const Function& function = |
| 416 Function::Handle(caller_frame->LookupDartFunction()); |
| 417 OS::PrintErr(" -> Function %s\n", function.ToFullyQualifiedCString()); |
| 418 } |
| 419 |
| 420 |
| 421 // This updates the type test cache, an array containing 5-value elements |
| 422 // (instance class (or function if the instance is a closure), instance type |
| 423 // arguments, instantiator type arguments, function type arguments, |
| 424 // and test_result). It can be applied to classes with type arguments in which |
| 425 // case it contains just the result of the class subtype test, not including the |
| 426 // evaluation of type arguments. |
| 427 // This operation is currently very slow (lookup of code is not efficient yet). |
| 428 static void UpdateTypeTestCache( |
| 429 const Instance& instance, |
| 430 const AbstractType& type, |
| 431 const TypeArguments& instantiator_type_arguments, |
| 432 const TypeArguments& function_type_arguments, |
| 433 const Bool& result, |
| 434 const SubtypeTestCache& new_cache) { |
| 435 // Since the test is expensive, don't do it unless necessary. |
| 436 // The list of disallowed cases will decrease as they are implemented in |
| 437 // inlined assembly. |
| 438 if (new_cache.IsNull()) { |
| 439 if (FLAG_trace_type_checks) { |
| 440 OS::Print("UpdateTypeTestCache: cache is null\n"); |
| 441 } |
| 442 return; |
| 443 } |
| 444 if (instance.IsSmi()) { |
| 445 if (FLAG_trace_type_checks) { |
| 446 OS::Print("UpdateTypeTestCache: instance is Smi\n"); |
| 447 } |
| 448 return; |
| 449 } |
| 450 // If the type is uninstantiated and refers to parent function type |
| 451 // parameters, the function_type_arguments may not have been canonicalized |
| 452 // when concatenated. The optimization still works, but the cache could grow |
| 453 // uncontrollably. For now, do not update the cache in this case. |
| 454 // TODO(regis): Revisit. |
| 455 if (!function_type_arguments.IsNull() && |
| 456 !function_type_arguments.IsCanonical()) { |
| 457 if (FLAG_trace_type_checks) { |
| 458 OS::Print( |
| 459 "UpdateTypeTestCache: function_type_arguments is not canonical\n"); |
| 460 } |
| 461 return; |
| 462 } |
| 463 const Class& instance_class = Class::Handle(instance.clazz()); |
| 464 Object& instance_class_id_or_function = Object::Handle(); |
| 465 TypeArguments& instance_type_arguments = TypeArguments::Handle(); |
| 466 if (instance_class.IsClosureClass()) { |
| 467 // If the closure instance is generic, we cannot perform the optimization, |
| 468 // because one more input (function_type_arguments) would need to be |
| 469 // considered. For now, only perform the optimization if the closure's |
| 470 // function_type_arguments is null, meaning the closure function is not |
| 471 // generic. |
| 472 // TODO(regis): In addition to null (non-generic closure), we should also |
| 473 // accept Object::empty_type_arguments() (non-nested generic closure). |
| 474 // In that case, update stubs and simulator_dbc accordingly. |
| 475 if (Closure::Cast(instance).function_type_arguments() != |
| 476 TypeArguments::null()) { |
| 477 if (FLAG_trace_type_checks) { |
| 478 OS::Print( |
| 479 "UpdateTypeTestCache: closure function_type_arguments is " |
| 480 "not null\n"); |
| 481 } |
| 482 return; |
| 483 } |
| 484 instance_class_id_or_function = Closure::Cast(instance).function(); |
| 485 instance_type_arguments = |
| 486 Closure::Cast(instance).instantiator_type_arguments(); |
| 487 } else { |
| 488 instance_class_id_or_function = Smi::New(instance_class.id()); |
| 489 if (instance_class.NumTypeArguments() > 0) { |
| 490 instance_type_arguments = instance.GetTypeArguments(); |
| 491 } |
| 492 } |
| 493 const intptr_t len = new_cache.NumberOfChecks(); |
| 494 if (len >= FLAG_max_subtype_cache_entries) { |
| 495 return; |
| 496 } |
| 497 #if defined(DEBUG) |
| 498 ASSERT(instance_type_arguments.IsNull() || |
| 499 instance_type_arguments.IsCanonical()); |
| 500 ASSERT(instantiator_type_arguments.IsNull() || |
| 501 instantiator_type_arguments.IsCanonical()); |
| 502 ASSERT(function_type_arguments.IsNull() || |
| 503 function_type_arguments.IsCanonical()); |
| 504 Object& last_instance_class_id_or_function = Object::Handle(); |
| 505 TypeArguments& last_instance_type_arguments = TypeArguments::Handle(); |
| 506 TypeArguments& last_instantiator_type_arguments = TypeArguments::Handle(); |
| 507 TypeArguments& last_function_type_arguments = TypeArguments::Handle(); |
| 508 Bool& last_result = Bool::Handle(); |
| 509 for (intptr_t i = 0; i < len; ++i) { |
| 510 new_cache.GetCheck(i, &last_instance_class_id_or_function, |
| 511 &last_instance_type_arguments, |
| 512 &last_instantiator_type_arguments, |
| 513 &last_function_type_arguments, &last_result); |
| 514 if ((last_instance_class_id_or_function.raw() == |
| 515 instance_class_id_or_function.raw()) && |
| 516 (last_instance_type_arguments.raw() == instance_type_arguments.raw()) && |
| 517 (last_instantiator_type_arguments.raw() == |
| 518 instantiator_type_arguments.raw()) && |
| 519 (last_function_type_arguments.raw() == |
| 520 last_function_type_arguments.raw())) { |
| 521 OS::PrintErr(" Error in test cache %p ix: %" Pd ",", new_cache.raw(), i); |
| 522 PrintTypeCheck(" duplicate cache entry", instance, type, |
| 523 instantiator_type_arguments, function_type_arguments, |
| 524 result); |
| 525 UNREACHABLE(); |
| 526 return; |
| 527 } |
| 528 } |
| 529 #endif |
| 530 new_cache.AddCheck(instance_class_id_or_function, instance_type_arguments, |
| 531 instantiator_type_arguments, function_type_arguments, |
| 532 result); |
| 533 if (FLAG_trace_type_checks) { |
| 534 AbstractType& test_type = AbstractType::Handle(type.raw()); |
| 535 if (!test_type.IsInstantiated()) { |
| 536 Error& bound_error = Error::Handle(); |
| 537 test_type = type.InstantiateFrom(instantiator_type_arguments, |
| 538 function_type_arguments, &bound_error, |
| 539 NULL, NULL, Heap::kNew); |
| 540 ASSERT(bound_error.IsNull()); // Malbounded types are not optimized. |
| 541 } |
| 542 OS::PrintErr( |
| 543 " Updated test cache %p ix: %" Pd |
| 544 " with " |
| 545 "(cid-or-fun: %p, type-args: %p, i-type-args: %p, f-type-args: %p, " |
| 546 "result: %s)\n" |
| 547 " instance [class: (%p '%s' cid: %" Pd |
| 548 "), type-args: %p %s]\n" |
| 549 " test-type [class: (%p '%s' cid: %" Pd |
| 550 "), i-type-args: %p %s, " |
| 551 ", f-type-args: %p %s]\n", |
| 552 new_cache.raw(), len, |
| 553 |
| 554 instance_class_id_or_function.raw(), instance_type_arguments.raw(), |
| 555 instantiator_type_arguments.raw(), instantiator_type_arguments.raw(), |
| 556 result.ToCString(), |
| 557 |
| 558 instance_class.raw(), String::Handle(instance_class.Name()).ToCString(), |
| 559 instance_class.id(), instance_type_arguments.raw(), |
| 560 instance_type_arguments.ToCString(), |
| 561 |
| 562 test_type.type_class(), |
| 563 String::Handle(Class::Handle(test_type.type_class()).Name()) |
| 564 .ToCString(), |
| 565 Class::Handle(test_type.type_class()).id(), |
| 566 instantiator_type_arguments.raw(), |
| 567 instantiator_type_arguments.ToCString(), |
| 568 instantiator_type_arguments.raw(), |
| 569 instantiator_type_arguments.ToCString()); |
| 570 } |
| 571 } |
| 572 |
| 573 |
| 574 // Check that the given instance is an instance of the given type. |
| 575 // Tested instance may not be null, because the null test is inlined. |
| 576 // Arg0: instance being checked. |
| 577 // Arg1: type. |
| 578 // Arg2: type arguments of the instantiator of the type. |
| 579 // Arg3: type arguments of the function of the type. |
| 580 // Arg4: SubtypeTestCache. |
| 581 // Return value: true or false, or may throw a type error in checked mode. |
| 582 DEFINE_RUNTIME_ENTRY(Instanceof, 5) { |
| 583 const Instance& instance = Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 584 const AbstractType& type = |
| 585 AbstractType::CheckedHandle(zone, arguments.ArgAt(1)); |
| 586 const TypeArguments& instantiator_type_arguments = |
| 587 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2)); |
| 588 const TypeArguments& function_type_arguments = |
| 589 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3)); |
| 590 const SubtypeTestCache& cache = |
| 591 SubtypeTestCache::CheckedHandle(zone, arguments.ArgAt(4)); |
| 592 ASSERT(type.IsFinalized()); |
| 593 ASSERT(!type.IsMalformed()); // Already checked in code generator. |
| 594 ASSERT(!type.IsMalbounded()); // Already checked in code generator. |
| 595 ASSERT(!type.IsDynamicType()); // No need to check assignment. |
| 596 Error& bound_error = Error::Handle(zone); |
| 597 const Bool& result = |
| 598 Bool::Get(instance.IsInstanceOf(type, instantiator_type_arguments, |
| 599 function_type_arguments, &bound_error)); |
| 600 if (FLAG_trace_type_checks) { |
| 601 PrintTypeCheck("InstanceOf", instance, type, instantiator_type_arguments, |
| 602 function_type_arguments, result); |
| 603 } |
| 604 if (!result.value() && !bound_error.IsNull()) { |
| 605 // Throw a dynamic type error only if the instanceof test fails. |
| 606 const TokenPosition location = GetCallerLocation(); |
| 607 String& bound_error_message = |
| 608 String::Handle(zone, String::New(bound_error.ToErrorCString())); |
| 609 Exceptions::CreateAndThrowTypeError(location, AbstractType::Handle(zone), |
| 610 AbstractType::Handle(zone), |
| 611 Symbols::Empty(), bound_error_message); |
| 612 UNREACHABLE(); |
| 613 } |
| 614 UpdateTypeTestCache(instance, type, instantiator_type_arguments, |
| 615 function_type_arguments, result, cache); |
| 616 arguments.SetReturn(result); |
| 617 } |
| 618 |
| 619 |
| 620 // Check that the type of the given instance is a subtype of the given type and |
| 621 // can therefore be assigned. |
| 622 // Arg0: instance being assigned. |
| 623 // Arg1: type being assigned to. |
| 624 // Arg2: type arguments of the instantiator of the type being assigned to. |
| 625 // Arg3: type arguments of the function of the type being assigned to. |
| 626 // Arg4: name of variable being assigned to. |
| 627 // Arg5: SubtypeTestCache. |
| 628 // Return value: instance if a subtype, otherwise throw a TypeError. |
| 629 DEFINE_RUNTIME_ENTRY(TypeCheck, 6) { |
| 630 const Instance& src_instance = |
| 631 Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 632 AbstractType& dst_type = |
| 633 AbstractType::CheckedHandle(zone, arguments.ArgAt(1)); |
| 634 const TypeArguments& instantiator_type_arguments = |
| 635 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2)); |
| 636 const TypeArguments& function_type_arguments = |
| 637 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3)); |
| 638 const String& dst_name = String::CheckedHandle(zone, arguments.ArgAt(4)); |
| 639 const SubtypeTestCache& cache = |
| 640 SubtypeTestCache::CheckedHandle(zone, arguments.ArgAt(5)); |
| 641 ASSERT(!dst_type.IsMalformed()); // Already checked in code generator. |
| 642 ASSERT(!dst_type.IsMalbounded()); // Already checked in code generator. |
| 643 ASSERT(!dst_type.IsDynamicType()); // No need to check assignment. |
| 644 ASSERT(!src_instance.IsNull()); // Already checked in inlined code. |
| 645 |
| 646 Error& bound_error = Error::Handle(zone); |
| 647 const bool is_instance_of = |
| 648 src_instance.IsInstanceOf(dst_type, instantiator_type_arguments, |
| 649 function_type_arguments, &bound_error); |
| 650 |
| 651 if (FLAG_trace_type_checks) { |
| 652 PrintTypeCheck("TypeCheck", src_instance, dst_type, |
| 653 instantiator_type_arguments, function_type_arguments, |
| 654 Bool::Get(is_instance_of)); |
| 655 } |
| 656 if (!is_instance_of) { |
| 657 // Throw a dynamic type error. |
| 658 const TokenPosition location = GetCallerLocation(); |
| 659 const AbstractType& src_type = |
| 660 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew)); |
| 661 if (!dst_type.IsInstantiated()) { |
| 662 // Instantiate dst_type before reporting the error. |
| 663 dst_type = dst_type.InstantiateFrom(instantiator_type_arguments, |
| 664 function_type_arguments, NULL, NULL, |
| 665 NULL, Heap::kNew); |
| 666 // Note that instantiated dst_type may be malbounded. |
| 667 } |
| 668 String& bound_error_message = String::Handle(zone); |
| 669 if (!bound_error.IsNull()) { |
| 670 ASSERT(isolate->type_checks()); |
| 671 bound_error_message = String::New(bound_error.ToErrorCString()); |
| 672 } |
| 673 Exceptions::CreateAndThrowTypeError(location, src_type, dst_type, dst_name, |
| 674 bound_error_message); |
| 675 UNREACHABLE(); |
| 676 } |
| 677 UpdateTypeTestCache(src_instance, dst_type, instantiator_type_arguments, |
| 678 function_type_arguments, Bool::True(), cache); |
| 679 arguments.SetReturn(src_instance); |
| 680 } |
| 681 |
| 682 |
| 683 // Report that the type of the given object is not bool in conditional context. |
| 684 // Throw assertion error if the object is null. (cf. Boolean Conversion |
| 685 // in language Spec.) |
| 686 // Arg0: bad object. |
| 687 // Return value: none, throws TypeError or AssertionError. |
| 688 DEFINE_RUNTIME_ENTRY(NonBoolTypeError, 1) { |
| 689 const TokenPosition location = GetCallerLocation(); |
| 690 const Instance& src_instance = |
| 691 Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 692 |
| 693 if (src_instance.IsNull()) { |
| 694 const Array& args = Array::Handle(zone, Array::New(5)); |
| 695 args.SetAt( |
| 696 0, String::Handle( |
| 697 zone, |
| 698 String::New( |
| 699 "Failed assertion: boolean expression must not be null"))); |
| 700 |
| 701 // No source code for this assertion, set url to null. |
| 702 args.SetAt(1, String::Handle(zone, String::null())); |
| 703 args.SetAt(2, Smi::Handle(zone, Smi::New(0))); |
| 704 args.SetAt(3, Smi::Handle(zone, Smi::New(0))); |
| 705 args.SetAt(4, String::Handle(zone, String::null())); |
| 706 |
| 707 Exceptions::ThrowByType(Exceptions::kAssertion, args); |
| 708 UNREACHABLE(); |
| 709 } |
| 710 |
| 711 ASSERT(!src_instance.IsBool()); |
| 712 const Type& bool_interface = Type::Handle(Type::BoolType()); |
| 713 const AbstractType& src_type = |
| 714 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew)); |
| 715 const String& no_bound_error = String::Handle(zone); |
| 716 Exceptions::CreateAndThrowTypeError(location, src_type, bool_interface, |
| 717 Symbols::BooleanExpression(), |
| 718 no_bound_error); |
| 719 UNREACHABLE(); |
| 720 } |
| 721 |
| 722 |
| 723 // Report that the type of the type check is malformed or malbounded. |
| 724 // Arg0: src value. |
| 725 // Arg1: name of destination being assigned to. |
| 726 // Arg2: type of destination being assigned to. |
| 727 // Return value: none, throws an exception. |
| 728 DEFINE_RUNTIME_ENTRY(BadTypeError, 3) { |
| 729 const TokenPosition location = GetCallerLocation(); |
| 730 const Instance& src_value = Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 731 const String& dst_name = String::CheckedHandle(zone, arguments.ArgAt(1)); |
| 732 const AbstractType& dst_type = |
| 733 AbstractType::CheckedHandle(zone, arguments.ArgAt(2)); |
| 734 const AbstractType& src_type = |
| 735 AbstractType::Handle(zone, src_value.GetType(Heap::kNew)); |
| 736 Exceptions::CreateAndThrowTypeError(location, src_type, dst_type, dst_name, |
| 737 String::Handle(zone)); |
| 738 UNREACHABLE(); |
| 739 } |
| 740 |
| 741 |
| 742 DEFINE_RUNTIME_ENTRY(Throw, 1) { |
| 743 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 744 Exceptions::Throw(thread, exception); |
| 745 } |
| 746 |
| 747 |
| 748 DEFINE_RUNTIME_ENTRY(ReThrow, 2) { |
| 749 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 750 const Instance& stacktrace = |
| 751 Instance::CheckedHandle(zone, arguments.ArgAt(1)); |
| 752 Exceptions::ReThrow(thread, exception, stacktrace); |
| 753 } |
| 754 |
| 755 |
| 756 // Patches static call in optimized code with the target's entry point. |
| 757 // Compiles target if necessary. |
| 758 DEFINE_RUNTIME_ENTRY(PatchStaticCall, 0) { |
| 759 DartFrameIterator iterator; |
| 760 StackFrame* caller_frame = iterator.NextFrame(); |
| 761 ASSERT(caller_frame != NULL); |
| 762 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode()); |
| 763 ASSERT(!caller_code.IsNull()); |
| 764 ASSERT(caller_code.is_optimized()); |
| 765 const Function& target_function = Function::Handle( |
| 766 zone, caller_code.GetStaticCallTargetFunctionAt(caller_frame->pc())); |
| 767 const Code& target_code = Code::Handle(zone, target_function.EnsureHasCode()); |
| 768 // Before patching verify that we are not repeatedly patching to the same |
| 769 // target. |
| 770 ASSERT(target_code.raw() != |
| 771 CodePatcher::GetStaticCallTargetAt(caller_frame->pc(), caller_code)); |
| 772 CodePatcher::PatchStaticCallAt(caller_frame->pc(), caller_code, target_code); |
| 773 caller_code.SetStaticCallTargetCodeAt(caller_frame->pc(), target_code); |
| 774 if (FLAG_trace_patching) { |
| 775 THR_Print("PatchStaticCall: patching caller pc %#" Px |
| 776 "" |
| 777 " to '%s' new entry point %#" Px " (%s)\n", |
| 778 caller_frame->pc(), target_function.ToFullyQualifiedCString(), |
| 779 target_code.UncheckedEntryPoint(), |
| 780 target_code.is_optimized() ? "optimized" : "unoptimized"); |
| 781 } |
| 782 arguments.SetReturn(target_code); |
| 783 } |
| 784 |
| 785 |
| 786 // Result of an invoke may be an unhandled exception, in which case we |
| 787 // rethrow it. |
| 788 static void CheckResultError(const Object& result) { |
| 789 if (result.IsError()) { |
| 790 Exceptions::PropagateError(Error::Cast(result)); |
| 791 } |
| 792 } |
| 793 |
| 794 |
| 795 #if !defined(TARGET_ARCH_DBC) |
| 796 // Gets called from debug stub when code reaches a breakpoint |
| 797 // set on a runtime stub call. |
| 798 DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) { |
| 799 if (!FLAG_support_debugger) { |
| 800 UNREACHABLE(); |
| 801 return; |
| 802 } |
| 803 DartFrameIterator iterator; |
| 804 StackFrame* caller_frame = iterator.NextFrame(); |
| 805 ASSERT(caller_frame != NULL); |
| 806 const Code& orig_stub = Code::Handle( |
| 807 zone, isolate->debugger()->GetPatchedStubAddress(caller_frame->pc())); |
| 808 const Error& error = |
| 809 Error::Handle(zone, isolate->debugger()->PauseBreakpoint()); |
| 810 if (!error.IsNull()) { |
| 811 Exceptions::PropagateError(error); |
| 812 UNREACHABLE(); |
| 813 } |
| 814 arguments.SetReturn(orig_stub); |
| 815 } |
| 816 #else |
| 817 // Gets called from the simulator when the breakpoint is reached. |
| 818 DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) { |
| 819 if (!FLAG_support_debugger) { |
| 820 UNREACHABLE(); |
| 821 return; |
| 822 } |
| 823 const Error& error = Error::Handle(isolate->debugger()->PauseBreakpoint()); |
| 824 if (!error.IsNull()) { |
| 825 Exceptions::PropagateError(error); |
| 826 UNREACHABLE(); |
| 827 } |
| 828 } |
| 829 #endif // !defined(TARGET_ARCH_DBC) |
| 830 |
| 831 |
| 832 DEFINE_RUNTIME_ENTRY(SingleStepHandler, 0) { |
| 833 if (!FLAG_support_debugger) { |
| 834 UNREACHABLE(); |
| 835 return; |
| 836 } |
| 837 const Error& error = |
| 838 Error::Handle(zone, isolate->debugger()->PauseStepping()); |
| 839 if (!error.IsNull()) { |
| 840 Exceptions::PropagateError(error); |
| 841 UNREACHABLE(); |
| 842 } |
| 843 } |
| 844 |
| 845 |
| 846 // An instance call of the form o.f(...) could not be resolved. Check if |
| 847 // there is a getter with the same name. If so, invoke it. If the value is |
| 848 // a closure, invoke it with the given arguments. If the value is a |
| 849 // non-closure, attempt to invoke "call" on it. |
| 850 static bool ResolveCallThroughGetter(const Instance& receiver, |
| 851 const Class& receiver_class, |
| 852 const String& target_name, |
| 853 const Array& arguments_descriptor, |
| 854 Function* result) { |
| 855 // 1. Check if there is a getter with the same name. |
| 856 const String& getter_name = String::Handle(Field::GetterName(target_name)); |
| 857 const int kNumArguments = 1; |
| 858 ArgumentsDescriptor args_desc( |
| 859 Array::Handle(ArgumentsDescriptor::New(kNumArguments))); |
| 860 const Function& getter = |
| 861 Function::Handle(Resolver::ResolveDynamicForReceiverClass( |
| 862 receiver_class, getter_name, args_desc)); |
| 863 if (getter.IsNull() || getter.IsMethodExtractor()) { |
| 864 return false; |
| 865 } |
| 866 const Function& target_function = |
| 867 Function::Handle(receiver_class.GetInvocationDispatcher( |
| 868 target_name, arguments_descriptor, |
| 869 RawFunction::kInvokeFieldDispatcher, FLAG_lazy_dispatchers)); |
| 870 ASSERT(!target_function.IsNull() || !FLAG_lazy_dispatchers); |
| 871 if (FLAG_trace_ic) { |
| 872 OS::PrintErr( |
| 873 "InvokeField IC miss: adding <%s> id:%" Pd " -> <%s>\n", |
| 874 Class::Handle(receiver.clazz()).ToCString(), receiver.GetClassId(), |
| 875 target_function.IsNull() ? "null" : target_function.ToCString()); |
| 876 } |
| 877 *result = target_function.raw(); |
| 878 return true; |
| 879 } |
| 880 |
| 881 |
| 882 // Handle other invocations (implicit closures, noSuchMethod). |
| 883 RawFunction* InlineCacheMissHelper(const Instance& receiver, |
| 884 const Array& args_descriptor, |
| 885 const String& target_name) { |
| 886 const Class& receiver_class = Class::Handle(receiver.clazz()); |
| 887 |
| 888 Function& result = Function::Handle(); |
| 889 if (!ResolveCallThroughGetter(receiver, receiver_class, target_name, |
| 890 args_descriptor, &result)) { |
| 891 ArgumentsDescriptor desc(args_descriptor); |
| 892 const Function& target_function = |
| 893 Function::Handle(receiver_class.GetInvocationDispatcher( |
| 894 target_name, args_descriptor, RawFunction::kNoSuchMethodDispatcher, |
| 895 FLAG_lazy_dispatchers)); |
| 896 if (FLAG_trace_ic) { |
| 897 OS::PrintErr( |
| 898 "NoSuchMethod IC miss: adding <%s> id:%" Pd " -> <%s>\n", |
| 899 Class::Handle(receiver.clazz()).ToCString(), receiver.GetClassId(), |
| 900 target_function.IsNull() ? "null" : target_function.ToCString()); |
| 901 } |
| 902 result = target_function.raw(); |
| 903 } |
| 904 // May be null if --no-lazy-dispatchers, in which case dispatch will be |
| 905 // handled by InvokeNoSuchMethodDispatcher. |
| 906 ASSERT(!result.IsNull() || !FLAG_lazy_dispatchers); |
| 907 return result.raw(); |
| 908 } |
| 909 |
| 910 |
| 911 // Perform the subtype and return constant function based on the result. |
| 912 static RawFunction* ComputeTypeCheckTarget(const Instance& receiver, |
| 913 const AbstractType& type, |
| 914 const ArgumentsDescriptor& desc) { |
| 915 Error& error = Error::Handle(); |
| 916 bool result = receiver.IsInstanceOf(type, Object::null_type_arguments(), |
| 917 Object::null_type_arguments(), &error); |
| 918 ASSERT(error.IsNull()); |
| 919 ObjectStore* store = Isolate::Current()->object_store(); |
| 920 const Function& target = |
| 921 Function::Handle(result ? store->simple_instance_of_true_function() |
| 922 : store->simple_instance_of_false_function()); |
| 923 ASSERT(!target.IsNull()); |
| 924 return target.raw(); |
| 925 } |
| 926 |
| 927 |
| 928 static RawFunction* InlineCacheMissHandler( |
| 929 const GrowableArray<const Instance*>& args, |
| 930 const ICData& ic_data) { |
| 931 const Instance& receiver = *args[0]; |
| 932 ArgumentsDescriptor arguments_descriptor( |
| 933 Array::Handle(ic_data.arguments_descriptor())); |
| 934 String& function_name = String::Handle(ic_data.target_name()); |
| 935 ASSERT(function_name.IsSymbol()); |
| 936 |
| 937 Function& target_function = Function::Handle( |
| 938 Resolver::ResolveDynamic(receiver, function_name, arguments_descriptor)); |
| 939 |
| 940 ObjectStore* store = Isolate::Current()->object_store(); |
| 941 if (target_function.raw() == store->simple_instance_of_function()) { |
| 942 // Replace the target function with constant function. |
| 943 const AbstractType& type = AbstractType::Cast(*args[1]); |
| 944 target_function = |
| 945 ComputeTypeCheckTarget(receiver, type, arguments_descriptor); |
| 946 } |
| 947 if (target_function.IsNull()) { |
| 948 if (FLAG_trace_ic) { |
| 949 OS::PrintErr("InlineCacheMissHandler NULL function for %s receiver: %s\n", |
| 950 String::Handle(ic_data.target_name()).ToCString(), |
| 951 receiver.ToCString()); |
| 952 } |
| 953 const Array& args_descriptor = |
| 954 Array::Handle(ic_data.arguments_descriptor()); |
| 955 const String& target_name = String::Handle(ic_data.target_name()); |
| 956 target_function = |
| 957 InlineCacheMissHelper(receiver, args_descriptor, target_name); |
| 958 } |
| 959 if (target_function.IsNull()) { |
| 960 ASSERT(!FLAG_lazy_dispatchers); |
| 961 return target_function.raw(); |
| 962 } |
| 963 if (args.length() == 1) { |
| 964 ic_data.AddReceiverCheck(args[0]->GetClassId(), target_function); |
| 965 } else { |
| 966 GrowableArray<intptr_t> class_ids(args.length()); |
| 967 ASSERT(ic_data.NumArgsTested() == args.length()); |
| 968 for (intptr_t i = 0; i < args.length(); i++) { |
| 969 class_ids.Add(args[i]->GetClassId()); |
| 970 } |
| 971 ic_data.AddCheck(class_ids, target_function); |
| 972 } |
| 973 if (FLAG_trace_ic_miss_in_optimized || FLAG_trace_ic) { |
| 974 DartFrameIterator iterator; |
| 975 StackFrame* caller_frame = iterator.NextFrame(); |
| 976 ASSERT(caller_frame != NULL); |
| 977 if (FLAG_trace_ic_miss_in_optimized) { |
| 978 const Code& caller = Code::Handle(Code::LookupCode(caller_frame->pc())); |
| 979 if (caller.is_optimized()) { |
| 980 OS::PrintErr("IC miss in optimized code; call %s -> %s\n", |
| 981 Function::Handle(caller.function()).ToCString(), |
| 982 target_function.ToCString()); |
| 983 } |
| 984 } |
| 985 if (FLAG_trace_ic) { |
| 986 OS::PrintErr("InlineCacheMissHandler %" Pd " call at %#" Px |
| 987 "' " |
| 988 "adding <%s> id:%" Pd " -> <%s>\n", |
| 989 args.length(), caller_frame->pc(), |
| 990 Class::Handle(receiver.clazz()).ToCString(), |
| 991 receiver.GetClassId(), target_function.ToCString()); |
| 992 } |
| 993 } |
| 994 return target_function.raw(); |
| 995 } |
| 996 |
| 997 |
| 998 // Handles inline cache misses by updating the IC data array of the call site. |
| 999 // Arg0: Receiver object. |
| 1000 // Arg1: IC data object. |
| 1001 // Returns: target function with compiled code or null. |
| 1002 // Modifies the instance call to hold the updated IC data array. |
| 1003 DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerOneArg, 2) { |
| 1004 const Instance& receiver = Instance::CheckedHandle(arguments.ArgAt(0)); |
| 1005 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(1)); |
| 1006 GrowableArray<const Instance*> args(1); |
| 1007 args.Add(&receiver); |
| 1008 const Function& result = |
| 1009 Function::Handle(InlineCacheMissHandler(args, ic_data)); |
| 1010 arguments.SetReturn(result); |
| 1011 } |
| 1012 |
| 1013 |
| 1014 // Handles inline cache misses by updating the IC data array of the call site. |
| 1015 // Arg0: Receiver object. |
| 1016 // Arg1: Argument after receiver. |
| 1017 // Arg2: IC data object. |
| 1018 // Returns: target function with compiled code or null. |
| 1019 // Modifies the instance call to hold the updated IC data array. |
| 1020 DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerTwoArgs, 3) { |
| 1021 const Instance& receiver = Instance::CheckedHandle(arguments.ArgAt(0)); |
| 1022 const Instance& other = Instance::CheckedHandle(arguments.ArgAt(1)); |
| 1023 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(2)); |
| 1024 GrowableArray<const Instance*> args(2); |
| 1025 args.Add(&receiver); |
| 1026 args.Add(&other); |
| 1027 const Function& result = |
| 1028 Function::Handle(InlineCacheMissHandler(args, ic_data)); |
| 1029 arguments.SetReturn(result); |
| 1030 } |
| 1031 |
| 1032 |
| 1033 // Handles a static call in unoptimized code that has one argument type not |
| 1034 // seen before. Compile the target if necessary and update the ICData. |
| 1035 // Arg0: argument. |
| 1036 // Arg1: IC data object. |
| 1037 DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerOneArg, 2) { |
| 1038 const Instance& arg = Instance::CheckedHandle(arguments.ArgAt(0)); |
| 1039 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(1)); |
| 1040 // IC data for static call is prepopulated with the statically known target. |
| 1041 ASSERT(ic_data.NumberOfChecksIs(1)); |
| 1042 const Function& target = Function::Handle(ic_data.GetTargetAt(0)); |
| 1043 target.EnsureHasCode(); |
| 1044 ASSERT(!target.IsNull() && target.HasCode()); |
| 1045 ic_data.AddReceiverCheck(arg.GetClassId(), target, 1); |
| 1046 if (FLAG_trace_ic) { |
| 1047 DartFrameIterator iterator; |
| 1048 StackFrame* caller_frame = iterator.NextFrame(); |
| 1049 ASSERT(caller_frame != NULL); |
| 1050 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ")\n", |
| 1051 caller_frame->pc(), target.ToCString(), arg.GetClassId()); |
| 1052 } |
| 1053 arguments.SetReturn(target); |
| 1054 } |
| 1055 |
| 1056 |
| 1057 // Handles a static call in unoptimized code that has two argument types not |
| 1058 // seen before. Compile the target if necessary and update the ICData. |
| 1059 // Arg0: argument 0. |
| 1060 // Arg1: argument 1. |
| 1061 // Arg2: IC data object. |
| 1062 DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerTwoArgs, 3) { |
| 1063 const Instance& arg0 = Instance::CheckedHandle(arguments.ArgAt(0)); |
| 1064 const Instance& arg1 = Instance::CheckedHandle(arguments.ArgAt(1)); |
| 1065 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(2)); |
| 1066 // IC data for static call is prepopulated with the statically known target. |
| 1067 ASSERT(!ic_data.NumberOfChecksIs(0)); |
| 1068 const Function& target = Function::Handle(ic_data.GetTargetAt(0)); |
| 1069 target.EnsureHasCode(); |
| 1070 GrowableArray<intptr_t> cids(2); |
| 1071 cids.Add(arg0.GetClassId()); |
| 1072 cids.Add(arg1.GetClassId()); |
| 1073 ic_data.AddCheck(cids, target); |
| 1074 if (FLAG_trace_ic) { |
| 1075 DartFrameIterator iterator; |
| 1076 StackFrame* caller_frame = iterator.NextFrame(); |
| 1077 ASSERT(caller_frame != NULL); |
| 1078 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ", %" Pd |
| 1079 ")\n", |
| 1080 caller_frame->pc(), target.ToCString(), cids[0], cids[1]); |
| 1081 } |
| 1082 arguments.SetReturn(target); |
| 1083 } |
| 1084 |
| 1085 |
| 1086 #if !defined(TARGET_ARCH_DBC) |
| 1087 static bool IsSingleTarget(Isolate* isolate, |
| 1088 Zone* zone, |
| 1089 intptr_t lower_cid, |
| 1090 intptr_t upper_cid, |
| 1091 const Function& target, |
| 1092 const String& name) { |
| 1093 Class& cls = Class::Handle(zone); |
| 1094 ClassTable* table = isolate->class_table(); |
| 1095 Function& other_target = Function::Handle(zone); |
| 1096 for (intptr_t cid = lower_cid; cid <= upper_cid; cid++) { |
| 1097 if (!table->HasValidClassAt(cid)) continue; |
| 1098 cls = table->At(cid); |
| 1099 if (cls.is_abstract()) continue; |
| 1100 if (!cls.is_allocated()) continue; |
| 1101 other_target = |
| 1102 Resolver::ResolveDynamicAnyArgs(zone, cls, name, false /* allow_add */); |
| 1103 if (other_target.raw() != target.raw()) { |
| 1104 return false; |
| 1105 } |
| 1106 } |
| 1107 return true; |
| 1108 } |
| 1109 #endif |
| 1110 |
| 1111 |
| 1112 // Handle a miss of a single target cache. |
| 1113 // Arg0: Receiver. |
| 1114 // Returns: the ICData used to continue with a polymorphic call. |
| 1115 DEFINE_RUNTIME_ENTRY(SingleTargetMiss, 1) { |
| 1116 #if defined(TARGET_ARCH_DBC) |
| 1117 // DBC does not use switchable calls. |
| 1118 UNREACHABLE(); |
| 1119 #else |
| 1120 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 1121 |
| 1122 DartFrameIterator iterator; |
| 1123 StackFrame* caller_frame = iterator.NextFrame(); |
| 1124 ASSERT(caller_frame->IsDartFrame()); |
| 1125 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode()); |
| 1126 const Function& caller_function = |
| 1127 Function::Handle(zone, caller_frame->LookupDartFunction()); |
| 1128 |
| 1129 SingleTargetCache& cache = SingleTargetCache::Handle(zone); |
| 1130 cache ^= |
| 1131 CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(), caller_code); |
| 1132 Code& old_target_code = Code::Handle(zone, cache.target()); |
| 1133 Function& old_target = Function::Handle(zone); |
| 1134 old_target ^= old_target_code.owner(); |
| 1135 |
| 1136 // We lost the original ICData when we patched to the monomorphic case. |
| 1137 const String& name = String::Handle(zone, old_target.name()); |
| 1138 ASSERT(!old_target.HasOptionalParameters()); |
| 1139 const Array& descriptor = Array::Handle( |
| 1140 zone, ArgumentsDescriptor::New(old_target.num_fixed_parameters())); |
| 1141 const ICData& ic_data = |
| 1142 ICData::Handle(zone, ICData::New(caller_function, name, descriptor, |
| 1143 Thread::kNoDeoptId, 1, /* args_tested */ |
| 1144 false /* static_call */)); |
| 1145 |
| 1146 // Maybe add the new target. |
| 1147 Class& cls = Class::Handle(zone, receiver.clazz()); |
| 1148 ArgumentsDescriptor args_desc(descriptor); |
| 1149 Function& target_function = Function::Handle( |
| 1150 zone, Resolver::ResolveDynamicForReceiverClass(cls, name, args_desc)); |
| 1151 if (target_function.IsNull()) { |
| 1152 target_function = InlineCacheMissHelper(receiver, descriptor, name); |
| 1153 } |
| 1154 if (target_function.IsNull()) { |
| 1155 ASSERT(!FLAG_lazy_dispatchers); |
| 1156 } else { |
| 1157 ic_data.AddReceiverCheck(receiver.GetClassId(), target_function); |
| 1158 } |
| 1159 |
| 1160 if (old_target.raw() == target_function.raw()) { |
| 1161 intptr_t lower, upper, unchecked_lower, unchecked_upper; |
| 1162 if (receiver.GetClassId() < cache.lower_limit()) { |
| 1163 lower = receiver.GetClassId(); |
| 1164 unchecked_lower = receiver.GetClassId(); |
| 1165 upper = cache.upper_limit(); |
| 1166 unchecked_upper = cache.lower_limit() - 1; |
| 1167 } else { |
| 1168 lower = cache.lower_limit(); |
| 1169 unchecked_lower = cache.upper_limit() + 1; |
| 1170 upper = receiver.GetClassId(); |
| 1171 unchecked_upper = receiver.GetClassId(); |
| 1172 } |
| 1173 |
| 1174 if (IsSingleTarget(isolate, zone, unchecked_lower, unchecked_upper, |
| 1175 target_function, name)) { |
| 1176 cache.set_lower_limit(lower); |
| 1177 cache.set_upper_limit(upper); |
| 1178 // Return the ICData. The single target stub will jump to continue in the |
| 1179 // IC call stub. |
| 1180 arguments.SetReturn(ic_data); |
| 1181 return; |
| 1182 } |
| 1183 } |
| 1184 |
| 1185 // Call site is not single target, switch to call using ICData. |
| 1186 const Code& stub = |
| 1187 Code::Handle(zone, StubCode::ICCallThroughCode_entry()->code()); |
| 1188 ASSERT(!Isolate::Current()->compilation_allowed()); |
| 1189 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, ic_data, |
| 1190 stub); |
| 1191 |
| 1192 // Return the ICData. The single target stub will jump to continue in the |
| 1193 // IC call stub. |
| 1194 arguments.SetReturn(ic_data); |
| 1195 #endif |
| 1196 } |
| 1197 |
| 1198 |
| 1199 DEFINE_RUNTIME_ENTRY(UnlinkedCall, 2) { |
| 1200 #if defined(TARGET_ARCH_DBC) |
| 1201 // DBC does not use switchable calls. |
| 1202 UNREACHABLE(); |
| 1203 #else |
| 1204 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 1205 const UnlinkedCall& unlinked = |
| 1206 UnlinkedCall::CheckedHandle(zone, arguments.ArgAt(1)); |
| 1207 |
| 1208 DartFrameIterator iterator; |
| 1209 StackFrame* caller_frame = iterator.NextFrame(); |
| 1210 ASSERT(caller_frame->IsDartFrame()); |
| 1211 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode()); |
| 1212 const Function& caller_function = |
| 1213 Function::Handle(zone, caller_frame->LookupDartFunction()); |
| 1214 |
| 1215 const String& name = String::Handle(zone, unlinked.target_name()); |
| 1216 const Array& descriptor = Array::Handle(zone, unlinked.args_descriptor()); |
| 1217 const ICData& ic_data = |
| 1218 ICData::Handle(zone, ICData::New(caller_function, name, descriptor, |
| 1219 Thread::kNoDeoptId, 1, /* args_tested */ |
| 1220 false /* static_call */)); |
| 1221 |
| 1222 Class& cls = Class::Handle(zone, receiver.clazz()); |
| 1223 ArgumentsDescriptor args_desc(descriptor); |
| 1224 Function& target_function = Function::Handle( |
| 1225 zone, Resolver::ResolveDynamicForReceiverClass(cls, name, args_desc)); |
| 1226 if (target_function.IsNull()) { |
| 1227 target_function = InlineCacheMissHelper(receiver, descriptor, name); |
| 1228 } |
| 1229 if (target_function.IsNull()) { |
| 1230 ASSERT(!FLAG_lazy_dispatchers); |
| 1231 } else { |
| 1232 ic_data.AddReceiverCheck(receiver.GetClassId(), target_function); |
| 1233 } |
| 1234 |
| 1235 if (!target_function.IsNull() && !target_function.HasOptionalParameters()) { |
| 1236 // Patch to monomorphic call. |
| 1237 ASSERT(target_function.HasCode()); |
| 1238 const Code& target_code = Code::Handle(zone, target_function.CurrentCode()); |
| 1239 const Smi& expected_cid = |
| 1240 Smi::Handle(zone, Smi::New(receiver.GetClassId())); |
| 1241 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, |
| 1242 expected_cid, target_code); |
| 1243 |
| 1244 // Return the ICData. The miss stub will jump to continue in the IC call |
| 1245 // stub. |
| 1246 arguments.SetReturn(ic_data); |
| 1247 return; |
| 1248 } |
| 1249 |
| 1250 // Patch to call through stub. |
| 1251 const Code& stub = |
| 1252 Code::Handle(zone, StubCode::ICCallThroughCode_entry()->code()); |
| 1253 ASSERT(!Isolate::Current()->compilation_allowed()); |
| 1254 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, ic_data, |
| 1255 stub); |
| 1256 |
| 1257 // Return the ICData. The miss stub will jump to continue in the IC lookup |
| 1258 // stub. |
| 1259 arguments.SetReturn(ic_data); |
| 1260 #endif // !DBC |
| 1261 } |
| 1262 |
| 1263 |
| 1264 // Handle a miss of a megamorphic cache. |
| 1265 // Arg0: Receiver. |
| 1266 // Returns: the ICData used to continue with a polymorphic call. |
| 1267 DEFINE_RUNTIME_ENTRY(MonomorphicMiss, 1) { |
| 1268 #if defined(TARGET_ARCH_DBC) |
| 1269 // DBC does not use switchable calls. |
| 1270 UNREACHABLE(); |
| 1271 #else |
| 1272 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 1273 |
| 1274 DartFrameIterator iterator; |
| 1275 StackFrame* caller_frame = iterator.NextFrame(); |
| 1276 ASSERT(caller_frame->IsDartFrame()); |
| 1277 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode()); |
| 1278 const Function& caller_function = |
| 1279 Function::Handle(zone, caller_frame->LookupDartFunction()); |
| 1280 |
| 1281 Smi& old_expected_cid = Smi::Handle(zone); |
| 1282 old_expected_cid ^= |
| 1283 CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(), caller_code); |
| 1284 const Code& old_target_code = Code::Handle( |
| 1285 CodePatcher::GetSwitchableCallTargetAt(caller_frame->pc(), caller_code)); |
| 1286 Function& old_target = Function::Handle(zone); |
| 1287 old_target ^= old_target_code.owner(); |
| 1288 |
| 1289 // We lost the original ICData when we patched to the monomorphic case. |
| 1290 const String& name = String::Handle(zone, old_target.name()); |
| 1291 ASSERT(!old_target.HasOptionalParameters()); |
| 1292 const Array& descriptor = Array::Handle( |
| 1293 zone, ArgumentsDescriptor::New(old_target.num_fixed_parameters())); |
| 1294 const ICData& ic_data = |
| 1295 ICData::Handle(zone, ICData::New(caller_function, name, descriptor, |
| 1296 Thread::kNoDeoptId, 1, /* args_tested */ |
| 1297 false /* static_call */)); |
| 1298 |
| 1299 // Add the first target. |
| 1300 ic_data.AddReceiverCheck(old_expected_cid.Value(), old_target); |
| 1301 |
| 1302 // Maybe add the new target. |
| 1303 Class& cls = Class::Handle(zone, receiver.clazz()); |
| 1304 ArgumentsDescriptor args_desc(descriptor); |
| 1305 Function& target_function = Function::Handle( |
| 1306 zone, Resolver::ResolveDynamicForReceiverClass(cls, name, args_desc)); |
| 1307 if (target_function.IsNull()) { |
| 1308 target_function = InlineCacheMissHelper(receiver, descriptor, name); |
| 1309 } |
| 1310 if (target_function.IsNull()) { |
| 1311 ASSERT(!FLAG_lazy_dispatchers); |
| 1312 } else { |
| 1313 ic_data.AddReceiverCheck(receiver.GetClassId(), target_function); |
| 1314 } |
| 1315 |
| 1316 if (old_target.raw() == target_function.raw()) { |
| 1317 intptr_t lower, upper; |
| 1318 if (old_expected_cid.Value() < receiver.GetClassId()) { |
| 1319 lower = old_expected_cid.Value(); |
| 1320 upper = receiver.GetClassId(); |
| 1321 } else { |
| 1322 lower = receiver.GetClassId(); |
| 1323 upper = old_expected_cid.Value(); |
| 1324 } |
| 1325 |
| 1326 if (IsSingleTarget(isolate, zone, lower, upper, target_function, name)) { |
| 1327 const SingleTargetCache& cache = |
| 1328 SingleTargetCache::Handle(SingleTargetCache::New()); |
| 1329 const Code& code = Code::Handle(target_function.CurrentCode()); |
| 1330 cache.set_target(code); |
| 1331 cache.set_entry_point(code.UncheckedEntryPoint()); |
| 1332 cache.set_lower_limit(lower); |
| 1333 cache.set_upper_limit(upper); |
| 1334 const Code& stub = |
| 1335 Code::Handle(zone, StubCode::SingleTargetCall_entry()->code()); |
| 1336 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, cache, |
| 1337 stub); |
| 1338 // Return the ICData. The miss stub will jump to continue in the IC call |
| 1339 // stub. |
| 1340 arguments.SetReturn(ic_data); |
| 1341 return; |
| 1342 } |
| 1343 } |
| 1344 |
| 1345 // Patch to call through stub. |
| 1346 const Code& stub = |
| 1347 Code::Handle(zone, StubCode::ICCallThroughCode_entry()->code()); |
| 1348 ASSERT(!Isolate::Current()->compilation_allowed()); |
| 1349 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, ic_data, |
| 1350 stub); |
| 1351 |
| 1352 // Return the ICData. The miss stub will jump to continue in the IC lookup |
| 1353 // stub. |
| 1354 arguments.SetReturn(ic_data); |
| 1355 #endif // !defined(TARGET_ARCH_DBC) |
| 1356 } |
| 1357 |
| 1358 |
| 1359 // Handle a miss of a megamorphic cache. |
| 1360 // Arg0: Receiver. |
| 1361 // Arg1: ICData or MegamorphicCache. |
| 1362 // Arg2: Arguments descriptor array. |
| 1363 // Returns: target function to call. |
| 1364 DEFINE_RUNTIME_ENTRY(MegamorphicCacheMissHandler, 3) { |
| 1365 #if defined(TARGET_ARCH_DBC) |
| 1366 // DBC does not use megamorphic calls right now. |
| 1367 UNREACHABLE(); |
| 1368 #else |
| 1369 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 1370 const Object& ic_data_or_cache = Object::Handle(zone, arguments.ArgAt(1)); |
| 1371 const Array& descriptor = Array::CheckedHandle(zone, arguments.ArgAt(2)); |
| 1372 String& name = String::Handle(zone); |
| 1373 if (ic_data_or_cache.IsICData()) { |
| 1374 name = ICData::Cast(ic_data_or_cache).target_name(); |
| 1375 } else { |
| 1376 ASSERT(ic_data_or_cache.IsMegamorphicCache()); |
| 1377 name = MegamorphicCache::Cast(ic_data_or_cache).target_name(); |
| 1378 } |
| 1379 Class& cls = Class::Handle(zone, receiver.clazz()); |
| 1380 ASSERT(!cls.IsNull()); |
| 1381 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) { |
| 1382 OS::PrintErr("Megamorphic IC miss, class=%s, function=%s\n", |
| 1383 cls.ToCString(), name.ToCString()); |
| 1384 } |
| 1385 |
| 1386 ArgumentsDescriptor args_desc(descriptor); |
| 1387 Function& target_function = Function::Handle( |
| 1388 zone, Resolver::ResolveDynamicForReceiverClass(cls, name, args_desc)); |
| 1389 if (target_function.IsNull()) { |
| 1390 target_function = InlineCacheMissHelper(receiver, descriptor, name); |
| 1391 } |
| 1392 if (target_function.IsNull()) { |
| 1393 ASSERT(!FLAG_lazy_dispatchers); |
| 1394 arguments.SetReturn(target_function); |
| 1395 return; |
| 1396 } |
| 1397 |
| 1398 if (ic_data_or_cache.IsICData()) { |
| 1399 const ICData& ic_data = ICData::Cast(ic_data_or_cache); |
| 1400 const intptr_t number_of_checks = ic_data.NumberOfChecks(); |
| 1401 |
| 1402 if (number_of_checks == 0 && !target_function.HasOptionalParameters() && |
| 1403 !Isolate::Current()->compilation_allowed()) { |
| 1404 // This call site is unlinked: transition to a monomorphic direct call. |
| 1405 // Note we cannot do this if the target has optional parameters because |
| 1406 // the monomorphic direct call does not load the arguments descriptor. |
| 1407 // We cannot do this if we are still in the middle of precompiling because |
| 1408 // the monomorphic case hides an live instance selector from the |
| 1409 // treeshaker. |
| 1410 |
| 1411 const Code& target_code = |
| 1412 Code::Handle(zone, target_function.EnsureHasCode()); |
| 1413 |
| 1414 DartFrameIterator iterator; |
| 1415 StackFrame* miss_function_frame = iterator.NextFrame(); |
| 1416 ASSERT(miss_function_frame->IsDartFrame()); |
| 1417 StackFrame* caller_frame = iterator.NextFrame(); |
| 1418 ASSERT(caller_frame->IsDartFrame()); |
| 1419 const Code& caller_code = |
| 1420 Code::Handle(zone, caller_frame->LookupDartCode()); |
| 1421 const Smi& expected_cid = |
| 1422 Smi::Handle(zone, Smi::New(receiver.GetClassId())); |
| 1423 |
| 1424 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, |
| 1425 expected_cid, target_code); |
| 1426 } else { |
| 1427 ic_data.AddReceiverCheck(receiver.GetClassId(), target_function); |
| 1428 if (number_of_checks > FLAG_max_polymorphic_checks) { |
| 1429 // Switch to megamorphic call. |
| 1430 const MegamorphicCache& cache = MegamorphicCache::Handle( |
| 1431 zone, MegamorphicCacheTable::Lookup(isolate, name, descriptor)); |
| 1432 DartFrameIterator iterator; |
| 1433 StackFrame* miss_function_frame = iterator.NextFrame(); |
| 1434 ASSERT(miss_function_frame->IsDartFrame()); |
| 1435 StackFrame* caller_frame = iterator.NextFrame(); |
| 1436 ASSERT(caller_frame->IsDartFrame()); |
| 1437 const Code& caller_code = |
| 1438 Code::Handle(zone, caller_frame->LookupDartCode()); |
| 1439 const Code& stub = |
| 1440 Code::Handle(zone, StubCode::MegamorphicCall_entry()->code()); |
| 1441 |
| 1442 CodePatcher::PatchSwitchableCallAt(caller_frame->pc(), caller_code, |
| 1443 cache, stub); |
| 1444 } |
| 1445 } |
| 1446 } else { |
| 1447 const MegamorphicCache& cache = MegamorphicCache::Cast(ic_data_or_cache); |
| 1448 // Insert function found into cache and return it. |
| 1449 cache.EnsureCapacity(); |
| 1450 const Smi& class_id = Smi::Handle(zone, Smi::New(cls.id())); |
| 1451 cache.Insert(class_id, target_function); |
| 1452 } |
| 1453 arguments.SetReturn(target_function); |
| 1454 #endif // !defined(TARGET_ARCH_DBC) |
| 1455 } |
| 1456 |
| 1457 |
| 1458 // Invoke appropriate noSuchMethod or closure from getter. |
| 1459 // Arg0: receiver |
| 1460 // Arg1: ICData or MegamorphicCache |
| 1461 // Arg2: arguments descriptor array |
| 1462 // Arg3: arguments array |
| 1463 DEFINE_RUNTIME_ENTRY(InvokeNoSuchMethodDispatcher, 4) { |
| 1464 ASSERT(!FLAG_lazy_dispatchers); |
| 1465 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0)); |
| 1466 const Object& ic_data_or_cache = Object::Handle(zone, arguments.ArgAt(1)); |
| 1467 const Array& orig_arguments_desc = |
| 1468 Array::CheckedHandle(zone, arguments.ArgAt(2)); |
| 1469 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3)); |
| 1470 String& target_name = String::Handle(zone); |
| 1471 if (ic_data_or_cache.IsICData()) { |
| 1472 target_name = ICData::Cast(ic_data_or_cache).target_name(); |
| 1473 } else { |
| 1474 ASSERT(ic_data_or_cache.IsMegamorphicCache()); |
| 1475 target_name = MegamorphicCache::Cast(ic_data_or_cache).target_name(); |
| 1476 } |
| 1477 |
| 1478 Class& cls = Class::Handle(zone, receiver.clazz()); |
| 1479 Function& function = Function::Handle(zone); |
| 1480 |
| 1481 // Dart distinguishes getters and regular methods and allows their calls |
| 1482 // to mix with conversions, and its selectors are independent of arity. So do |
| 1483 // a zigzagged lookup to see if this call failed because of an arity mismatch, |
| 1484 // need for conversion, or there really is no such method. |
| 1485 |
| 1486 #define NO_SUCH_METHOD() \ |
| 1487 const Object& result = Object::Handle( \ |
| 1488 zone, DartEntry::InvokeNoSuchMethod( \ |
| 1489 receiver, target_name, orig_arguments, orig_arguments_desc)); \ |
| 1490 CheckResultError(result); \ |
| 1491 arguments.SetReturn(result); |
| 1492 |
| 1493 #define CLOSURIZE(some_function) \ |
| 1494 const Function& closure_function = \ |
| 1495 Function::Handle(zone, some_function.ImplicitClosureFunction()); \ |
| 1496 const Object& result = Object::Handle( \ |
| 1497 zone, closure_function.ImplicitInstanceClosure(receiver)); \ |
| 1498 arguments.SetReturn(result); |
| 1499 |
| 1500 const bool is_getter = Field::IsGetterName(target_name); |
| 1501 if (is_getter) { |
| 1502 // o.foo (o.get:foo) failed, closurize o.foo() if it exists. Or, |
| 1503 // o#foo (o.get:#foo) failed, closurizee o.foo or o.foo(), whichever is |
| 1504 // encountered first on the inheritance chain. Or, |
| 1505 // o#foo= (o.get:#set:foo) failed, closurize o.foo= if it exists. |
| 1506 String& field_name = |
| 1507 String::Handle(zone, Field::NameFromGetter(target_name)); |
| 1508 |
| 1509 const bool is_extractor = field_name.CharAt(0) == '#'; |
| 1510 if (is_extractor) { |
| 1511 field_name = String::SubString(field_name, 1); |
| 1512 ASSERT(!Field::IsGetterName(field_name)); |
| 1513 field_name = Symbols::New(thread, field_name); |
| 1514 |
| 1515 if (!Field::IsSetterName(field_name)) { |
| 1516 const String& getter_name = |
| 1517 String::Handle(Field::GetterName(field_name)); |
| 1518 |
| 1519 // Zigzagged lookup: closure either a regular method or a getter. |
| 1520 while (!cls.IsNull()) { |
| 1521 function ^= cls.LookupDynamicFunction(field_name); |
| 1522 if (!function.IsNull()) { |
| 1523 CLOSURIZE(function); |
| 1524 return; |
| 1525 } |
| 1526 function ^= cls.LookupDynamicFunction(getter_name); |
| 1527 if (!function.IsNull()) { |
| 1528 CLOSURIZE(function); |
| 1529 return; |
| 1530 } |
| 1531 cls = cls.SuperClass(); |
| 1532 } |
| 1533 NO_SUCH_METHOD(); |
| 1534 return; |
| 1535 } else { |
| 1536 // Fall through for non-ziggaged lookup for o#foo=. |
| 1537 } |
| 1538 } |
| 1539 |
| 1540 while (!cls.IsNull()) { |
| 1541 function ^= cls.LookupDynamicFunction(field_name); |
| 1542 if (!function.IsNull()) { |
| 1543 CLOSURIZE(function); |
| 1544 return; |
| 1545 } |
| 1546 cls = cls.SuperClass(); |
| 1547 } |
| 1548 |
| 1549 // Fall through for noSuchMethod |
| 1550 } else { |
| 1551 // o.foo(...) failed, invoke noSuchMethod is foo exists but has the wrong |
| 1552 // number of arguments, or try (o.foo).call(...) |
| 1553 |
| 1554 if ((target_name.raw() == Symbols::Call().raw()) && receiver.IsClosure()) { |
| 1555 // Special case: closures are implemented with a call getter instead of a |
| 1556 // call method and with lazy dispatchers the field-invocation-dispatcher |
| 1557 // would perform the closure call. |
| 1558 const Object& result = Object::Handle( |
| 1559 zone, DartEntry::InvokeClosure(orig_arguments, orig_arguments_desc)); |
| 1560 CheckResultError(result); |
| 1561 arguments.SetReturn(result); |
| 1562 return; |
| 1563 } |
| 1564 |
| 1565 const String& getter_name = |
| 1566 String::Handle(zone, Field::GetterName(target_name)); |
| 1567 while (!cls.IsNull()) { |
| 1568 function ^= cls.LookupDynamicFunction(target_name); |
| 1569 if (!function.IsNull()) { |
| 1570 ArgumentsDescriptor args_desc(orig_arguments_desc); |
| 1571 ASSERT(!function.AreValidArguments(args_desc, NULL)); |
| 1572 break; // mismatch, invoke noSuchMethod |
| 1573 } |
| 1574 function ^= cls.LookupDynamicFunction(getter_name); |
| 1575 if (!function.IsNull()) { |
| 1576 const Array& getter_arguments = Array::Handle(Array::New(1)); |
| 1577 getter_arguments.SetAt(0, receiver); |
| 1578 const Object& getter_result = Object::Handle( |
| 1579 zone, DartEntry::InvokeFunction(function, getter_arguments)); |
| 1580 CheckResultError(getter_result); |
| 1581 ASSERT(getter_result.IsNull() || getter_result.IsInstance()); |
| 1582 |
| 1583 orig_arguments.SetAt(0, getter_result); |
| 1584 const Object& call_result = Object::Handle( |
| 1585 zone, |
| 1586 DartEntry::InvokeClosure(orig_arguments, orig_arguments_desc)); |
| 1587 CheckResultError(call_result); |
| 1588 arguments.SetReturn(call_result); |
| 1589 return; |
| 1590 } |
| 1591 cls = cls.SuperClass(); |
| 1592 } |
| 1593 } |
| 1594 |
| 1595 NO_SUCH_METHOD(); |
| 1596 |
| 1597 #undef NO_SUCH_METHOD |
| 1598 #undef CLOSURIZE |
| 1599 } |
| 1600 |
| 1601 |
| 1602 // Invoke appropriate noSuchMethod function. |
| 1603 // Arg0: receiver (closure object) |
| 1604 // Arg1: arguments descriptor array. |
| 1605 // Arg2: arguments array. |
| 1606 DEFINE_RUNTIME_ENTRY(InvokeClosureNoSuchMethod, 3) { |
| 1607 const Closure& receiver = Closure::CheckedHandle(arguments.ArgAt(0)); |
| 1608 const Array& orig_arguments_desc = Array::CheckedHandle(arguments.ArgAt(1)); |
| 1609 const Array& orig_arguments = Array::CheckedHandle(arguments.ArgAt(2)); |
| 1610 |
| 1611 // For closure the function name is always 'call'. Replace it with the |
| 1612 // name of the closurized function so that exception contains more |
| 1613 // relevant information. |
| 1614 const Function& function = Function::Handle(receiver.function()); |
| 1615 const String& original_function_name = |
| 1616 String::Handle(function.QualifiedUserVisibleName()); |
| 1617 const Object& result = Object::Handle(DartEntry::InvokeNoSuchMethod( |
| 1618 receiver, original_function_name, orig_arguments, orig_arguments_desc)); |
| 1619 CheckResultError(result); |
| 1620 arguments.SetReturn(result); |
| 1621 } |
| 1622 |
| 1623 |
| 1624 DEFINE_RUNTIME_ENTRY(StackOverflow, 0) { |
| 1625 #if defined(USING_SIMULATOR) |
| 1626 uword stack_pos = Simulator::Current()->get_sp(); |
| 1627 #else |
| 1628 uword stack_pos = Thread::GetCurrentStackPointer(); |
| 1629 #endif |
| 1630 // Always clear the stack overflow flags. They are meant for this |
| 1631 // particular stack overflow runtime call and are not meant to |
| 1632 // persist. |
| 1633 uword stack_overflow_flags = thread->GetAndClearStackOverflowFlags(); |
| 1634 |
| 1635 // If an interrupt happens at the same time as a stack overflow, we |
| 1636 // process the stack overflow now and leave the interrupt for next |
| 1637 // time. |
| 1638 if (IsCalleeFrameOf(thread->saved_stack_limit(), stack_pos)) { |
| 1639 // Use the preallocated stack overflow exception to avoid calling |
| 1640 // into dart code. |
| 1641 const Instance& exception = |
| 1642 Instance::Handle(isolate->object_store()->stack_overflow()); |
| 1643 Exceptions::Throw(thread, exception); |
| 1644 UNREACHABLE(); |
| 1645 } |
| 1646 |
| 1647 // The following code is used to stress test deoptimization and |
| 1648 // debugger stack tracing. |
| 1649 bool do_deopt = false; |
| 1650 bool do_stacktrace = false; |
| 1651 bool do_reload = false; |
| 1652 const intptr_t isolate_reload_every = |
| 1653 isolate->reload_every_n_stack_overflow_checks(); |
| 1654 if ((FLAG_deoptimize_every > 0) || (FLAG_stacktrace_every > 0) || |
| 1655 (isolate_reload_every > 0)) { |
| 1656 // TODO(turnidge): To make --deoptimize_every and |
| 1657 // --stacktrace-every faster we could move this increment/test to |
| 1658 // the generated code. |
| 1659 int32_t count = thread->IncrementAndGetStackOverflowCount(); |
| 1660 if (FLAG_deoptimize_every > 0 && (count % FLAG_deoptimize_every) == 0) { |
| 1661 do_deopt = true; |
| 1662 } |
| 1663 if (FLAG_stacktrace_every > 0 && (count % FLAG_stacktrace_every) == 0) { |
| 1664 do_stacktrace = true; |
| 1665 } |
| 1666 if ((isolate_reload_every > 0) && (count % isolate_reload_every) == 0) { |
| 1667 do_reload = isolate->CanReload(); |
| 1668 } |
| 1669 } |
| 1670 if ((FLAG_deoptimize_filter != NULL) || (FLAG_stacktrace_filter != NULL) || |
| 1671 FLAG_reload_every_optimized) { |
| 1672 DartFrameIterator iterator; |
| 1673 StackFrame* frame = iterator.NextFrame(); |
| 1674 ASSERT(frame != NULL); |
| 1675 const Code& code = Code::Handle(frame->LookupDartCode()); |
| 1676 ASSERT(!code.IsNull()); |
| 1677 const Function& function = Function::Handle(code.function()); |
| 1678 ASSERT(!function.IsNull()); |
| 1679 const char* function_name = function.ToFullyQualifiedCString(); |
| 1680 ASSERT(function_name != NULL); |
| 1681 if (!code.is_optimized() && FLAG_reload_every_optimized) { |
| 1682 // Don't do the reload if we aren't inside optimized code. |
| 1683 do_reload = false; |
| 1684 } |
| 1685 if (code.is_optimized() && FLAG_deoptimize_filter != NULL && |
| 1686 strstr(function_name, FLAG_deoptimize_filter) != NULL) { |
| 1687 OS::PrintErr("*** Forcing deoptimization (%s)\n", |
| 1688 function.ToFullyQualifiedCString()); |
| 1689 do_deopt = true; |
| 1690 } |
| 1691 if (FLAG_stacktrace_filter != NULL && |
| 1692 strstr(function_name, FLAG_stacktrace_filter) != NULL) { |
| 1693 OS::PrintErr("*** Computing stacktrace (%s)\n", |
| 1694 function.ToFullyQualifiedCString()); |
| 1695 do_stacktrace = true; |
| 1696 } |
| 1697 } |
| 1698 if (do_deopt) { |
| 1699 // TODO(turnidge): Consider using DeoptimizeAt instead. |
| 1700 DeoptimizeFunctionsOnStack(); |
| 1701 } |
| 1702 if (do_reload) { |
| 1703 #ifndef PRODUCT |
| 1704 JSONStream js; |
| 1705 // Maybe adjust the rate of future reloads. |
| 1706 isolate->MaybeIncreaseReloadEveryNStackOverflowChecks(); |
| 1707 // Issue a reload. |
| 1708 bool success = isolate->ReloadSources(&js, true /* force_reload */); |
| 1709 if (!success) { |
| 1710 FATAL1("*** Isolate reload failed:\n%s\n", js.ToCString()); |
| 1711 } |
| 1712 #endif |
| 1713 } |
| 1714 if (FLAG_support_debugger && do_stacktrace) { |
| 1715 String& var_name = String::Handle(); |
| 1716 Instance& var_value = Instance::Handle(); |
| 1717 // Collecting the stack trace and accessing local variables |
| 1718 // of frames may trigger parsing of functions to compute |
| 1719 // variable descriptors of functions. Parsing may trigger |
| 1720 // code execution, e.g. to compute compile-time constants. Thus, |
| 1721 // disable FLAG_stacktrace_every during trace collection to prevent |
| 1722 // recursive stack trace collection. |
| 1723 intptr_t saved_stacktrace_every = FLAG_stacktrace_every; |
| 1724 FLAG_stacktrace_every = 0; |
| 1725 DebuggerStackTrace* stack = isolate->debugger()->StackTrace(); |
| 1726 intptr_t num_frames = stack->Length(); |
| 1727 for (intptr_t i = 0; i < num_frames; i++) { |
| 1728 ActivationFrame* frame = stack->FrameAt(i); |
| 1729 #ifndef DART_PRECOMPILED_RUNTIME |
| 1730 // Ensure that we have unoptimized code. |
| 1731 frame->function().EnsureHasCompiledUnoptimizedCode(); |
| 1732 #endif |
| 1733 // Variable locations and number are unknown when precompiling. |
| 1734 const int num_vars = |
| 1735 FLAG_precompiled_runtime ? 0 : frame->NumLocalVariables(); |
| 1736 TokenPosition unused = TokenPosition::kNoSource; |
| 1737 for (intptr_t v = 0; v < num_vars; v++) { |
| 1738 frame->VariableAt(v, &var_name, &unused, &unused, &unused, &var_value); |
| 1739 } |
| 1740 } |
| 1741 if (FLAG_stress_async_stacks) { |
| 1742 Debugger::CollectAwaiterReturnStackTrace(); |
| 1743 } |
| 1744 FLAG_stacktrace_every = saved_stacktrace_every; |
| 1745 } |
| 1746 |
| 1747 const Error& error = Error::Handle(thread->HandleInterrupts()); |
| 1748 if (!error.IsNull()) { |
| 1749 Exceptions::PropagateError(error); |
| 1750 UNREACHABLE(); |
| 1751 } |
| 1752 |
| 1753 if ((stack_overflow_flags & Thread::kOsrRequest) != 0) { |
| 1754 ASSERT(isolate->use_osr()); |
| 1755 DartFrameIterator iterator; |
| 1756 StackFrame* frame = iterator.NextFrame(); |
| 1757 ASSERT(frame != NULL); |
| 1758 const Code& code = Code::ZoneHandle(frame->LookupDartCode()); |
| 1759 ASSERT(!code.IsNull()); |
| 1760 ASSERT(!code.is_optimized()); |
| 1761 const Function& function = Function::Handle(code.function()); |
| 1762 ASSERT(!function.IsNull()); |
| 1763 |
| 1764 // If the code of the frame does not match the function's unoptimized code, |
| 1765 // we bail out since the code was reset by an isolate reload. |
| 1766 if (code.raw() != function.unoptimized_code()) { |
| 1767 return; |
| 1768 } |
| 1769 |
| 1770 // Since the code is referenced from the frame and the ZoneHandle, |
| 1771 // it cannot have been removed from the function. |
| 1772 ASSERT(function.HasCode()); |
| 1773 // Don't do OSR on intrinsified functions: The intrinsic code expects to be |
| 1774 // called like a regular function and can't be entered via OSR. |
| 1775 if (!Compiler::CanOptimizeFunction(thread, function) || |
| 1776 function.is_intrinsic()) { |
| 1777 return; |
| 1778 } |
| 1779 |
| 1780 // The unoptimized code is on the stack and should never be detached from |
| 1781 // the function at this point. |
| 1782 ASSERT(function.unoptimized_code() != Object::null()); |
| 1783 intptr_t osr_id = |
| 1784 Code::Handle(function.unoptimized_code()).GetDeoptIdForOsr(frame->pc()); |
| 1785 ASSERT(osr_id != Compiler::kNoOSRDeoptId); |
| 1786 if (FLAG_trace_osr) { |
| 1787 OS::Print("Attempting OSR for %s at id=%" Pd ", count=%" Pd "\n", |
| 1788 function.ToFullyQualifiedCString(), osr_id, |
| 1789 function.usage_counter()); |
| 1790 } |
| 1791 |
| 1792 // Since the code is referenced from the frame and the ZoneHandle, |
| 1793 // it cannot have been removed from the function. |
| 1794 const Object& result = Object::Handle( |
| 1795 Compiler::CompileOptimizedFunction(thread, function, osr_id)); |
| 1796 if (result.IsError()) { |
| 1797 Exceptions::PropagateError(Error::Cast(result)); |
| 1798 } |
| 1799 |
| 1800 if (!result.IsNull()) { |
| 1801 const Code& code = Code::Cast(result); |
| 1802 uword optimized_entry = |
| 1803 Instructions::UncheckedEntryPoint(code.instructions()); |
| 1804 frame->set_pc(optimized_entry); |
| 1805 frame->set_pc_marker(code.raw()); |
| 1806 } |
| 1807 } |
| 1808 } |
| 1809 |
| 1810 |
| 1811 DEFINE_RUNTIME_ENTRY(TraceICCall, 2) { |
| 1812 const ICData& ic_data = ICData::CheckedHandle(arguments.ArgAt(0)); |
| 1813 const Function& function = Function::CheckedHandle(arguments.ArgAt(1)); |
| 1814 DartFrameIterator iterator; |
| 1815 StackFrame* frame = iterator.NextFrame(); |
| 1816 ASSERT(frame != NULL); |
| 1817 OS::PrintErr("IC call @%#" Px ": ICData: %p cnt:%" Pd " nchecks: %" Pd |
| 1818 " %s\n", |
| 1819 frame->pc(), ic_data.raw(), function.usage_counter(), |
| 1820 ic_data.NumberOfChecks(), function.ToFullyQualifiedCString()); |
| 1821 } |
| 1822 |
| 1823 |
| 1824 // This is called from function that needs to be optimized. |
| 1825 // The requesting function can be already optimized (reoptimization). |
| 1826 // Returns the Code object where to continue execution. |
| 1827 DEFINE_RUNTIME_ENTRY(OptimizeInvokedFunction, 1) { |
| 1828 #if !defined(DART_PRECOMPILED_RUNTIME) |
| 1829 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0)); |
| 1830 ASSERT(!function.IsNull()); |
| 1831 ASSERT(function.HasCode()); |
| 1832 |
| 1833 if (Compiler::CanOptimizeFunction(thread, function)) { |
| 1834 if (FLAG_background_compilation) { |
| 1835 Field& field = Field::Handle(zone, isolate->GetDeoptimizingBoxedField()); |
| 1836 while (!field.IsNull()) { |
| 1837 if (FLAG_trace_optimization || FLAG_trace_field_guards) { |
| 1838 THR_Print("Lazy disabling unboxing of %s\n", field.ToCString()); |
| 1839 } |
| 1840 field.set_is_unboxing_candidate(false); |
| 1841 field.DeoptimizeDependentCode(); |
| 1842 // Get next field. |
| 1843 field = isolate->GetDeoptimizingBoxedField(); |
| 1844 } |
| 1845 } |
| 1846 // TODO(srdjan): Fix background compilation of regular expressions. |
| 1847 if (FLAG_background_compilation) { |
| 1848 if (FLAG_enable_inlining_annotations) { |
| 1849 FATAL("Cannot enable inlining annotations and background compilation"); |
| 1850 } |
| 1851 if (!BackgroundCompiler::IsDisabled()) { |
| 1852 if (FLAG_background_compilation_stop_alot) { |
| 1853 BackgroundCompiler::Stop(isolate); |
| 1854 } |
| 1855 // Reduce the chance of triggering optimization while the function is |
| 1856 // being optimized in the background. INT_MIN should ensure that it |
| 1857 // takes long time to trigger optimization. |
| 1858 // Note that the background compilation queue rejects duplicate entries. |
| 1859 function.set_usage_counter(INT_MIN); |
| 1860 BackgroundCompiler::EnsureInit(thread); |
| 1861 ASSERT(isolate->background_compiler() != NULL); |
| 1862 isolate->background_compiler()->CompileOptimized(function); |
| 1863 // Continue in the same code. |
| 1864 arguments.SetReturn(function); |
| 1865 return; |
| 1866 } |
| 1867 } |
| 1868 |
| 1869 // Reset usage counter for reoptimization before calling optimizer to |
| 1870 // prevent recursive triggering of function optimization. |
| 1871 function.set_usage_counter(0); |
| 1872 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) { |
| 1873 if (function.HasOptimizedCode()) { |
| 1874 THR_Print("ReCompiling function: '%s' \n", |
| 1875 function.ToFullyQualifiedCString()); |
| 1876 } |
| 1877 } |
| 1878 const Object& result = Object::Handle( |
| 1879 zone, Compiler::CompileOptimizedFunction(thread, function)); |
| 1880 if (result.IsError()) { |
| 1881 Exceptions::PropagateError(Error::Cast(result)); |
| 1882 } |
| 1883 } |
| 1884 arguments.SetReturn(function); |
| 1885 #else |
| 1886 UNREACHABLE(); |
| 1887 #endif // !DART_PRECOMPILED_RUNTIME |
| 1888 } |
| 1889 |
| 1890 |
| 1891 // The caller must be a static call in a Dart frame, or an entry frame. |
| 1892 // Patch static call to point to valid code's entry point. |
| 1893 DEFINE_RUNTIME_ENTRY(FixCallersTarget, 0) { |
| 1894 StackFrameIterator iterator(StackFrameIterator::kDontValidateFrames); |
| 1895 StackFrame* frame = iterator.NextFrame(); |
| 1896 ASSERT(frame != NULL); |
| 1897 while (frame->IsStubFrame() || frame->IsExitFrame()) { |
| 1898 frame = iterator.NextFrame(); |
| 1899 ASSERT(frame != NULL); |
| 1900 } |
| 1901 if (frame->IsEntryFrame()) { |
| 1902 // Since function's current code is always unpatched, the entry frame always |
| 1903 // calls to unpatched code. |
| 1904 UNREACHABLE(); |
| 1905 } |
| 1906 ASSERT(frame->IsDartFrame()); |
| 1907 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode()); |
| 1908 ASSERT(caller_code.is_optimized()); |
| 1909 const Function& target_function = Function::Handle( |
| 1910 zone, caller_code.GetStaticCallTargetFunctionAt(frame->pc())); |
| 1911 |
| 1912 const Code& current_target_code = |
| 1913 Code::Handle(zone, target_function.EnsureHasCode()); |
| 1914 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, current_target_code); |
| 1915 caller_code.SetStaticCallTargetCodeAt(frame->pc(), current_target_code); |
| 1916 if (FLAG_trace_patching) { |
| 1917 OS::PrintErr("FixCallersTarget: caller %#" Px |
| 1918 " " |
| 1919 "target '%s' -> %#" Px "\n", |
| 1920 frame->pc(), target_function.ToFullyQualifiedCString(), |
| 1921 current_target_code.UncheckedEntryPoint()); |
| 1922 } |
| 1923 ASSERT(!current_target_code.IsDisabled()); |
| 1924 arguments.SetReturn(current_target_code); |
| 1925 } |
| 1926 |
| 1927 |
| 1928 // The caller tried to allocate an instance via an invalidated allocation |
| 1929 // stub. |
| 1930 DEFINE_RUNTIME_ENTRY(FixAllocationStubTarget, 0) { |
| 1931 #if !defined(DART_PRECOMPILED_RUNTIME) |
| 1932 StackFrameIterator iterator(StackFrameIterator::kDontValidateFrames); |
| 1933 StackFrame* frame = iterator.NextFrame(); |
| 1934 ASSERT(frame != NULL); |
| 1935 while (frame->IsStubFrame() || frame->IsExitFrame()) { |
| 1936 frame = iterator.NextFrame(); |
| 1937 ASSERT(frame != NULL); |
| 1938 } |
| 1939 if (frame->IsEntryFrame()) { |
| 1940 // There must be a valid Dart frame. |
| 1941 UNREACHABLE(); |
| 1942 } |
| 1943 ASSERT(frame->IsDartFrame()); |
| 1944 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode()); |
| 1945 ASSERT(!caller_code.IsNull()); |
| 1946 const Code& stub = Code::Handle( |
| 1947 CodePatcher::GetStaticCallTargetAt(frame->pc(), caller_code)); |
| 1948 Class& alloc_class = Class::ZoneHandle(zone); |
| 1949 alloc_class ^= stub.owner(); |
| 1950 Code& alloc_stub = Code::Handle(zone, alloc_class.allocation_stub()); |
| 1951 if (alloc_stub.IsNull()) { |
| 1952 alloc_stub = StubCode::GetAllocationStubForClass(alloc_class); |
| 1953 ASSERT(!alloc_stub.IsDisabled()); |
| 1954 } |
| 1955 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, alloc_stub); |
| 1956 caller_code.SetStubCallTargetCodeAt(frame->pc(), alloc_stub); |
| 1957 if (FLAG_trace_patching) { |
| 1958 OS::PrintErr("FixAllocationStubTarget: caller %#" Px |
| 1959 " alloc-class %s " |
| 1960 " -> %#" Px "\n", |
| 1961 frame->pc(), alloc_class.ToCString(), |
| 1962 alloc_stub.UncheckedEntryPoint()); |
| 1963 } |
| 1964 arguments.SetReturn(alloc_stub); |
| 1965 #else |
| 1966 UNREACHABLE(); |
| 1967 #endif |
| 1968 } |
| 1969 |
| 1970 |
| 1971 const char* DeoptReasonToCString(ICData::DeoptReasonId deopt_reason) { |
| 1972 switch (deopt_reason) { |
| 1973 #define DEOPT_REASON_TO_TEXT(name) \ |
| 1974 case ICData::kDeopt##name: \ |
| 1975 return #name; |
| 1976 DEOPT_REASONS(DEOPT_REASON_TO_TEXT) |
| 1977 #undef DEOPT_REASON_TO_TEXT |
| 1978 default: |
| 1979 UNREACHABLE(); |
| 1980 return ""; |
| 1981 } |
| 1982 } |
| 1983 |
| 1984 |
| 1985 void DeoptimizeAt(const Code& optimized_code, StackFrame* frame) { |
| 1986 ASSERT(optimized_code.is_optimized()); |
| 1987 Thread* thread = Thread::Current(); |
| 1988 Zone* zone = thread->zone(); |
| 1989 const Function& function = Function::Handle(zone, optimized_code.function()); |
| 1990 const Error& error = |
| 1991 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, function)); |
| 1992 if (!error.IsNull()) { |
| 1993 Exceptions::PropagateError(error); |
| 1994 } |
| 1995 const Code& unoptimized_code = |
| 1996 Code::Handle(zone, function.unoptimized_code()); |
| 1997 ASSERT(!unoptimized_code.IsNull()); |
| 1998 // The switch to unoptimized code may have already occurred. |
| 1999 if (function.HasOptimizedCode()) { |
| 2000 function.SwitchToUnoptimizedCode(); |
| 2001 } |
| 2002 |
| 2003 #if defined(TARGET_ARCH_DBC) |
| 2004 const Instructions& instrs = |
| 2005 Instructions::Handle(zone, optimized_code.instructions()); |
| 2006 { |
| 2007 WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size()); |
| 2008 CodePatcher::InsertDeoptimizationCallAt(frame->pc()); |
| 2009 if (FLAG_trace_patching) { |
| 2010 const String& name = String::Handle(function.name()); |
| 2011 OS::PrintErr("InsertDeoptimizationCallAt: 0x%" Px " for %s\n", |
| 2012 frame->pc(), name.ToCString()); |
| 2013 } |
| 2014 const ExceptionHandlers& handlers = |
| 2015 ExceptionHandlers::Handle(zone, optimized_code.exception_handlers()); |
| 2016 ExceptionHandlerInfo info; |
| 2017 for (intptr_t i = 0; i < handlers.num_entries(); ++i) { |
| 2018 handlers.GetHandlerInfo(i, &info); |
| 2019 const uword patch_pc = instrs.PayloadStart() + info.handler_pc_offset; |
| 2020 CodePatcher::InsertDeoptimizationCallAt(patch_pc); |
| 2021 if (FLAG_trace_patching) { |
| 2022 OS::PrintErr(" at handler 0x%" Px "\n", patch_pc); |
| 2023 } |
| 2024 } |
| 2025 } |
| 2026 #else // !DBC |
| 2027 if (frame->IsMarkedForLazyDeopt()) { |
| 2028 // Deopt already scheduled. |
| 2029 if (FLAG_trace_deoptimization) { |
| 2030 THR_Print("Lazy deopt already scheduled for fp=%" Pp "\n", frame->fp()); |
| 2031 } |
| 2032 } else { |
| 2033 uword deopt_pc = frame->pc(); |
| 2034 ASSERT(optimized_code.ContainsInstructionAt(deopt_pc)); |
| 2035 |
| 2036 #if defined(DEBUG) |
| 2037 ValidateFrames(); |
| 2038 #endif |
| 2039 |
| 2040 // N.B.: Update the pending deopt table before updating the frame. The |
| 2041 // profiler may attempt a stack walk in between. |
| 2042 thread->isolate()->AddPendingDeopt(frame->fp(), deopt_pc); |
| 2043 frame->MarkForLazyDeopt(); |
| 2044 |
| 2045 if (FLAG_trace_deoptimization) { |
| 2046 THR_Print("Lazy deopt scheduled for fp=%" Pp ", pc=%" Pp "\n", |
| 2047 frame->fp(), deopt_pc); |
| 2048 } |
| 2049 } |
| 2050 #endif // !DBC |
| 2051 |
| 2052 // Mark code as dead (do not GC its embedded objects). |
| 2053 optimized_code.set_is_alive(false); |
| 2054 } |
| 2055 |
| 2056 |
| 2057 // Currently checks only that all optimized frames have kDeoptIndex |
| 2058 // and unoptimized code has the kDeoptAfter. |
| 2059 void DeoptimizeFunctionsOnStack() { |
| 2060 DartFrameIterator iterator; |
| 2061 StackFrame* frame = iterator.NextFrame(); |
| 2062 Code& optimized_code = Code::Handle(); |
| 2063 while (frame != NULL) { |
| 2064 optimized_code = frame->LookupDartCode(); |
| 2065 if (optimized_code.is_optimized()) { |
| 2066 DeoptimizeAt(optimized_code, frame); |
| 2067 } |
| 2068 frame = iterator.NextFrame(); |
| 2069 } |
| 2070 } |
| 2071 |
| 2072 #if !defined(DART_PRECOMPILED_RUNTIME) |
| 2073 #if !defined(TARGET_ARCH_DBC) |
| 2074 static const intptr_t kNumberOfSavedCpuRegisters = kNumberOfCpuRegisters; |
| 2075 static const intptr_t kNumberOfSavedFpuRegisters = kNumberOfFpuRegisters; |
| 2076 #else |
| 2077 static const intptr_t kNumberOfSavedCpuRegisters = 0; |
| 2078 static const intptr_t kNumberOfSavedFpuRegisters = 0; |
| 2079 #endif |
| 2080 |
| 2081 static void CopySavedRegisters(uword saved_registers_address, |
| 2082 fpu_register_t** fpu_registers, |
| 2083 intptr_t** cpu_registers) { |
| 2084 ASSERT(sizeof(fpu_register_t) == kFpuRegisterSize); |
| 2085 fpu_register_t* fpu_registers_copy = |
| 2086 new fpu_register_t[kNumberOfSavedFpuRegisters]; |
| 2087 ASSERT(fpu_registers_copy != NULL); |
| 2088 for (intptr_t i = 0; i < kNumberOfSavedFpuRegisters; i++) { |
| 2089 fpu_registers_copy[i] = |
| 2090 *reinterpret_cast<fpu_register_t*>(saved_registers_address); |
| 2091 saved_registers_address += kFpuRegisterSize; |
| 2092 } |
| 2093 *fpu_registers = fpu_registers_copy; |
| 2094 |
| 2095 ASSERT(sizeof(intptr_t) == kWordSize); |
| 2096 intptr_t* cpu_registers_copy = new intptr_t[kNumberOfSavedCpuRegisters]; |
| 2097 ASSERT(cpu_registers_copy != NULL); |
| 2098 for (intptr_t i = 0; i < kNumberOfSavedCpuRegisters; i++) { |
| 2099 cpu_registers_copy[i] = |
| 2100 *reinterpret_cast<intptr_t*>(saved_registers_address); |
| 2101 saved_registers_address += kWordSize; |
| 2102 } |
| 2103 *cpu_registers = cpu_registers_copy; |
| 2104 } |
| 2105 #endif |
| 2106 |
| 2107 |
| 2108 // Copies saved registers and caller's frame into temporary buffers. |
| 2109 // Returns the stack size of unoptimized frame. |
| 2110 // The calling code must be optimized, but its function may not have |
| 2111 // have optimized code if the code is OSR code, or if the code was invalidated |
| 2112 // through class loading/finalization or field guard. |
| 2113 DEFINE_LEAF_RUNTIME_ENTRY(intptr_t, |
| 2114 DeoptimizeCopyFrame, |
| 2115 2, |
| 2116 uword saved_registers_address, |
| 2117 uword is_lazy_deopt) { |
| 2118 #if !defined(DART_PRECOMPILED_RUNTIME) |
| 2119 Thread* thread = Thread::Current(); |
| 2120 Isolate* isolate = thread->isolate(); |
| 2121 StackZone zone(thread); |
| 2122 HANDLESCOPE(thread); |
| 2123 |
| 2124 // All registers have been saved below last-fp as if they were locals. |
| 2125 const uword last_fp = saved_registers_address + |
| 2126 (kNumberOfSavedCpuRegisters * kWordSize) + |
| 2127 (kNumberOfSavedFpuRegisters * kFpuRegisterSize) - |
| 2128 ((kFirstLocalSlotFromFp + 1) * kWordSize); |
| 2129 |
| 2130 // Get optimized code and frame that need to be deoptimized. |
| 2131 DartFrameIterator iterator(last_fp); |
| 2132 |
| 2133 StackFrame* caller_frame = iterator.NextFrame(); |
| 2134 ASSERT(caller_frame != NULL); |
| 2135 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode()); |
| 2136 ASSERT(optimized_code.is_optimized()); |
| 2137 const Function& top_function = |
| 2138 Function::Handle(thread->zone(), optimized_code.function()); |
| 2139 const bool deoptimizing_code = top_function.HasOptimizedCode(); |
| 2140 if (FLAG_trace_deoptimization) { |
| 2141 const Function& function = Function::Handle(optimized_code.function()); |
| 2142 THR_Print("== Deoptimizing code for '%s', %s, %s\n", |
| 2143 function.ToFullyQualifiedCString(), |
| 2144 deoptimizing_code ? "code & frame" : "frame", |
| 2145 is_lazy_deopt ? "lazy-deopt" : ""); |
| 2146 } |
| 2147 |
| 2148 #if !defined(TARGET_ARCH_DBC) |
| 2149 if (is_lazy_deopt) { |
| 2150 uword deopt_pc = isolate->FindPendingDeopt(caller_frame->fp()); |
| 2151 if (FLAG_trace_deoptimization) { |
| 2152 THR_Print("Lazy deopt fp=%" Pp " pc=%" Pp "\n", caller_frame->fp(), |
| 2153 deopt_pc); |
| 2154 } |
| 2155 |
| 2156 // N.B.: Update frame before updating pending deopt table. The profiler |
| 2157 // may attempt a stack walk in between. |
| 2158 caller_frame->set_pc(deopt_pc); |
| 2159 ASSERT(caller_frame->pc() == deopt_pc); |
| 2160 ASSERT(optimized_code.ContainsInstructionAt(caller_frame->pc())); |
| 2161 isolate->ClearPendingDeoptsAtOrBelow(caller_frame->fp()); |
| 2162 } else { |
| 2163 if (FLAG_trace_deoptimization) { |
| 2164 THR_Print("Eager deopt fp=%" Pp " pc=%" Pp "\n", caller_frame->fp(), |
| 2165 caller_frame->pc()); |
| 2166 } |
| 2167 } |
| 2168 #endif // !DBC |
| 2169 |
| 2170 // Copy the saved registers from the stack. |
| 2171 fpu_register_t* fpu_registers; |
| 2172 intptr_t* cpu_registers; |
| 2173 CopySavedRegisters(saved_registers_address, &fpu_registers, &cpu_registers); |
| 2174 |
| 2175 // Create the DeoptContext. |
| 2176 DeoptContext* deopt_context = new DeoptContext( |
| 2177 caller_frame, optimized_code, DeoptContext::kDestIsOriginalFrame, |
| 2178 fpu_registers, cpu_registers, is_lazy_deopt != 0, deoptimizing_code); |
| 2179 isolate->set_deopt_context(deopt_context); |
| 2180 |
| 2181 // Stack size (FP - SP) in bytes. |
| 2182 return deopt_context->DestStackAdjustment() * kWordSize; |
| 2183 #else |
| 2184 UNREACHABLE(); |
| 2185 return 0; |
| 2186 #endif // !DART_PRECOMPILED_RUNTIME |
| 2187 } |
| 2188 END_LEAF_RUNTIME_ENTRY |
| 2189 |
| 2190 |
| 2191 // The stack has been adjusted to fit all values for unoptimized frame. |
| 2192 // Fill the unoptimized frame. |
| 2193 DEFINE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, 1, uword last_fp) { |
| 2194 #if !defined(DART_PRECOMPILED_RUNTIME) |
| 2195 Thread* thread = Thread::Current(); |
| 2196 Isolate* isolate = thread->isolate(); |
| 2197 StackZone zone(thread); |
| 2198 HANDLESCOPE(thread); |
| 2199 |
| 2200 DeoptContext* deopt_context = isolate->deopt_context(); |
| 2201 DartFrameIterator iterator(last_fp); |
| 2202 StackFrame* caller_frame = iterator.NextFrame(); |
| 2203 ASSERT(caller_frame != NULL); |
| 2204 |
| 2205 #if defined(DEBUG) |
| 2206 { |
| 2207 // The code from the deopt_context. |
| 2208 const Code& code = Code::Handle(deopt_context->code()); |
| 2209 |
| 2210 // The code from our frame. |
| 2211 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode()); |
| 2212 const Function& function = Function::Handle(optimized_code.function()); |
| 2213 ASSERT(!function.IsNull()); |
| 2214 |
| 2215 // The code will be the same as before. |
| 2216 ASSERT(code.raw() == optimized_code.raw()); |
| 2217 |
| 2218 // Some sanity checking of the optimized code. |
| 2219 ASSERT(!optimized_code.IsNull() && optimized_code.is_optimized()); |
| 2220 } |
| 2221 #endif |
| 2222 |
| 2223 deopt_context->set_dest_frame(caller_frame); |
| 2224 deopt_context->FillDestFrame(); |
| 2225 |
| 2226 #else |
| 2227 UNREACHABLE(); |
| 2228 #endif // !DART_PRECOMPILED_RUNTIME |
| 2229 } |
| 2230 END_LEAF_RUNTIME_ENTRY |
| 2231 |
| 2232 |
| 2233 // This is the last step in the deoptimization, GC can occur. |
| 2234 // Returns number of bytes to remove from the expression stack of the |
| 2235 // bottom-most deoptimized frame. Those arguments were artificially injected |
| 2236 // under return address to keep them discoverable by GC that can occur during |
| 2237 // materialization phase. |
| 2238 DEFINE_RUNTIME_ENTRY(DeoptimizeMaterialize, 0) { |
| 2239 #if !defined(DART_PRECOMPILED_RUNTIME) |
| 2240 #if defined(DEBUG) |
| 2241 { |
| 2242 // We may rendezvous for a safepoint at entry or GC from the allocations |
| 2243 // below. Check the stack is walkable. |
| 2244 ValidateFrames(); |
| 2245 } |
| 2246 #endif |
| 2247 DeoptContext* deopt_context = isolate->deopt_context(); |
| 2248 intptr_t deopt_arg_count = deopt_context->MaterializeDeferredObjects(); |
| 2249 isolate->set_deopt_context(NULL); |
| 2250 delete deopt_context; |
| 2251 |
| 2252 // Return value tells deoptimization stub to remove the given number of bytes |
| 2253 // from the stack. |
| 2254 arguments.SetReturn(Smi::Handle(Smi::New(deopt_arg_count * kWordSize))); |
| 2255 #else |
| 2256 UNREACHABLE(); |
| 2257 #endif // !DART_PRECOMPILED_RUNTIME |
| 2258 } |
| 2259 |
| 2260 |
| 2261 DEFINE_RUNTIME_ENTRY(RewindPostDeopt, 0) { |
| 2262 #if !defined(DART_PRECOMPILED_RUNTIME) |
| 2263 #if !defined(PRODUCT) |
| 2264 isolate->debugger()->RewindPostDeopt(); |
| 2265 #endif // !PRODUCT |
| 2266 #endif // !DART_PRECOMPILED_RUNTIME |
| 2267 UNREACHABLE(); |
| 2268 } |
| 2269 |
| 2270 DEFINE_LEAF_RUNTIME_ENTRY(intptr_t, |
| 2271 BigintCompare, |
| 2272 2, |
| 2273 RawBigint* left, |
| 2274 RawBigint* right) { |
| 2275 Thread* thread = Thread::Current(); |
| 2276 StackZone zone(thread); |
| 2277 HANDLESCOPE(thread); |
| 2278 const Bigint& big_left = Bigint::Handle(left); |
| 2279 const Bigint& big_right = Bigint::Handle(right); |
| 2280 return big_left.CompareWith(big_right); |
| 2281 } |
| 2282 END_LEAF_RUNTIME_ENTRY |
| 2283 |
| 2284 |
| 2285 double DartModulo(double left, double right) { |
| 2286 double remainder = fmod_ieee(left, right); |
| 2287 if (remainder == 0.0) { |
| 2288 // We explicitely switch to the positive 0.0 (just in case it was negative). |
| 2289 remainder = +0.0; |
| 2290 } else if (remainder < 0.0) { |
| 2291 if (right < 0) { |
| 2292 remainder -= right; |
| 2293 } else { |
| 2294 remainder += right; |
| 2295 } |
| 2296 } |
| 2297 return remainder; |
| 2298 } |
| 2299 |
| 2300 |
| 2301 // Update global type feedback recorded for a field recording the assignment |
| 2302 // of the given value. |
| 2303 // Arg0: Field object; |
| 2304 // Arg1: Value that is being stored. |
| 2305 DEFINE_RUNTIME_ENTRY(UpdateFieldCid, 2) { |
| 2306 const Field& field = Field::CheckedHandle(arguments.ArgAt(0)); |
| 2307 const Object& value = Object::Handle(arguments.ArgAt(1)); |
| 2308 field.RecordStore(value); |
| 2309 } |
| 2310 |
| 2311 |
| 2312 DEFINE_RUNTIME_ENTRY(InitStaticField, 1) { |
| 2313 const Field& field = Field::CheckedHandle(arguments.ArgAt(0)); |
| 2314 field.EvaluateInitializer(); |
| 2315 } |
| 2316 |
| 2317 |
| 2318 DEFINE_RUNTIME_ENTRY(GrowRegExpStack, 1) { |
| 2319 const Array& typed_data_cell = Array::CheckedHandle(arguments.ArgAt(0)); |
| 2320 ASSERT(!typed_data_cell.IsNull() && typed_data_cell.Length() == 1); |
| 2321 const TypedData& old_data = TypedData::CheckedHandle(typed_data_cell.At(0)); |
| 2322 ASSERT(!old_data.IsNull()); |
| 2323 const intptr_t cid = old_data.GetClassId(); |
| 2324 const intptr_t old_size = old_data.Length(); |
| 2325 const intptr_t new_size = 2 * old_size; |
| 2326 const intptr_t elm_size = old_data.ElementSizeInBytes(); |
| 2327 const TypedData& new_data = |
| 2328 TypedData::Handle(TypedData::New(cid, new_size, Heap::kOld)); |
| 2329 TypedData::Copy(new_data, 0, old_data, 0, old_size * elm_size); |
| 2330 typed_data_cell.SetAt(0, new_data); |
| 2331 arguments.SetReturn(new_data); |
| 2332 } |
| 2333 |
46 } // namespace dart | 2334 } // namespace dart |
OLD | NEW |