Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(268)

Side by Side Diff: runtime/vm/flow_graph_compiler_arm64.cc

Issue 2481873005: clang-format runtime/vm (Closed)
Patch Set: Merge Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_dbc.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64.
6 #if defined(TARGET_ARCH_ARM64) 6 #if defined(TARGET_ARCH_ARM64)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
109 builder->AddCallerFp(slot_ix++); 109 builder->AddCallerFp(slot_ix++);
110 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); 110 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++);
111 111
112 // Emit all values that are needed for materialization as a part of the 112 // Emit all values that are needed for materialization as a part of the
113 // expression stack for the bottom-most frame. This guarantees that GC 113 // expression stack for the bottom-most frame. This guarantees that GC
114 // will be able to find them during materialization. 114 // will be able to find them during materialization.
115 slot_ix = builder->EmitMaterializationArguments(slot_ix); 115 slot_ix = builder->EmitMaterializationArguments(slot_ix);
116 116
117 // For the innermost environment, set outgoing arguments and the locals. 117 // For the innermost environment, set outgoing arguments and the locals.
118 for (intptr_t i = current->Length() - 1; 118 for (intptr_t i = current->Length() - 1;
119 i >= current->fixed_parameter_count(); 119 i >= current->fixed_parameter_count(); i--) {
120 i--) {
121 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); 120 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
122 } 121 }
123 122
124 Environment* previous = current; 123 Environment* previous = current;
125 current = current->outer(); 124 current = current->outer();
126 while (current != NULL) { 125 while (current != NULL) {
127 builder->AddPp(current->function(), slot_ix++); 126 builder->AddPp(current->function(), slot_ix++);
128 builder->AddPcMarker(previous->function(), slot_ix++); 127 builder->AddPcMarker(previous->function(), slot_ix++);
129 builder->AddCallerFp(slot_ix++); 128 builder->AddCallerFp(slot_ix++);
130 129
131 // For any outer environment the deopt id is that of the call instruction 130 // For any outer environment the deopt id is that of the call instruction
132 // which is recorded in the outer environment. 131 // which is recorded in the outer environment.
133 builder->AddReturnAddress( 132 builder->AddReturnAddress(current->function(),
134 current->function(), 133 Thread::ToDeoptAfter(current->deopt_id()),
135 Thread::ToDeoptAfter(current->deopt_id()), 134 slot_ix++);
136 slot_ix++);
137 135
138 // The values of outgoing arguments can be changed from the inlined call so 136 // The values of outgoing arguments can be changed from the inlined call so
139 // we must read them from the previous environment. 137 // we must read them from the previous environment.
140 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { 138 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
141 builder->AddCopy(previous->ValueAt(i), 139 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i),
142 previous->LocationAt(i),
143 slot_ix++); 140 slot_ix++);
144 } 141 }
145 142
146 // Set the locals, note that outgoing arguments are not in the environment. 143 // Set the locals, note that outgoing arguments are not in the environment.
147 for (intptr_t i = current->Length() - 1; 144 for (intptr_t i = current->Length() - 1;
148 i >= current->fixed_parameter_count(); 145 i >= current->fixed_parameter_count(); i--) {
149 i--) { 146 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
150 builder->AddCopy(current->ValueAt(i),
151 current->LocationAt(i),
152 slot_ix++);
153 } 147 }
154 148
155 // Iterate on the outer environment. 149 // Iterate on the outer environment.
156 previous = current; 150 previous = current;
157 current = current->outer(); 151 current = current->outer();
158 } 152 }
159 // The previous pointer is now the outermost environment. 153 // The previous pointer is now the outermost environment.
160 ASSERT(previous != NULL); 154 ASSERT(previous != NULL);
161 155
162 // Add slots for the outermost environment. 156 // Add slots for the outermost environment.
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
274 __ b(is_not_instance_lbl, EQ); 268 __ b(is_not_instance_lbl, EQ);
275 } 269 }
276 // A function type test requires checking the function signature. 270 // A function type test requires checking the function signature.
277 if (!type.IsFunctionType()) { 271 if (!type.IsFunctionType()) {
278 const intptr_t num_type_args = type_class.NumTypeArguments(); 272 const intptr_t num_type_args = type_class.NumTypeArguments();
279 const intptr_t num_type_params = type_class.NumTypeParameters(); 273 const intptr_t num_type_params = type_class.NumTypeParameters();
280 const intptr_t from_index = num_type_args - num_type_params; 274 const intptr_t from_index = num_type_args - num_type_params;
281 const TypeArguments& type_arguments = 275 const TypeArguments& type_arguments =
282 TypeArguments::ZoneHandle(zone(), type.arguments()); 276 TypeArguments::ZoneHandle(zone(), type.arguments());
283 const bool is_raw_type = type_arguments.IsNull() || 277 const bool is_raw_type = type_arguments.IsNull() ||
284 type_arguments.IsRaw(from_index, num_type_params); 278 type_arguments.IsRaw(from_index, num_type_params);
285 if (is_raw_type) { 279 if (is_raw_type) {
286 const Register kClassIdReg = R2; 280 const Register kClassIdReg = R2;
287 // dynamic type argument, check only classes. 281 // dynamic type argument, check only classes.
288 __ LoadClassId(kClassIdReg, kInstanceReg); 282 __ LoadClassId(kClassIdReg, kInstanceReg);
289 __ CompareImmediate(kClassIdReg, type_class.id()); 283 __ CompareImmediate(kClassIdReg, type_class.id());
290 __ b(is_instance_lbl, EQ); 284 __ b(is_instance_lbl, EQ);
291 // List is a very common case. 285 // List is a very common case.
292 if (IsListClass(type_class)) { 286 if (IsListClass(type_class)) {
293 GenerateListTypeCheck(kClassIdReg, is_instance_lbl); 287 GenerateListTypeCheck(kClassIdReg, is_instance_lbl);
294 } 288 }
295 return GenerateSubtype1TestCacheLookup( 289 return GenerateSubtype1TestCacheLookup(
296 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); 290 token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
297 } 291 }
298 // If one type argument only, check if type argument is Object or dynamic. 292 // If one type argument only, check if type argument is Object or dynamic.
299 if (type_arguments.Length() == 1) { 293 if (type_arguments.Length() == 1) {
300 const AbstractType& tp_argument = AbstractType::ZoneHandle( 294 const AbstractType& tp_argument =
301 zone(), type_arguments.TypeAt(0)); 295 AbstractType::ZoneHandle(zone(), type_arguments.TypeAt(0));
302 ASSERT(!tp_argument.IsMalformed()); 296 ASSERT(!tp_argument.IsMalformed());
303 if (tp_argument.IsType()) { 297 if (tp_argument.IsType()) {
304 ASSERT(tp_argument.HasResolvedTypeClass()); 298 ASSERT(tp_argument.HasResolvedTypeClass());
305 // Check if type argument is dynamic or Object. 299 // Check if type argument is dynamic or Object.
306 const Type& object_type = Type::Handle(zone(), Type::ObjectType()); 300 const Type& object_type = Type::Handle(zone(), Type::ObjectType());
307 if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) { 301 if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
308 // Instance class test only necessary. 302 // Instance class test only necessary.
309 return GenerateSubtype1TestCacheLookup( 303 return GenerateSubtype1TestCacheLookup(
310 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); 304 token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
311 } 305 }
312 } 306 }
313 } 307 }
314 } 308 }
315 // Regular subtype test cache involving instance's type arguments. 309 // Regular subtype test cache involving instance's type arguments.
316 const Register kTypeArgumentsReg = kNoRegister; 310 const Register kTypeArgumentsReg = kNoRegister;
317 const Register kTempReg = kNoRegister; 311 const Register kTempReg = kNoRegister;
318 // R0: instance (must be preserved). 312 // R0: instance (must be preserved).
319 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, 313 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, kInstanceReg,
320 kInstanceReg, 314 kTypeArgumentsReg, kTempReg,
321 kTypeArgumentsReg, 315 is_instance_lbl, is_not_instance_lbl);
322 kTempReg,
323 is_instance_lbl,
324 is_not_instance_lbl);
325 } 316 }
326 317
327 318
328 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, 319 void FlowGraphCompiler::CheckClassIds(Register class_id_reg,
329 const GrowableArray<intptr_t>& class_ids, 320 const GrowableArray<intptr_t>& class_ids,
330 Label* is_equal_lbl, 321 Label* is_equal_lbl,
331 Label* is_not_equal_lbl) { 322 Label* is_not_equal_lbl) {
332 for (intptr_t i = 0; i < class_ids.length(); i++) { 323 for (intptr_t i = 0; i < class_ids.length(); i++) {
333 __ CompareImmediate(class_id_reg, class_ids[i]); 324 __ CompareImmediate(class_id_reg, class_ids[i]);
334 __ b(is_equal_lbl, EQ); 325 __ b(is_equal_lbl, EQ);
(...skipping 18 matching lines...) Expand all
353 // Fallthrough. 344 // Fallthrough.
354 return true; 345 return true;
355 } 346 }
356 const Class& type_class = Class::Handle(zone(), type.type_class()); 347 const Class& type_class = Class::Handle(zone(), type.type_class());
357 ASSERT(type_class.NumTypeArguments() == 0); 348 ASSERT(type_class.NumTypeArguments() == 0);
358 349
359 const Register kInstanceReg = R0; 350 const Register kInstanceReg = R0;
360 __ tsti(kInstanceReg, Immediate(kSmiTagMask)); 351 __ tsti(kInstanceReg, Immediate(kSmiTagMask));
361 // If instance is Smi, check directly. 352 // If instance is Smi, check directly.
362 const Class& smi_class = Class::Handle(zone(), Smi::Class()); 353 const Class& smi_class = Class::Handle(zone(), Smi::Class());
363 if (smi_class.IsSubtypeOf(TypeArguments::Handle(zone()), 354 if (smi_class.IsSubtypeOf(TypeArguments::Handle(zone()), type_class,
364 type_class, 355 TypeArguments::Handle(zone()), NULL, NULL,
365 TypeArguments::Handle(zone()),
366 NULL,
367 NULL,
368 Heap::kOld)) { 356 Heap::kOld)) {
369 __ b(is_instance_lbl, EQ); 357 __ b(is_instance_lbl, EQ);
370 } else { 358 } else {
371 __ b(is_not_instance_lbl, EQ); 359 __ b(is_not_instance_lbl, EQ);
372 } 360 }
373 const Register kClassIdReg = R2; 361 const Register kClassIdReg = R2;
374 __ LoadClassId(kClassIdReg, kInstanceReg); 362 __ LoadClassId(kClassIdReg, kInstanceReg);
375 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted 363 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted
376 // interfaces. 364 // interfaces.
377 // Bool interface can be implemented only by core class Bool. 365 // Bool interface can be implemented only by core class Bool.
378 if (type.IsBoolType()) { 366 if (type.IsBoolType()) {
379 __ CompareImmediate(kClassIdReg, kBoolCid); 367 __ CompareImmediate(kClassIdReg, kBoolCid);
380 __ b(is_instance_lbl, EQ); 368 __ b(is_instance_lbl, EQ);
381 __ b(is_not_instance_lbl); 369 __ b(is_not_instance_lbl);
382 return false; 370 return false;
383 } 371 }
384 // Custom checking for numbers (Smi, Mint, Bigint and Double). 372 // Custom checking for numbers (Smi, Mint, Bigint and Double).
385 // Note that instance is not Smi (checked above). 373 // Note that instance is not Smi (checked above).
386 if (type.IsNumberType() || type.IsIntType() || type.IsDoubleType()) { 374 if (type.IsNumberType() || type.IsIntType() || type.IsDoubleType()) {
387 GenerateNumberTypeCheck( 375 GenerateNumberTypeCheck(kClassIdReg, type, is_instance_lbl,
388 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); 376 is_not_instance_lbl);
389 return false; 377 return false;
390 } 378 }
391 if (type.IsStringType()) { 379 if (type.IsStringType()) {
392 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl); 380 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl);
393 return false; 381 return false;
394 } 382 }
395 if (type.IsDartFunctionType()) { 383 if (type.IsDartFunctionType()) {
396 // Check if instance is a closure. 384 // Check if instance is a closure.
397 __ CompareImmediate(kClassIdReg, kClosureCid); 385 __ CompareImmediate(kClassIdReg, kClosureCid);
398 __ b(is_instance_lbl, EQ); 386 __ b(is_instance_lbl, EQ);
(...skipping 26 matching lines...) Expand all
425 __ LoadClass(R1, kInstanceReg); 413 __ LoadClass(R1, kInstanceReg);
426 // R1: instance class. 414 // R1: instance class.
427 // Check immediate superclass equality. 415 // Check immediate superclass equality.
428 __ LoadFieldFromOffset(R2, R1, Class::super_type_offset()); 416 __ LoadFieldFromOffset(R2, R1, Class::super_type_offset());
429 __ LoadFieldFromOffset(R2, R2, Type::type_class_id_offset()); 417 __ LoadFieldFromOffset(R2, R2, Type::type_class_id_offset());
430 __ CompareImmediate(R2, Smi::RawValue(type_class.id())); 418 __ CompareImmediate(R2, Smi::RawValue(type_class.id()));
431 __ b(is_instance_lbl, EQ); 419 __ b(is_instance_lbl, EQ);
432 420
433 const Register kTypeArgumentsReg = kNoRegister; 421 const Register kTypeArgumentsReg = kNoRegister;
434 const Register kTempReg = kNoRegister; 422 const Register kTempReg = kNoRegister;
435 return GenerateCallSubtypeTestStub(kTestTypeOneArg, 423 return GenerateCallSubtypeTestStub(kTestTypeOneArg, kInstanceReg,
436 kInstanceReg, 424 kTypeArgumentsReg, kTempReg,
437 kTypeArgumentsReg, 425 is_instance_lbl, is_not_instance_lbl);
438 kTempReg,
439 is_instance_lbl,
440 is_not_instance_lbl);
441 } 426 }
442 427
443 428
444 // Generates inlined check if 'type' is a type parameter or type itself 429 // Generates inlined check if 'type' is a type parameter or type itself
445 // R0: instance (preserved). 430 // R0: instance (preserved).
446 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( 431 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
447 TokenPosition token_pos, 432 TokenPosition token_pos,
448 const AbstractType& type, 433 const AbstractType& type,
449 Label* is_instance_lbl, 434 Label* is_instance_lbl,
450 Label* is_not_instance_lbl) { 435 Label* is_not_instance_lbl) {
451 __ Comment("UninstantiatedTypeTest"); 436 __ Comment("UninstantiatedTypeTest");
452 ASSERT(!type.IsInstantiated()); 437 ASSERT(!type.IsInstantiated());
453 // Skip check if destination is a dynamic type. 438 // Skip check if destination is a dynamic type.
454 if (type.IsTypeParameter()) { 439 if (type.IsTypeParameter()) {
455 const TypeParameter& type_param = TypeParameter::Cast(type); 440 const TypeParameter& type_param = TypeParameter::Cast(type);
456 // Load instantiator type arguments on stack. 441 // Load instantiator type arguments on stack.
457 __ ldr(R1, Address(SP)); // Get instantiator type arguments. 442 __ ldr(R1, Address(SP)); // Get instantiator type arguments.
458 // R1: instantiator type arguments. 443 // R1: instantiator type arguments.
459 // Check if type arguments are null, i.e. equivalent to vector of dynamic. 444 // Check if type arguments are null, i.e. equivalent to vector of dynamic.
460 __ CompareObject(R1, Object::null_object()); 445 __ CompareObject(R1, Object::null_object());
461 __ b(is_instance_lbl, EQ); 446 __ b(is_instance_lbl, EQ);
462 __ LoadFieldFromOffset( 447 __ LoadFieldFromOffset(R2, R1,
463 R2, R1, TypeArguments::type_at_offset(type_param.index())); 448 TypeArguments::type_at_offset(type_param.index()));
464 // R2: concrete type of type. 449 // R2: concrete type of type.
465 // Check if type argument is dynamic. 450 // Check if type argument is dynamic.
466 __ CompareObject(R2, Object::dynamic_type()); 451 __ CompareObject(R2, Object::dynamic_type());
467 __ b(is_instance_lbl, EQ); 452 __ b(is_instance_lbl, EQ);
468 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::ObjectType())); 453 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::ObjectType()));
469 __ b(is_instance_lbl, EQ); 454 __ b(is_instance_lbl, EQ);
470 455
471 // For Smi check quickly against int and num interfaces. 456 // For Smi check quickly against int and num interfaces.
472 Label not_smi; 457 Label not_smi;
473 __ tsti(R0, Immediate(kSmiTagMask)); // Value is Smi? 458 __ tsti(R0, Immediate(kSmiTagMask)); // Value is Smi?
474 __ b(&not_smi, NE); 459 __ b(&not_smi, NE);
475 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::IntType())); 460 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::IntType()));
476 __ b(is_instance_lbl, EQ); 461 __ b(is_instance_lbl, EQ);
477 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::Number())); 462 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::Number()));
478 __ b(is_instance_lbl, EQ); 463 __ b(is_instance_lbl, EQ);
479 // Smi must be handled in runtime. 464 // Smi must be handled in runtime.
480 Label fall_through; 465 Label fall_through;
481 __ b(&fall_through); 466 __ b(&fall_through);
482 467
483 __ Bind(&not_smi); 468 __ Bind(&not_smi);
484 // R1: instantiator type arguments. 469 // R1: instantiator type arguments.
485 // R0: instance. 470 // R0: instance.
486 const Register kInstanceReg = R0; 471 const Register kInstanceReg = R0;
487 const Register kTypeArgumentsReg = R1; 472 const Register kTypeArgumentsReg = R1;
488 const Register kTempReg = kNoRegister; 473 const Register kTempReg = kNoRegister;
489 const SubtypeTestCache& type_test_cache = 474 const SubtypeTestCache& type_test_cache = SubtypeTestCache::ZoneHandle(
490 SubtypeTestCache::ZoneHandle(zone(), 475 zone(), GenerateCallSubtypeTestStub(
491 GenerateCallSubtypeTestStub(kTestTypeThreeArgs, 476 kTestTypeThreeArgs, kInstanceReg, kTypeArgumentsReg,
492 kInstanceReg, 477 kTempReg, is_instance_lbl, is_not_instance_lbl));
493 kTypeArgumentsReg,
494 kTempReg,
495 is_instance_lbl,
496 is_not_instance_lbl));
497 __ Bind(&fall_through); 478 __ Bind(&fall_through);
498 return type_test_cache.raw(); 479 return type_test_cache.raw();
499 } 480 }
500 if (type.IsType()) { 481 if (type.IsType()) {
501 const Register kInstanceReg = R0; 482 const Register kInstanceReg = R0;
502 const Register kTypeArgumentsReg = R1; 483 const Register kTypeArgumentsReg = R1;
503 __ tsti(kInstanceReg, Immediate(kSmiTagMask)); // Is instance Smi? 484 __ tsti(kInstanceReg, Immediate(kSmiTagMask)); // Is instance Smi?
504 __ b(is_not_instance_lbl, EQ); 485 __ b(is_not_instance_lbl, EQ);
505 __ ldr(kTypeArgumentsReg, Address(SP)); // Instantiator type args. 486 __ ldr(kTypeArgumentsReg, Address(SP)); // Instantiator type args.
506 // Uninstantiated type class is known at compile time, but the type 487 // Uninstantiated type class is known at compile time, but the type
507 // arguments are determined at runtime by the instantiator. 488 // arguments are determined at runtime by the instantiator.
508 const Register kTempReg = kNoRegister; 489 const Register kTempReg = kNoRegister;
509 return GenerateCallSubtypeTestStub(kTestTypeThreeArgs, 490 return GenerateCallSubtypeTestStub(kTestTypeThreeArgs, kInstanceReg,
510 kInstanceReg, 491 kTypeArgumentsReg, kTempReg,
511 kTypeArgumentsReg, 492 is_instance_lbl, is_not_instance_lbl);
512 kTempReg,
513 is_instance_lbl,
514 is_not_instance_lbl);
515 } 493 }
516 return SubtypeTestCache::null(); 494 return SubtypeTestCache::null();
517 } 495 }
518 496
519 497
520 // Inputs: 498 // Inputs:
521 // - R0: instance being type checked (preserved). 499 // - R0: instance being type checked (preserved).
522 // - R1: optional instantiator type arguments (preserved). 500 // - R1: optional instantiator type arguments (preserved).
523 // Clobbers R2, R3. 501 // Clobbers R2, R3.
524 // Returns: 502 // Returns:
(...skipping 11 matching lines...) Expand all
536 // A non-null value is returned from a void function, which will result in a 514 // A non-null value is returned from a void function, which will result in a
537 // type error. A null value is handled prior to executing this inline code. 515 // type error. A null value is handled prior to executing this inline code.
538 return SubtypeTestCache::null(); 516 return SubtypeTestCache::null();
539 } 517 }
540 if (type.IsInstantiated()) { 518 if (type.IsInstantiated()) {
541 const Class& type_class = Class::ZoneHandle(zone(), type.type_class()); 519 const Class& type_class = Class::ZoneHandle(zone(), type.type_class());
542 // A class equality check is only applicable with a dst type (not a 520 // A class equality check is only applicable with a dst type (not a
543 // function type) of a non-parameterized class or with a raw dst type of 521 // function type) of a non-parameterized class or with a raw dst type of
544 // a parameterized class. 522 // a parameterized class.
545 if (type.IsFunctionType() || (type_class.NumTypeArguments() > 0)) { 523 if (type.IsFunctionType() || (type_class.NumTypeArguments() > 0)) {
546 return GenerateInstantiatedTypeWithArgumentsTest(token_pos, 524 return GenerateInstantiatedTypeWithArgumentsTest(
547 type, 525 token_pos, type, is_instance_lbl, is_not_instance_lbl);
548 is_instance_lbl,
549 is_not_instance_lbl);
550 // Fall through to runtime call. 526 // Fall through to runtime call.
551 } 527 }
552 const bool has_fall_through = 528 const bool has_fall_through = GenerateInstantiatedTypeNoArgumentsTest(
553 GenerateInstantiatedTypeNoArgumentsTest(token_pos, 529 token_pos, type, is_instance_lbl, is_not_instance_lbl);
554 type,
555 is_instance_lbl,
556 is_not_instance_lbl);
557 if (has_fall_through) { 530 if (has_fall_through) {
558 // If test non-conclusive so far, try the inlined type-test cache. 531 // If test non-conclusive so far, try the inlined type-test cache.
559 // 'type' is known at compile time. 532 // 'type' is known at compile time.
560 return GenerateSubtype1TestCacheLookup( 533 return GenerateSubtype1TestCacheLookup(
561 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); 534 token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
562 } else { 535 } else {
563 return SubtypeTestCache::null(); 536 return SubtypeTestCache::null();
564 } 537 }
565 } 538 }
566 return GenerateUninstantiatedTypeTest(token_pos, 539 return GenerateUninstantiatedTypeTest(token_pos, type, is_instance_lbl,
567 type,
568 is_instance_lbl,
569 is_not_instance_lbl); 540 is_not_instance_lbl);
570 } 541 }
571 542
572 543
573 // If instanceof type test cannot be performed successfully at compile time and 544 // If instanceof type test cannot be performed successfully at compile time and
574 // therefore eliminated, optimize it by adding inlined tests for: 545 // therefore eliminated, optimize it by adding inlined tests for:
575 // - NULL -> return false. 546 // - NULL -> return false.
576 // - Smi -> compile time subtype check (only if dst class is not parameterized). 547 // - Smi -> compile time subtype check (only if dst class is not parameterized).
577 // - Class equality (only if class is not parameterized). 548 // - Class equality (only if class is not parameterized).
578 // Inputs: 549 // Inputs:
(...skipping 21 matching lines...) Expand all
600 // instantiated). 571 // instantiated).
601 // We can only inline this null check if the type is instantiated at compile 572 // We can only inline this null check if the type is instantiated at compile
602 // time, since an uninstantiated type at compile time could be Object or 573 // time, since an uninstantiated type at compile time could be Object or
603 // dynamic at run time. 574 // dynamic at run time.
604 __ CompareObject(R0, Object::null_object()); 575 __ CompareObject(R0, Object::null_object());
605 __ b(type.IsNullType() ? &is_instance : &is_not_instance, EQ); 576 __ b(type.IsNullType() ? &is_instance : &is_not_instance, EQ);
606 } 577 }
607 578
608 // Generate inline instanceof test. 579 // Generate inline instanceof test.
609 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); 580 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
610 test_cache = GenerateInlineInstanceof(token_pos, type, 581 test_cache =
611 &is_instance, &is_not_instance); 582 GenerateInlineInstanceof(token_pos, type, &is_instance, &is_not_instance);
612 583
613 // test_cache is null if there is no fall-through. 584 // test_cache is null if there is no fall-through.
614 Label done; 585 Label done;
615 if (!test_cache.IsNull()) { 586 if (!test_cache.IsNull()) {
616 // Generate runtime call. 587 // Generate runtime call.
617 // Load instantiator (R2) and its type arguments (R1). 588 // Load instantiator (R2) and its type arguments (R1).
618 __ ldr(R1, Address(SP, 0 * kWordSize)); 589 __ ldr(R1, Address(SP, 0 * kWordSize));
619 __ PushObject(Object::null_object()); // Make room for the result. 590 __ PushObject(Object::null_object()); // Make room for the result.
620 __ Push(R0); // Push the instance. 591 __ Push(R0); // Push the instance.
621 __ PushObject(type); // Push the type. 592 __ PushObject(type); // Push the type.
622 __ Push(R1); // Push instantiator type arguments (R1). 593 __ Push(R1); // Push instantiator type arguments (R1).
623 __ LoadUniqueObject(R0, test_cache); 594 __ LoadUniqueObject(R0, test_cache);
624 __ Push(R0); 595 __ Push(R0);
625 GenerateRuntimeCall(token_pos, deopt_id, kInstanceofRuntimeEntry, 4, locs); 596 GenerateRuntimeCall(token_pos, deopt_id, kInstanceofRuntimeEntry, 4, locs);
626 // Pop the parameters supplied to the runtime entry. The result of the 597 // Pop the parameters supplied to the runtime entry. The result of the
627 // instanceof runtime call will be left as the result of the operation. 598 // instanceof runtime call will be left as the result of the operation.
628 __ Drop(4); 599 __ Drop(4);
629 if (negate_result) { 600 if (negate_result) {
630 __ Pop(R1); 601 __ Pop(R1);
631 __ LoadObject(R0, Bool::True()); 602 __ LoadObject(R0, Bool::True());
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
674 // Preserve instantiator type arguments (R1). 645 // Preserve instantiator type arguments (R1).
675 __ Push(R1); 646 __ Push(R1);
676 // A null object is always assignable and is returned as result. 647 // A null object is always assignable and is returned as result.
677 Label is_assignable, runtime_call; 648 Label is_assignable, runtime_call;
678 __ CompareObject(R0, Object::null_object()); 649 __ CompareObject(R0, Object::null_object());
679 __ b(&is_assignable, EQ); 650 __ b(&is_assignable, EQ);
680 651
681 // Generate throw new TypeError() if the type is malformed or malbounded. 652 // Generate throw new TypeError() if the type is malformed or malbounded.
682 if (dst_type.IsMalformedOrMalbounded()) { 653 if (dst_type.IsMalformedOrMalbounded()) {
683 __ PushObject(Object::null_object()); // Make room for the result. 654 __ PushObject(Object::null_object()); // Make room for the result.
684 __ Push(R0); // Push the source object. 655 __ Push(R0); // Push the source object.
685 __ PushObject(dst_name); // Push the name of the destination. 656 __ PushObject(dst_name); // Push the name of the destination.
686 __ PushObject(dst_type); // Push the type of the destination. 657 __ PushObject(dst_type); // Push the type of the destination.
687 GenerateRuntimeCall(token_pos, 658 GenerateRuntimeCall(token_pos, deopt_id, kBadTypeErrorRuntimeEntry, 3,
688 deopt_id,
689 kBadTypeErrorRuntimeEntry,
690 3,
691 locs); 659 locs);
692 // We should never return here. 660 // We should never return here.
693 __ brk(0); 661 __ brk(0);
694 662
695 __ Bind(&is_assignable); // For a null object. 663 __ Bind(&is_assignable); // For a null object.
696 // Restore instantiator type arguments (R1). 664 // Restore instantiator type arguments (R1).
697 __ Pop(R1); 665 __ Pop(R1);
698 return; 666 return;
699 } 667 }
700 668
701 // Generate inline type check, linking to runtime call if not assignable. 669 // Generate inline type check, linking to runtime call if not assignable.
702 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); 670 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
703 test_cache = GenerateInlineInstanceof(token_pos, dst_type, 671 test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable,
704 &is_assignable, &runtime_call); 672 &runtime_call);
705 673
706 __ Bind(&runtime_call); 674 __ Bind(&runtime_call);
707 __ ldr(R1, Address(SP)); // Load instantiator type arguments (R1). 675 __ ldr(R1, Address(SP)); // Load instantiator type arguments (R1).
708 __ PushObject(Object::null_object()); // Make room for the result. 676 __ PushObject(Object::null_object()); // Make room for the result.
709 __ Push(R0); // Push the source object. 677 __ Push(R0); // Push the source object.
710 __ PushObject(dst_type); // Push the type of the destination. 678 __ PushObject(dst_type); // Push the type of the destination.
711 __ Push(R1); // Push instantiator type arguments (R1). 679 __ Push(R1); // Push instantiator type arguments (R1).
712 __ PushObject(dst_name); // Push the name of the destination. 680 __ PushObject(dst_name); // Push the name of the destination.
713 __ LoadUniqueObject(R0, test_cache); 681 __ LoadUniqueObject(R0, test_cache);
714 __ Push(R0); 682 __ Push(R0);
715 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 5, locs); 683 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 5, locs);
716 // Pop the parameters supplied to the runtime entry. The result of the 684 // Pop the parameters supplied to the runtime entry. The result of the
717 // type check runtime call is the checked value. 685 // type check runtime call is the checked value.
718 __ Drop(5); 686 __ Drop(5);
719 __ Pop(R0); 687 __ Pop(R0);
720 688
721 __ Bind(&is_assignable); 689 __ Bind(&is_assignable);
(...skipping 25 matching lines...) Expand all
747 const int num_params = 715 const int num_params =
748 num_fixed_params + num_opt_pos_params + num_opt_named_params; 716 num_fixed_params + num_opt_pos_params + num_opt_named_params;
749 ASSERT(function.NumParameters() == num_params); 717 ASSERT(function.NumParameters() == num_params);
750 ASSERT(parsed_function().first_parameter_index() == kFirstLocalSlotFromFp); 718 ASSERT(parsed_function().first_parameter_index() == kFirstLocalSlotFromFp);
751 719
752 // Check that min_num_pos_args <= num_pos_args <= max_num_pos_args, 720 // Check that min_num_pos_args <= num_pos_args <= max_num_pos_args,
753 // where num_pos_args is the number of positional arguments passed in. 721 // where num_pos_args is the number of positional arguments passed in.
754 const int min_num_pos_args = num_fixed_params; 722 const int min_num_pos_args = num_fixed_params;
755 const int max_num_pos_args = num_fixed_params + num_opt_pos_params; 723 const int max_num_pos_args = num_fixed_params + num_opt_pos_params;
756 724
757 __ LoadFieldFromOffset( 725 __ LoadFieldFromOffset(R8, R4,
758 R8, R4, ArgumentsDescriptor::positional_count_offset()); 726 ArgumentsDescriptor::positional_count_offset());
759 // Check that min_num_pos_args <= num_pos_args. 727 // Check that min_num_pos_args <= num_pos_args.
760 Label wrong_num_arguments; 728 Label wrong_num_arguments;
761 __ CompareImmediate(R8, Smi::RawValue(min_num_pos_args)); 729 __ CompareImmediate(R8, Smi::RawValue(min_num_pos_args));
762 __ b(&wrong_num_arguments, LT); 730 __ b(&wrong_num_arguments, LT);
763 // Check that num_pos_args <= max_num_pos_args. 731 // Check that num_pos_args <= max_num_pos_args.
764 __ CompareImmediate(R8, Smi::RawValue(max_num_pos_args)); 732 __ CompareImmediate(R8, Smi::RawValue(max_num_pos_args));
765 __ b(&wrong_num_arguments, GT); 733 __ b(&wrong_num_arguments, GT);
766 734
767 // Copy positional arguments. 735 // Copy positional arguments.
768 // Argument i passed at fp[kParamEndSlotFromFp + num_args - i] is copied 736 // Argument i passed at fp[kParamEndSlotFromFp + num_args - i] is copied
(...skipping 22 matching lines...) Expand all
791 __ Bind(&loop); 759 __ Bind(&loop);
792 __ ldr(TMP, argument_addr); 760 __ ldr(TMP, argument_addr);
793 __ str(TMP, copy_addr); 761 __ str(TMP, copy_addr);
794 __ Bind(&loop_condition); 762 __ Bind(&loop_condition);
795 __ subs(R8, R8, Operand(1)); 763 __ subs(R8, R8, Operand(1));
796 __ b(&loop, PL); 764 __ b(&loop, PL);
797 765
798 // Copy or initialize optional named arguments. 766 // Copy or initialize optional named arguments.
799 Label all_arguments_processed; 767 Label all_arguments_processed;
800 #ifdef DEBUG 768 #ifdef DEBUG
801 const bool check_correct_named_args = true; 769 const bool check_correct_named_args = true;
802 #else 770 #else
803 const bool check_correct_named_args = function.IsClosureFunction(); 771 const bool check_correct_named_args = function.IsClosureFunction();
804 #endif 772 #endif
805 if (num_opt_named_params > 0) { 773 if (num_opt_named_params > 0) {
806 // Start by alphabetically sorting the names of the optional parameters. 774 // Start by alphabetically sorting the names of the optional parameters.
807 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; 775 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params];
808 int* opt_param_position = new int[num_opt_named_params]; 776 int* opt_param_position = new int[num_opt_named_params];
809 for (int pos = num_fixed_params; pos < num_params; pos++) { 777 for (int pos = num_fixed_params; pos < num_params; pos++) {
810 LocalVariable* parameter = scope->VariableAt(pos); 778 LocalVariable* parameter = scope->VariableAt(pos);
811 const String& opt_param_name = parameter->name(); 779 const String& opt_param_name = parameter->name();
812 int i = pos - num_fixed_params; 780 int i = pos - num_fixed_params;
813 while (--i >= 0) { 781 while (--i >= 0) {
814 LocalVariable* param_i = opt_param[i]; 782 LocalVariable* param_i = opt_param[i];
815 const intptr_t result = opt_param_name.CompareTo(param_i->name()); 783 const intptr_t result = opt_param_name.CompareTo(param_i->name());
816 ASSERT(result != 0); 784 ASSERT(result != 0);
817 if (result > 0) break; 785 if (result > 0) break;
818 opt_param[i + 1] = opt_param[i]; 786 opt_param[i + 1] = opt_param[i];
819 opt_param_position[i + 1] = opt_param_position[i]; 787 opt_param_position[i + 1] = opt_param_position[i];
820 } 788 }
821 opt_param[i + 1] = parameter; 789 opt_param[i + 1] = parameter;
822 opt_param_position[i + 1] = pos; 790 opt_param_position[i + 1] = pos;
823 } 791 }
824 // Generate code handling each optional parameter in alphabetical order. 792 // Generate code handling each optional parameter in alphabetical order.
825 __ LoadFieldFromOffset(R7, R4, ArgumentsDescriptor::count_offset()); 793 __ LoadFieldFromOffset(R7, R4, ArgumentsDescriptor::count_offset());
826 __ LoadFieldFromOffset( 794 __ LoadFieldFromOffset(R8, R4,
827 R8, R4, ArgumentsDescriptor::positional_count_offset()); 795 ArgumentsDescriptor::positional_count_offset());
828 __ SmiUntag(R8); 796 __ SmiUntag(R8);
829 // Let R7 point to the first passed argument, i.e. to 797 // Let R7 point to the first passed argument, i.e. to
830 // fp[kParamEndSlotFromFp + num_args - 0]; num_args (R7) is Smi. 798 // fp[kParamEndSlotFromFp + num_args - 0]; num_args (R7) is Smi.
831 __ add(R7, FP, Operand(R7, LSL, 2)); 799 __ add(R7, FP, Operand(R7, LSL, 2));
832 __ AddImmediate(R7, R7, kParamEndSlotFromFp * kWordSize); 800 __ AddImmediate(R7, R7, kParamEndSlotFromFp * kWordSize);
833 // Let R6 point to the entry of the first named argument. 801 // Let R6 point to the entry of the first named argument.
834 __ add(R6, R4, Operand( 802 __ add(R6, R4, Operand(ArgumentsDescriptor::first_named_entry_offset() -
835 ArgumentsDescriptor::first_named_entry_offset() - kHeapObjectTag)); 803 kHeapObjectTag));
836 for (int i = 0; i < num_opt_named_params; i++) { 804 for (int i = 0; i < num_opt_named_params; i++) {
837 Label load_default_value, assign_optional_parameter; 805 Label load_default_value, assign_optional_parameter;
838 const int param_pos = opt_param_position[i]; 806 const int param_pos = opt_param_position[i];
839 // Check if this named parameter was passed in. 807 // Check if this named parameter was passed in.
840 // Load R5 with the name of the argument. 808 // Load R5 with the name of the argument.
841 __ LoadFromOffset(R5, R6, ArgumentsDescriptor::name_offset()); 809 __ LoadFromOffset(R5, R6, ArgumentsDescriptor::name_offset());
842 ASSERT(opt_param[i]->name().IsSymbol()); 810 ASSERT(opt_param[i]->name().IsSymbol());
843 __ CompareObject(R5, opt_param[i]->name()); 811 __ CompareObject(R5, opt_param[i]->name());
844 __ b(&load_default_value, NE); 812 __ b(&load_default_value, NE);
845 // Load R5 with passed-in argument at provided arg_pos, i.e. at 813 // Load R5 with passed-in argument at provided arg_pos, i.e. at
(...skipping 24 matching lines...) Expand all
870 delete[] opt_param_position; 838 delete[] opt_param_position;
871 if (check_correct_named_args) { 839 if (check_correct_named_args) {
872 // Check that R6 now points to the null terminator in the arguments 840 // Check that R6 now points to the null terminator in the arguments
873 // descriptor. 841 // descriptor.
874 __ ldr(R5, Address(R6)); 842 __ ldr(R5, Address(R6));
875 __ CompareObject(R5, Object::null_object()); 843 __ CompareObject(R5, Object::null_object());
876 __ b(&all_arguments_processed, EQ); 844 __ b(&all_arguments_processed, EQ);
877 } 845 }
878 } else { 846 } else {
879 ASSERT(num_opt_pos_params > 0); 847 ASSERT(num_opt_pos_params > 0);
880 __ LoadFieldFromOffset( 848 __ LoadFieldFromOffset(R8, R4,
881 R8, R4, ArgumentsDescriptor::positional_count_offset()); 849 ArgumentsDescriptor::positional_count_offset());
882 __ SmiUntag(R8); 850 __ SmiUntag(R8);
883 for (int i = 0; i < num_opt_pos_params; i++) { 851 for (int i = 0; i < num_opt_pos_params; i++) {
884 Label next_parameter; 852 Label next_parameter;
885 // Handle this optional positional parameter only if k or fewer positional 853 // Handle this optional positional parameter only if k or fewer positional
886 // arguments have been passed, where k is param_pos, the position of this 854 // arguments have been passed, where k is param_pos, the position of this
887 // optional parameter in the formal parameter list. 855 // optional parameter in the formal parameter list.
888 const int param_pos = num_fixed_params + i; 856 const int param_pos = num_fixed_params + i;
889 __ CompareImmediate(R8, param_pos); 857 __ CompareImmediate(R8, param_pos);
890 __ b(&next_parameter, GT); 858 __ b(&next_parameter, GT);
891 // Load R5 with default argument. 859 // Load R5 with default argument.
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
961 __ LoadFromOffset(R1, SP, 0 * kWordSize); // Value. 929 __ LoadFromOffset(R1, SP, 0 * kWordSize); // Value.
962 __ StoreIntoObjectOffset(R0, offset, R1); 930 __ StoreIntoObjectOffset(R0, offset, R1);
963 __ LoadObject(R0, Object::null_object()); 931 __ LoadObject(R0, Object::null_object());
964 __ ret(); 932 __ ret();
965 } 933 }
966 934
967 935
968 void FlowGraphCompiler::EmitFrameEntry() { 936 void FlowGraphCompiler::EmitFrameEntry() {
969 const Function& function = parsed_function().function(); 937 const Function& function = parsed_function().function();
970 Register new_pp = kNoRegister; 938 Register new_pp = kNoRegister;
971 if (CanOptimizeFunction() && 939 if (CanOptimizeFunction() && function.IsOptimizable() &&
972 function.IsOptimizable() &&
973 (!is_optimizing() || may_reoptimize())) { 940 (!is_optimizing() || may_reoptimize())) {
974 __ Comment("Invocation Count Check"); 941 __ Comment("Invocation Count Check");
975 const Register function_reg = R6; 942 const Register function_reg = R6;
976 new_pp = R13; 943 new_pp = R13;
977 // The pool pointer is not setup before entering the Dart frame. 944 // The pool pointer is not setup before entering the Dart frame.
978 // Temporarily setup pool pointer for this dart function. 945 // Temporarily setup pool pointer for this dart function.
979 __ LoadPoolPointer(new_pp); 946 __ LoadPoolPointer(new_pp);
980 947
981 // Load function object using the callee's pool pointer. 948 // Load function object using the callee's pool pointer.
982 __ LoadFunctionFromCalleePool(function_reg, function, new_pp); 949 __ LoadFunctionFromCalleePool(function_reg, function, new_pp);
983 950
984 __ LoadFieldFromOffset( 951 __ LoadFieldFromOffset(R7, function_reg, Function::usage_counter_offset(),
985 R7, function_reg, Function::usage_counter_offset(), kWord); 952 kWord);
986 // Reoptimization of an optimized function is triggered by counting in 953 // Reoptimization of an optimized function is triggered by counting in
987 // IC stubs, but not at the entry of the function. 954 // IC stubs, but not at the entry of the function.
988 if (!is_optimizing()) { 955 if (!is_optimizing()) {
989 __ add(R7, R7, Operand(1)); 956 __ add(R7, R7, Operand(1));
990 __ StoreFieldToOffset( 957 __ StoreFieldToOffset(R7, function_reg, Function::usage_counter_offset(),
991 R7, function_reg, Function::usage_counter_offset(), kWord); 958 kWord);
992 } 959 }
993 __ CompareImmediate(R7, GetOptimizationThreshold()); 960 __ CompareImmediate(R7, GetOptimizationThreshold());
994 ASSERT(function_reg == R6); 961 ASSERT(function_reg == R6);
995 Label dont_optimize; 962 Label dont_optimize;
996 __ b(&dont_optimize, LT); 963 __ b(&dont_optimize, LT);
997 __ Branch(*StubCode::OptimizeFunction_entry(), new_pp); 964 __ Branch(*StubCode::OptimizeFunction_entry(), new_pp);
998 __ Bind(&dont_optimize); 965 __ Bind(&dont_optimize);
999 } 966 }
1000 __ Comment("Enter frame"); 967 __ Comment("Enter frame");
1001 if (flow_graph().IsCompiledForOsr()) { 968 if (flow_graph().IsCompiledForOsr()) {
1002 intptr_t extra_slots = StackSize() 969 intptr_t extra_slots = StackSize() - flow_graph().num_stack_locals() -
1003 - flow_graph().num_stack_locals() 970 flow_graph().num_copied_params();
1004 - flow_graph().num_copied_params();
1005 ASSERT(extra_slots >= 0); 971 ASSERT(extra_slots >= 0);
1006 __ EnterOsrFrame(extra_slots * kWordSize, new_pp); 972 __ EnterOsrFrame(extra_slots * kWordSize, new_pp);
1007 } else { 973 } else {
1008 ASSERT(StackSize() >= 0); 974 ASSERT(StackSize() >= 0);
1009 __ EnterDartFrame(StackSize() * kWordSize, new_pp); 975 __ EnterDartFrame(StackSize() * kWordSize, new_pp);
1010 } 976 }
1011 } 977 }
1012 978
1013 979
1014 // Input parameters: 980 // Input parameters:
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1048 const bool check_arguments = 1014 const bool check_arguments =
1049 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr(); 1015 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr();
1050 if (check_arguments) { 1016 if (check_arguments) {
1051 __ Comment("Check argument count"); 1017 __ Comment("Check argument count");
1052 // Check that exactly num_fixed arguments are passed in. 1018 // Check that exactly num_fixed arguments are passed in.
1053 Label correct_num_arguments, wrong_num_arguments; 1019 Label correct_num_arguments, wrong_num_arguments;
1054 __ LoadFieldFromOffset(R0, R4, ArgumentsDescriptor::count_offset()); 1020 __ LoadFieldFromOffset(R0, R4, ArgumentsDescriptor::count_offset());
1055 __ CompareImmediate(R0, Smi::RawValue(num_fixed_params)); 1021 __ CompareImmediate(R0, Smi::RawValue(num_fixed_params));
1056 __ b(&wrong_num_arguments, NE); 1022 __ b(&wrong_num_arguments, NE);
1057 __ LoadFieldFromOffset(R1, R4, 1023 __ LoadFieldFromOffset(R1, R4,
1058 ArgumentsDescriptor::positional_count_offset()); 1024 ArgumentsDescriptor::positional_count_offset());
1059 __ CompareRegisters(R0, R1); 1025 __ CompareRegisters(R0, R1);
1060 __ b(&correct_num_arguments, EQ); 1026 __ b(&correct_num_arguments, EQ);
1061 __ Bind(&wrong_num_arguments); 1027 __ Bind(&wrong_num_arguments);
1062 __ LeaveDartFrame(kKeepCalleePP); // Arguments are still on the stack. 1028 __ LeaveDartFrame(kKeepCalleePP); // Arguments are still on the stack.
1063 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry()); 1029 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry());
1064 // The noSuchMethod call may return to the caller, but not here. 1030 // The noSuchMethod call may return to the caller, but not here.
1065 __ Bind(&correct_num_arguments); 1031 __ Bind(&correct_num_arguments);
1066 } 1032 }
1067 } else if (!flow_graph().IsCompiledForOsr()) { 1033 } else if (!flow_graph().IsCompiledForOsr()) {
1068 CopyParameters(); 1034 CopyParameters();
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
1205 ASSERT(!edge_counters_array_.IsNull()); 1171 ASSERT(!edge_counters_array_.IsNull());
1206 ASSERT(assembler_->constant_pool_allowed()); 1172 ASSERT(assembler_->constant_pool_allowed());
1207 __ Comment("Edge counter"); 1173 __ Comment("Edge counter");
1208 __ LoadObject(R0, edge_counters_array_); 1174 __ LoadObject(R0, edge_counters_array_);
1209 __ LoadFieldFromOffset(TMP, R0, Array::element_offset(edge_id)); 1175 __ LoadFieldFromOffset(TMP, R0, Array::element_offset(edge_id));
1210 __ add(TMP, TMP, Operand(Smi::RawValue(1))); 1176 __ add(TMP, TMP, Operand(Smi::RawValue(1)));
1211 __ StoreFieldToOffset(TMP, R0, Array::element_offset(edge_id)); 1177 __ StoreFieldToOffset(TMP, R0, Array::element_offset(edge_id));
1212 } 1178 }
1213 1179
1214 1180
1215 void FlowGraphCompiler::EmitOptimizedInstanceCall( 1181 void FlowGraphCompiler::EmitOptimizedInstanceCall(const StubEntry& stub_entry,
1216 const StubEntry& stub_entry, 1182 const ICData& ic_data,
1217 const ICData& ic_data, 1183 intptr_t argument_count,
1218 intptr_t argument_count, 1184 intptr_t deopt_id,
1219 intptr_t deopt_id, 1185 TokenPosition token_pos,
1220 TokenPosition token_pos, 1186 LocationSummary* locs) {
1221 LocationSummary* locs) {
1222 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); 1187 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
1223 // Each ICData propagated from unoptimized to optimized code contains the 1188 // Each ICData propagated from unoptimized to optimized code contains the
1224 // function that corresponds to the Dart function of that IC call. Due 1189 // function that corresponds to the Dart function of that IC call. Due
1225 // to inlining in optimized code, that function may not correspond to the 1190 // to inlining in optimized code, that function may not correspond to the
1226 // top-level function (parsed_function().function()) which could be 1191 // top-level function (parsed_function().function()) which could be
1227 // reoptimized and which counter needs to be incremented. 1192 // reoptimized and which counter needs to be incremented.
1228 // Pass the function explicitly, it is used in IC stub. 1193 // Pass the function explicitly, it is used in IC stub.
1229 1194
1230 __ LoadObject(R6, parsed_function().function()); 1195 __ LoadObject(R6, parsed_function().function());
1231 __ LoadUniqueObject(R5, ic_data); 1196 __ LoadUniqueObject(R5, ic_data);
1232 GenerateDartCall(deopt_id, 1197 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall,
1233 token_pos,
1234 stub_entry,
1235 RawPcDescriptors::kIcCall,
1236 locs); 1198 locs);
1237 __ Drop(argument_count); 1199 __ Drop(argument_count);
1238 } 1200 }
1239 1201
1240 1202
1241 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, 1203 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry,
1242 const ICData& ic_data, 1204 const ICData& ic_data,
1243 intptr_t argument_count, 1205 intptr_t argument_count,
1244 intptr_t deopt_id, 1206 intptr_t deopt_id,
1245 TokenPosition token_pos, 1207 TokenPosition token_pos,
1246 LocationSummary* locs) { 1208 LocationSummary* locs) {
1247 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); 1209 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
1248 __ LoadUniqueObject(R5, ic_data); 1210 __ LoadUniqueObject(R5, ic_data);
1249 GenerateDartCall(deopt_id, 1211 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall,
1250 token_pos,
1251 stub_entry,
1252 RawPcDescriptors::kIcCall,
1253 locs); 1212 locs);
1254 __ Drop(argument_count); 1213 __ Drop(argument_count);
1255 } 1214 }
1256 1215
1257 1216
1258 void FlowGraphCompiler::EmitMegamorphicInstanceCall( 1217 void FlowGraphCompiler::EmitMegamorphicInstanceCall(
1259 const ICData& ic_data, 1218 const ICData& ic_data,
1260 intptr_t argument_count, 1219 intptr_t argument_count,
1261 intptr_t deopt_id, 1220 intptr_t deopt_id,
1262 TokenPosition token_pos, 1221 TokenPosition token_pos,
1263 LocationSummary* locs, 1222 LocationSummary* locs,
1264 intptr_t try_index, 1223 intptr_t try_index,
1265 intptr_t slow_path_argument_count) { 1224 intptr_t slow_path_argument_count) {
1266 const String& name = String::Handle(zone(), ic_data.target_name()); 1225 const String& name = String::Handle(zone(), ic_data.target_name());
1267 const Array& arguments_descriptor = 1226 const Array& arguments_descriptor =
1268 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); 1227 Array::ZoneHandle(zone(), ic_data.arguments_descriptor());
1269 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); 1228 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0));
1270 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(), 1229 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(
1230 zone(),
1271 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor)); 1231 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor));
1272 1232
1273 __ Comment("MegamorphicCall"); 1233 __ Comment("MegamorphicCall");
1274 // Load receiver into R0. 1234 // Load receiver into R0.
1275 __ LoadFromOffset(R0, SP, (argument_count - 1) * kWordSize); 1235 __ LoadFromOffset(R0, SP, (argument_count - 1) * kWordSize);
1276 Label done; 1236 Label done;
1277 if (ShouldInlineSmiStringHashCode(ic_data)) { 1237 if (ShouldInlineSmiStringHashCode(ic_data)) {
1278 Label megamorphic_call; 1238 Label megamorphic_call;
1279 __ Comment("Inlined get:hashCode for Smi and OneByteString"); 1239 __ Comment("Inlined get:hashCode for Smi and OneByteString");
1280 __ tsti(R0, Immediate(kSmiTagMask)); 1240 __ tsti(R0, Immediate(kSmiTagMask));
(...skipping 19 matching lines...) Expand all
1300 1260
1301 __ Bind(&done); 1261 __ Bind(&done);
1302 RecordSafepoint(locs, slow_path_argument_count); 1262 RecordSafepoint(locs, slow_path_argument_count);
1303 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); 1263 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id);
1304 if (FLAG_precompiled_mode) { 1264 if (FLAG_precompiled_mode) {
1305 // Megamorphic calls may occur in slow path stubs. 1265 // Megamorphic calls may occur in slow path stubs.
1306 // If valid use try_index argument. 1266 // If valid use try_index argument.
1307 if (try_index == CatchClauseNode::kInvalidTryIndex) { 1267 if (try_index == CatchClauseNode::kInvalidTryIndex) {
1308 try_index = CurrentTryIndex(); 1268 try_index = CurrentTryIndex();
1309 } 1269 }
1310 pc_descriptors_list()->AddDescriptor(RawPcDescriptors::kOther, 1270 pc_descriptors_list()->AddDescriptor(
1311 assembler()->CodeSize(), 1271 RawPcDescriptors::kOther, assembler()->CodeSize(), Thread::kNoDeoptId,
1312 Thread::kNoDeoptId, 1272 token_pos, try_index);
1313 token_pos,
1314 try_index);
1315 } else if (is_optimizing()) { 1273 } else if (is_optimizing()) {
1316 AddCurrentDescriptor(RawPcDescriptors::kOther, 1274 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1317 Thread::kNoDeoptId, token_pos); 1275 token_pos);
1318 AddDeoptIndexAtCall(deopt_id_after); 1276 AddDeoptIndexAtCall(deopt_id_after);
1319 } else { 1277 } else {
1320 AddCurrentDescriptor(RawPcDescriptors::kOther, 1278 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1321 Thread::kNoDeoptId, token_pos); 1279 token_pos);
1322 // Add deoptimization continuation point after the call and before the 1280 // Add deoptimization continuation point after the call and before the
1323 // arguments are removed. 1281 // arguments are removed.
1324 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1282 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1325 } 1283 }
1326 __ Drop(argument_count); 1284 __ Drop(argument_count);
1327 } 1285 }
1328 1286
1329 1287
1330 void FlowGraphCompiler::EmitSwitchableInstanceCall( 1288 void FlowGraphCompiler::EmitSwitchableInstanceCall(const ICData& ic_data,
1331 const ICData& ic_data, 1289 intptr_t argument_count,
1332 intptr_t argument_count, 1290 intptr_t deopt_id,
1333 intptr_t deopt_id, 1291 TokenPosition token_pos,
1334 TokenPosition token_pos, 1292 LocationSummary* locs) {
1335 LocationSummary* locs) {
1336 ASSERT(ic_data.NumArgsTested() == 1); 1293 ASSERT(ic_data.NumArgsTested() == 1);
1337 const Code& initial_stub = Code::ZoneHandle( 1294 const Code& initial_stub =
1338 StubCode::ICCallThroughFunction_entry()->code()); 1295 Code::ZoneHandle(StubCode::ICCallThroughFunction_entry()->code());
1339 __ Comment("SwitchableCall"); 1296 __ Comment("SwitchableCall");
1340 1297
1341 __ LoadFromOffset(R0, SP, (argument_count - 1) * kWordSize); 1298 __ LoadFromOffset(R0, SP, (argument_count - 1) * kWordSize);
1342 __ LoadUniqueObject(CODE_REG, initial_stub); 1299 __ LoadUniqueObject(CODE_REG, initial_stub);
1343 __ ldr(TMP, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); 1300 __ ldr(TMP, FieldAddress(CODE_REG, Code::checked_entry_point_offset()));
1344 __ LoadUniqueObject(R5, ic_data); 1301 __ LoadUniqueObject(R5, ic_data);
1345 __ blr(TMP); 1302 __ blr(TMP);
1346 1303
1347 AddCurrentDescriptor(RawPcDescriptors::kOther, 1304 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, token_pos);
1348 Thread::kNoDeoptId, token_pos);
1349 RecordSafepoint(locs); 1305 RecordSafepoint(locs);
1350 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); 1306 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id);
1351 if (is_optimizing()) { 1307 if (is_optimizing()) {
1352 AddDeoptIndexAtCall(deopt_id_after); 1308 AddDeoptIndexAtCall(deopt_id_after);
1353 } else { 1309 } else {
1354 // Add deoptimization continuation point after the call and before the 1310 // Add deoptimization continuation point after the call and before the
1355 // arguments are removed. 1311 // arguments are removed.
1356 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1312 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1357 } 1313 }
1358 __ Drop(argument_count); 1314 __ Drop(argument_count);
1359 } 1315 }
1360 1316
1361 1317
1362 void FlowGraphCompiler::EmitUnoptimizedStaticCall( 1318 void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t argument_count,
1363 intptr_t argument_count, 1319 intptr_t deopt_id,
1364 intptr_t deopt_id, 1320 TokenPosition token_pos,
1365 TokenPosition token_pos, 1321 LocationSummary* locs,
1366 LocationSummary* locs, 1322 const ICData& ic_data) {
1367 const ICData& ic_data) {
1368 const StubEntry* stub_entry = 1323 const StubEntry* stub_entry =
1369 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); 1324 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
1370 __ LoadObject(R5, ic_data); 1325 __ LoadObject(R5, ic_data);
1371 GenerateDartCall(deopt_id, 1326 GenerateDartCall(deopt_id, token_pos, *stub_entry,
1372 token_pos, 1327 RawPcDescriptors::kUnoptStaticCall, locs);
1373 *stub_entry,
1374 RawPcDescriptors::kUnoptStaticCall,
1375 locs);
1376 __ Drop(argument_count); 1328 __ Drop(argument_count);
1377 } 1329 }
1378 1330
1379 1331
1380 void FlowGraphCompiler::EmitOptimizedStaticCall( 1332 void FlowGraphCompiler::EmitOptimizedStaticCall(
1381 const Function& function, 1333 const Function& function,
1382 const Array& arguments_descriptor, 1334 const Array& arguments_descriptor,
1383 intptr_t argument_count, 1335 intptr_t argument_count,
1384 intptr_t deopt_id, 1336 intptr_t deopt_id,
1385 TokenPosition token_pos, 1337 TokenPosition token_pos,
1386 LocationSummary* locs) { 1338 LocationSummary* locs) {
1387 ASSERT(!function.IsClosureFunction()); 1339 ASSERT(!function.IsClosureFunction());
1388 if (function.HasOptionalParameters()) { 1340 if (function.HasOptionalParameters()) {
1389 __ LoadObject(R4, arguments_descriptor); 1341 __ LoadObject(R4, arguments_descriptor);
1390 } else { 1342 } else {
1391 __ LoadImmediate(R4, 0); // GC safe smi zero because of stub. 1343 __ LoadImmediate(R4, 0); // GC safe smi zero because of stub.
1392 } 1344 }
1393 // Do not use the code from the function, but let the code be patched so that 1345 // Do not use the code from the function, but let the code be patched so that
1394 // we can record the outgoing edges to other code. 1346 // we can record the outgoing edges to other code.
1395 GenerateStaticDartCall(deopt_id, 1347 GenerateStaticDartCall(deopt_id, token_pos,
1396 token_pos,
1397 *StubCode::CallStaticFunction_entry(), 1348 *StubCode::CallStaticFunction_entry(),
1398 RawPcDescriptors::kOther, 1349 RawPcDescriptors::kOther, locs, function);
1399 locs,
1400 function);
1401 __ Drop(argument_count); 1350 __ Drop(argument_count);
1402 } 1351 }
1403 1352
1404 1353
1405 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( 1354 Condition FlowGraphCompiler::EmitEqualityRegConstCompare(
1406 Register reg, 1355 Register reg,
1407 const Object& obj, 1356 const Object& obj,
1408 bool needs_number_check, 1357 bool needs_number_check,
1409 TokenPosition token_pos) { 1358 TokenPosition token_pos) {
1410 if (needs_number_check) { 1359 if (needs_number_check) {
1411 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint()); 1360 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint());
1412 __ Push(reg); 1361 __ Push(reg);
1413 __ PushObject(obj); 1362 __ PushObject(obj);
1414 if (is_optimizing()) { 1363 if (is_optimizing()) {
1415 __ BranchLinkPatchable( 1364 __ BranchLinkPatchable(
1416 *StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1365 *StubCode::OptimizedIdenticalWithNumberCheck_entry());
1417 } else { 1366 } else {
1418 __ BranchLinkPatchable( 1367 __ BranchLinkPatchable(
1419 *StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1368 *StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1420 } 1369 }
1421 if (token_pos.IsReal()) { 1370 if (token_pos.IsReal()) {
1422 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1371 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, Thread::kNoDeoptId,
1423 Thread::kNoDeoptId,
1424 token_pos); 1372 token_pos);
1425 } 1373 }
1426 // Stub returns result in flags (result of a cmp, we need Z computed). 1374 // Stub returns result in flags (result of a cmp, we need Z computed).
1427 __ Drop(1); // Discard constant. 1375 __ Drop(1); // Discard constant.
1428 __ Pop(reg); // Restore 'reg'. 1376 __ Pop(reg); // Restore 'reg'.
1429 } else { 1377 } else {
1430 __ CompareObject(reg, obj); 1378 __ CompareObject(reg, obj);
1431 } 1379 }
1432 return EQ; 1380 return EQ;
1433 } 1381 }
1434 1382
1435 1383
1436 Condition FlowGraphCompiler::EmitEqualityRegRegCompare( 1384 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(
1437 Register left, 1385 Register left,
1438 Register right, 1386 Register right,
1439 bool needs_number_check, 1387 bool needs_number_check,
1440 TokenPosition token_pos) { 1388 TokenPosition token_pos) {
1441 if (needs_number_check) { 1389 if (needs_number_check) {
1442 __ Push(left); 1390 __ Push(left);
1443 __ Push(right); 1391 __ Push(right);
1444 if (is_optimizing()) { 1392 if (is_optimizing()) {
1445 __ BranchLinkPatchable( 1393 __ BranchLinkPatchable(
1446 *StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1394 *StubCode::OptimizedIdenticalWithNumberCheck_entry());
1447 } else { 1395 } else {
1448 __ BranchLinkPatchable( 1396 __ BranchLinkPatchable(
1449 *StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1397 *StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1450 } 1398 }
1451 if (token_pos.IsReal()) { 1399 if (token_pos.IsReal()) {
1452 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1400 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, Thread::kNoDeoptId,
1453 Thread::kNoDeoptId,
1454 token_pos); 1401 token_pos);
1455 } 1402 }
1456 // Stub returns result in flags (result of a cmp, we need Z computed). 1403 // Stub returns result in flags (result of a cmp, we need Z computed).
1457 __ Pop(right); 1404 __ Pop(right);
1458 __ Pop(left); 1405 __ Pop(left);
1459 } else { 1406 } else {
1460 __ CompareRegisters(left, right); 1407 __ CompareRegisters(left, right);
1461 } 1408 }
1462 return EQ; 1409 return EQ;
1463 } 1410 }
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1536 const Array& argument_names, 1483 const Array& argument_names,
1537 Label* failed, 1484 Label* failed,
1538 Label* match_found, 1485 Label* match_found,
1539 intptr_t deopt_id, 1486 intptr_t deopt_id,
1540 TokenPosition token_index, 1487 TokenPosition token_index,
1541 LocationSummary* locs, 1488 LocationSummary* locs,
1542 bool complete) { 1489 bool complete) {
1543 ASSERT(is_optimizing()); 1490 ASSERT(is_optimizing());
1544 1491
1545 __ Comment("EmitTestAndCall"); 1492 __ Comment("EmitTestAndCall");
1546 const Array& arguments_descriptor = 1493 const Array& arguments_descriptor = Array::ZoneHandle(
1547 Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count, 1494 zone(), ArgumentsDescriptor::New(argument_count, argument_names));
1548 argument_names));
1549 1495
1550 // Load receiver into R0. 1496 // Load receiver into R0.
1551 __ LoadFromOffset(R0, SP, (argument_count - 1) * kWordSize); 1497 __ LoadFromOffset(R0, SP, (argument_count - 1) * kWordSize);
1552 __ LoadObject(R4, arguments_descriptor); 1498 __ LoadObject(R4, arguments_descriptor);
1553 1499
1554 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid; 1500 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid;
1555 const intptr_t kNumChecks = ic_data.NumberOfChecks(); 1501 const intptr_t kNumChecks = ic_data.NumberOfChecks();
1556 1502
1557 ASSERT(!ic_data.IsNull() && (kNumChecks > 0)); 1503 ASSERT(!ic_data.IsNull() && (kNumChecks > 0));
1558 1504
1559 Label after_smi_test; 1505 Label after_smi_test;
1560 if (kFirstCheckIsSmi) { 1506 if (kFirstCheckIsSmi) {
1561 __ tsti(R0, Immediate(kSmiTagMask)); 1507 __ tsti(R0, Immediate(kSmiTagMask));
1562 // Jump if receiver is not Smi. 1508 // Jump if receiver is not Smi.
1563 if (kNumChecks == 1) { 1509 if (kNumChecks == 1) {
1564 __ b(failed, NE); 1510 __ b(failed, NE);
1565 } else { 1511 } else {
1566 __ b(&after_smi_test, NE); 1512 __ b(&after_smi_test, NE);
1567 } 1513 }
1568 // Do not use the code from the function, but let the code be patched so 1514 // Do not use the code from the function, but let the code be patched so
1569 // that we can record the outgoing edges to other code. 1515 // that we can record the outgoing edges to other code.
1570 const Function& function = Function::ZoneHandle( 1516 const Function& function =
1571 zone(), ic_data.GetTargetAt(0)); 1517 Function::ZoneHandle(zone(), ic_data.GetTargetAt(0));
1572 GenerateStaticDartCall(deopt_id, 1518 GenerateStaticDartCall(deopt_id, token_index,
1573 token_index,
1574 *StubCode::CallStaticFunction_entry(), 1519 *StubCode::CallStaticFunction_entry(),
1575 RawPcDescriptors::kOther, 1520 RawPcDescriptors::kOther, locs, function);
1576 locs,
1577 function);
1578 __ Drop(argument_count); 1521 __ Drop(argument_count);
1579 if (kNumChecks > 1) { 1522 if (kNumChecks > 1) {
1580 __ b(match_found); 1523 __ b(match_found);
1581 } 1524 }
1582 } else { 1525 } else {
1583 // Receiver is Smi, but Smi is not a valid class therefore fail. 1526 // Receiver is Smi, but Smi is not a valid class therefore fail.
1584 // (Smi class must be first in the list). 1527 // (Smi class must be first in the list).
1585 if (!complete) { 1528 if (!complete) {
1586 __ tsti(R0, Immediate(kSmiTagMask)); 1529 __ tsti(R0, Immediate(kSmiTagMask));
1587 __ b(failed, EQ); 1530 __ b(failed, EQ);
(...skipping 25 matching lines...) Expand all
1613 } 1556 }
1614 } else { 1557 } else {
1615 if (!kIsLastCheck) { 1558 if (!kIsLastCheck) {
1616 __ CompareImmediate(R2, sorted[i].cid); 1559 __ CompareImmediate(R2, sorted[i].cid);
1617 __ b(&next_test, NE); 1560 __ b(&next_test, NE);
1618 } 1561 }
1619 } 1562 }
1620 // Do not use the code from the function, but let the code be patched so 1563 // Do not use the code from the function, but let the code be patched so
1621 // that we can record the outgoing edges to other code. 1564 // that we can record the outgoing edges to other code.
1622 const Function& function = *sorted[i].target; 1565 const Function& function = *sorted[i].target;
1623 GenerateStaticDartCall(deopt_id, 1566 GenerateStaticDartCall(deopt_id, token_index,
1624 token_index,
1625 *StubCode::CallStaticFunction_entry(), 1567 *StubCode::CallStaticFunction_entry(),
1626 RawPcDescriptors::kOther, 1568 RawPcDescriptors::kOther, locs, function);
1627 locs,
1628 function);
1629 __ Drop(argument_count); 1569 __ Drop(argument_count);
1630 if (!kIsLastCheck) { 1570 if (!kIsLastCheck) {
1631 __ b(match_found); 1571 __ b(match_found);
1632 } 1572 }
1633 __ Bind(&next_test); 1573 __ Bind(&next_test);
1634 } 1574 }
1635 } 1575 }
1636 1576
1637 1577
1638 #undef __ 1578 #undef __
1639 #define __ compiler_->assembler()-> 1579 #define __ compiler_->assembler()->
1640 1580
1641 1581
1642 void ParallelMoveResolver::EmitMove(int index) { 1582 void ParallelMoveResolver::EmitMove(int index) {
1643 MoveOperands* move = moves_[index]; 1583 MoveOperands* move = moves_[index];
1644 const Location source = move->src(); 1584 const Location source = move->src();
1645 const Location destination = move->dest(); 1585 const Location destination = move->dest();
1646 1586
1647 if (source.IsRegister()) { 1587 if (source.IsRegister()) {
1648 if (destination.IsRegister()) { 1588 if (destination.IsRegister()) {
1649 __ mov(destination.reg(), source.reg()); 1589 __ mov(destination.reg(), source.reg());
1650 } else { 1590 } else {
1651 ASSERT(destination.IsStackSlot()); 1591 ASSERT(destination.IsStackSlot());
1652 const intptr_t dest_offset = destination.ToStackSlotOffset(); 1592 const intptr_t dest_offset = destination.ToStackSlotOffset();
1653 __ StoreToOffset(source.reg(), destination.base_reg(), dest_offset); 1593 __ StoreToOffset(source.reg(), destination.base_reg(), dest_offset);
1654 } 1594 }
1655 } else if (source.IsStackSlot()) { 1595 } else if (source.IsStackSlot()) {
1656 if (destination.IsRegister()) { 1596 if (destination.IsRegister()) {
1657 const intptr_t source_offset = source.ToStackSlotOffset(); 1597 const intptr_t source_offset = source.ToStackSlotOffset();
1658 __ LoadFromOffset( 1598 __ LoadFromOffset(destination.reg(), source.base_reg(), source_offset);
1659 destination.reg(), source.base_reg(), source_offset);
1660 } else { 1599 } else {
1661 ASSERT(destination.IsStackSlot()); 1600 ASSERT(destination.IsStackSlot());
1662 const intptr_t source_offset = source.ToStackSlotOffset(); 1601 const intptr_t source_offset = source.ToStackSlotOffset();
1663 const intptr_t dest_offset = destination.ToStackSlotOffset(); 1602 const intptr_t dest_offset = destination.ToStackSlotOffset();
1664 ScratchRegisterScope tmp(this, kNoRegister); 1603 ScratchRegisterScope tmp(this, kNoRegister);
1665 __ LoadFromOffset(tmp.reg(), source.base_reg(), source_offset); 1604 __ LoadFromOffset(tmp.reg(), source.base_reg(), source_offset);
1666 __ StoreToOffset(tmp.reg(), destination.base_reg(), dest_offset); 1605 __ StoreToOffset(tmp.reg(), destination.base_reg(), dest_offset);
1667 } 1606 }
1668 } else if (source.IsFpuRegister()) { 1607 } else if (source.IsFpuRegister()) {
1669 if (destination.IsFpuRegister()) { 1608 if (destination.IsFpuRegister()) {
1670 __ vmov(destination.fpu_reg(), source.fpu_reg()); 1609 __ vmov(destination.fpu_reg(), source.fpu_reg());
1671 } else { 1610 } else {
1672 if (destination.IsDoubleStackSlot()) { 1611 if (destination.IsDoubleStackSlot()) {
1673 const intptr_t dest_offset = destination.ToStackSlotOffset(); 1612 const intptr_t dest_offset = destination.ToStackSlotOffset();
1674 VRegister src = source.fpu_reg(); 1613 VRegister src = source.fpu_reg();
1675 __ StoreDToOffset(src, destination.base_reg(), dest_offset); 1614 __ StoreDToOffset(src, destination.base_reg(), dest_offset);
1676 } else { 1615 } else {
1677 ASSERT(destination.IsQuadStackSlot()); 1616 ASSERT(destination.IsQuadStackSlot());
1678 const intptr_t dest_offset = destination.ToStackSlotOffset(); 1617 const intptr_t dest_offset = destination.ToStackSlotOffset();
1679 __ StoreQToOffset( 1618 __ StoreQToOffset(source.fpu_reg(), destination.base_reg(),
1680 source.fpu_reg(), destination.base_reg(), dest_offset); 1619 dest_offset);
1681 } 1620 }
1682 } 1621 }
1683 } else if (source.IsDoubleStackSlot()) { 1622 } else if (source.IsDoubleStackSlot()) {
1684 if (destination.IsFpuRegister()) { 1623 if (destination.IsFpuRegister()) {
1685 const intptr_t source_offset = source.ToStackSlotOffset(); 1624 const intptr_t source_offset = source.ToStackSlotOffset();
1686 const VRegister dst = destination.fpu_reg(); 1625 const VRegister dst = destination.fpu_reg();
1687 __ LoadDFromOffset(dst, source.base_reg(), source_offset); 1626 __ LoadDFromOffset(dst, source.base_reg(), source_offset);
1688 } else { 1627 } else {
1689 ASSERT(destination.IsDoubleStackSlot()); 1628 ASSERT(destination.IsDoubleStackSlot());
1690 const intptr_t source_offset = source.ToStackSlotOffset(); 1629 const intptr_t source_offset = source.ToStackSlotOffset();
1691 const intptr_t dest_offset = destination.ToStackSlotOffset(); 1630 const intptr_t dest_offset = destination.ToStackSlotOffset();
1692 __ LoadDFromOffset(VTMP, source.base_reg(), source_offset); 1631 __ LoadDFromOffset(VTMP, source.base_reg(), source_offset);
1693 __ StoreDToOffset(VTMP, destination.base_reg(), dest_offset); 1632 __ StoreDToOffset(VTMP, destination.base_reg(), dest_offset);
1694 } 1633 }
1695 } else if (source.IsQuadStackSlot()) { 1634 } else if (source.IsQuadStackSlot()) {
1696 if (destination.IsFpuRegister()) { 1635 if (destination.IsFpuRegister()) {
1697 const intptr_t source_offset = source.ToStackSlotOffset(); 1636 const intptr_t source_offset = source.ToStackSlotOffset();
1698 __ LoadQFromOffset( 1637 __ LoadQFromOffset(destination.fpu_reg(), source.base_reg(),
1699 destination.fpu_reg(), source.base_reg(), source_offset); 1638 source_offset);
1700 } else { 1639 } else {
1701 ASSERT(destination.IsQuadStackSlot()); 1640 ASSERT(destination.IsQuadStackSlot());
1702 const intptr_t source_offset = source.ToStackSlotOffset(); 1641 const intptr_t source_offset = source.ToStackSlotOffset();
1703 const intptr_t dest_offset = destination.ToStackSlotOffset(); 1642 const intptr_t dest_offset = destination.ToStackSlotOffset();
1704 __ LoadQFromOffset(VTMP, source.base_reg(), source_offset); 1643 __ LoadQFromOffset(VTMP, source.base_reg(), source_offset);
1705 __ StoreQToOffset(VTMP, destination.base_reg(), dest_offset); 1644 __ StoreQToOffset(VTMP, destination.base_reg(), dest_offset);
1706 } 1645 }
1707 } else { 1646 } else {
1708 ASSERT(source.IsConstant()); 1647 ASSERT(source.IsConstant());
1709 const Object& constant = source.constant(); 1648 const Object& constant = source.constant();
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1758 const Location source = move->src(); 1697 const Location source = move->src();
1759 const Location destination = move->dest(); 1698 const Location destination = move->dest();
1760 1699
1761 if (source.IsRegister() && destination.IsRegister()) { 1700 if (source.IsRegister() && destination.IsRegister()) {
1762 ASSERT(source.reg() != TMP); 1701 ASSERT(source.reg() != TMP);
1763 ASSERT(destination.reg() != TMP); 1702 ASSERT(destination.reg() != TMP);
1764 __ mov(TMP, source.reg()); 1703 __ mov(TMP, source.reg());
1765 __ mov(source.reg(), destination.reg()); 1704 __ mov(source.reg(), destination.reg());
1766 __ mov(destination.reg(), TMP); 1705 __ mov(destination.reg(), TMP);
1767 } else if (source.IsRegister() && destination.IsStackSlot()) { 1706 } else if (source.IsRegister() && destination.IsStackSlot()) {
1768 Exchange(source.reg(), 1707 Exchange(source.reg(), destination.base_reg(),
1769 destination.base_reg(), destination.ToStackSlotOffset()); 1708 destination.ToStackSlotOffset());
1770 } else if (source.IsStackSlot() && destination.IsRegister()) { 1709 } else if (source.IsStackSlot() && destination.IsRegister()) {
1771 Exchange(destination.reg(), 1710 Exchange(destination.reg(), source.base_reg(), source.ToStackSlotOffset());
1772 source.base_reg(), source.ToStackSlotOffset());
1773 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 1711 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1774 Exchange(source.base_reg(), source.ToStackSlotOffset(), 1712 Exchange(source.base_reg(), source.ToStackSlotOffset(),
1775 destination.base_reg(), destination.ToStackSlotOffset()); 1713 destination.base_reg(), destination.ToStackSlotOffset());
1776 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) { 1714 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
1777 const VRegister dst = destination.fpu_reg(); 1715 const VRegister dst = destination.fpu_reg();
1778 const VRegister src = source.fpu_reg(); 1716 const VRegister src = source.fpu_reg();
1779 __ vmov(VTMP, src); 1717 __ vmov(VTMP, src);
1780 __ vmov(src, dst); 1718 __ vmov(src, dst);
1781 __ vmov(dst, VTMP); 1719 __ vmov(dst, VTMP);
1782 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) { 1720 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
1783 ASSERT(destination.IsDoubleStackSlot() || 1721 ASSERT(destination.IsDoubleStackSlot() || destination.IsQuadStackSlot() ||
1784 destination.IsQuadStackSlot() || 1722 source.IsDoubleStackSlot() || source.IsQuadStackSlot());
1785 source.IsDoubleStackSlot() || 1723 bool double_width =
1786 source.IsQuadStackSlot()); 1724 destination.IsDoubleStackSlot() || source.IsDoubleStackSlot();
1787 bool double_width = destination.IsDoubleStackSlot() || 1725 VRegister reg =
1788 source.IsDoubleStackSlot(); 1726 source.IsFpuRegister() ? source.fpu_reg() : destination.fpu_reg();
1789 VRegister reg = source.IsFpuRegister() ? source.fpu_reg() 1727 Register base_reg =
1790 : destination.fpu_reg(); 1728 source.IsFpuRegister() ? destination.base_reg() : source.base_reg();
1791 Register base_reg = source.IsFpuRegister()
1792 ? destination.base_reg()
1793 : source.base_reg();
1794 const intptr_t slot_offset = source.IsFpuRegister() 1729 const intptr_t slot_offset = source.IsFpuRegister()
1795 ? destination.ToStackSlotOffset() 1730 ? destination.ToStackSlotOffset()
1796 : source.ToStackSlotOffset(); 1731 : source.ToStackSlotOffset();
1797 1732
1798 if (double_width) { 1733 if (double_width) {
1799 __ LoadDFromOffset(VTMP, base_reg, slot_offset); 1734 __ LoadDFromOffset(VTMP, base_reg, slot_offset);
1800 __ StoreDToOffset(reg, base_reg, slot_offset); 1735 __ StoreDToOffset(reg, base_reg, slot_offset);
1801 __ fmovdd(reg, VTMP); 1736 __ fmovdd(reg, VTMP);
1802 } else { 1737 } else {
1803 __ LoadQFromOffset(VTMP, base_reg, slot_offset); 1738 __ LoadQFromOffset(VTMP, base_reg, slot_offset);
1804 __ StoreQToOffset(reg, base_reg, slot_offset); 1739 __ StoreQToOffset(reg, base_reg, slot_offset);
1805 __ vmov(reg, VTMP); 1740 __ vmov(reg, VTMP);
1806 } 1741 }
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
1912 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { 1847 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) {
1913 __ PopDouble(reg); 1848 __ PopDouble(reg);
1914 } 1849 }
1915 1850
1916 1851
1917 #undef __ 1852 #undef __
1918 1853
1919 } // namespace dart 1854 } // namespace dart
1920 1855
1921 #endif // defined TARGET_ARCH_ARM64 1856 #endif // defined TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_dbc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698