Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(380)

Side by Side Diff: runtime/vm/flow_graph_compiler_x64.cc

Issue 2481873005: clang-format runtime/vm (Closed)
Patch Set: Merge Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler_mips.cc ('k') | runtime/vm/flow_graph_inliner.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
45 bool FlowGraphCompiler::SupportsUnboxedMints() { 45 bool FlowGraphCompiler::SupportsUnboxedMints() {
46 return FLAG_unbox_mints; 46 return FLAG_unbox_mints;
47 } 47 }
48 48
49 49
50 bool FlowGraphCompiler::SupportsUnboxedSimd128() { 50 bool FlowGraphCompiler::SupportsUnboxedSimd128() {
51 return FLAG_enable_simd_inline; 51 return FLAG_enable_simd_inline;
52 } 52 }
53 53
54 54
55
56 bool FlowGraphCompiler::SupportsSinCos() { 55 bool FlowGraphCompiler::SupportsSinCos() {
57 return true; 56 return true;
58 } 57 }
59 58
60 59
61 bool FlowGraphCompiler::SupportsHardwareDivision() { 60 bool FlowGraphCompiler::SupportsHardwareDivision() {
62 return true; 61 return true;
63 } 62 }
64 63
65 64
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
109 builder->AddCallerFp(slot_ix++); 108 builder->AddCallerFp(slot_ix++);
110 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); 109 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++);
111 110
112 // Emit all values that are needed for materialization as a part of the 111 // Emit all values that are needed for materialization as a part of the
113 // expression stack for the bottom-most frame. This guarantees that GC 112 // expression stack for the bottom-most frame. This guarantees that GC
114 // will be able to find them during materialization. 113 // will be able to find them during materialization.
115 slot_ix = builder->EmitMaterializationArguments(slot_ix); 114 slot_ix = builder->EmitMaterializationArguments(slot_ix);
116 115
117 // For the innermost environment, set outgoing arguments and the locals. 116 // For the innermost environment, set outgoing arguments and the locals.
118 for (intptr_t i = current->Length() - 1; 117 for (intptr_t i = current->Length() - 1;
119 i >= current->fixed_parameter_count(); 118 i >= current->fixed_parameter_count(); i--) {
120 i--) {
121 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); 119 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
122 } 120 }
123 121
124 Environment* previous = current; 122 Environment* previous = current;
125 current = current->outer(); 123 current = current->outer();
126 while (current != NULL) { 124 while (current != NULL) {
127 builder->AddPp(current->function(), slot_ix++); 125 builder->AddPp(current->function(), slot_ix++);
128 builder->AddPcMarker(previous->function(), slot_ix++); 126 builder->AddPcMarker(previous->function(), slot_ix++);
129 builder->AddCallerFp(slot_ix++); 127 builder->AddCallerFp(slot_ix++);
130 128
131 // For any outer environment the deopt id is that of the call instruction 129 // For any outer environment the deopt id is that of the call instruction
132 // which is recorded in the outer environment. 130 // which is recorded in the outer environment.
133 builder->AddReturnAddress( 131 builder->AddReturnAddress(current->function(),
134 current->function(), 132 Thread::ToDeoptAfter(current->deopt_id()),
135 Thread::ToDeoptAfter(current->deopt_id()), 133 slot_ix++);
136 slot_ix++);
137 134
138 // The values of outgoing arguments can be changed from the inlined call so 135 // The values of outgoing arguments can be changed from the inlined call so
139 // we must read them from the previous environment. 136 // we must read them from the previous environment.
140 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { 137 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
141 builder->AddCopy(previous->ValueAt(i), 138 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i),
142 previous->LocationAt(i),
143 slot_ix++); 139 slot_ix++);
144 } 140 }
145 141
146 // Set the locals, note that outgoing arguments are not in the environment. 142 // Set the locals, note that outgoing arguments are not in the environment.
147 for (intptr_t i = current->Length() - 1; 143 for (intptr_t i = current->Length() - 1;
148 i >= current->fixed_parameter_count(); 144 i >= current->fixed_parameter_count(); i--) {
149 i--) { 145 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
150 builder->AddCopy(current->ValueAt(i),
151 current->LocationAt(i),
152 slot_ix++);
153 } 146 }
154 147
155 // Iterate on the outer environment. 148 // Iterate on the outer environment.
156 previous = current; 149 previous = current;
157 current = current->outer(); 150 current = current->outer();
158 } 151 }
159 // The previous pointer is now the outermost environment. 152 // The previous pointer is now the outermost environment.
160 ASSERT(previous != NULL); 153 ASSERT(previous != NULL);
161 154
162 // Set slots for the outermost environment. 155 // Set slots for the outermost environment.
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
217 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( 210 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub(
218 TypeTestStubKind test_kind, 211 TypeTestStubKind test_kind,
219 Register instance_reg, 212 Register instance_reg,
220 Register type_arguments_reg, 213 Register type_arguments_reg,
221 Register temp_reg, 214 Register temp_reg,
222 Label* is_instance_lbl, 215 Label* is_instance_lbl,
223 Label* is_not_instance_lbl) { 216 Label* is_not_instance_lbl) {
224 const SubtypeTestCache& type_test_cache = 217 const SubtypeTestCache& type_test_cache =
225 SubtypeTestCache::ZoneHandle(zone(), SubtypeTestCache::New()); 218 SubtypeTestCache::ZoneHandle(zone(), SubtypeTestCache::New());
226 __ LoadUniqueObject(temp_reg, type_test_cache); 219 __ LoadUniqueObject(temp_reg, type_test_cache);
227 __ pushq(temp_reg); // Subtype test cache. 220 __ pushq(temp_reg); // Subtype test cache.
228 __ pushq(instance_reg); // Instance. 221 __ pushq(instance_reg); // Instance.
229 if (test_kind == kTestTypeOneArg) { 222 if (test_kind == kTestTypeOneArg) {
230 ASSERT(type_arguments_reg == kNoRegister); 223 ASSERT(type_arguments_reg == kNoRegister);
231 __ PushObject(Object::null_object()); 224 __ PushObject(Object::null_object());
232 __ Call(*StubCode::Subtype1TestCache_entry()); 225 __ Call(*StubCode::Subtype1TestCache_entry());
233 } else if (test_kind == kTestTypeTwoArgs) { 226 } else if (test_kind == kTestTypeTwoArgs) {
234 ASSERT(type_arguments_reg == kNoRegister); 227 ASSERT(type_arguments_reg == kNoRegister);
235 __ PushObject(Object::null_object()); 228 __ PushObject(Object::null_object());
236 __ Call(*StubCode::Subtype2TestCache_entry()); 229 __ Call(*StubCode::Subtype2TestCache_entry());
237 } else if (test_kind == kTestTypeThreeArgs) { 230 } else if (test_kind == kTestTypeThreeArgs) {
238 __ pushq(type_arguments_reg); 231 __ pushq(type_arguments_reg);
239 __ Call(*StubCode::Subtype3TestCache_entry()); 232 __ Call(*StubCode::Subtype3TestCache_entry());
240 } else { 233 } else {
241 UNREACHABLE(); 234 UNREACHABLE();
242 } 235 }
243 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False. 236 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False.
244 ASSERT(instance_reg != RCX); 237 ASSERT(instance_reg != RCX);
245 ASSERT(temp_reg != RCX); 238 ASSERT(temp_reg != RCX);
246 __ popq(instance_reg); // Discard. 239 __ popq(instance_reg); // Discard.
247 __ popq(instance_reg); // Restore receiver. 240 __ popq(instance_reg); // Restore receiver.
248 __ popq(temp_reg); // Discard. 241 __ popq(temp_reg); // Discard.
249 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl); 242 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl);
250 return type_test_cache.raw(); 243 return type_test_cache.raw();
251 } 244 }
252 245
253 246
254 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if 247 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if
255 // type test is conclusive, otherwise fallthrough if a type test could not 248 // type test is conclusive, otherwise fallthrough if a type test could not
256 // be completed. 249 // be completed.
257 // RAX: instance (must survive). 250 // RAX: instance (must survive).
258 // Clobbers R10. 251 // Clobbers R10.
(...skipping 21 matching lines...) Expand all
280 __ j(ZERO, is_not_instance_lbl); 273 __ j(ZERO, is_not_instance_lbl);
281 } 274 }
282 // A function type test requires checking the function signature. 275 // A function type test requires checking the function signature.
283 if (!type.IsFunctionType()) { 276 if (!type.IsFunctionType()) {
284 const intptr_t num_type_args = type_class.NumTypeArguments(); 277 const intptr_t num_type_args = type_class.NumTypeArguments();
285 const intptr_t num_type_params = type_class.NumTypeParameters(); 278 const intptr_t num_type_params = type_class.NumTypeParameters();
286 const intptr_t from_index = num_type_args - num_type_params; 279 const intptr_t from_index = num_type_args - num_type_params;
287 const TypeArguments& type_arguments = 280 const TypeArguments& type_arguments =
288 TypeArguments::ZoneHandle(zone(), type.arguments()); 281 TypeArguments::ZoneHandle(zone(), type.arguments());
289 const bool is_raw_type = type_arguments.IsNull() || 282 const bool is_raw_type = type_arguments.IsNull() ||
290 type_arguments.IsRaw(from_index, num_type_params); 283 type_arguments.IsRaw(from_index, num_type_params);
291 if (is_raw_type) { 284 if (is_raw_type) {
292 const Register kClassIdReg = R10; 285 const Register kClassIdReg = R10;
293 // dynamic type argument, check only classes. 286 // dynamic type argument, check only classes.
294 __ LoadClassId(kClassIdReg, kInstanceReg); 287 __ LoadClassId(kClassIdReg, kInstanceReg);
295 __ cmpl(kClassIdReg, Immediate(type_class.id())); 288 __ cmpl(kClassIdReg, Immediate(type_class.id()));
296 __ j(EQUAL, is_instance_lbl); 289 __ j(EQUAL, is_instance_lbl);
297 // List is a very common case. 290 // List is a very common case.
298 if (IsListClass(type_class)) { 291 if (IsListClass(type_class)) {
299 GenerateListTypeCheck(kClassIdReg, is_instance_lbl); 292 GenerateListTypeCheck(kClassIdReg, is_instance_lbl);
300 } 293 }
301 return GenerateSubtype1TestCacheLookup( 294 return GenerateSubtype1TestCacheLookup(
302 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); 295 token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
303 } 296 }
304 // If one type argument only, check if type argument is Object or dynamic. 297 // If one type argument only, check if type argument is Object or dynamic.
305 if (type_arguments.Length() == 1) { 298 if (type_arguments.Length() == 1) {
306 const AbstractType& tp_argument = AbstractType::ZoneHandle(zone(), 299 const AbstractType& tp_argument =
307 type_arguments.TypeAt(0)); 300 AbstractType::ZoneHandle(zone(), type_arguments.TypeAt(0));
308 ASSERT(!tp_argument.IsMalformed()); 301 ASSERT(!tp_argument.IsMalformed());
309 if (tp_argument.IsType()) { 302 if (tp_argument.IsType()) {
310 ASSERT(tp_argument.HasResolvedTypeClass()); 303 ASSERT(tp_argument.HasResolvedTypeClass());
311 // Check if type argument is dynamic or Object. 304 // Check if type argument is dynamic or Object.
312 const Type& object_type = Type::Handle(zone(), Type::ObjectType()); 305 const Type& object_type = Type::Handle(zone(), Type::ObjectType());
313 if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) { 306 if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
314 // Instance class test only necessary. 307 // Instance class test only necessary.
315 return GenerateSubtype1TestCacheLookup( 308 return GenerateSubtype1TestCacheLookup(
316 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); 309 token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
317 } 310 }
318 } 311 }
319 } 312 }
320 } 313 }
321 // Regular subtype test cache involving instance's type arguments. 314 // Regular subtype test cache involving instance's type arguments.
322 const Register kTypeArgumentsReg = kNoRegister; 315 const Register kTypeArgumentsReg = kNoRegister;
323 const Register kTempReg = R10; 316 const Register kTempReg = R10;
324 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, 317 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, kInstanceReg,
325 kInstanceReg, 318 kTypeArgumentsReg, kTempReg,
326 kTypeArgumentsReg, 319 is_instance_lbl, is_not_instance_lbl);
327 kTempReg,
328 is_instance_lbl,
329 is_not_instance_lbl);
330 } 320 }
331 321
332 322
333 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, 323 void FlowGraphCompiler::CheckClassIds(Register class_id_reg,
334 const GrowableArray<intptr_t>& class_ids, 324 const GrowableArray<intptr_t>& class_ids,
335 Label* is_equal_lbl, 325 Label* is_equal_lbl,
336 Label* is_not_equal_lbl) { 326 Label* is_not_equal_lbl) {
337 for (intptr_t i = 0; i < class_ids.length(); i++) { 327 for (intptr_t i = 0; i < class_ids.length(); i++) {
338 __ cmpl(class_id_reg, Immediate(class_ids[i])); 328 __ cmpl(class_id_reg, Immediate(class_ids[i]));
339 __ j(EQUAL, is_equal_lbl); 329 __ j(EQUAL, is_equal_lbl);
(...skipping 18 matching lines...) Expand all
358 // Fallthrough. 348 // Fallthrough.
359 return true; 349 return true;
360 } 350 }
361 const Class& type_class = Class::Handle(zone(), type.type_class()); 351 const Class& type_class = Class::Handle(zone(), type.type_class());
362 ASSERT(type_class.NumTypeArguments() == 0); 352 ASSERT(type_class.NumTypeArguments() == 0);
363 353
364 const Register kInstanceReg = RAX; 354 const Register kInstanceReg = RAX;
365 __ testq(kInstanceReg, Immediate(kSmiTagMask)); 355 __ testq(kInstanceReg, Immediate(kSmiTagMask));
366 // If instance is Smi, check directly. 356 // If instance is Smi, check directly.
367 const Class& smi_class = Class::Handle(zone(), Smi::Class()); 357 const Class& smi_class = Class::Handle(zone(), Smi::Class());
368 if (smi_class.IsSubtypeOf(TypeArguments::Handle(zone()), 358 if (smi_class.IsSubtypeOf(TypeArguments::Handle(zone()), type_class,
369 type_class, 359 TypeArguments::Handle(zone()), NULL, NULL,
370 TypeArguments::Handle(zone()),
371 NULL,
372 NULL,
373 Heap::kOld)) { 360 Heap::kOld)) {
374 __ j(ZERO, is_instance_lbl); 361 __ j(ZERO, is_instance_lbl);
375 } else { 362 } else {
376 __ j(ZERO, is_not_instance_lbl); 363 __ j(ZERO, is_not_instance_lbl);
377 } 364 }
378 const Register kClassIdReg = R10; 365 const Register kClassIdReg = R10;
379 __ LoadClassId(kClassIdReg, kInstanceReg); 366 __ LoadClassId(kClassIdReg, kInstanceReg);
380 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted 367 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted
381 // interfaces. 368 // interfaces.
382 // Bool interface can be implemented only by core class Bool. 369 // Bool interface can be implemented only by core class Bool.
383 if (type.IsBoolType()) { 370 if (type.IsBoolType()) {
384 __ cmpl(kClassIdReg, Immediate(kBoolCid)); 371 __ cmpl(kClassIdReg, Immediate(kBoolCid));
385 __ j(EQUAL, is_instance_lbl); 372 __ j(EQUAL, is_instance_lbl);
386 __ jmp(is_not_instance_lbl); 373 __ jmp(is_not_instance_lbl);
387 return false; 374 return false;
388 } 375 }
389 // Custom checking for numbers (Smi, Mint, Bigint and Double). 376 // Custom checking for numbers (Smi, Mint, Bigint and Double).
390 // Note that instance is not Smi (checked above). 377 // Note that instance is not Smi (checked above).
391 if (type.IsNumberType() || type.IsIntType() || type.IsDoubleType()) { 378 if (type.IsNumberType() || type.IsIntType() || type.IsDoubleType()) {
392 GenerateNumberTypeCheck( 379 GenerateNumberTypeCheck(kClassIdReg, type, is_instance_lbl,
393 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); 380 is_not_instance_lbl);
394 return false; 381 return false;
395 } 382 }
396 if (type.IsStringType()) { 383 if (type.IsStringType()) {
397 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl); 384 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl);
398 return false; 385 return false;
399 } 386 }
400 if (type.IsDartFunctionType()) { 387 if (type.IsDartFunctionType()) {
401 // Check if instance is a closure. 388 // Check if instance is a closure.
402 __ cmpq(kClassIdReg, Immediate(kClosureCid)); 389 __ cmpq(kClassIdReg, Immediate(kClosureCid));
403 __ j(EQUAL, is_instance_lbl); 390 __ j(EQUAL, is_instance_lbl);
(...skipping 26 matching lines...) Expand all
430 __ LoadClass(R10, kInstanceReg); 417 __ LoadClass(R10, kInstanceReg);
431 // R10: instance class. 418 // R10: instance class.
432 // Check immediate superclass equality. 419 // Check immediate superclass equality.
433 __ movq(R13, FieldAddress(R10, Class::super_type_offset())); 420 __ movq(R13, FieldAddress(R10, Class::super_type_offset()));
434 __ movq(R13, FieldAddress(R13, Type::type_class_id_offset())); 421 __ movq(R13, FieldAddress(R13, Type::type_class_id_offset()));
435 __ CompareImmediate(R13, Immediate(Smi::RawValue(type_class.id()))); 422 __ CompareImmediate(R13, Immediate(Smi::RawValue(type_class.id())));
436 __ j(EQUAL, is_instance_lbl); 423 __ j(EQUAL, is_instance_lbl);
437 424
438 const Register kTypeArgumentsReg = kNoRegister; 425 const Register kTypeArgumentsReg = kNoRegister;
439 const Register kTempReg = R10; 426 const Register kTempReg = R10;
440 return GenerateCallSubtypeTestStub(kTestTypeOneArg, 427 return GenerateCallSubtypeTestStub(kTestTypeOneArg, kInstanceReg,
441 kInstanceReg, 428 kTypeArgumentsReg, kTempReg,
442 kTypeArgumentsReg, 429 is_instance_lbl, is_not_instance_lbl);
443 kTempReg,
444 is_instance_lbl,
445 is_not_instance_lbl);
446 } 430 }
447 431
448 432
449 // Generates inlined check if 'type' is a type parameter or type itself 433 // Generates inlined check if 'type' is a type parameter or type itself
450 // RAX: instance (preserved). 434 // RAX: instance (preserved).
451 // Clobbers RDI, RDX, R10. 435 // Clobbers RDI, RDX, R10.
452 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( 436 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
453 TokenPosition token_pos, 437 TokenPosition token_pos,
454 const AbstractType& type, 438 const AbstractType& type,
455 Label* is_instance_lbl, 439 Label* is_instance_lbl,
456 Label* is_not_instance_lbl) { 440 Label* is_not_instance_lbl) {
457 __ Comment("UninstantiatedTypeTest"); 441 __ Comment("UninstantiatedTypeTest");
458 ASSERT(!type.IsInstantiated()); 442 ASSERT(!type.IsInstantiated());
459 // Skip check if destination is a dynamic type. 443 // Skip check if destination is a dynamic type.
460 if (type.IsTypeParameter()) { 444 if (type.IsTypeParameter()) {
461 const TypeParameter& type_param = TypeParameter::Cast(type); 445 const TypeParameter& type_param = TypeParameter::Cast(type);
462 // Load instantiator type arguments on stack. 446 // Load instantiator type arguments on stack.
463 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. 447 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
464 // RDX: instantiator type arguments. 448 // RDX: instantiator type arguments.
465 // Check if type arguments are null, i.e. equivalent to vector of dynamic. 449 // Check if type arguments are null, i.e. equivalent to vector of dynamic.
466 __ CompareObject(RDX, Object::null_object()); 450 __ CompareObject(RDX, Object::null_object());
467 __ j(EQUAL, is_instance_lbl); 451 __ j(EQUAL, is_instance_lbl);
468 __ movq(RDI, 452 __ movq(RDI, FieldAddress(
469 FieldAddress(RDX, TypeArguments::type_at_offset(type_param.index()))); 453 RDX, TypeArguments::type_at_offset(type_param.index())));
470 // RDI: Concrete type of type. 454 // RDI: Concrete type of type.
471 // Check if type argument is dynamic. 455 // Check if type argument is dynamic.
472 __ CompareObject(RDI, Object::dynamic_type()); 456 __ CompareObject(RDI, Object::dynamic_type());
473 __ j(EQUAL, is_instance_lbl); 457 __ j(EQUAL, is_instance_lbl);
474 const Type& object_type = Type::ZoneHandle(zone(), Type::ObjectType()); 458 const Type& object_type = Type::ZoneHandle(zone(), Type::ObjectType());
475 __ CompareObject(RDI, object_type); 459 __ CompareObject(RDI, object_type);
476 __ j(EQUAL, is_instance_lbl); 460 __ j(EQUAL, is_instance_lbl);
477 461
478 // For Smi check quickly against int and num interfaces. 462 // For Smi check quickly against int and num interfaces.
479 Label not_smi; 463 Label not_smi;
480 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi? 464 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi?
481 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump); 465 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump);
482 __ CompareObject(RDI, Type::ZoneHandle(zone(), Type::IntType())); 466 __ CompareObject(RDI, Type::ZoneHandle(zone(), Type::IntType()));
483 __ j(EQUAL, is_instance_lbl); 467 __ j(EQUAL, is_instance_lbl);
484 __ CompareObject(RDI, Type::ZoneHandle(zone(), Type::Number())); 468 __ CompareObject(RDI, Type::ZoneHandle(zone(), Type::Number()));
485 __ j(EQUAL, is_instance_lbl); 469 __ j(EQUAL, is_instance_lbl);
486 // Smi must be handled in runtime. 470 // Smi must be handled in runtime.
487 Label fall_through; 471 Label fall_through;
488 __ jmp(&fall_through); 472 __ jmp(&fall_through);
489 473
490 __ Bind(&not_smi); 474 __ Bind(&not_smi);
491 // RDX: instantiator type arguments. 475 // RDX: instantiator type arguments.
492 // RAX: instance. 476 // RAX: instance.
493 const Register kInstanceReg = RAX; 477 const Register kInstanceReg = RAX;
494 const Register kTypeArgumentsReg = RDX; 478 const Register kTypeArgumentsReg = RDX;
495 const Register kTempReg = R10; 479 const Register kTempReg = R10;
496 const SubtypeTestCache& type_test_cache = 480 const SubtypeTestCache& type_test_cache = SubtypeTestCache::ZoneHandle(
497 SubtypeTestCache::ZoneHandle(zone(), 481 zone(), GenerateCallSubtypeTestStub(
498 GenerateCallSubtypeTestStub(kTestTypeThreeArgs, 482 kTestTypeThreeArgs, kInstanceReg, kTypeArgumentsReg,
499 kInstanceReg, 483 kTempReg, is_instance_lbl, is_not_instance_lbl));
500 kTypeArgumentsReg,
501 kTempReg,
502 is_instance_lbl,
503 is_not_instance_lbl));
504 __ Bind(&fall_through); 484 __ Bind(&fall_through);
505 return type_test_cache.raw(); 485 return type_test_cache.raw();
506 } 486 }
507 if (type.IsType()) { 487 if (type.IsType()) {
508 const Register kInstanceReg = RAX; 488 const Register kInstanceReg = RAX;
509 const Register kTypeArgumentsReg = RDX; 489 const Register kTypeArgumentsReg = RDX;
510 __ testq(kInstanceReg, Immediate(kSmiTagMask)); // Is instance Smi? 490 __ testq(kInstanceReg, Immediate(kSmiTagMask)); // Is instance Smi?
511 __ j(ZERO, is_not_instance_lbl); 491 __ j(ZERO, is_not_instance_lbl);
512 __ movq(kTypeArgumentsReg, Address(RSP, 0)); // Instantiator type args. 492 __ movq(kTypeArgumentsReg, Address(RSP, 0)); // Instantiator type args.
513 // Uninstantiated type class is known at compile time, but the type 493 // Uninstantiated type class is known at compile time, but the type
514 // arguments are determined at runtime by the instantiator. 494 // arguments are determined at runtime by the instantiator.
515 const Register kTempReg = R10; 495 const Register kTempReg = R10;
516 return GenerateCallSubtypeTestStub(kTestTypeThreeArgs, 496 return GenerateCallSubtypeTestStub(kTestTypeThreeArgs, kInstanceReg,
517 kInstanceReg, 497 kTypeArgumentsReg, kTempReg,
518 kTypeArgumentsReg, 498 is_instance_lbl, is_not_instance_lbl);
519 kTempReg,
520 is_instance_lbl,
521 is_not_instance_lbl);
522 } 499 }
523 return SubtypeTestCache::null(); 500 return SubtypeTestCache::null();
524 } 501 }
525 502
526 503
527 // Inputs: 504 // Inputs:
528 // - RAX: instance to test against (preserved). 505 // - RAX: instance to test against (preserved).
529 // - RDX: optional instantiator type arguments (preserved). 506 // - RDX: optional instantiator type arguments (preserved).
530 // Clobbers R10, R13. 507 // Clobbers R10, R13.
531 // Returns: 508 // Returns:
(...skipping 11 matching lines...) Expand all
543 // A non-null value is returned from a void function, which will result in a 520 // A non-null value is returned from a void function, which will result in a
544 // type error. A null value is handled prior to executing this inline code. 521 // type error. A null value is handled prior to executing this inline code.
545 return SubtypeTestCache::null(); 522 return SubtypeTestCache::null();
546 } 523 }
547 if (type.IsInstantiated()) { 524 if (type.IsInstantiated()) {
548 const Class& type_class = Class::ZoneHandle(zone(), type.type_class()); 525 const Class& type_class = Class::ZoneHandle(zone(), type.type_class());
549 // A class equality check is only applicable with a dst type (not a 526 // A class equality check is only applicable with a dst type (not a
550 // function type) of a non-parameterized class or with a raw dst type of 527 // function type) of a non-parameterized class or with a raw dst type of
551 // a parameterized class. 528 // a parameterized class.
552 if (type.IsFunctionType() || (type_class.NumTypeArguments() > 0)) { 529 if (type.IsFunctionType() || (type_class.NumTypeArguments() > 0)) {
553 return GenerateInstantiatedTypeWithArgumentsTest(token_pos, 530 return GenerateInstantiatedTypeWithArgumentsTest(
554 type, 531 token_pos, type, is_instance_lbl, is_not_instance_lbl);
555 is_instance_lbl,
556 is_not_instance_lbl);
557 // Fall through to runtime call. 532 // Fall through to runtime call.
558 } 533 }
559 const bool has_fall_through = 534 const bool has_fall_through = GenerateInstantiatedTypeNoArgumentsTest(
560 GenerateInstantiatedTypeNoArgumentsTest(token_pos, 535 token_pos, type, is_instance_lbl, is_not_instance_lbl);
561 type,
562 is_instance_lbl,
563 is_not_instance_lbl);
564 if (has_fall_through) { 536 if (has_fall_through) {
565 // If test non-conclusive so far, try the inlined type-test cache. 537 // If test non-conclusive so far, try the inlined type-test cache.
566 // 'type' is known at compile time. 538 // 'type' is known at compile time.
567 return GenerateSubtype1TestCacheLookup( 539 return GenerateSubtype1TestCacheLookup(
568 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); 540 token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
569 } else { 541 } else {
570 return SubtypeTestCache::null(); 542 return SubtypeTestCache::null();
571 } 543 }
572 } 544 }
573 return GenerateUninstantiatedTypeTest(token_pos, 545 return GenerateUninstantiatedTypeTest(token_pos, type, is_instance_lbl,
574 type,
575 is_instance_lbl,
576 is_not_instance_lbl); 546 is_not_instance_lbl);
577 } 547 }
578 548
579 549
580 // If instanceof type test cannot be performed successfully at compile time and 550 // If instanceof type test cannot be performed successfully at compile time and
581 // therefore eliminated, optimize it by adding inlined tests for: 551 // therefore eliminated, optimize it by adding inlined tests for:
582 // - NULL -> return false. 552 // - NULL -> return false.
583 // - Smi -> compile time subtype check (only if dst class is not parameterized). 553 // - Smi -> compile time subtype check (only if dst class is not parameterized).
584 // - Class equality (only if class is not parameterized). 554 // - Class equality (only if class is not parameterized).
585 // Inputs: 555 // Inputs:
(...skipping 20 matching lines...) Expand all
606 // instantiated). 576 // instantiated).
607 // We can only inline this null check if the type is instantiated at compile 577 // We can only inline this null check if the type is instantiated at compile
608 // time, since an uninstantiated type at compile time could be Object or 578 // time, since an uninstantiated type at compile time could be Object or
609 // dynamic at run time. 579 // dynamic at run time.
610 __ CompareObject(RAX, Object::null_object()); 580 __ CompareObject(RAX, Object::null_object());
611 __ j(EQUAL, type.IsNullType() ? &is_instance : &is_not_instance); 581 __ j(EQUAL, type.IsNullType() ? &is_instance : &is_not_instance);
612 } 582 }
613 583
614 // Generate inline instanceof test. 584 // Generate inline instanceof test.
615 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); 585 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
616 test_cache = GenerateInlineInstanceof(token_pos, type, 586 test_cache =
617 &is_instance, &is_not_instance); 587 GenerateInlineInstanceof(token_pos, type, &is_instance, &is_not_instance);
618 588
619 // test_cache is null if there is no fall-through. 589 // test_cache is null if there is no fall-through.
620 Label done; 590 Label done;
621 if (!test_cache.IsNull()) { 591 if (!test_cache.IsNull()) {
622 // Generate runtime call. 592 // Generate runtime call.
623 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. 593 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
624 __ PushObject(Object::null_object()); // Make room for the result. 594 __ PushObject(Object::null_object()); // Make room for the result.
625 __ pushq(RAX); // Push the instance. 595 __ pushq(RAX); // Push the instance.
626 __ PushObject(type); // Push the type. 596 __ PushObject(type); // Push the type.
627 __ pushq(RDX); // Instantiator type arguments. 597 __ pushq(RDX); // Instantiator type arguments.
628 __ LoadUniqueObject(RAX, test_cache); 598 __ LoadUniqueObject(RAX, test_cache);
629 __ pushq(RAX); 599 __ pushq(RAX);
630 GenerateRuntimeCall(token_pos, 600 GenerateRuntimeCall(token_pos, deopt_id, kInstanceofRuntimeEntry, 4, locs);
631 deopt_id,
632 kInstanceofRuntimeEntry,
633 4,
634 locs);
635 // Pop the parameters supplied to the runtime entry. The result of the 601 // Pop the parameters supplied to the runtime entry. The result of the
636 // instanceof runtime call will be left as the result of the operation. 602 // instanceof runtime call will be left as the result of the operation.
637 __ Drop(4); 603 __ Drop(4);
638 if (negate_result) { 604 if (negate_result) {
639 __ popq(RDX); 605 __ popq(RDX);
640 __ LoadObject(RAX, Bool::True()); 606 __ LoadObject(RAX, Bool::True());
641 __ cmpq(RDX, RAX); 607 __ cmpq(RDX, RAX);
642 __ j(NOT_EQUAL, &done, Assembler::kNearJump); 608 __ j(NOT_EQUAL, &done, Assembler::kNearJump);
643 __ LoadObject(RAX, Bool::False()); 609 __ LoadObject(RAX, Bool::False());
644 } else { 610 } else {
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
681 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); 647 (!dst_type.IsDynamicType() && !dst_type.IsObjectType()));
682 __ pushq(RDX); // Store instantiator type arguments. 648 __ pushq(RDX); // Store instantiator type arguments.
683 // A null object is always assignable and is returned as result. 649 // A null object is always assignable and is returned as result.
684 Label is_assignable, runtime_call; 650 Label is_assignable, runtime_call;
685 __ CompareObject(RAX, Object::null_object()); 651 __ CompareObject(RAX, Object::null_object());
686 __ j(EQUAL, &is_assignable); 652 __ j(EQUAL, &is_assignable);
687 653
688 // Generate throw new TypeError() if the type is malformed or malbounded. 654 // Generate throw new TypeError() if the type is malformed or malbounded.
689 if (dst_type.IsMalformedOrMalbounded()) { 655 if (dst_type.IsMalformedOrMalbounded()) {
690 __ PushObject(Object::null_object()); // Make room for the result. 656 __ PushObject(Object::null_object()); // Make room for the result.
691 __ pushq(RAX); // Push the source object. 657 __ pushq(RAX); // Push the source object.
692 __ PushObject(dst_name); // Push the name of the destination. 658 __ PushObject(dst_name); // Push the name of the destination.
693 __ PushObject(dst_type); // Push the type of the destination. 659 __ PushObject(dst_type); // Push the type of the destination.
694 GenerateRuntimeCall(token_pos, 660 GenerateRuntimeCall(token_pos, deopt_id, kBadTypeErrorRuntimeEntry, 3,
695 deopt_id,
696 kBadTypeErrorRuntimeEntry,
697 3,
698 locs); 661 locs);
699 // We should never return here. 662 // We should never return here.
700 __ int3(); 663 __ int3();
701 664
702 __ Bind(&is_assignable); // For a null object. 665 __ Bind(&is_assignable); // For a null object.
703 __ popq(RDX); // Remove pushed instantiator type arguments. 666 __ popq(RDX); // Remove pushed instantiator type arguments.
704 return; 667 return;
705 } 668 }
706 669
707 // Generate inline type check, linking to runtime call if not assignable. 670 // Generate inline type check, linking to runtime call if not assignable.
708 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); 671 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
709 test_cache = GenerateInlineInstanceof(token_pos, dst_type, 672 test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable,
710 &is_assignable, &runtime_call); 673 &runtime_call);
711 674
712 __ Bind(&runtime_call); 675 __ Bind(&runtime_call);
713 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. 676 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
714 __ PushObject(Object::null_object()); // Make room for the result. 677 __ PushObject(Object::null_object()); // Make room for the result.
715 __ pushq(RAX); // Push the source object. 678 __ pushq(RAX); // Push the source object.
716 __ PushObject(dst_type); // Push the type of the destination. 679 __ PushObject(dst_type); // Push the type of the destination.
717 __ pushq(RDX); // Instantiator type arguments. 680 __ pushq(RDX); // Instantiator type arguments.
718 __ PushObject(dst_name); // Push the name of the destination. 681 __ PushObject(dst_name); // Push the name of the destination.
719 __ LoadUniqueObject(RAX, test_cache); 682 __ LoadUniqueObject(RAX, test_cache);
720 __ pushq(RAX); 683 __ pushq(RAX);
721 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 5, locs); 684 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 5, locs);
722 // Pop the parameters supplied to the runtime entry. The result of the 685 // Pop the parameters supplied to the runtime entry. The result of the
723 // type check runtime call is the checked value. 686 // type check runtime call is the checked value.
724 __ Drop(5); 687 __ Drop(5);
725 __ popq(RAX); 688 __ popq(RAX);
726 689
727 __ Bind(&is_assignable); 690 __ Bind(&is_assignable);
728 __ popq(RDX); // Remove pushed instantiator type arguments. 691 __ popq(RDX); // Remove pushed instantiator type arguments.
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
777 740
778 // Copy positional arguments. 741 // Copy positional arguments.
779 // Argument i passed at fp[kParamEndSlotFromFp + num_args - i] is copied 742 // Argument i passed at fp[kParamEndSlotFromFp + num_args - i] is copied
780 // to fp[kFirstLocalSlotFromFp - i]. 743 // to fp[kFirstLocalSlotFromFp - i].
781 744
782 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 745 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
783 // Since RBX and RCX are Smi, use TIMES_4 instead of TIMES_8. 746 // Since RBX and RCX are Smi, use TIMES_4 instead of TIMES_8.
784 // Let RBX point to the last passed positional argument, i.e. to 747 // Let RBX point to the last passed positional argument, i.e. to
785 // fp[kParamEndSlotFromFp + num_args - (num_pos_args - 1)]. 748 // fp[kParamEndSlotFromFp + num_args - (num_pos_args - 1)].
786 __ subq(RBX, RCX); 749 __ subq(RBX, RCX);
787 __ leaq(RBX, Address(RBP, RBX, TIMES_4, 750 __ leaq(RBX,
788 (kParamEndSlotFromFp + 1) * kWordSize)); 751 Address(RBP, RBX, TIMES_4, (kParamEndSlotFromFp + 1) * kWordSize));
789 752
790 // Let RDI point to the last copied positional argument, i.e. to 753 // Let RDI point to the last copied positional argument, i.e. to
791 // fp[kFirstLocalSlotFromFp - (num_pos_args - 1)]. 754 // fp[kFirstLocalSlotFromFp - (num_pos_args - 1)].
792 __ SmiUntag(RCX); 755 __ SmiUntag(RCX);
793 __ movq(RAX, RCX); 756 __ movq(RAX, RCX);
794 __ negq(RAX); 757 __ negq(RAX);
795 // -num_pos_args is in RAX. 758 // -num_pos_args is in RAX.
796 __ leaq(RDI, 759 __ leaq(RDI,
797 Address(RBP, RAX, TIMES_8, (kFirstLocalSlotFromFp + 1) * kWordSize)); 760 Address(RBP, RAX, TIMES_8, (kFirstLocalSlotFromFp + 1) * kWordSize));
798 Label loop, loop_condition; 761 Label loop, loop_condition;
799 __ jmp(&loop_condition, Assembler::kNearJump); 762 __ jmp(&loop_condition, Assembler::kNearJump);
800 // We do not use the final allocation index of the variable here, i.e. 763 // We do not use the final allocation index of the variable here, i.e.
801 // scope->VariableAt(i)->index(), because captured variables still need 764 // scope->VariableAt(i)->index(), because captured variables still need
802 // to be copied to the context that is not yet allocated. 765 // to be copied to the context that is not yet allocated.
803 const Address argument_addr(RBX, RCX, TIMES_8, 0); 766 const Address argument_addr(RBX, RCX, TIMES_8, 0);
804 const Address copy_addr(RDI, RCX, TIMES_8, 0); 767 const Address copy_addr(RDI, RCX, TIMES_8, 0);
805 __ Bind(&loop); 768 __ Bind(&loop);
806 __ movq(RAX, argument_addr); 769 __ movq(RAX, argument_addr);
807 __ movq(copy_addr, RAX); 770 __ movq(copy_addr, RAX);
808 __ Bind(&loop_condition); 771 __ Bind(&loop_condition);
809 __ decq(RCX); 772 __ decq(RCX);
810 __ j(POSITIVE, &loop, Assembler::kNearJump); 773 __ j(POSITIVE, &loop, Assembler::kNearJump);
811 774
812 // Copy or initialize optional named arguments. 775 // Copy or initialize optional named arguments.
813 Label all_arguments_processed; 776 Label all_arguments_processed;
814 #ifdef DEBUG 777 #ifdef DEBUG
815 const bool check_correct_named_args = true; 778 const bool check_correct_named_args = true;
816 #else 779 #else
817 const bool check_correct_named_args = function.IsClosureFunction(); 780 const bool check_correct_named_args = function.IsClosureFunction();
818 #endif 781 #endif
819 if (num_opt_named_params > 0) { 782 if (num_opt_named_params > 0) {
820 // Start by alphabetically sorting the names of the optional parameters. 783 // Start by alphabetically sorting the names of the optional parameters.
821 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; 784 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params];
822 int* opt_param_position = new int[num_opt_named_params]; 785 int* opt_param_position = new int[num_opt_named_params];
823 for (int pos = num_fixed_params; pos < num_params; pos++) { 786 for (int pos = num_fixed_params; pos < num_params; pos++) {
824 LocalVariable* parameter = scope->VariableAt(pos); 787 LocalVariable* parameter = scope->VariableAt(pos);
825 const String& opt_param_name = parameter->name(); 788 const String& opt_param_name = parameter->name();
826 int i = pos - num_fixed_params; 789 int i = pos - num_fixed_params;
827 while (--i >= 0) { 790 while (--i >= 0) {
828 LocalVariable* param_i = opt_param[i]; 791 LocalVariable* param_i = opt_param[i];
829 const intptr_t result = opt_param_name.CompareTo(param_i->name()); 792 const intptr_t result = opt_param_name.CompareTo(param_i->name());
830 ASSERT(result != 0); 793 ASSERT(result != 0);
831 if (result > 0) break; 794 if (result > 0) break;
832 opt_param[i + 1] = opt_param[i]; 795 opt_param[i + 1] = opt_param[i];
833 opt_param_position[i + 1] = opt_param_position[i]; 796 opt_param_position[i + 1] = opt_param_position[i];
834 } 797 }
835 opt_param[i + 1] = parameter; 798 opt_param[i + 1] = parameter;
836 opt_param_position[i + 1] = pos; 799 opt_param_position[i + 1] = pos;
837 } 800 }
838 // Generate code handling each optional parameter in alphabetical order. 801 // Generate code handling each optional parameter in alphabetical order.
839 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 802 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
840 __ movq(RCX, 803 __ movq(RCX,
841 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset())); 804 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset()));
842 __ SmiUntag(RCX); 805 __ SmiUntag(RCX);
843 // Let RBX point to the first passed argument, i.e. to 806 // Let RBX point to the first passed argument, i.e. to
844 // fp[kParamEndSlotFromFp + num_args]; num_args (RBX) is Smi. 807 // fp[kParamEndSlotFromFp + num_args]; num_args (RBX) is Smi.
845 __ leaq(RBX, 808 __ leaq(RBX, Address(RBP, RBX, TIMES_4, kParamEndSlotFromFp * kWordSize));
846 Address(RBP, RBX, TIMES_4, kParamEndSlotFromFp * kWordSize));
847 // Let RDI point to the entry of the first named argument. 809 // Let RDI point to the entry of the first named argument.
848 __ leaq(RDI, 810 __ leaq(RDI,
849 FieldAddress(R10, ArgumentsDescriptor::first_named_entry_offset())); 811 FieldAddress(R10, ArgumentsDescriptor::first_named_entry_offset()));
850 for (int i = 0; i < num_opt_named_params; i++) { 812 for (int i = 0; i < num_opt_named_params; i++) {
851 Label load_default_value, assign_optional_parameter; 813 Label load_default_value, assign_optional_parameter;
852 const int param_pos = opt_param_position[i]; 814 const int param_pos = opt_param_position[i];
853 // Check if this named parameter was passed in. 815 // Check if this named parameter was passed in.
854 // Load RAX with the name of the argument. 816 // Load RAX with the name of the argument.
855 __ movq(RAX, Address(RDI, ArgumentsDescriptor::name_offset())); 817 __ movq(RAX, Address(RDI, ArgumentsDescriptor::name_offset()));
856 ASSERT(opt_param[i]->name().IsSymbol()); 818 ASSERT(opt_param[i]->name().IsSymbol());
857 __ CompareObject(RAX, opt_param[i]->name()); 819 __ CompareObject(RAX, opt_param[i]->name());
858 __ j(NOT_EQUAL, &load_default_value, Assembler::kNearJump); 820 __ j(NOT_EQUAL, &load_default_value, Assembler::kNearJump);
859 // Load RAX with passed-in argument at provided arg_pos, i.e. at 821 // Load RAX with passed-in argument at provided arg_pos, i.e. at
860 // fp[kParamEndSlotFromFp + num_args - arg_pos]. 822 // fp[kParamEndSlotFromFp + num_args - arg_pos].
861 __ movq(RAX, Address(RDI, ArgumentsDescriptor::position_offset())); 823 __ movq(RAX, Address(RDI, ArgumentsDescriptor::position_offset()));
862 // RAX is arg_pos as Smi. 824 // RAX is arg_pos as Smi.
863 // Point to next named entry. 825 // Point to next named entry.
864 __ AddImmediate( 826 __ AddImmediate(RDI, Immediate(ArgumentsDescriptor::named_entry_size()));
865 RDI, Immediate(ArgumentsDescriptor::named_entry_size()));
866 __ negq(RAX); 827 __ negq(RAX);
867 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi. 828 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi.
868 __ movq(RAX, argument_addr); 829 __ movq(RAX, argument_addr);
869 __ jmp(&assign_optional_parameter, Assembler::kNearJump); 830 __ jmp(&assign_optional_parameter, Assembler::kNearJump);
870 __ Bind(&load_default_value); 831 __ Bind(&load_default_value);
871 // Load RAX with default argument. 832 // Load RAX with default argument.
872 const Instance& value = parsed_function().DefaultParameterValueAt( 833 const Instance& value = parsed_function().DefaultParameterValueAt(
873 param_pos - num_fixed_params); 834 param_pos - num_fixed_params);
874 __ LoadObject(RAX, value); 835 __ LoadObject(RAX, value);
875 __ Bind(&assign_optional_parameter); 836 __ Bind(&assign_optional_parameter);
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
939 // This step can be skipped in case we decide that formal parameters are 900 // This step can be skipped in case we decide that formal parameters are
940 // implicitly final, since garbage collecting the unmodified value is not 901 // implicitly final, since garbage collecting the unmodified value is not
941 // an issue anymore. 902 // an issue anymore.
942 903
943 // R10 : arguments descriptor array. 904 // R10 : arguments descriptor array.
944 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 905 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
945 __ SmiUntag(RCX); 906 __ SmiUntag(RCX);
946 __ LoadObject(R12, Object::null_object()); 907 __ LoadObject(R12, Object::null_object());
947 Label null_args_loop, null_args_loop_condition; 908 Label null_args_loop, null_args_loop_condition;
948 __ jmp(&null_args_loop_condition, Assembler::kNearJump); 909 __ jmp(&null_args_loop_condition, Assembler::kNearJump);
949 const Address original_argument_addr( 910 const Address original_argument_addr(RBP, RCX, TIMES_8,
950 RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize); 911 (kParamEndSlotFromFp + 1) * kWordSize);
951 __ Bind(&null_args_loop); 912 __ Bind(&null_args_loop);
952 __ movq(original_argument_addr, R12); 913 __ movq(original_argument_addr, R12);
953 __ Bind(&null_args_loop_condition); 914 __ Bind(&null_args_loop_condition);
954 __ decq(RCX); 915 __ decq(RCX);
955 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); 916 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump);
956 } 917 }
957 918
958 919
959 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { 920 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) {
960 // TOS: return address. 921 // TOS: return address.
(...skipping 17 matching lines...) Expand all
978 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); 939 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX);
979 __ LoadObject(RAX, Object::null_object()); 940 __ LoadObject(RAX, Object::null_object());
980 __ ret(); 941 __ ret();
981 } 942 }
982 943
983 944
984 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc 945 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc
985 // needs to be updated to match. 946 // needs to be updated to match.
986 void FlowGraphCompiler::EmitFrameEntry() { 947 void FlowGraphCompiler::EmitFrameEntry() {
987 if (flow_graph().IsCompiledForOsr()) { 948 if (flow_graph().IsCompiledForOsr()) {
988 intptr_t extra_slots = StackSize() 949 intptr_t extra_slots = StackSize() - flow_graph().num_stack_locals() -
989 - flow_graph().num_stack_locals() 950 flow_graph().num_copied_params();
990 - flow_graph().num_copied_params();
991 ASSERT(extra_slots >= 0); 951 ASSERT(extra_slots >= 0);
992 __ EnterOsrFrame(extra_slots * kWordSize); 952 __ EnterOsrFrame(extra_slots * kWordSize);
993 } else { 953 } else {
994 const Register new_pp = R13; 954 const Register new_pp = R13;
995 __ LoadPoolPointer(new_pp); 955 __ LoadPoolPointer(new_pp);
996 956
997 const Function& function = parsed_function().function(); 957 const Function& function = parsed_function().function();
998 if (CanOptimizeFunction() && 958 if (CanOptimizeFunction() && function.IsOptimizable() &&
999 function.IsOptimizable() &&
1000 (!is_optimizing() || may_reoptimize())) { 959 (!is_optimizing() || may_reoptimize())) {
1001 __ Comment("Invocation Count Check"); 960 __ Comment("Invocation Count Check");
1002 const Register function_reg = RDI; 961 const Register function_reg = RDI;
1003 // Load function object using the callee's pool pointer. 962 // Load function object using the callee's pool pointer.
1004 __ LoadFunctionFromCalleePool(function_reg, function, new_pp); 963 __ LoadFunctionFromCalleePool(function_reg, function, new_pp);
1005 964
1006 // Reoptimization of an optimized function is triggered by counting in 965 // Reoptimization of an optimized function is triggered by counting in
1007 // IC stubs, but not at the entry of the function. 966 // IC stubs, but not at the entry of the function.
1008 if (!is_optimizing()) { 967 if (!is_optimizing()) {
1009 __ incl(FieldAddress(function_reg, Function::usage_counter_offset())); 968 __ incl(FieldAddress(function_reg, Function::usage_counter_offset()));
1010 } 969 }
1011 __ cmpl( 970 __ cmpl(FieldAddress(function_reg, Function::usage_counter_offset()),
1012 FieldAddress(function_reg, Function::usage_counter_offset()), 971 Immediate(GetOptimizationThreshold()));
1013 Immediate(GetOptimizationThreshold()));
1014 ASSERT(function_reg == RDI); 972 ASSERT(function_reg == RDI);
1015 __ J(GREATER_EQUAL, 973 __ J(GREATER_EQUAL, *StubCode::OptimizeFunction_entry(), new_pp);
1016 *StubCode::OptimizeFunction_entry(),
1017 new_pp);
1018 } 974 }
1019 ASSERT(StackSize() >= 0); 975 ASSERT(StackSize() >= 0);
1020 __ Comment("Enter frame"); 976 __ Comment("Enter frame");
1021 __ EnterDartFrame(StackSize() * kWordSize, new_pp); 977 __ EnterDartFrame(StackSize() * kWordSize, new_pp);
1022 } 978 }
1023 } 979 }
1024 980
1025 981
1026 void FlowGraphCompiler::CompileGraph() { 982 void FlowGraphCompiler::CompileGraph() {
1027 InitCompiler(); 983 InitCompiler();
(...skipping 24 matching lines...) Expand all
1052 if (num_copied_params == 0) { 1008 if (num_copied_params == 0) {
1053 const bool check_arguments = 1009 const bool check_arguments =
1054 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr(); 1010 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr();
1055 if (check_arguments) { 1011 if (check_arguments) {
1056 __ Comment("Check argument count"); 1012 __ Comment("Check argument count");
1057 // Check that exactly num_fixed arguments are passed in. 1013 // Check that exactly num_fixed arguments are passed in.
1058 Label correct_num_arguments, wrong_num_arguments; 1014 Label correct_num_arguments, wrong_num_arguments;
1059 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 1015 __ movq(RAX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
1060 __ CompareImmediate(RAX, Immediate(Smi::RawValue(num_fixed_params))); 1016 __ CompareImmediate(RAX, Immediate(Smi::RawValue(num_fixed_params)));
1061 __ j(NOT_EQUAL, &wrong_num_arguments, Assembler::kNearJump); 1017 __ j(NOT_EQUAL, &wrong_num_arguments, Assembler::kNearJump);
1062 __ cmpq(RAX, 1018 __ cmpq(RAX, FieldAddress(
1063 FieldAddress(R10, 1019 R10, ArgumentsDescriptor::positional_count_offset()));
1064 ArgumentsDescriptor::positional_count_offset()));
1065 __ j(EQUAL, &correct_num_arguments, Assembler::kNearJump); 1020 __ j(EQUAL, &correct_num_arguments, Assembler::kNearJump);
1066 1021
1067 __ Bind(&wrong_num_arguments); 1022 __ Bind(&wrong_num_arguments);
1068 __ LeaveDartFrame(kKeepCalleePP); // Leave arguments on the stack. 1023 __ LeaveDartFrame(kKeepCalleePP); // Leave arguments on the stack.
1069 __ Jmp(*StubCode::CallClosureNoSuchMethod_entry()); 1024 __ Jmp(*StubCode::CallClosureNoSuchMethod_entry());
1070 // The noSuchMethod call may return to the caller, but not here. 1025 // The noSuchMethod call may return to the caller, but not here.
1071 __ Bind(&correct_num_arguments); 1026 __ Bind(&correct_num_arguments);
1072 } 1027 }
1073 } else if (!flow_graph().IsCompiledForOsr()) { 1028 } else if (!flow_graph().IsCompiledForOsr()) {
1074 CopyParameters(); 1029 CopyParameters();
(...skipping 27 matching lines...) Expand all
1102 __ LoadObject(RAX, Object::null_object()); 1057 __ LoadObject(RAX, Object::null_object());
1103 } 1058 }
1104 for (intptr_t i = 0; i < num_locals; ++i) { 1059 for (intptr_t i = 0; i < num_locals; ++i) {
1105 // Subtract index i (locals lie at lower addresses than RBP). 1060 // Subtract index i (locals lie at lower addresses than RBP).
1106 if (((slot_base - i) == context_index)) { 1061 if (((slot_base - i) == context_index)) {
1107 if (function.IsClosureFunction()) { 1062 if (function.IsClosureFunction()) {
1108 __ movq(Address(RBP, (slot_base - i) * kWordSize), CTX); 1063 __ movq(Address(RBP, (slot_base - i) * kWordSize), CTX);
1109 } else { 1064 } else {
1110 const Context& empty_context = Context::ZoneHandle( 1065 const Context& empty_context = Context::ZoneHandle(
1111 zone(), isolate()->object_store()->empty_context()); 1066 zone(), isolate()->object_store()->empty_context());
1112 __ StoreObject( 1067 __ StoreObject(Address(RBP, (slot_base - i) * kWordSize),
1113 Address(RBP, (slot_base - i) * kWordSize), empty_context); 1068 empty_context);
1114 } 1069 }
1115 } else { 1070 } else {
1116 ASSERT(num_locals > 1); 1071 ASSERT(num_locals > 1);
1117 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX); 1072 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX);
1118 } 1073 }
1119 } 1074 }
1120 } 1075 }
1121 1076
1122 EndCodeSourceRange(TokenPosition::kDartCodePrologue); 1077 EndCodeSourceRange(TokenPosition::kDartCodePrologue);
1123 ASSERT(!block_order().is_empty()); 1078 ASSERT(!block_order().is_empty());
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1204 AddDeoptIndexAtCall(deopt_id_after); 1159 AddDeoptIndexAtCall(deopt_id_after);
1205 } else { 1160 } else {
1206 // Add deoptimization continuation point after the call and before the 1161 // Add deoptimization continuation point after the call and before the
1207 // arguments are removed. 1162 // arguments are removed.
1208 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1163 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1209 } 1164 }
1210 } 1165 }
1211 } 1166 }
1212 1167
1213 1168
1214 void FlowGraphCompiler::EmitUnoptimizedStaticCall( 1169 void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t argument_count,
1215 intptr_t argument_count, 1170 intptr_t deopt_id,
1216 intptr_t deopt_id, 1171 TokenPosition token_pos,
1217 TokenPosition token_pos, 1172 LocationSummary* locs,
1218 LocationSummary* locs, 1173 const ICData& ic_data) {
1219 const ICData& ic_data) {
1220 const StubEntry* stub_entry = 1174 const StubEntry* stub_entry =
1221 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); 1175 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
1222 __ LoadObject(RBX, ic_data); 1176 __ LoadObject(RBX, ic_data);
1223 GenerateDartCall(deopt_id, 1177 GenerateDartCall(deopt_id, token_pos, *stub_entry,
1224 token_pos, 1178 RawPcDescriptors::kUnoptStaticCall, locs);
1225 *stub_entry,
1226 RawPcDescriptors::kUnoptStaticCall,
1227 locs);
1228 __ Drop(argument_count, RCX); 1179 __ Drop(argument_count, RCX);
1229 } 1180 }
1230 1181
1231 1182
1232 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) { 1183 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) {
1233 // We do not check for overflow when incrementing the edge counter. The 1184 // We do not check for overflow when incrementing the edge counter. The
1234 // function should normally be optimized long before the counter can 1185 // function should normally be optimized long before the counter can
1235 // overflow; and though we do not reset the counters when we optimize or 1186 // overflow; and though we do not reset the counters when we optimize or
1236 // deoptimize, there is a bound on the number of 1187 // deoptimize, there is a bound on the number of
1237 // optimization/deoptimization cycles we will attempt. 1188 // optimization/deoptimization cycles we will attempt.
1238 ASSERT(!edge_counters_array_.IsNull()); 1189 ASSERT(!edge_counters_array_.IsNull());
1239 ASSERT(assembler_->constant_pool_allowed()); 1190 ASSERT(assembler_->constant_pool_allowed());
1240 __ Comment("Edge counter"); 1191 __ Comment("Edge counter");
1241 __ LoadObject(RAX, edge_counters_array_); 1192 __ LoadObject(RAX, edge_counters_array_);
1242 __ IncrementSmiField(FieldAddress(RAX, Array::element_offset(edge_id)), 1); 1193 __ IncrementSmiField(FieldAddress(RAX, Array::element_offset(edge_id)), 1);
1243 } 1194 }
1244 1195
1245 1196
1246 void FlowGraphCompiler::EmitOptimizedInstanceCall( 1197 void FlowGraphCompiler::EmitOptimizedInstanceCall(const StubEntry& stub_entry,
1247 const StubEntry& stub_entry, 1198 const ICData& ic_data,
1248 const ICData& ic_data, 1199 intptr_t argument_count,
1249 intptr_t argument_count, 1200 intptr_t deopt_id,
1250 intptr_t deopt_id, 1201 TokenPosition token_pos,
1251 TokenPosition token_pos, 1202 LocationSummary* locs) {
1252 LocationSummary* locs) {
1253 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); 1203 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
1254 // Each ICData propagated from unoptimized to optimized code contains the 1204 // Each ICData propagated from unoptimized to optimized code contains the
1255 // function that corresponds to the Dart function of that IC call. Due 1205 // function that corresponds to the Dart function of that IC call. Due
1256 // to inlining in optimized code, that function may not correspond to the 1206 // to inlining in optimized code, that function may not correspond to the
1257 // top-level function (parsed_function().function()) which could be 1207 // top-level function (parsed_function().function()) which could be
1258 // reoptimized and which counter needs to be incremented. 1208 // reoptimized and which counter needs to be incremented.
1259 // Pass the function explicitly, it is used in IC stub. 1209 // Pass the function explicitly, it is used in IC stub.
1260 __ LoadObject(RDI, parsed_function().function()); 1210 __ LoadObject(RDI, parsed_function().function());
1261 __ LoadUniqueObject(RBX, ic_data); 1211 __ LoadUniqueObject(RBX, ic_data);
1262 GenerateDartCall(deopt_id, 1212 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall,
1263 token_pos,
1264 stub_entry,
1265 RawPcDescriptors::kIcCall,
1266 locs); 1213 locs);
1267 __ Drop(argument_count, RCX); 1214 __ Drop(argument_count, RCX);
1268 } 1215 }
1269 1216
1270 1217
1271 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, 1218 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry,
1272 const ICData& ic_data, 1219 const ICData& ic_data,
1273 intptr_t argument_count, 1220 intptr_t argument_count,
1274 intptr_t deopt_id, 1221 intptr_t deopt_id,
1275 TokenPosition token_pos, 1222 TokenPosition token_pos,
1276 LocationSummary* locs) { 1223 LocationSummary* locs) {
1277 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); 1224 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
1278 __ LoadUniqueObject(RBX, ic_data); 1225 __ LoadUniqueObject(RBX, ic_data);
1279 GenerateDartCall(deopt_id, 1226 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall,
1280 token_pos,
1281 stub_entry,
1282 RawPcDescriptors::kIcCall,
1283 locs); 1227 locs);
1284 __ Drop(argument_count, RCX); 1228 __ Drop(argument_count, RCX);
1285 } 1229 }
1286 1230
1287 1231
1288 void FlowGraphCompiler::EmitMegamorphicInstanceCall( 1232 void FlowGraphCompiler::EmitMegamorphicInstanceCall(
1289 const ICData& ic_data, 1233 const ICData& ic_data,
1290 intptr_t argument_count, 1234 intptr_t argument_count,
1291 intptr_t deopt_id, 1235 intptr_t deopt_id,
1292 TokenPosition token_pos, 1236 TokenPosition token_pos,
1293 LocationSummary* locs, 1237 LocationSummary* locs,
1294 intptr_t try_index, 1238 intptr_t try_index,
1295 intptr_t slow_path_argument_count) { 1239 intptr_t slow_path_argument_count) {
1296 const String& name = String::Handle(zone(), ic_data.target_name()); 1240 const String& name = String::Handle(zone(), ic_data.target_name());
1297 const Array& arguments_descriptor = 1241 const Array& arguments_descriptor =
1298 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); 1242 Array::ZoneHandle(zone(), ic_data.arguments_descriptor());
1299 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); 1243 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0));
1300 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(), 1244 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(
1245 zone(),
1301 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor)); 1246 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor));
1302 __ Comment("MegamorphicCall"); 1247 __ Comment("MegamorphicCall");
1303 // Load receiver into RDI. 1248 // Load receiver into RDI.
1304 __ movq(RDI, Address(RSP, (argument_count - 1) * kWordSize)); 1249 __ movq(RDI, Address(RSP, (argument_count - 1) * kWordSize));
1305 Label done; 1250 Label done;
1306 if (ShouldInlineSmiStringHashCode(ic_data)) { 1251 if (ShouldInlineSmiStringHashCode(ic_data)) {
1307 Label megamorphic_call; 1252 Label megamorphic_call;
1308 __ Comment("Inlined get:hashCode for Smi and OneByteString"); 1253 __ Comment("Inlined get:hashCode for Smi and OneByteString");
1309 __ movq(RAX, RDI); // Move Smi hashcode to RAX. 1254 __ movq(RAX, RDI); // Move Smi hashcode to RAX.
1310 __ testq(RDI, Immediate(kSmiTagMask)); 1255 __ testq(RDI, Immediate(kSmiTagMask));
(...skipping 13 matching lines...) Expand all
1324 1269
1325 __ Bind(&done); 1270 __ Bind(&done);
1326 RecordSafepoint(locs, slow_path_argument_count); 1271 RecordSafepoint(locs, slow_path_argument_count);
1327 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); 1272 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id);
1328 if (FLAG_precompiled_mode) { 1273 if (FLAG_precompiled_mode) {
1329 // Megamorphic calls may occur in slow path stubs. 1274 // Megamorphic calls may occur in slow path stubs.
1330 // If valid use try_index argument. 1275 // If valid use try_index argument.
1331 if (try_index == CatchClauseNode::kInvalidTryIndex) { 1276 if (try_index == CatchClauseNode::kInvalidTryIndex) {
1332 try_index = CurrentTryIndex(); 1277 try_index = CurrentTryIndex();
1333 } 1278 }
1334 pc_descriptors_list()->AddDescriptor(RawPcDescriptors::kOther, 1279 pc_descriptors_list()->AddDescriptor(
1335 assembler()->CodeSize(), 1280 RawPcDescriptors::kOther, assembler()->CodeSize(), Thread::kNoDeoptId,
1336 Thread::kNoDeoptId, 1281 token_pos, try_index);
1337 token_pos,
1338 try_index);
1339 } else if (is_optimizing()) { 1282 } else if (is_optimizing()) {
1340 AddCurrentDescriptor(RawPcDescriptors::kOther, 1283 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1341 Thread::kNoDeoptId, token_pos); 1284 token_pos);
1342 AddDeoptIndexAtCall(deopt_id_after); 1285 AddDeoptIndexAtCall(deopt_id_after);
1343 } else { 1286 } else {
1344 AddCurrentDescriptor(RawPcDescriptors::kOther, 1287 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1345 Thread::kNoDeoptId, token_pos); 1288 token_pos);
1346 // Add deoptimization continuation point after the call and before the 1289 // Add deoptimization continuation point after the call and before the
1347 // arguments are removed. 1290 // arguments are removed.
1348 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1291 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1349 } 1292 }
1350 __ Drop(argument_count, RCX); 1293 __ Drop(argument_count, RCX);
1351 } 1294 }
1352 1295
1353 1296
1354 void FlowGraphCompiler::EmitSwitchableInstanceCall( 1297 void FlowGraphCompiler::EmitSwitchableInstanceCall(const ICData& ic_data,
1355 const ICData& ic_data, 1298 intptr_t argument_count,
1356 intptr_t argument_count, 1299 intptr_t deopt_id,
1357 intptr_t deopt_id, 1300 TokenPosition token_pos,
1358 TokenPosition token_pos, 1301 LocationSummary* locs) {
1359 LocationSummary* locs) {
1360 ASSERT(ic_data.NumArgsTested() == 1); 1302 ASSERT(ic_data.NumArgsTested() == 1);
1361 const Code& initial_stub = Code::ZoneHandle( 1303 const Code& initial_stub =
1362 StubCode::ICCallThroughFunction_entry()->code()); 1304 Code::ZoneHandle(StubCode::ICCallThroughFunction_entry()->code());
1363 1305
1364 __ Comment("SwitchableCall"); 1306 __ Comment("SwitchableCall");
1365 __ movq(RDI, Address(RSP, (argument_count - 1) * kWordSize)); 1307 __ movq(RDI, Address(RSP, (argument_count - 1) * kWordSize));
1366 __ LoadUniqueObject(CODE_REG, initial_stub); 1308 __ LoadUniqueObject(CODE_REG, initial_stub);
1367 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); 1309 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset()));
1368 __ LoadUniqueObject(RBX, ic_data); 1310 __ LoadUniqueObject(RBX, ic_data);
1369 __ call(RCX); 1311 __ call(RCX);
1370 1312
1371 AddCurrentDescriptor(RawPcDescriptors::kOther, 1313 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, token_pos);
1372 Thread::kNoDeoptId, token_pos);
1373 RecordSafepoint(locs); 1314 RecordSafepoint(locs);
1374 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); 1315 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id);
1375 if (is_optimizing()) { 1316 if (is_optimizing()) {
1376 AddDeoptIndexAtCall(deopt_id_after); 1317 AddDeoptIndexAtCall(deopt_id_after);
1377 } else { 1318 } else {
1378 // Add deoptimization continuation point after the call and before the 1319 // Add deoptimization continuation point after the call and before the
1379 // arguments are removed. 1320 // arguments are removed.
1380 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1321 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1381 } 1322 }
1382 __ Drop(argument_count, RCX); 1323 __ Drop(argument_count, RCX);
1383 } 1324 }
1384 1325
1385 1326
1386 void FlowGraphCompiler::EmitOptimizedStaticCall( 1327 void FlowGraphCompiler::EmitOptimizedStaticCall(
1387 const Function& function, 1328 const Function& function,
1388 const Array& arguments_descriptor, 1329 const Array& arguments_descriptor,
1389 intptr_t argument_count, 1330 intptr_t argument_count,
1390 intptr_t deopt_id, 1331 intptr_t deopt_id,
1391 TokenPosition token_pos, 1332 TokenPosition token_pos,
1392 LocationSummary* locs) { 1333 LocationSummary* locs) {
1393 ASSERT(!function.IsClosureFunction()); 1334 ASSERT(!function.IsClosureFunction());
1394 if (function.HasOptionalParameters()) { 1335 if (function.HasOptionalParameters()) {
1395 __ LoadObject(R10, arguments_descriptor); 1336 __ LoadObject(R10, arguments_descriptor);
1396 } else { 1337 } else {
1397 __ xorq(R10, R10); // GC safe smi zero because of stub. 1338 __ xorq(R10, R10); // GC safe smi zero because of stub.
1398 } 1339 }
1399 // Do not use the code from the function, but let the code be patched so that 1340 // Do not use the code from the function, but let the code be patched so that
1400 // we can record the outgoing edges to other code. 1341 // we can record the outgoing edges to other code.
1401 GenerateStaticDartCall(deopt_id, 1342 GenerateStaticDartCall(deopt_id, token_pos,
1402 token_pos,
1403 *StubCode::CallStaticFunction_entry(), 1343 *StubCode::CallStaticFunction_entry(),
1404 RawPcDescriptors::kOther, 1344 RawPcDescriptors::kOther, locs, function);
1405 locs,
1406 function);
1407 __ Drop(argument_count, RCX); 1345 __ Drop(argument_count, RCX);
1408 } 1346 }
1409 1347
1410 1348
1411 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( 1349 Condition FlowGraphCompiler::EmitEqualityRegConstCompare(
1412 Register reg, 1350 Register reg,
1413 const Object& obj, 1351 const Object& obj,
1414 bool needs_number_check, 1352 bool needs_number_check,
1415 TokenPosition token_pos) { 1353 TokenPosition token_pos) {
1416 ASSERT(!needs_number_check || 1354 ASSERT(!needs_number_check ||
1417 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); 1355 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint()));
1418 1356
1419 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { 1357 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) {
1420 ASSERT(!needs_number_check); 1358 ASSERT(!needs_number_check);
1421 __ testq(reg, reg); 1359 __ testq(reg, reg);
1422 return EQUAL; 1360 return EQUAL;
1423 } 1361 }
1424 1362
1425 if (needs_number_check) { 1363 if (needs_number_check) {
1426 __ pushq(reg); 1364 __ pushq(reg);
1427 __ PushObject(obj); 1365 __ PushObject(obj);
1428 if (is_optimizing()) { 1366 if (is_optimizing()) {
1429 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1367 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry());
1430 } else { 1368 } else {
1431 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1369 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1432 } 1370 }
1433 if (token_pos.IsReal()) { 1371 if (token_pos.IsReal()) {
1434 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1372 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, Thread::kNoDeoptId,
1435 Thread::kNoDeoptId,
1436 token_pos); 1373 token_pos);
1437 } 1374 }
1438 // Stub returns result in flags (result of a cmpq, we need ZF computed). 1375 // Stub returns result in flags (result of a cmpq, we need ZF computed).
1439 __ popq(reg); // Discard constant. 1376 __ popq(reg); // Discard constant.
1440 __ popq(reg); // Restore 'reg'. 1377 __ popq(reg); // Restore 'reg'.
1441 } else { 1378 } else {
1442 __ CompareObject(reg, obj); 1379 __ CompareObject(reg, obj);
1443 } 1380 }
1444 return EQUAL; 1381 return EQUAL;
1445 } 1382 }
1446 1383
1447 1384
1448 Condition FlowGraphCompiler::EmitEqualityRegRegCompare( 1385 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(
1449 Register left, 1386 Register left,
1450 Register right, 1387 Register right,
1451 bool needs_number_check, 1388 bool needs_number_check,
1452 TokenPosition token_pos) { 1389 TokenPosition token_pos) {
1453 if (needs_number_check) { 1390 if (needs_number_check) {
1454 __ pushq(left); 1391 __ pushq(left);
1455 __ pushq(right); 1392 __ pushq(right);
1456 if (is_optimizing()) { 1393 if (is_optimizing()) {
1457 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1394 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry());
1458 } else { 1395 } else {
1459 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1396 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1460 } 1397 }
1461 if (token_pos.IsReal()) { 1398 if (token_pos.IsReal()) {
1462 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1399 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, Thread::kNoDeoptId,
1463 Thread::kNoDeoptId,
1464 token_pos); 1400 token_pos);
1465 } 1401 }
1466 // Stub returns result in flags (result of a cmpq, we need ZF computed). 1402 // Stub returns result in flags (result of a cmpq, we need ZF computed).
1467 __ popq(right); 1403 __ popq(right);
1468 __ popq(left); 1404 __ popq(left);
1469 } else { 1405 } else {
1470 __ CompareRegisters(left, right); 1406 __ CompareRegisters(left, right);
1471 } 1407 }
1472 return EQUAL; 1408 return EQUAL;
1473 } 1409 }
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1513 const Array& argument_names, 1449 const Array& argument_names,
1514 Label* failed, 1450 Label* failed,
1515 Label* match_found, 1451 Label* match_found,
1516 intptr_t deopt_id, 1452 intptr_t deopt_id,
1517 TokenPosition token_index, 1453 TokenPosition token_index,
1518 LocationSummary* locs, 1454 LocationSummary* locs,
1519 bool complete) { 1455 bool complete) {
1520 ASSERT(is_optimizing()); 1456 ASSERT(is_optimizing());
1521 1457
1522 __ Comment("EmitTestAndCall"); 1458 __ Comment("EmitTestAndCall");
1523 const Array& arguments_descriptor = 1459 const Array& arguments_descriptor = Array::ZoneHandle(
1524 Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count, 1460 zone(), ArgumentsDescriptor::New(argument_count, argument_names));
1525 argument_names));
1526 // Load receiver into RAX. 1461 // Load receiver into RAX.
1527 __ movq(RAX, 1462 __ movq(RAX, Address(RSP, (argument_count - 1) * kWordSize));
1528 Address(RSP, (argument_count - 1) * kWordSize));
1529 __ LoadObject(R10, arguments_descriptor); 1463 __ LoadObject(R10, arguments_descriptor);
1530 1464
1531 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid; 1465 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid;
1532 const intptr_t kNumChecks = ic_data.NumberOfChecks(); 1466 const intptr_t kNumChecks = ic_data.NumberOfChecks();
1533 1467
1534 ASSERT(!ic_data.IsNull() && (kNumChecks > 0)); 1468 ASSERT(!ic_data.IsNull() && (kNumChecks > 0));
1535 1469
1536 Label after_smi_test; 1470 Label after_smi_test;
1537 if (kFirstCheckIsSmi) { 1471 if (kFirstCheckIsSmi) {
1538 __ testq(RAX, Immediate(kSmiTagMask)); 1472 __ testq(RAX, Immediate(kSmiTagMask));
1539 // Jump if receiver is not Smi. 1473 // Jump if receiver is not Smi.
1540 if (kNumChecks == 1) { 1474 if (kNumChecks == 1) {
1541 __ j(NOT_ZERO, failed); 1475 __ j(NOT_ZERO, failed);
1542 } else { 1476 } else {
1543 __ j(NOT_ZERO, &after_smi_test); 1477 __ j(NOT_ZERO, &after_smi_test);
1544 } 1478 }
1545 // Do not use the code from the function, but let the code be patched so 1479 // Do not use the code from the function, but let the code be patched so
1546 // that we can record the outgoing edges to other code. 1480 // that we can record the outgoing edges to other code.
1547 const Function& function = Function::ZoneHandle( 1481 const Function& function =
1548 zone(), ic_data.GetTargetAt(0)); 1482 Function::ZoneHandle(zone(), ic_data.GetTargetAt(0));
1549 GenerateStaticDartCall(deopt_id, 1483 GenerateStaticDartCall(deopt_id, token_index,
1550 token_index,
1551 *StubCode::CallStaticFunction_entry(), 1484 *StubCode::CallStaticFunction_entry(),
1552 RawPcDescriptors::kOther, 1485 RawPcDescriptors::kOther, locs, function);
1553 locs,
1554 function);
1555 __ Drop(argument_count, RCX); 1486 __ Drop(argument_count, RCX);
1556 if (kNumChecks > 1) { 1487 if (kNumChecks > 1) {
1557 __ jmp(match_found); 1488 __ jmp(match_found);
1558 } 1489 }
1559 } else { 1490 } else {
1560 // Receiver is Smi, but Smi is not a valid class therefore fail. 1491 // Receiver is Smi, but Smi is not a valid class therefore fail.
1561 // (Smi class must be first in the list). 1492 // (Smi class must be first in the list).
1562 if (!complete) { 1493 if (!complete) {
1563 __ testq(RAX, Immediate(kSmiTagMask)); 1494 __ testq(RAX, Immediate(kSmiTagMask));
1564 __ j(ZERO, failed); 1495 __ j(ZERO, failed);
(...skipping 25 matching lines...) Expand all
1590 } 1521 }
1591 } else { 1522 } else {
1592 if (!kIsLastCheck) { 1523 if (!kIsLastCheck) {
1593 __ cmpl(RDI, Immediate(sorted[i].cid)); 1524 __ cmpl(RDI, Immediate(sorted[i].cid));
1594 __ j(NOT_EQUAL, &next_test); 1525 __ j(NOT_EQUAL, &next_test);
1595 } 1526 }
1596 } 1527 }
1597 // Do not use the code from the function, but let the code be patched so 1528 // Do not use the code from the function, but let the code be patched so
1598 // that we can record the outgoing edges to other code. 1529 // that we can record the outgoing edges to other code.
1599 const Function& function = *sorted[i].target; 1530 const Function& function = *sorted[i].target;
1600 GenerateStaticDartCall(deopt_id, 1531 GenerateStaticDartCall(deopt_id, token_index,
1601 token_index,
1602 *StubCode::CallStaticFunction_entry(), 1532 *StubCode::CallStaticFunction_entry(),
1603 RawPcDescriptors::kOther, 1533 RawPcDescriptors::kOther, locs, function);
1604 locs,
1605 function);
1606 __ Drop(argument_count, RCX); 1534 __ Drop(argument_count, RCX);
1607 if (!kIsLastCheck) { 1535 if (!kIsLastCheck) {
1608 __ jmp(match_found); 1536 __ jmp(match_found);
1609 } 1537 }
1610 __ Bind(&next_test); 1538 __ Bind(&next_test);
1611 } 1539 }
1612 } 1540 }
1613 1541
1614 1542
1615 #undef __ 1543 #undef __
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1665 __ movups(XMM0, source.ToStackSlotAddress()); 1593 __ movups(XMM0, source.ToStackSlotAddress());
1666 __ movups(destination.ToStackSlotAddress(), XMM0); 1594 __ movups(destination.ToStackSlotAddress(), XMM0);
1667 } 1595 }
1668 } else { 1596 } else {
1669 ASSERT(source.IsConstant()); 1597 ASSERT(source.IsConstant());
1670 const Object& constant = source.constant(); 1598 const Object& constant = source.constant();
1671 if (destination.IsRegister()) { 1599 if (destination.IsRegister()) {
1672 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) { 1600 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) {
1673 __ xorq(destination.reg(), destination.reg()); 1601 __ xorq(destination.reg(), destination.reg());
1674 } else if (constant.IsSmi() && 1602 } else if (constant.IsSmi() &&
1675 (source.constant_instruction()->representation() == kUnboxedInt32)) { 1603 (source.constant_instruction()->representation() ==
1604 kUnboxedInt32)) {
1676 __ movl(destination.reg(), Immediate(Smi::Cast(constant).Value())); 1605 __ movl(destination.reg(), Immediate(Smi::Cast(constant).Value()));
1677 } else { 1606 } else {
1678 __ LoadObject(destination.reg(), constant); 1607 __ LoadObject(destination.reg(), constant);
1679 } 1608 }
1680 } else if (destination.IsFpuRegister()) { 1609 } else if (destination.IsFpuRegister()) {
1681 if (Utils::DoublesBitEqual(Double::Cast(constant).value(), 0.0)) { 1610 if (Utils::DoublesBitEqual(Double::Cast(constant).value(), 0.0)) {
1682 __ xorps(destination.fpu_reg(), destination.fpu_reg()); 1611 __ xorps(destination.fpu_reg(), destination.fpu_reg());
1683 } else { 1612 } else {
1684 __ LoadObject(TMP, constant); 1613 __ LoadObject(TMP, constant);
1685 __ movsd(destination.fpu_reg(), 1614 __ movsd(destination.fpu_reg(),
1686 FieldAddress(TMP, Double::value_offset())); 1615 FieldAddress(TMP, Double::value_offset()));
1687 } 1616 }
1688 } else if (destination.IsDoubleStackSlot()) { 1617 } else if (destination.IsDoubleStackSlot()) {
1689 if (Utils::DoublesBitEqual(Double::Cast(constant).value(), 0.0)) { 1618 if (Utils::DoublesBitEqual(Double::Cast(constant).value(), 0.0)) {
1690 __ xorps(XMM0, XMM0); 1619 __ xorps(XMM0, XMM0);
1691 } else { 1620 } else {
1692 __ LoadObject(TMP, constant); 1621 __ LoadObject(TMP, constant);
1693 __ movsd(XMM0, FieldAddress(TMP, Double::value_offset())); 1622 __ movsd(XMM0, FieldAddress(TMP, Double::value_offset()));
1694 } 1623 }
1695 __ movsd(destination.ToStackSlotAddress(), XMM0); 1624 __ movsd(destination.ToStackSlotAddress(), XMM0);
1696 } else { 1625 } else {
(...skipping 23 matching lines...) Expand all
1720 Exchange(source.reg(), destination.ToStackSlotAddress()); 1649 Exchange(source.reg(), destination.ToStackSlotAddress());
1721 } else if (source.IsStackSlot() && destination.IsRegister()) { 1650 } else if (source.IsStackSlot() && destination.IsRegister()) {
1722 Exchange(destination.reg(), source.ToStackSlotAddress()); 1651 Exchange(destination.reg(), source.ToStackSlotAddress());
1723 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 1652 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1724 Exchange(destination.ToStackSlotAddress(), source.ToStackSlotAddress()); 1653 Exchange(destination.ToStackSlotAddress(), source.ToStackSlotAddress());
1725 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) { 1654 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
1726 __ movaps(XMM0, source.fpu_reg()); 1655 __ movaps(XMM0, source.fpu_reg());
1727 __ movaps(source.fpu_reg(), destination.fpu_reg()); 1656 __ movaps(source.fpu_reg(), destination.fpu_reg());
1728 __ movaps(destination.fpu_reg(), XMM0); 1657 __ movaps(destination.fpu_reg(), XMM0);
1729 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) { 1658 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
1730 ASSERT(destination.IsDoubleStackSlot() || 1659 ASSERT(destination.IsDoubleStackSlot() || destination.IsQuadStackSlot() ||
1731 destination.IsQuadStackSlot() || 1660 source.IsDoubleStackSlot() || source.IsQuadStackSlot());
1732 source.IsDoubleStackSlot() || 1661 bool double_width =
1733 source.IsQuadStackSlot()); 1662 destination.IsDoubleStackSlot() || source.IsDoubleStackSlot();
1734 bool double_width = destination.IsDoubleStackSlot() || 1663 XmmRegister reg =
1735 source.IsDoubleStackSlot(); 1664 source.IsFpuRegister() ? source.fpu_reg() : destination.fpu_reg();
1736 XmmRegister reg = source.IsFpuRegister() ? source.fpu_reg()
1737 : destination.fpu_reg();
1738 Address slot_address = source.IsFpuRegister() 1665 Address slot_address = source.IsFpuRegister()
1739 ? destination.ToStackSlotAddress() 1666 ? destination.ToStackSlotAddress()
1740 : source.ToStackSlotAddress(); 1667 : source.ToStackSlotAddress();
1741 1668
1742 if (double_width) { 1669 if (double_width) {
1743 __ movsd(XMM0, slot_address); 1670 __ movsd(XMM0, slot_address);
1744 __ movsd(slot_address, reg); 1671 __ movsd(slot_address, reg);
1745 } else { 1672 } else {
1746 __ movups(XMM0, slot_address); 1673 __ movups(XMM0, slot_address);
1747 __ movups(slot_address, reg); 1674 __ movups(slot_address, reg);
1748 } 1675 }
1749 __ movaps(reg, XMM0); 1676 __ movaps(reg, XMM0);
1750 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) { 1677 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
1843 __ movups(reg, Address(RSP, 0)); 1770 __ movups(reg, Address(RSP, 0));
1844 __ AddImmediate(RSP, Immediate(kFpuRegisterSize)); 1771 __ AddImmediate(RSP, Immediate(kFpuRegisterSize));
1845 } 1772 }
1846 1773
1847 1774
1848 #undef __ 1775 #undef __
1849 1776
1850 } // namespace dart 1777 } // namespace dart
1851 1778
1852 #endif // defined TARGET_ARCH_X64 1779 #endif // defined TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler_mips.cc ('k') | runtime/vm/flow_graph_inliner.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698