Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(132)

Side by Side Diff: runtime/vm/flow_graph_compiler_ia32.cc

Issue 2481873005: clang-format runtime/vm (Closed)
Patch Set: Merge Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler_dbc.cc ('k') | runtime/vm/flow_graph_compiler_mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32.
6 #if defined(TARGET_ARCH_IA32) 6 #if defined(TARGET_ARCH_IA32)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
110 builder->AddCallerFp(slot_ix++); 110 builder->AddCallerFp(slot_ix++);
111 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); 111 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++);
112 112
113 // Emit all values that are needed for materialization as a part of the 113 // Emit all values that are needed for materialization as a part of the
114 // expression stack for the bottom-most frame. This guarantees that GC 114 // expression stack for the bottom-most frame. This guarantees that GC
115 // will be able to find them during materialization. 115 // will be able to find them during materialization.
116 slot_ix = builder->EmitMaterializationArguments(slot_ix); 116 slot_ix = builder->EmitMaterializationArguments(slot_ix);
117 117
118 // For the innermost environment, set outgoing arguments and the locals. 118 // For the innermost environment, set outgoing arguments and the locals.
119 for (intptr_t i = current->Length() - 1; 119 for (intptr_t i = current->Length() - 1;
120 i >= current->fixed_parameter_count(); 120 i >= current->fixed_parameter_count(); i--) {
121 i--) {
122 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); 121 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
123 } 122 }
124 123
125 builder->AddPcMarker(current->function(), slot_ix++); 124 builder->AddPcMarker(current->function(), slot_ix++);
126 builder->AddCallerFp(slot_ix++); 125 builder->AddCallerFp(slot_ix++);
127 126
128 Environment* previous = current; 127 Environment* previous = current;
129 current = current->outer(); 128 current = current->outer();
130 while (current != NULL) { 129 while (current != NULL) {
131 // For any outer environment the deopt id is that of the call instruction 130 // For any outer environment the deopt id is that of the call instruction
132 // which is recorded in the outer environment. 131 // which is recorded in the outer environment.
133 builder->AddReturnAddress( 132 builder->AddReturnAddress(current->function(),
134 current->function(), 133 Thread::ToDeoptAfter(current->deopt_id()),
135 Thread::ToDeoptAfter(current->deopt_id()), 134 slot_ix++);
136 slot_ix++);
137 135
138 // The values of outgoing arguments can be changed from the inlined call so 136 // The values of outgoing arguments can be changed from the inlined call so
139 // we must read them from the previous environment. 137 // we must read them from the previous environment.
140 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { 138 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
141 builder->AddCopy(previous->ValueAt(i), 139 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i),
142 previous->LocationAt(i),
143 slot_ix++); 140 slot_ix++);
144 } 141 }
145 142
146 // Set the locals, note that outgoing arguments are not in the environment. 143 // Set the locals, note that outgoing arguments are not in the environment.
147 for (intptr_t i = current->Length() - 1; 144 for (intptr_t i = current->Length() - 1;
148 i >= current->fixed_parameter_count(); 145 i >= current->fixed_parameter_count(); i--) {
149 i--) { 146 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
150 builder->AddCopy(current->ValueAt(i),
151 current->LocationAt(i),
152 slot_ix++);
153 } 147 }
154 148
155 builder->AddPcMarker(current->function(), slot_ix++); 149 builder->AddPcMarker(current->function(), slot_ix++);
156 builder->AddCallerFp(slot_ix++); 150 builder->AddCallerFp(slot_ix++);
157 151
158 // Iterate on the outer environment. 152 // Iterate on the outer environment.
159 previous = current; 153 previous = current;
160 current = current->outer(); 154 current = current->outer();
161 } 155 }
162 // The previous pointer is now the outermost environment. 156 // The previous pointer is now the outermost environment.
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
220 Register instance_reg, 214 Register instance_reg,
221 Register type_arguments_reg, 215 Register type_arguments_reg,
222 Register temp_reg, 216 Register temp_reg,
223 Label* is_instance_lbl, 217 Label* is_instance_lbl,
224 Label* is_not_instance_lbl) { 218 Label* is_not_instance_lbl) {
225 const SubtypeTestCache& type_test_cache = 219 const SubtypeTestCache& type_test_cache =
226 SubtypeTestCache::ZoneHandle(zone(), SubtypeTestCache::New()); 220 SubtypeTestCache::ZoneHandle(zone(), SubtypeTestCache::New());
227 const Immediate& raw_null = 221 const Immediate& raw_null =
228 Immediate(reinterpret_cast<intptr_t>(Object::null())); 222 Immediate(reinterpret_cast<intptr_t>(Object::null()));
229 __ LoadObject(temp_reg, type_test_cache); 223 __ LoadObject(temp_reg, type_test_cache);
230 __ pushl(temp_reg); // Subtype test cache. 224 __ pushl(temp_reg); // Subtype test cache.
231 __ pushl(instance_reg); // Instance. 225 __ pushl(instance_reg); // Instance.
232 if (test_kind == kTestTypeOneArg) { 226 if (test_kind == kTestTypeOneArg) {
233 ASSERT(type_arguments_reg == kNoRegister); 227 ASSERT(type_arguments_reg == kNoRegister);
234 __ pushl(raw_null); 228 __ pushl(raw_null);
235 __ Call(*StubCode::Subtype1TestCache_entry()); 229 __ Call(*StubCode::Subtype1TestCache_entry());
236 } else if (test_kind == kTestTypeTwoArgs) { 230 } else if (test_kind == kTestTypeTwoArgs) {
237 ASSERT(type_arguments_reg == kNoRegister); 231 ASSERT(type_arguments_reg == kNoRegister);
238 __ pushl(raw_null); 232 __ pushl(raw_null);
239 __ Call(*StubCode::Subtype2TestCache_entry()); 233 __ Call(*StubCode::Subtype2TestCache_entry());
240 } else if (test_kind == kTestTypeThreeArgs) { 234 } else if (test_kind == kTestTypeThreeArgs) {
241 __ pushl(type_arguments_reg); 235 __ pushl(type_arguments_reg);
242 __ Call(*StubCode::Subtype3TestCache_entry()); 236 __ Call(*StubCode::Subtype3TestCache_entry());
243 } else { 237 } else {
244 UNREACHABLE(); 238 UNREACHABLE();
245 } 239 }
246 // Result is in ECX: null -> not found, otherwise Bool::True or Bool::False. 240 // Result is in ECX: null -> not found, otherwise Bool::True or Bool::False.
247 ASSERT(instance_reg != ECX); 241 ASSERT(instance_reg != ECX);
248 ASSERT(temp_reg != ECX); 242 ASSERT(temp_reg != ECX);
249 __ popl(instance_reg); // Discard. 243 __ popl(instance_reg); // Discard.
250 __ popl(instance_reg); // Restore receiver. 244 __ popl(instance_reg); // Restore receiver.
251 __ popl(temp_reg); // Discard. 245 __ popl(temp_reg); // Discard.
252 GenerateBoolToJump(ECX, is_instance_lbl, is_not_instance_lbl); 246 GenerateBoolToJump(ECX, is_instance_lbl, is_not_instance_lbl);
253 return type_test_cache.raw(); 247 return type_test_cache.raw();
254 } 248 }
255 249
256 250
257 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if 251 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if
258 // type test is conclusive, otherwise fallthrough if a type test could not 252 // type test is conclusive, otherwise fallthrough if a type test could not
259 // be completed. 253 // be completed.
260 // EAX: instance (must survive). 254 // EAX: instance (must survive).
261 // Clobbers ECX, EDI. 255 // Clobbers ECX, EDI.
(...skipping 21 matching lines...) Expand all
283 __ j(ZERO, is_not_instance_lbl); 277 __ j(ZERO, is_not_instance_lbl);
284 } 278 }
285 // A function type test requires checking the function signature. 279 // A function type test requires checking the function signature.
286 if (!type.IsFunctionType()) { 280 if (!type.IsFunctionType()) {
287 const intptr_t num_type_args = type_class.NumTypeArguments(); 281 const intptr_t num_type_args = type_class.NumTypeArguments();
288 const intptr_t num_type_params = type_class.NumTypeParameters(); 282 const intptr_t num_type_params = type_class.NumTypeParameters();
289 const intptr_t from_index = num_type_args - num_type_params; 283 const intptr_t from_index = num_type_args - num_type_params;
290 const TypeArguments& type_arguments = 284 const TypeArguments& type_arguments =
291 TypeArguments::ZoneHandle(zone(), type.arguments()); 285 TypeArguments::ZoneHandle(zone(), type.arguments());
292 const bool is_raw_type = type_arguments.IsNull() || 286 const bool is_raw_type = type_arguments.IsNull() ||
293 type_arguments.IsRaw(from_index, num_type_params); 287 type_arguments.IsRaw(from_index, num_type_params);
294 if (is_raw_type) { 288 if (is_raw_type) {
295 const Register kClassIdReg = ECX; 289 const Register kClassIdReg = ECX;
296 // dynamic type argument, check only classes. 290 // dynamic type argument, check only classes.
297 __ LoadClassId(kClassIdReg, kInstanceReg); 291 __ LoadClassId(kClassIdReg, kInstanceReg);
298 __ cmpl(kClassIdReg, Immediate(type_class.id())); 292 __ cmpl(kClassIdReg, Immediate(type_class.id()));
299 __ j(EQUAL, is_instance_lbl); 293 __ j(EQUAL, is_instance_lbl);
300 // List is a very common case. 294 // List is a very common case.
301 if (IsListClass(type_class)) { 295 if (IsListClass(type_class)) {
302 GenerateListTypeCheck(kClassIdReg, is_instance_lbl); 296 GenerateListTypeCheck(kClassIdReg, is_instance_lbl);
303 } 297 }
304 return GenerateSubtype1TestCacheLookup( 298 return GenerateSubtype1TestCacheLookup(
305 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); 299 token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
306 } 300 }
307 // If one type argument only, check if type argument is Object or dynamic. 301 // If one type argument only, check if type argument is Object or dynamic.
308 if (type_arguments.Length() == 1) { 302 if (type_arguments.Length() == 1) {
309 const AbstractType& tp_argument = AbstractType::ZoneHandle( 303 const AbstractType& tp_argument =
310 zone(), type_arguments.TypeAt(0)); 304 AbstractType::ZoneHandle(zone(), type_arguments.TypeAt(0));
311 ASSERT(!tp_argument.IsMalformed()); 305 ASSERT(!tp_argument.IsMalformed());
312 if (tp_argument.IsType()) { 306 if (tp_argument.IsType()) {
313 ASSERT(tp_argument.HasResolvedTypeClass()); 307 ASSERT(tp_argument.HasResolvedTypeClass());
314 // Check if type argument is dynamic or Object. 308 // Check if type argument is dynamic or Object.
315 const Type& object_type = Type::Handle(zone(), Type::ObjectType()); 309 const Type& object_type = Type::Handle(zone(), Type::ObjectType());
316 if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) { 310 if (object_type.IsSubtypeOf(tp_argument, NULL, NULL, Heap::kOld)) {
317 // Instance class test only necessary. 311 // Instance class test only necessary.
318 return GenerateSubtype1TestCacheLookup( 312 return GenerateSubtype1TestCacheLookup(
319 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); 313 token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
320 } 314 }
321 } 315 }
322 } 316 }
323 } 317 }
324 // Regular subtype test cache involving instance's type arguments. 318 // Regular subtype test cache involving instance's type arguments.
325 const Register kTypeArgumentsReg = kNoRegister; 319 const Register kTypeArgumentsReg = kNoRegister;
326 const Register kTempReg = EDI; 320 const Register kTempReg = EDI;
327 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, 321 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, kInstanceReg,
328 kInstanceReg, 322 kTypeArgumentsReg, kTempReg,
329 kTypeArgumentsReg, 323 is_instance_lbl, is_not_instance_lbl);
330 kTempReg,
331 is_instance_lbl,
332 is_not_instance_lbl);
333 } 324 }
334 325
335 326
336 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, 327 void FlowGraphCompiler::CheckClassIds(Register class_id_reg,
337 const GrowableArray<intptr_t>& class_ids, 328 const GrowableArray<intptr_t>& class_ids,
338 Label* is_equal_lbl, 329 Label* is_equal_lbl,
339 Label* is_not_equal_lbl) { 330 Label* is_not_equal_lbl) {
340 for (intptr_t i = 0; i < class_ids.length(); i++) { 331 for (intptr_t i = 0; i < class_ids.length(); i++) {
341 __ cmpl(class_id_reg, Immediate(class_ids[i])); 332 __ cmpl(class_id_reg, Immediate(class_ids[i]));
342 __ j(EQUAL, is_equal_lbl); 333 __ j(EQUAL, is_equal_lbl);
(...skipping 18 matching lines...) Expand all
361 // Fallthrough. 352 // Fallthrough.
362 return true; 353 return true;
363 } 354 }
364 const Class& type_class = Class::Handle(zone(), type.type_class()); 355 const Class& type_class = Class::Handle(zone(), type.type_class());
365 ASSERT(type_class.NumTypeArguments() == 0); 356 ASSERT(type_class.NumTypeArguments() == 0);
366 357
367 const Register kInstanceReg = EAX; 358 const Register kInstanceReg = EAX;
368 __ testl(kInstanceReg, Immediate(kSmiTagMask)); 359 __ testl(kInstanceReg, Immediate(kSmiTagMask));
369 // If instance is Smi, check directly. 360 // If instance is Smi, check directly.
370 const Class& smi_class = Class::Handle(zone(), Smi::Class()); 361 const Class& smi_class = Class::Handle(zone(), Smi::Class());
371 if (smi_class.IsSubtypeOf(TypeArguments::Handle(zone()), 362 if (smi_class.IsSubtypeOf(TypeArguments::Handle(zone()), type_class,
372 type_class, 363 TypeArguments::Handle(zone()), NULL, NULL,
373 TypeArguments::Handle(zone()),
374 NULL,
375 NULL,
376 Heap::kOld)) { 364 Heap::kOld)) {
377 __ j(ZERO, is_instance_lbl); 365 __ j(ZERO, is_instance_lbl);
378 } else { 366 } else {
379 __ j(ZERO, is_not_instance_lbl); 367 __ j(ZERO, is_not_instance_lbl);
380 } 368 }
381 const Register kClassIdReg = ECX; 369 const Register kClassIdReg = ECX;
382 __ LoadClassId(kClassIdReg, kInstanceReg); 370 __ LoadClassId(kClassIdReg, kInstanceReg);
383 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted 371 // See ClassFinalizer::ResolveSuperTypeAndInterfaces for list of restricted
384 // interfaces. 372 // interfaces.
385 // Bool interface can be implemented only by core class Bool. 373 // Bool interface can be implemented only by core class Bool.
386 if (type.IsBoolType()) { 374 if (type.IsBoolType()) {
387 __ cmpl(kClassIdReg, Immediate(kBoolCid)); 375 __ cmpl(kClassIdReg, Immediate(kBoolCid));
388 __ j(EQUAL, is_instance_lbl); 376 __ j(EQUAL, is_instance_lbl);
389 __ jmp(is_not_instance_lbl); 377 __ jmp(is_not_instance_lbl);
390 return false; 378 return false;
391 } 379 }
392 // Custom checking for numbers (Smi, Mint, Bigint and Double). 380 // Custom checking for numbers (Smi, Mint, Bigint and Double).
393 // Note that instance is not Smi (checked above). 381 // Note that instance is not Smi (checked above).
394 if (type.IsNumberType() || type.IsIntType() || type.IsDoubleType()) { 382 if (type.IsNumberType() || type.IsIntType() || type.IsDoubleType()) {
395 GenerateNumberTypeCheck( 383 GenerateNumberTypeCheck(kClassIdReg, type, is_instance_lbl,
396 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); 384 is_not_instance_lbl);
397 return false; 385 return false;
398 } 386 }
399 if (type.IsStringType()) { 387 if (type.IsStringType()) {
400 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl); 388 GenerateStringTypeCheck(kClassIdReg, is_instance_lbl, is_not_instance_lbl);
401 return false; 389 return false;
402 } 390 }
403 if (type.IsDartFunctionType()) { 391 if (type.IsDartFunctionType()) {
404 // Check if instance is a closure. 392 // Check if instance is a closure.
405 __ cmpl(kClassIdReg, Immediate(kClosureCid)); 393 __ cmpl(kClassIdReg, Immediate(kClosureCid));
406 __ j(EQUAL, is_instance_lbl); 394 __ j(EQUAL, is_instance_lbl);
(...skipping 26 matching lines...) Expand all
433 __ LoadClass(ECX, kInstanceReg, EDI); 421 __ LoadClass(ECX, kInstanceReg, EDI);
434 // ECX: instance class. 422 // ECX: instance class.
435 // Check immediate superclass equality. 423 // Check immediate superclass equality.
436 __ movl(EDI, FieldAddress(ECX, Class::super_type_offset())); 424 __ movl(EDI, FieldAddress(ECX, Class::super_type_offset()));
437 __ movl(EDI, FieldAddress(EDI, Type::type_class_id_offset())); 425 __ movl(EDI, FieldAddress(EDI, Type::type_class_id_offset()));
438 __ cmpl(EDI, Immediate(Smi::RawValue(type_class.id()))); 426 __ cmpl(EDI, Immediate(Smi::RawValue(type_class.id())));
439 __ j(EQUAL, is_instance_lbl); 427 __ j(EQUAL, is_instance_lbl);
440 428
441 const Register kTypeArgumentsReg = kNoRegister; 429 const Register kTypeArgumentsReg = kNoRegister;
442 const Register kTempReg = EDI; 430 const Register kTempReg = EDI;
443 return GenerateCallSubtypeTestStub(kTestTypeOneArg, 431 return GenerateCallSubtypeTestStub(kTestTypeOneArg, kInstanceReg,
444 kInstanceReg, 432 kTypeArgumentsReg, kTempReg,
445 kTypeArgumentsReg, 433 is_instance_lbl, is_not_instance_lbl);
446 kTempReg,
447 is_instance_lbl,
448 is_not_instance_lbl);
449 } 434 }
450 435
451 436
452 // Generates inlined check if 'type' is a type parameter or type itself 437 // Generates inlined check if 'type' is a type parameter or type itself
453 // EAX: instance (preserved). 438 // EAX: instance (preserved).
454 // Clobbers EDX, EDI, ECX. 439 // Clobbers EDX, EDI, ECX.
455 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( 440 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
456 TokenPosition token_pos, 441 TokenPosition token_pos,
457 const AbstractType& type, 442 const AbstractType& type,
458 Label* is_instance_lbl, 443 Label* is_instance_lbl,
459 Label* is_not_instance_lbl) { 444 Label* is_not_instance_lbl) {
460 __ Comment("UninstantiatedTypeTest"); 445 __ Comment("UninstantiatedTypeTest");
461 ASSERT(!type.IsInstantiated()); 446 ASSERT(!type.IsInstantiated());
462 // Skip check if destination is a dynamic type. 447 // Skip check if destination is a dynamic type.
463 const Immediate& raw_null = 448 const Immediate& raw_null =
464 Immediate(reinterpret_cast<intptr_t>(Object::null())); 449 Immediate(reinterpret_cast<intptr_t>(Object::null()));
465 if (type.IsTypeParameter()) { 450 if (type.IsTypeParameter()) {
466 const TypeParameter& type_param = TypeParameter::Cast(type); 451 const TypeParameter& type_param = TypeParameter::Cast(type);
467 // Load instantiator type arguments on stack. 452 // Load instantiator type arguments on stack.
468 __ movl(EDX, Address(ESP, 0)); // Get instantiator type arguments. 453 __ movl(EDX, Address(ESP, 0)); // Get instantiator type arguments.
469 // EDX: instantiator type arguments. 454 // EDX: instantiator type arguments.
470 // Check if type arguments are null, i.e. equivalent to vector of dynamic. 455 // Check if type arguments are null, i.e. equivalent to vector of dynamic.
471 __ cmpl(EDX, raw_null); 456 __ cmpl(EDX, raw_null);
472 __ j(EQUAL, is_instance_lbl); 457 __ j(EQUAL, is_instance_lbl);
473 __ movl(EDI, 458 __ movl(EDI, FieldAddress(
474 FieldAddress(EDX, TypeArguments::type_at_offset(type_param.index()))); 459 EDX, TypeArguments::type_at_offset(type_param.index())));
475 // EDI: concrete type of type. 460 // EDI: concrete type of type.
476 // Check if type argument is dynamic. 461 // Check if type argument is dynamic.
477 __ CompareObject(EDI, Object::dynamic_type()); 462 __ CompareObject(EDI, Object::dynamic_type());
478 __ j(EQUAL, is_instance_lbl); 463 __ j(EQUAL, is_instance_lbl);
479 __ CompareObject(EDI, Type::ZoneHandle(zone(), Type::ObjectType())); 464 __ CompareObject(EDI, Type::ZoneHandle(zone(), Type::ObjectType()));
480 __ j(EQUAL, is_instance_lbl); 465 __ j(EQUAL, is_instance_lbl);
481 466
482 // For Smi check quickly against int and num interfaces. 467 // For Smi check quickly against int and num interfaces.
483 Label not_smi; 468 Label not_smi;
484 __ testl(EAX, Immediate(kSmiTagMask)); // Value is Smi? 469 __ testl(EAX, Immediate(kSmiTagMask)); // Value is Smi?
485 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump); 470 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump);
486 __ CompareObject(EDI, Type::ZoneHandle(zone(), Type::IntType())); 471 __ CompareObject(EDI, Type::ZoneHandle(zone(), Type::IntType()));
487 __ j(EQUAL, is_instance_lbl); 472 __ j(EQUAL, is_instance_lbl);
488 __ CompareObject(EDI, Type::ZoneHandle(zone(), Type::Number())); 473 __ CompareObject(EDI, Type::ZoneHandle(zone(), Type::Number()));
489 __ j(EQUAL, is_instance_lbl); 474 __ j(EQUAL, is_instance_lbl);
490 // Smi must be handled in runtime. 475 // Smi must be handled in runtime.
491 Label fall_through; 476 Label fall_through;
492 __ jmp(&fall_through); 477 __ jmp(&fall_through);
493 478
494 __ Bind(&not_smi); 479 __ Bind(&not_smi);
495 // EDX: instantiator type arguments. 480 // EDX: instantiator type arguments.
496 // EAX: instance. 481 // EAX: instance.
497 const Register kInstanceReg = EAX; 482 const Register kInstanceReg = EAX;
498 const Register kTypeArgumentsReg = EDX; 483 const Register kTypeArgumentsReg = EDX;
499 const Register kTempReg = EDI; 484 const Register kTempReg = EDI;
500 const SubtypeTestCache& type_test_cache = 485 const SubtypeTestCache& type_test_cache = SubtypeTestCache::ZoneHandle(
501 SubtypeTestCache::ZoneHandle(zone(), 486 zone(), GenerateCallSubtypeTestStub(
502 GenerateCallSubtypeTestStub(kTestTypeThreeArgs, 487 kTestTypeThreeArgs, kInstanceReg, kTypeArgumentsReg,
503 kInstanceReg, 488 kTempReg, is_instance_lbl, is_not_instance_lbl));
504 kTypeArgumentsReg,
505 kTempReg,
506 is_instance_lbl,
507 is_not_instance_lbl));
508 __ Bind(&fall_through); 489 __ Bind(&fall_through);
509 return type_test_cache.raw(); 490 return type_test_cache.raw();
510 } 491 }
511 if (type.IsType()) { 492 if (type.IsType()) {
512 const Register kInstanceReg = EAX; 493 const Register kInstanceReg = EAX;
513 const Register kTypeArgumentsReg = EDX; 494 const Register kTypeArgumentsReg = EDX;
514 __ testl(kInstanceReg, Immediate(kSmiTagMask)); // Is instance Smi? 495 __ testl(kInstanceReg, Immediate(kSmiTagMask)); // Is instance Smi?
515 __ j(ZERO, is_not_instance_lbl); 496 __ j(ZERO, is_not_instance_lbl);
516 __ movl(kTypeArgumentsReg, Address(ESP, 0)); // Instantiator type args. 497 __ movl(kTypeArgumentsReg, Address(ESP, 0)); // Instantiator type args.
517 // Uninstantiated type class is known at compile time, but the type 498 // Uninstantiated type class is known at compile time, but the type
518 // arguments are determined at runtime by the instantiator. 499 // arguments are determined at runtime by the instantiator.
519 const Register kTempReg = EDI; 500 const Register kTempReg = EDI;
520 return GenerateCallSubtypeTestStub(kTestTypeThreeArgs, 501 return GenerateCallSubtypeTestStub(kTestTypeThreeArgs, kInstanceReg,
521 kInstanceReg, 502 kTypeArgumentsReg, kTempReg,
522 kTypeArgumentsReg, 503 is_instance_lbl, is_not_instance_lbl);
523 kTempReg,
524 is_instance_lbl,
525 is_not_instance_lbl);
526 } 504 }
527 return SubtypeTestCache::null(); 505 return SubtypeTestCache::null();
528 } 506 }
529 507
530 508
531 // Inputs: 509 // Inputs:
532 // - EAX: instance to test against (preserved). 510 // - EAX: instance to test against (preserved).
533 // - EDX: optional instantiator type arguments (preserved). 511 // - EDX: optional instantiator type arguments (preserved).
534 // Clobbers ECX, EDI. 512 // Clobbers ECX, EDI.
535 // Returns: 513 // Returns:
(...skipping 11 matching lines...) Expand all
547 // A non-null value is returned from a void function, which will result in a 525 // A non-null value is returned from a void function, which will result in a
548 // type error. A null value is handled prior to executing this inline code. 526 // type error. A null value is handled prior to executing this inline code.
549 return SubtypeTestCache::null(); 527 return SubtypeTestCache::null();
550 } 528 }
551 if (type.IsInstantiated()) { 529 if (type.IsInstantiated()) {
552 const Class& type_class = Class::ZoneHandle(zone(), type.type_class()); 530 const Class& type_class = Class::ZoneHandle(zone(), type.type_class());
553 // A class equality check is only applicable with a dst type (not a 531 // A class equality check is only applicable with a dst type (not a
554 // function type) of a non-parameterized class or with a raw dst type of 532 // function type) of a non-parameterized class or with a raw dst type of
555 // a parameterized class. 533 // a parameterized class.
556 if (type.IsFunctionType() || (type_class.NumTypeArguments() > 0)) { 534 if (type.IsFunctionType() || (type_class.NumTypeArguments() > 0)) {
557 return GenerateInstantiatedTypeWithArgumentsTest(token_pos, 535 return GenerateInstantiatedTypeWithArgumentsTest(
558 type, 536 token_pos, type, is_instance_lbl, is_not_instance_lbl);
559 is_instance_lbl,
560 is_not_instance_lbl);
561 // Fall through to runtime call. 537 // Fall through to runtime call.
562 } 538 }
563 const bool has_fall_through = 539 const bool has_fall_through = GenerateInstantiatedTypeNoArgumentsTest(
564 GenerateInstantiatedTypeNoArgumentsTest(token_pos, 540 token_pos, type, is_instance_lbl, is_not_instance_lbl);
565 type,
566 is_instance_lbl,
567 is_not_instance_lbl);
568 if (has_fall_through) { 541 if (has_fall_through) {
569 // If test non-conclusive so far, try the inlined type-test cache. 542 // If test non-conclusive so far, try the inlined type-test cache.
570 // 'type' is known at compile time. 543 // 'type' is known at compile time.
571 return GenerateSubtype1TestCacheLookup( 544 return GenerateSubtype1TestCacheLookup(
572 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); 545 token_pos, type_class, is_instance_lbl, is_not_instance_lbl);
573 } else { 546 } else {
574 return SubtypeTestCache::null(); 547 return SubtypeTestCache::null();
575 } 548 }
576 } 549 }
577 return GenerateUninstantiatedTypeTest(token_pos, 550 return GenerateUninstantiatedTypeTest(token_pos, type, is_instance_lbl,
578 type,
579 is_instance_lbl,
580 is_not_instance_lbl); 551 is_not_instance_lbl);
581 } 552 }
582 553
583 554
584 // If instanceof type test cannot be performed successfully at compile time and 555 // If instanceof type test cannot be performed successfully at compile time and
585 // therefore eliminated, optimize it by adding inlined tests for: 556 // therefore eliminated, optimize it by adding inlined tests for:
586 // - NULL -> return false. 557 // - NULL -> return false.
587 // - Smi -> compile time subtype check (only if dst class is not parameterized). 558 // - Smi -> compile time subtype check (only if dst class is not parameterized).
588 // - Class equality (only if class is not parameterized). 559 // - Class equality (only if class is not parameterized).
589 // Inputs: 560 // Inputs:
(...skipping 22 matching lines...) Expand all
612 // instantiated). 583 // instantiated).
613 // We can only inline this null check if the type is instantiated at compile 584 // We can only inline this null check if the type is instantiated at compile
614 // time, since an uninstantiated type at compile time could be Object or 585 // time, since an uninstantiated type at compile time could be Object or
615 // dynamic at run time. 586 // dynamic at run time.
616 __ cmpl(EAX, raw_null); 587 __ cmpl(EAX, raw_null);
617 __ j(EQUAL, type.IsNullType() ? &is_instance : &is_not_instance); 588 __ j(EQUAL, type.IsNullType() ? &is_instance : &is_not_instance);
618 } 589 }
619 590
620 // Generate inline instanceof test. 591 // Generate inline instanceof test.
621 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); 592 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
622 test_cache = GenerateInlineInstanceof(token_pos, type, 593 test_cache =
623 &is_instance, &is_not_instance); 594 GenerateInlineInstanceof(token_pos, type, &is_instance, &is_not_instance);
624 595
625 // test_cache is null if there is no fall-through. 596 // test_cache is null if there is no fall-through.
626 Label done; 597 Label done;
627 if (!test_cache.IsNull()) { 598 if (!test_cache.IsNull()) {
628 // Generate runtime call. 599 // Generate runtime call.
629 __ movl(EDX, Address(ESP, 0)); // Get instantiator type arguments. 600 __ movl(EDX, Address(ESP, 0)); // Get instantiator type arguments.
630 __ PushObject(Object::null_object()); // Make room for the result. 601 __ PushObject(Object::null_object()); // Make room for the result.
631 __ pushl(EAX); // Push the instance. 602 __ pushl(EAX); // Push the instance.
632 __ PushObject(type); // Push the type. 603 __ PushObject(type); // Push the type.
633 __ pushl(EDX); // Instantiator type arguments. 604 __ pushl(EDX); // Instantiator type arguments.
634 __ LoadObject(EAX, test_cache); 605 __ LoadObject(EAX, test_cache);
635 __ pushl(EAX); 606 __ pushl(EAX);
636 GenerateRuntimeCall(token_pos, 607 GenerateRuntimeCall(token_pos, deopt_id, kInstanceofRuntimeEntry, 4, locs);
637 deopt_id,
638 kInstanceofRuntimeEntry,
639 4,
640 locs);
641 // Pop the parameters supplied to the runtime entry. The result of the 608 // Pop the parameters supplied to the runtime entry. The result of the
642 // instanceof runtime call will be left as the result of the operation. 609 // instanceof runtime call will be left as the result of the operation.
643 __ Drop(4); 610 __ Drop(4);
644 if (negate_result) { 611 if (negate_result) {
645 __ popl(EDX); 612 __ popl(EDX);
646 __ LoadObject(EAX, Bool::True()); 613 __ LoadObject(EAX, Bool::True());
647 __ cmpl(EDX, EAX); 614 __ cmpl(EDX, EAX);
648 __ j(NOT_EQUAL, &done, Assembler::kNearJump); 615 __ j(NOT_EQUAL, &done, Assembler::kNearJump);
649 __ LoadObject(EAX, Bool::False()); 616 __ LoadObject(EAX, Bool::False());
650 } else { 617 } else {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
689 // A null object is always assignable and is returned as result. 656 // A null object is always assignable and is returned as result.
690 const Immediate& raw_null = 657 const Immediate& raw_null =
691 Immediate(reinterpret_cast<intptr_t>(Object::null())); 658 Immediate(reinterpret_cast<intptr_t>(Object::null()));
692 Label is_assignable, runtime_call; 659 Label is_assignable, runtime_call;
693 __ cmpl(EAX, raw_null); 660 __ cmpl(EAX, raw_null);
694 __ j(EQUAL, &is_assignable); 661 __ j(EQUAL, &is_assignable);
695 662
696 // Generate throw new TypeError() if the type is malformed or malbounded. 663 // Generate throw new TypeError() if the type is malformed or malbounded.
697 if (dst_type.IsMalformedOrMalbounded()) { 664 if (dst_type.IsMalformedOrMalbounded()) {
698 __ PushObject(Object::null_object()); // Make room for the result. 665 __ PushObject(Object::null_object()); // Make room for the result.
699 __ pushl(EAX); // Push the source object. 666 __ pushl(EAX); // Push the source object.
700 __ PushObject(dst_name); // Push the name of the destination. 667 __ PushObject(dst_name); // Push the name of the destination.
701 __ PushObject(dst_type); // Push the type of the destination. 668 __ PushObject(dst_type); // Push the type of the destination.
702 GenerateRuntimeCall(token_pos, 669 GenerateRuntimeCall(token_pos, deopt_id, kBadTypeErrorRuntimeEntry, 3,
703 deopt_id,
704 kBadTypeErrorRuntimeEntry,
705 3,
706 locs); 670 locs);
707 // We should never return here. 671 // We should never return here.
708 __ int3(); 672 __ int3();
709 673
710 __ Bind(&is_assignable); // For a null object. 674 __ Bind(&is_assignable); // For a null object.
711 __ popl(EDX); // Remove pushed instantiator type arguments. 675 __ popl(EDX); // Remove pushed instantiator type arguments.
712 return; 676 return;
713 } 677 }
714 678
715 // Generate inline type check, linking to runtime call if not assignable. 679 // Generate inline type check, linking to runtime call if not assignable.
716 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); 680 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
717 test_cache = GenerateInlineInstanceof(token_pos, dst_type, 681 test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable,
718 &is_assignable, &runtime_call); 682 &runtime_call);
719 683
720 __ Bind(&runtime_call); 684 __ Bind(&runtime_call);
721 __ movl(EDX, Address(ESP, 0)); // Get instantiator type arguments. 685 __ movl(EDX, Address(ESP, 0)); // Get instantiator type arguments.
722 __ PushObject(Object::null_object()); // Make room for the result. 686 __ PushObject(Object::null_object()); // Make room for the result.
723 __ pushl(EAX); // Push the source object. 687 __ pushl(EAX); // Push the source object.
724 __ PushObject(dst_type); // Push the type of the destination. 688 __ PushObject(dst_type); // Push the type of the destination.
725 __ pushl(EDX); // Instantiator type arguments. 689 __ pushl(EDX); // Instantiator type arguments.
726 __ PushObject(dst_name); // Push the name of the destination. 690 __ PushObject(dst_name); // Push the name of the destination.
727 __ LoadObject(EAX, test_cache); 691 __ LoadObject(EAX, test_cache);
728 __ pushl(EAX); 692 __ pushl(EAX);
729 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 5, locs); 693 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 5, locs);
730 // Pop the parameters supplied to the runtime entry. The result of the 694 // Pop the parameters supplied to the runtime entry. The result of the
731 // type check runtime call is the checked value. 695 // type check runtime call is the checked value.
732 __ Drop(5); 696 __ Drop(5);
733 __ popl(EAX); 697 __ popl(EAX);
734 698
735 __ Bind(&is_assignable); 699 __ Bind(&is_assignable);
736 __ popl(EDX); // Remove pushed instantiator type arguments. 700 __ popl(EDX); // Remove pushed instantiator type arguments.
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
785 749
786 // Copy positional arguments. 750 // Copy positional arguments.
787 // Argument i passed at fp[kParamEndSlotFromFp + num_args - i] is copied 751 // Argument i passed at fp[kParamEndSlotFromFp + num_args - i] is copied
788 // to fp[kFirstLocalSlotFromFp - i]. 752 // to fp[kFirstLocalSlotFromFp - i].
789 753
790 __ movl(EBX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); 754 __ movl(EBX, FieldAddress(EDX, ArgumentsDescriptor::count_offset()));
791 // Since EBX and ECX are Smi, use TIMES_2 instead of TIMES_4. 755 // Since EBX and ECX are Smi, use TIMES_2 instead of TIMES_4.
792 // Let EBX point to the last passed positional argument, i.e. to 756 // Let EBX point to the last passed positional argument, i.e. to
793 // fp[kParamEndSlotFromFp + num_args - (num_pos_args - 1)]. 757 // fp[kParamEndSlotFromFp + num_args - (num_pos_args - 1)].
794 __ subl(EBX, ECX); 758 __ subl(EBX, ECX);
795 __ leal(EBX, Address(EBP, EBX, TIMES_2, 759 __ leal(EBX,
796 (kParamEndSlotFromFp + 1) * kWordSize)); 760 Address(EBP, EBX, TIMES_2, (kParamEndSlotFromFp + 1) * kWordSize));
797 761
798 // Let EDI point to the last copied positional argument, i.e. to 762 // Let EDI point to the last copied positional argument, i.e. to
799 // fp[kFirstLocalSlotFromFp - (num_pos_args - 1)]. 763 // fp[kFirstLocalSlotFromFp - (num_pos_args - 1)].
800 __ leal(EDI, Address(EBP, (kFirstLocalSlotFromFp + 1) * kWordSize)); 764 __ leal(EDI, Address(EBP, (kFirstLocalSlotFromFp + 1) * kWordSize));
801 __ subl(EDI, ECX); // ECX is a Smi, subtract twice for TIMES_4 scaling. 765 __ subl(EDI, ECX); // ECX is a Smi, subtract twice for TIMES_4 scaling.
802 __ subl(EDI, ECX); 766 __ subl(EDI, ECX);
803 __ SmiUntag(ECX); 767 __ SmiUntag(ECX);
804 Label loop, loop_condition; 768 Label loop, loop_condition;
805 __ jmp(&loop_condition, Assembler::kNearJump); 769 __ jmp(&loop_condition, Assembler::kNearJump);
806 // We do not use the final allocation index of the variable here, i.e. 770 // We do not use the final allocation index of the variable here, i.e.
807 // scope->VariableAt(i)->index(), because captured variables still need 771 // scope->VariableAt(i)->index(), because captured variables still need
808 // to be copied to the context that is not yet allocated. 772 // to be copied to the context that is not yet allocated.
809 const Address argument_addr(EBX, ECX, TIMES_4, 0); 773 const Address argument_addr(EBX, ECX, TIMES_4, 0);
810 const Address copy_addr(EDI, ECX, TIMES_4, 0); 774 const Address copy_addr(EDI, ECX, TIMES_4, 0);
811 __ Bind(&loop); 775 __ Bind(&loop);
812 __ movl(EAX, argument_addr); 776 __ movl(EAX, argument_addr);
813 __ movl(copy_addr, EAX); 777 __ movl(copy_addr, EAX);
814 __ Bind(&loop_condition); 778 __ Bind(&loop_condition);
815 __ decl(ECX); 779 __ decl(ECX);
816 __ j(POSITIVE, &loop, Assembler::kNearJump); 780 __ j(POSITIVE, &loop, Assembler::kNearJump);
817 781
818 // Copy or initialize optional named arguments. 782 // Copy or initialize optional named arguments.
819 const Immediate& raw_null = 783 const Immediate& raw_null =
820 Immediate(reinterpret_cast<intptr_t>(Object::null())); 784 Immediate(reinterpret_cast<intptr_t>(Object::null()));
821 Label all_arguments_processed; 785 Label all_arguments_processed;
822 #ifdef DEBUG 786 #ifdef DEBUG
823 const bool check_correct_named_args = true; 787 const bool check_correct_named_args = true;
824 #else 788 #else
825 const bool check_correct_named_args = function.IsClosureFunction(); 789 const bool check_correct_named_args = function.IsClosureFunction();
826 #endif 790 #endif
827 if (num_opt_named_params > 0) { 791 if (num_opt_named_params > 0) {
828 // Start by alphabetically sorting the names of the optional parameters. 792 // Start by alphabetically sorting the names of the optional parameters.
829 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; 793 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params];
830 int* opt_param_position = new int[num_opt_named_params]; 794 int* opt_param_position = new int[num_opt_named_params];
831 for (int pos = num_fixed_params; pos < num_params; pos++) { 795 for (int pos = num_fixed_params; pos < num_params; pos++) {
832 LocalVariable* parameter = scope->VariableAt(pos); 796 LocalVariable* parameter = scope->VariableAt(pos);
833 const String& opt_param_name = parameter->name(); 797 const String& opt_param_name = parameter->name();
834 int i = pos - num_fixed_params; 798 int i = pos - num_fixed_params;
835 while (--i >= 0) { 799 while (--i >= 0) {
836 LocalVariable* param_i = opt_param[i]; 800 LocalVariable* param_i = opt_param[i];
837 const intptr_t result = opt_param_name.CompareTo(param_i->name()); 801 const intptr_t result = opt_param_name.CompareTo(param_i->name());
838 ASSERT(result != 0); 802 ASSERT(result != 0);
839 if (result > 0) break; 803 if (result > 0) break;
840 opt_param[i + 1] = opt_param[i]; 804 opt_param[i + 1] = opt_param[i];
841 opt_param_position[i + 1] = opt_param_position[i]; 805 opt_param_position[i + 1] = opt_param_position[i];
842 } 806 }
843 opt_param[i + 1] = parameter; 807 opt_param[i + 1] = parameter;
844 opt_param_position[i + 1] = pos; 808 opt_param_position[i + 1] = pos;
845 } 809 }
846 // Generate code handling each optional parameter in alphabetical order. 810 // Generate code handling each optional parameter in alphabetical order.
847 __ movl(EBX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); 811 __ movl(EBX, FieldAddress(EDX, ArgumentsDescriptor::count_offset()));
848 __ movl(ECX, 812 __ movl(ECX,
849 FieldAddress(EDX, ArgumentsDescriptor::positional_count_offset())); 813 FieldAddress(EDX, ArgumentsDescriptor::positional_count_offset()));
850 __ SmiUntag(ECX); 814 __ SmiUntag(ECX);
851 // Let EBX point to the first passed argument, i.e. to 815 // Let EBX point to the first passed argument, i.e. to
852 // fp[kParamEndSlotFromFp + num_args - 0]; num_args (EBX) is Smi. 816 // fp[kParamEndSlotFromFp + num_args - 0]; num_args (EBX) is Smi.
853 __ leal(EBX, 817 __ leal(EBX, Address(EBP, EBX, TIMES_2, kParamEndSlotFromFp * kWordSize));
854 Address(EBP, EBX, TIMES_2, kParamEndSlotFromFp * kWordSize));
855 // Let EDI point to the entry of the first named argument. 818 // Let EDI point to the entry of the first named argument.
856 __ leal(EDI, 819 __ leal(EDI,
857 FieldAddress(EDX, ArgumentsDescriptor::first_named_entry_offset())); 820 FieldAddress(EDX, ArgumentsDescriptor::first_named_entry_offset()));
858 for (int i = 0; i < num_opt_named_params; i++) { 821 for (int i = 0; i < num_opt_named_params; i++) {
859 Label load_default_value, assign_optional_parameter; 822 Label load_default_value, assign_optional_parameter;
860 const int param_pos = opt_param_position[i]; 823 const int param_pos = opt_param_position[i];
861 // Check if this named parameter was passed in. 824 // Check if this named parameter was passed in.
862 // Load EAX with the name of the argument. 825 // Load EAX with the name of the argument.
863 __ movl(EAX, Address(EDI, ArgumentsDescriptor::name_offset())); 826 __ movl(EAX, Address(EDI, ArgumentsDescriptor::name_offset()));
864 ASSERT(opt_param[i]->name().IsSymbol()); 827 ASSERT(opt_param[i]->name().IsSymbol());
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
944 // checked, otherwise noSuchMethod would not see their original values. 907 // checked, otherwise noSuchMethod would not see their original values.
945 // This step can be skipped in case we decide that formal parameters are 908 // This step can be skipped in case we decide that formal parameters are
946 // implicitly final, since garbage collecting the unmodified value is not 909 // implicitly final, since garbage collecting the unmodified value is not
947 // an issue anymore. 910 // an issue anymore.
948 911
949 // EDX : arguments descriptor array. 912 // EDX : arguments descriptor array.
950 __ movl(ECX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); 913 __ movl(ECX, FieldAddress(EDX, ArgumentsDescriptor::count_offset()));
951 __ SmiUntag(ECX); 914 __ SmiUntag(ECX);
952 Label null_args_loop, null_args_loop_condition; 915 Label null_args_loop, null_args_loop_condition;
953 __ jmp(&null_args_loop_condition, Assembler::kNearJump); 916 __ jmp(&null_args_loop_condition, Assembler::kNearJump);
954 const Address original_argument_addr( 917 const Address original_argument_addr(EBP, ECX, TIMES_4,
955 EBP, ECX, TIMES_4, (kParamEndSlotFromFp + 1) * kWordSize); 918 (kParamEndSlotFromFp + 1) * kWordSize);
956 __ Bind(&null_args_loop); 919 __ Bind(&null_args_loop);
957 __ movl(original_argument_addr, raw_null); 920 __ movl(original_argument_addr, raw_null);
958 __ Bind(&null_args_loop_condition); 921 __ Bind(&null_args_loop_condition);
959 __ decl(ECX); 922 __ decl(ECX);
960 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); 923 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump);
961 } 924 }
962 925
963 926
964 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { 927 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) {
965 // TOS: return address. 928 // TOS: return address.
(...skipping 19 matching lines...) Expand all
985 Immediate(reinterpret_cast<intptr_t>(Object::null())); 948 Immediate(reinterpret_cast<intptr_t>(Object::null()));
986 __ movl(EAX, raw_null); 949 __ movl(EAX, raw_null);
987 __ ret(); 950 __ ret();
988 } 951 }
989 952
990 953
991 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc 954 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc
992 // needs to be updated to match. 955 // needs to be updated to match.
993 void FlowGraphCompiler::EmitFrameEntry() { 956 void FlowGraphCompiler::EmitFrameEntry() {
994 const Function& function = parsed_function().function(); 957 const Function& function = parsed_function().function();
995 if (CanOptimizeFunction() && 958 if (CanOptimizeFunction() && function.IsOptimizable() &&
996 function.IsOptimizable() &&
997 (!is_optimizing() || may_reoptimize())) { 959 (!is_optimizing() || may_reoptimize())) {
998 __ Comment("Invocation Count Check"); 960 __ Comment("Invocation Count Check");
999 const Register function_reg = EBX; 961 const Register function_reg = EBX;
1000 __ LoadObject(function_reg, function); 962 __ LoadObject(function_reg, function);
1001 963
1002 // Reoptimization of an optimized function is triggered by counting in 964 // Reoptimization of an optimized function is triggered by counting in
1003 // IC stubs, but not at the entry of the function. 965 // IC stubs, but not at the entry of the function.
1004 if (!is_optimizing()) { 966 if (!is_optimizing()) {
1005 __ incl(FieldAddress(function_reg, Function::usage_counter_offset())); 967 __ incl(FieldAddress(function_reg, Function::usage_counter_offset()));
1006 } 968 }
1007 __ cmpl(FieldAddress(function_reg, Function::usage_counter_offset()), 969 __ cmpl(FieldAddress(function_reg, Function::usage_counter_offset()),
1008 Immediate(GetOptimizationThreshold())); 970 Immediate(GetOptimizationThreshold()));
1009 ASSERT(function_reg == EBX); 971 ASSERT(function_reg == EBX);
1010 __ J(GREATER_EQUAL, *StubCode::OptimizeFunction_entry()); 972 __ J(GREATER_EQUAL, *StubCode::OptimizeFunction_entry());
1011 } 973 }
1012 __ Comment("Enter frame"); 974 __ Comment("Enter frame");
1013 if (flow_graph().IsCompiledForOsr()) { 975 if (flow_graph().IsCompiledForOsr()) {
1014 intptr_t extra_slots = StackSize() 976 intptr_t extra_slots = StackSize() - flow_graph().num_stack_locals() -
1015 - flow_graph().num_stack_locals() 977 flow_graph().num_copied_params();
1016 - flow_graph().num_copied_params();
1017 ASSERT(extra_slots >= 0); 978 ASSERT(extra_slots >= 0);
1018 __ EnterOsrFrame(extra_slots * kWordSize); 979 __ EnterOsrFrame(extra_slots * kWordSize);
1019 } else { 980 } else {
1020 ASSERT(StackSize() >= 0); 981 ASSERT(StackSize() >= 0);
1021 __ EnterDartFrame(StackSize() * kWordSize); 982 __ EnterDartFrame(StackSize() * kWordSize);
1022 } 983 }
1023 } 984 }
1024 985
1025 986
1026 void FlowGraphCompiler::CompileGraph() { 987 void FlowGraphCompiler::CompileGraph() {
(...skipping 19 matching lines...) Expand all
1046 if (num_copied_params == 0) { 1007 if (num_copied_params == 0) {
1047 const bool check_arguments = 1008 const bool check_arguments =
1048 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr(); 1009 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr();
1049 if (check_arguments) { 1010 if (check_arguments) {
1050 __ Comment("Check argument count"); 1011 __ Comment("Check argument count");
1051 // Check that exactly num_fixed arguments are passed in. 1012 // Check that exactly num_fixed arguments are passed in.
1052 Label correct_num_arguments, wrong_num_arguments; 1013 Label correct_num_arguments, wrong_num_arguments;
1053 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset())); 1014 __ movl(EAX, FieldAddress(EDX, ArgumentsDescriptor::count_offset()));
1054 __ cmpl(EAX, Immediate(Smi::RawValue(num_fixed_params))); 1015 __ cmpl(EAX, Immediate(Smi::RawValue(num_fixed_params)));
1055 __ j(NOT_EQUAL, &wrong_num_arguments, Assembler::kNearJump); 1016 __ j(NOT_EQUAL, &wrong_num_arguments, Assembler::kNearJump);
1056 __ cmpl(EAX, 1017 __ cmpl(EAX, FieldAddress(
1057 FieldAddress(EDX, 1018 EDX, ArgumentsDescriptor::positional_count_offset()));
1058 ArgumentsDescriptor::positional_count_offset()));
1059 __ j(EQUAL, &correct_num_arguments, Assembler::kNearJump); 1019 __ j(EQUAL, &correct_num_arguments, Assembler::kNearJump);
1060 1020
1061 __ Bind(&wrong_num_arguments); 1021 __ Bind(&wrong_num_arguments);
1062 __ LeaveFrame(); // The arguments are still on the stack. 1022 __ LeaveFrame(); // The arguments are still on the stack.
1063 __ Jmp(*StubCode::CallClosureNoSuchMethod_entry()); 1023 __ Jmp(*StubCode::CallClosureNoSuchMethod_entry());
1064 // The noSuchMethod call may return to the caller, but not here. 1024 // The noSuchMethod call may return to the caller, but not here.
1065 __ Bind(&correct_num_arguments); 1025 __ Bind(&correct_num_arguments);
1066 } 1026 }
1067 } else if (!flow_graph().IsCompiledForOsr()) { 1027 } else if (!flow_graph().IsCompiledForOsr()) {
1068 CopyParameters(); 1028 CopyParameters();
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1173 AddDeoptIndexAtCall(deopt_id_after); 1133 AddDeoptIndexAtCall(deopt_id_after);
1174 } else { 1134 } else {
1175 // Add deoptimization continuation point after the call and before the 1135 // Add deoptimization continuation point after the call and before the
1176 // arguments are removed. 1136 // arguments are removed.
1177 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1137 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1178 } 1138 }
1179 } 1139 }
1180 } 1140 }
1181 1141
1182 1142
1183 void FlowGraphCompiler::EmitUnoptimizedStaticCall( 1143 void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t argument_count,
1184 intptr_t argument_count, 1144 intptr_t deopt_id,
1185 intptr_t deopt_id, 1145 TokenPosition token_pos,
1186 TokenPosition token_pos, 1146 LocationSummary* locs,
1187 LocationSummary* locs, 1147 const ICData& ic_data) {
1188 const ICData& ic_data) {
1189 const StubEntry& stub_entry = 1148 const StubEntry& stub_entry =
1190 *StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); 1149 *StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
1191 __ LoadObject(ECX, ic_data); 1150 __ LoadObject(ECX, ic_data);
1192 GenerateDartCall(deopt_id, 1151 GenerateDartCall(deopt_id, token_pos, stub_entry,
1193 token_pos, 1152 RawPcDescriptors::kUnoptStaticCall, locs);
1194 stub_entry,
1195 RawPcDescriptors::kUnoptStaticCall,
1196 locs);
1197 __ Drop(argument_count); 1153 __ Drop(argument_count);
1198 } 1154 }
1199 1155
1200 1156
1201 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) { 1157 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) {
1202 // We do not check for overflow when incrementing the edge counter. The 1158 // We do not check for overflow when incrementing the edge counter. The
1203 // function should normally be optimized long before the counter can 1159 // function should normally be optimized long before the counter can
1204 // overflow; and though we do not reset the counters when we optimize or 1160 // overflow; and though we do not reset the counters when we optimize or
1205 // deoptimize, there is a bound on the number of 1161 // deoptimize, there is a bound on the number of
1206 // optimization/deoptimization cycles we will attempt. 1162 // optimization/deoptimization cycles we will attempt.
1207 ASSERT(!edge_counters_array_.IsNull()); 1163 ASSERT(!edge_counters_array_.IsNull());
1208 __ Comment("Edge counter"); 1164 __ Comment("Edge counter");
1209 __ LoadObject(EAX, edge_counters_array_); 1165 __ LoadObject(EAX, edge_counters_array_);
1210 __ IncrementSmiField(FieldAddress(EAX, Array::element_offset(edge_id)), 1); 1166 __ IncrementSmiField(FieldAddress(EAX, Array::element_offset(edge_id)), 1);
1211 } 1167 }
1212 1168
1213 1169
1214 void FlowGraphCompiler::EmitOptimizedInstanceCall( 1170 void FlowGraphCompiler::EmitOptimizedInstanceCall(const StubEntry& stub_entry,
1215 const StubEntry& stub_entry, 1171 const ICData& ic_data,
1216 const ICData& ic_data, 1172 intptr_t argument_count,
1217 intptr_t argument_count, 1173 intptr_t deopt_id,
1218 intptr_t deopt_id, 1174 TokenPosition token_pos,
1219 TokenPosition token_pos, 1175 LocationSummary* locs) {
1220 LocationSummary* locs) {
1221 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0); 1176 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0);
1222 // Each ICData propagated from unoptimized to optimized code contains the 1177 // Each ICData propagated from unoptimized to optimized code contains the
1223 // function that corresponds to the Dart function of that IC call. Due 1178 // function that corresponds to the Dart function of that IC call. Due
1224 // to inlining in optimized code, that function may not correspond to the 1179 // to inlining in optimized code, that function may not correspond to the
1225 // top-level function (parsed_function().function()) which could be 1180 // top-level function (parsed_function().function()) which could be
1226 // reoptimized and which counter needs to be incremented. 1181 // reoptimized and which counter needs to be incremented.
1227 // Pass the function explicitly, it is used in IC stub. 1182 // Pass the function explicitly, it is used in IC stub.
1228 __ LoadObject(EBX, parsed_function().function()); 1183 __ LoadObject(EBX, parsed_function().function());
1229 __ LoadObject(ECX, ic_data); 1184 __ LoadObject(ECX, ic_data);
1230 GenerateDartCall(deopt_id, 1185 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall,
1231 token_pos,
1232 stub_entry,
1233 RawPcDescriptors::kIcCall,
1234 locs); 1186 locs);
1235 __ Drop(argument_count); 1187 __ Drop(argument_count);
1236 } 1188 }
1237 1189
1238 1190
1239 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, 1191 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry,
1240 const ICData& ic_data, 1192 const ICData& ic_data,
1241 intptr_t argument_count, 1193 intptr_t argument_count,
1242 intptr_t deopt_id, 1194 intptr_t deopt_id,
1243 TokenPosition token_pos, 1195 TokenPosition token_pos,
1244 LocationSummary* locs) { 1196 LocationSummary* locs) {
1245 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0); 1197 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0);
1246 __ LoadObject(ECX, ic_data); 1198 __ LoadObject(ECX, ic_data);
1247 GenerateDartCall(deopt_id, 1199 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall,
1248 token_pos,
1249 stub_entry,
1250 RawPcDescriptors::kIcCall,
1251 locs); 1200 locs);
1252 __ Drop(argument_count); 1201 __ Drop(argument_count);
1253 } 1202 }
1254 1203
1255 1204
1256 void FlowGraphCompiler::EmitMegamorphicInstanceCall( 1205 void FlowGraphCompiler::EmitMegamorphicInstanceCall(
1257 const ICData& ic_data, 1206 const ICData& ic_data,
1258 intptr_t argument_count, 1207 intptr_t argument_count,
1259 intptr_t deopt_id, 1208 intptr_t deopt_id,
1260 TokenPosition token_pos, 1209 TokenPosition token_pos,
1261 LocationSummary* locs, 1210 LocationSummary* locs,
1262 intptr_t try_index, 1211 intptr_t try_index,
1263 intptr_t slow_path_argument_count) { 1212 intptr_t slow_path_argument_count) {
1264 const String& name = String::Handle(zone(), ic_data.target_name()); 1213 const String& name = String::Handle(zone(), ic_data.target_name());
1265 const Array& arguments_descriptor = 1214 const Array& arguments_descriptor =
1266 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); 1215 Array::ZoneHandle(zone(), ic_data.arguments_descriptor());
1267 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); 1216 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0));
1268 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(), 1217 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(
1218 zone(),
1269 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor)); 1219 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor));
1270 1220
1271 __ Comment("MegamorphicCall"); 1221 __ Comment("MegamorphicCall");
1272 // Load receiver into EBX. 1222 // Load receiver into EBX.
1273 __ movl(EBX, Address(ESP, (argument_count - 1) * kWordSize)); 1223 __ movl(EBX, Address(ESP, (argument_count - 1) * kWordSize));
1274 Label done; 1224 Label done;
1275 if (ShouldInlineSmiStringHashCode(ic_data)) { 1225 if (ShouldInlineSmiStringHashCode(ic_data)) {
1276 Label megamorphic_call; 1226 Label megamorphic_call;
1277 __ Comment("Inlined get:hashCode for Smi and OneByteString"); 1227 __ Comment("Inlined get:hashCode for Smi and OneByteString");
1278 __ movl(EAX, EBX); // Move Smi hashcode to EAX. 1228 __ movl(EAX, EBX); // Move Smi hashcode to EAX.
1279 __ testl(EBX, Immediate(kSmiTagMask)); 1229 __ testl(EBX, Immediate(kSmiTagMask));
1280 __ j(ZERO, &done, Assembler::kNearJump); // It is Smi, we are done. 1230 __ j(ZERO, &done, Assembler::kNearJump); // It is Smi, we are done.
1281 1231
1282 __ CompareClassId(EBX, kOneByteStringCid, EAX); 1232 __ CompareClassId(EBX, kOneByteStringCid, EAX);
1283 __ j(NOT_EQUAL, &megamorphic_call, Assembler::kNearJump); 1233 __ j(NOT_EQUAL, &megamorphic_call, Assembler::kNearJump);
1284 __ movl(EAX, FieldAddress(EBX, String::hash_offset())); 1234 __ movl(EAX, FieldAddress(EBX, String::hash_offset()));
1285 __ cmpl(EAX, Immediate(0)); 1235 __ cmpl(EAX, Immediate(0));
1286 __ j(NOT_EQUAL, &done, Assembler::kNearJump); 1236 __ j(NOT_EQUAL, &done, Assembler::kNearJump);
1287 1237
1288 __ Bind(&megamorphic_call); 1238 __ Bind(&megamorphic_call);
1289 __ Comment("Slow case: megamorphic call"); 1239 __ Comment("Slow case: megamorphic call");
1290 } 1240 }
1291 __ LoadObject(ECX, cache); 1241 __ LoadObject(ECX, cache);
1292 __ call(Address(THR, Thread::megamorphic_call_checked_entry_offset())); 1242 __ call(Address(THR, Thread::megamorphic_call_checked_entry_offset()));
1293 __ call(EBX); 1243 __ call(EBX);
1294 1244
1295 __ Bind(&done); 1245 __ Bind(&done);
1296 AddCurrentDescriptor(RawPcDescriptors::kOther, 1246 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, token_pos);
1297 Thread::kNoDeoptId, token_pos);
1298 RecordSafepoint(locs, slow_path_argument_count); 1247 RecordSafepoint(locs, slow_path_argument_count);
1299 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); 1248 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id);
1300 // Precompilation not implemented on ia32 platform. 1249 // Precompilation not implemented on ia32 platform.
1301 ASSERT(!FLAG_precompiled_mode); 1250 ASSERT(!FLAG_precompiled_mode);
1302 if (is_optimizing()) { 1251 if (is_optimizing()) {
1303 AddDeoptIndexAtCall(deopt_id_after); 1252 AddDeoptIndexAtCall(deopt_id_after);
1304 } else { 1253 } else {
1305 // Add deoptimization continuation point after the call and before the 1254 // Add deoptimization continuation point after the call and before the
1306 // arguments are removed. 1255 // arguments are removed.
1307 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1256 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1308 } 1257 }
1309 __ Drop(argument_count); 1258 __ Drop(argument_count);
1310 } 1259 }
1311 1260
1312 1261
1313 void FlowGraphCompiler::EmitSwitchableInstanceCall( 1262 void FlowGraphCompiler::EmitSwitchableInstanceCall(const ICData& ic_data,
1314 const ICData& ic_data, 1263 intptr_t argument_count,
1315 intptr_t argument_count, 1264 intptr_t deopt_id,
1316 intptr_t deopt_id, 1265 TokenPosition token_pos,
1317 TokenPosition token_pos, 1266 LocationSummary* locs) {
1318 LocationSummary* locs) {
1319 // Only generated with precompilation. 1267 // Only generated with precompilation.
1320 UNREACHABLE(); 1268 UNREACHABLE();
1321 } 1269 }
1322 1270
1323 1271
1324 void FlowGraphCompiler::EmitOptimizedStaticCall( 1272 void FlowGraphCompiler::EmitOptimizedStaticCall(
1325 const Function& function, 1273 const Function& function,
1326 const Array& arguments_descriptor, 1274 const Array& arguments_descriptor,
1327 intptr_t argument_count, 1275 intptr_t argument_count,
1328 intptr_t deopt_id, 1276 intptr_t deopt_id,
1329 TokenPosition token_pos, 1277 TokenPosition token_pos,
1330 LocationSummary* locs) { 1278 LocationSummary* locs) {
1331 if (function.HasOptionalParameters()) { 1279 if (function.HasOptionalParameters()) {
1332 __ LoadObject(EDX, arguments_descriptor); 1280 __ LoadObject(EDX, arguments_descriptor);
1333 } else { 1281 } else {
1334 __ xorl(EDX, EDX); // GC safe smi zero because of stub. 1282 __ xorl(EDX, EDX); // GC safe smi zero because of stub.
1335 } 1283 }
1336 // Do not use the code from the function, but let the code be patched so that 1284 // Do not use the code from the function, but let the code be patched so that
1337 // we can record the outgoing edges to other code. 1285 // we can record the outgoing edges to other code.
1338 GenerateDartCall(deopt_id, 1286 GenerateDartCall(deopt_id, token_pos, *StubCode::CallStaticFunction_entry(),
1339 token_pos, 1287 RawPcDescriptors::kOther, locs);
1340 *StubCode::CallStaticFunction_entry(),
1341 RawPcDescriptors::kOther,
1342 locs);
1343 AddStaticCallTarget(function); 1288 AddStaticCallTarget(function);
1344 __ Drop(argument_count); 1289 __ Drop(argument_count);
1345 } 1290 }
1346 1291
1347 1292
1348 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( 1293 Condition FlowGraphCompiler::EmitEqualityRegConstCompare(
1349 Register reg, 1294 Register reg,
1350 const Object& obj, 1295 const Object& obj,
1351 bool needs_number_check, 1296 bool needs_number_check,
1352 TokenPosition token_pos) { 1297 TokenPosition token_pos) {
1353 ASSERT(!needs_number_check || 1298 ASSERT(!needs_number_check ||
1354 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); 1299 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint()));
1355 1300
1356 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { 1301 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) {
1357 ASSERT(!needs_number_check); 1302 ASSERT(!needs_number_check);
1358 __ testl(reg, reg); 1303 __ testl(reg, reg);
1359 return EQUAL; 1304 return EQUAL;
1360 } 1305 }
1361 1306
1362 if (needs_number_check) { 1307 if (needs_number_check) {
1363 __ pushl(reg); 1308 __ pushl(reg);
1364 __ PushObject(obj); 1309 __ PushObject(obj);
1365 if (is_optimizing()) { 1310 if (is_optimizing()) {
1366 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1311 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry());
1367 } else { 1312 } else {
1368 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1313 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1369 } 1314 }
1370 if (token_pos.IsReal()) { 1315 if (token_pos.IsReal()) {
1371 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1316 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, Thread::kNoDeoptId,
1372 Thread::kNoDeoptId,
1373 token_pos); 1317 token_pos);
1374 } 1318 }
1375 // Stub returns result in flags (result of a cmpl, we need ZF computed). 1319 // Stub returns result in flags (result of a cmpl, we need ZF computed).
1376 __ popl(reg); // Discard constant. 1320 __ popl(reg); // Discard constant.
1377 __ popl(reg); // Restore 'reg'. 1321 __ popl(reg); // Restore 'reg'.
1378 } else { 1322 } else {
1379 __ CompareObject(reg, obj); 1323 __ CompareObject(reg, obj);
1380 } 1324 }
1381 return EQUAL; 1325 return EQUAL;
1382 } 1326 }
1383 1327
1384 1328
1385 Condition FlowGraphCompiler::EmitEqualityRegRegCompare( 1329 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(
1386 Register left, 1330 Register left,
1387 Register right, 1331 Register right,
1388 bool needs_number_check, 1332 bool needs_number_check,
1389 TokenPosition token_pos) { 1333 TokenPosition token_pos) {
1390 if (needs_number_check) { 1334 if (needs_number_check) {
1391 __ pushl(left); 1335 __ pushl(left);
1392 __ pushl(right); 1336 __ pushl(right);
1393 if (is_optimizing()) { 1337 if (is_optimizing()) {
1394 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1338 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry());
1395 } else { 1339 } else {
1396 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1340 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1397 } 1341 }
1398 if (token_pos.IsReal()) { 1342 if (token_pos.IsReal()) {
1399 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1343 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, Thread::kNoDeoptId,
1400 Thread::kNoDeoptId,
1401 token_pos); 1344 token_pos);
1402 } 1345 }
1403 // Stub returns result in flags (result of a cmpl, we need ZF computed). 1346 // Stub returns result in flags (result of a cmpl, we need ZF computed).
1404 __ popl(right); 1347 __ popl(right);
1405 __ popl(left); 1348 __ popl(left);
1406 } else { 1349 } else {
1407 __ cmpl(left, right); 1350 __ cmpl(left, right);
1408 } 1351 }
1409 return EQUAL; 1352 return EQUAL;
1410 } 1353 }
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1491 const Array& argument_names, 1434 const Array& argument_names,
1492 Label* failed, 1435 Label* failed,
1493 Label* match_found, 1436 Label* match_found,
1494 intptr_t deopt_id, 1437 intptr_t deopt_id,
1495 TokenPosition token_index, 1438 TokenPosition token_index,
1496 LocationSummary* locs, 1439 LocationSummary* locs,
1497 bool complete) { 1440 bool complete) {
1498 ASSERT(is_optimizing()); 1441 ASSERT(is_optimizing());
1499 ASSERT(!complete); 1442 ASSERT(!complete);
1500 __ Comment("EmitTestAndCall"); 1443 __ Comment("EmitTestAndCall");
1501 const Array& arguments_descriptor = 1444 const Array& arguments_descriptor = Array::ZoneHandle(
1502 Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count, 1445 zone(), ArgumentsDescriptor::New(argument_count, argument_names));
1503 argument_names));
1504 // Load receiver into EAX. 1446 // Load receiver into EAX.
1505 __ movl(EAX, Address(ESP, (argument_count - 1) * kWordSize)); 1447 __ movl(EAX, Address(ESP, (argument_count - 1) * kWordSize));
1506 __ LoadObject(EDX, arguments_descriptor); 1448 __ LoadObject(EDX, arguments_descriptor);
1507 1449
1508 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid; 1450 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid;
1509 const intptr_t kNumChecks = ic_data.NumberOfChecks(); 1451 const intptr_t kNumChecks = ic_data.NumberOfChecks();
1510 1452
1511 ASSERT(!ic_data.IsNull() && (kNumChecks > 0)); 1453 ASSERT(!ic_data.IsNull() && (kNumChecks > 0));
1512 1454
1513 Label after_smi_test; 1455 Label after_smi_test;
1514 __ testl(EAX, Immediate(kSmiTagMask)); 1456 __ testl(EAX, Immediate(kSmiTagMask));
1515 if (kFirstCheckIsSmi) { 1457 if (kFirstCheckIsSmi) {
1516 // Jump if receiver is not Smi. 1458 // Jump if receiver is not Smi.
1517 if (kNumChecks == 1) { 1459 if (kNumChecks == 1) {
1518 __ j(NOT_ZERO, failed); 1460 __ j(NOT_ZERO, failed);
1519 } else { 1461 } else {
1520 __ j(NOT_ZERO, &after_smi_test); 1462 __ j(NOT_ZERO, &after_smi_test);
1521 } 1463 }
1522 // Do not use the code from the function, but let the code be patched so 1464 // Do not use the code from the function, but let the code be patched so
1523 // that we can record the outgoing edges to other code. 1465 // that we can record the outgoing edges to other code.
1524 GenerateDartCall(deopt_id, 1466 GenerateDartCall(deopt_id, token_index,
1525 token_index,
1526 *StubCode::CallStaticFunction_entry(), 1467 *StubCode::CallStaticFunction_entry(),
1527 RawPcDescriptors::kOther, 1468 RawPcDescriptors::kOther, locs);
1528 locs); 1469 const Function& function =
1529 const Function& function = Function::ZoneHandle( 1470 Function::ZoneHandle(zone(), ic_data.GetTargetAt(0));
1530 zone(), ic_data.GetTargetAt(0));
1531 AddStaticCallTarget(function); 1471 AddStaticCallTarget(function);
1532 __ Drop(argument_count); 1472 __ Drop(argument_count);
1533 if (kNumChecks > 1) { 1473 if (kNumChecks > 1) {
1534 __ jmp(match_found); 1474 __ jmp(match_found);
1535 } 1475 }
1536 } else { 1476 } else {
1537 // Receiver is Smi, but Smi is not a valid class therefore fail. 1477 // Receiver is Smi, but Smi is not a valid class therefore fail.
1538 // (Smi class must be first in the list). 1478 // (Smi class must be first in the list).
1539 __ j(ZERO, failed); 1479 __ j(ZERO, failed);
1540 } 1480 }
(...skipping 16 matching lines...) Expand all
1557 ASSERT(sorted[i].cid != kSmiCid); 1497 ASSERT(sorted[i].cid != kSmiCid);
1558 Label next_test; 1498 Label next_test;
1559 __ cmpl(EDI, Immediate(sorted[i].cid)); 1499 __ cmpl(EDI, Immediate(sorted[i].cid));
1560 if (kIsLastCheck) { 1500 if (kIsLastCheck) {
1561 __ j(NOT_EQUAL, failed); 1501 __ j(NOT_EQUAL, failed);
1562 } else { 1502 } else {
1563 __ j(NOT_EQUAL, &next_test); 1503 __ j(NOT_EQUAL, &next_test);
1564 } 1504 }
1565 // Do not use the code from the function, but let the code be patched so 1505 // Do not use the code from the function, but let the code be patched so
1566 // that we can record the outgoing edges to other code. 1506 // that we can record the outgoing edges to other code.
1567 GenerateDartCall(deopt_id, 1507 GenerateDartCall(deopt_id, token_index,
1568 token_index,
1569 *StubCode::CallStaticFunction_entry(), 1508 *StubCode::CallStaticFunction_entry(),
1570 RawPcDescriptors::kOther, 1509 RawPcDescriptors::kOther, locs);
1571 locs);
1572 const Function& function = *sorted[i].target; 1510 const Function& function = *sorted[i].target;
1573 AddStaticCallTarget(function); 1511 AddStaticCallTarget(function);
1574 __ Drop(argument_count); 1512 __ Drop(argument_count);
1575 if (!kIsLastCheck) { 1513 if (!kIsLastCheck) {
1576 __ jmp(match_found); 1514 __ jmp(match_found);
1577 } 1515 }
1578 __ Bind(&next_test); 1516 __ Bind(&next_test);
1579 } 1517 }
1580 } 1518 }
1581 1519
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1633 __ movups(XMM0, source.ToStackSlotAddress()); 1571 __ movups(XMM0, source.ToStackSlotAddress());
1634 __ movups(destination.ToStackSlotAddress(), XMM0); 1572 __ movups(destination.ToStackSlotAddress(), XMM0);
1635 } 1573 }
1636 } else { 1574 } else {
1637 ASSERT(source.IsConstant()); 1575 ASSERT(source.IsConstant());
1638 if (destination.IsRegister()) { 1576 if (destination.IsRegister()) {
1639 const Object& constant = source.constant(); 1577 const Object& constant = source.constant();
1640 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) { 1578 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) {
1641 __ xorl(destination.reg(), destination.reg()); 1579 __ xorl(destination.reg(), destination.reg());
1642 } else if (constant.IsSmi() && 1580 } else if (constant.IsSmi() &&
1643 (source.constant_instruction()->representation() == kUnboxedInt32)) { 1581 (source.constant_instruction()->representation() ==
1582 kUnboxedInt32)) {
1644 __ movl(destination.reg(), Immediate(Smi::Cast(constant).Value())); 1583 __ movl(destination.reg(), Immediate(Smi::Cast(constant).Value()));
1645 } else { 1584 } else {
1646 __ LoadObjectSafely(destination.reg(), constant); 1585 __ LoadObjectSafely(destination.reg(), constant);
1647 } 1586 }
1648 } else if (destination.IsFpuRegister()) { 1587 } else if (destination.IsFpuRegister()) {
1649 const Double& constant = Double::Cast(source.constant()); 1588 const Double& constant = Double::Cast(source.constant());
1650 uword addr = FlowGraphBuilder::FindDoubleConstant(constant.value()); 1589 uword addr = FlowGraphBuilder::FindDoubleConstant(constant.value());
1651 if (addr == 0) { 1590 if (addr == 0) {
1652 __ pushl(EAX); 1591 __ pushl(EAX);
1653 __ LoadObject(EAX, constant); 1592 __ LoadObject(EAX, constant);
1654 __ movsd(destination.fpu_reg(), 1593 __ movsd(destination.fpu_reg(),
1655 FieldAddress(EAX, Double::value_offset())); 1594 FieldAddress(EAX, Double::value_offset()));
1656 __ popl(EAX); 1595 __ popl(EAX);
1657 } else if (Utils::DoublesBitEqual(constant.value(), 0.0)) { 1596 } else if (Utils::DoublesBitEqual(constant.value(), 0.0)) {
1658 __ xorps(destination.fpu_reg(), destination.fpu_reg()); 1597 __ xorps(destination.fpu_reg(), destination.fpu_reg());
1659 } else { 1598 } else {
1660 __ movsd(destination.fpu_reg(), Address::Absolute(addr)); 1599 __ movsd(destination.fpu_reg(), Address::Absolute(addr));
1661 } 1600 }
1662 } else if (destination.IsDoubleStackSlot()) { 1601 } else if (destination.IsDoubleStackSlot()) {
1663 const Double& constant = Double::Cast(source.constant()); 1602 const Double& constant = Double::Cast(source.constant());
1664 uword addr = FlowGraphBuilder::FindDoubleConstant(constant.value()); 1603 uword addr = FlowGraphBuilder::FindDoubleConstant(constant.value());
1665 if (addr == 0) { 1604 if (addr == 0) {
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1701 Exchange(source.reg(), destination.ToStackSlotAddress()); 1640 Exchange(source.reg(), destination.ToStackSlotAddress());
1702 } else if (source.IsStackSlot() && destination.IsRegister()) { 1641 } else if (source.IsStackSlot() && destination.IsRegister()) {
1703 Exchange(destination.reg(), source.ToStackSlotAddress()); 1642 Exchange(destination.reg(), source.ToStackSlotAddress());
1704 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 1643 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1705 Exchange(destination.ToStackSlotAddress(), source.ToStackSlotAddress()); 1644 Exchange(destination.ToStackSlotAddress(), source.ToStackSlotAddress());
1706 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) { 1645 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
1707 __ movaps(XMM0, source.fpu_reg()); 1646 __ movaps(XMM0, source.fpu_reg());
1708 __ movaps(source.fpu_reg(), destination.fpu_reg()); 1647 __ movaps(source.fpu_reg(), destination.fpu_reg());
1709 __ movaps(destination.fpu_reg(), XMM0); 1648 __ movaps(destination.fpu_reg(), XMM0);
1710 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) { 1649 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
1711 ASSERT(destination.IsDoubleStackSlot() || 1650 ASSERT(destination.IsDoubleStackSlot() || destination.IsQuadStackSlot() ||
1712 destination.IsQuadStackSlot() || 1651 source.IsDoubleStackSlot() || source.IsQuadStackSlot());
1713 source.IsDoubleStackSlot() || 1652 bool double_width =
1714 source.IsQuadStackSlot()); 1653 destination.IsDoubleStackSlot() || source.IsDoubleStackSlot();
1715 bool double_width = destination.IsDoubleStackSlot() || 1654 XmmRegister reg =
1716 source.IsDoubleStackSlot(); 1655 source.IsFpuRegister() ? source.fpu_reg() : destination.fpu_reg();
1717 XmmRegister reg = source.IsFpuRegister() ? source.fpu_reg()
1718 : destination.fpu_reg();
1719 const Address& slot_address = source.IsFpuRegister() 1656 const Address& slot_address = source.IsFpuRegister()
1720 ? destination.ToStackSlotAddress() 1657 ? destination.ToStackSlotAddress()
1721 : source.ToStackSlotAddress(); 1658 : source.ToStackSlotAddress();
1722 1659
1723 if (double_width) { 1660 if (double_width) {
1724 __ movsd(XMM0, slot_address); 1661 __ movsd(XMM0, slot_address);
1725 __ movsd(slot_address, reg); 1662 __ movsd(slot_address, reg);
1726 } else { 1663 } else {
1727 __ movups(XMM0, slot_address); 1664 __ movups(XMM0, slot_address);
1728 __ movups(slot_address, reg); 1665 __ movups(slot_address, reg);
1729 } 1666 }
1730 __ movaps(reg, XMM0); 1667 __ movaps(reg, XMM0);
1731 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) { 1668 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
1840 __ movups(reg, Address(ESP, 0)); 1777 __ movups(reg, Address(ESP, 0));
1841 __ addl(ESP, Immediate(kFpuRegisterSize)); 1778 __ addl(ESP, Immediate(kFpuRegisterSize));
1842 } 1779 }
1843 1780
1844 1781
1845 #undef __ 1782 #undef __
1846 1783
1847 } // namespace dart 1784 } // namespace dart
1848 1785
1849 #endif // defined TARGET_ARCH_IA32 1786 #endif // defined TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler_dbc.cc ('k') | runtime/vm/flow_graph_compiler_mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698