| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
| 6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
| 7 | 7 |
| 8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
| 9 | 9 |
| 10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
| 11 #include "vm/compiler.h" | 11 #include "vm/compiler.h" |
| 12 #include "vm/dart_entry.h" | 12 #include "vm/dart_entry.h" |
| 13 #include "vm/deopt_instructions.h" | 13 #include "vm/deopt_instructions.h" |
| 14 #include "vm/il_printer.h" | 14 #include "vm/il_printer.h" |
| 15 #include "vm/instructions.h" | 15 #include "vm/instructions.h" |
| 16 #include "vm/locations.h" | 16 #include "vm/locations.h" |
| 17 #include "vm/object_store.h" | 17 #include "vm/object_store.h" |
| 18 #include "vm/parser.h" | 18 #include "vm/parser.h" |
| 19 #include "vm/stack_frame.h" | 19 #include "vm/stack_frame.h" |
| 20 #include "vm/stub_code.h" | 20 #include "vm/stub_code.h" |
| 21 #include "vm/symbols.h" | 21 #include "vm/symbols.h" |
| 22 | 22 |
| 23 namespace dart { | 23 namespace dart { |
| 24 | 24 |
| 25 DEFINE_FLAG(bool, trap_on_deoptimization, false, "Trap on deoptimization."); | 25 DEFINE_FLAG(bool, trap_on_deoptimization, false, "Trap on deoptimization."); |
| 26 DEFINE_FLAG(bool, unbox_mints, true, "Optimize 64-bit integer arithmetic."); | 26 DEFINE_FLAG(bool, unbox_mints, true, "Optimize 64-bit integer arithmetic."); |
| 27 DECLARE_FLAG(bool, enable_simd_inline); | 27 DECLARE_FLAG(bool, enable_simd_inline); |
| 28 | 28 |
| 29 | |
| 30 FlowGraphCompiler::~FlowGraphCompiler() { | 29 FlowGraphCompiler::~FlowGraphCompiler() { |
| 31 // BlockInfos are zone-allocated, so their destructors are not called. | 30 // BlockInfos are zone-allocated, so their destructors are not called. |
| 32 // Verify the labels explicitly here. | 31 // Verify the labels explicitly here. |
| 33 for (int i = 0; i < block_info_.length(); ++i) { | 32 for (int i = 0; i < block_info_.length(); ++i) { |
| 34 ASSERT(!block_info_[i]->jump_label()->IsLinked()); | 33 ASSERT(!block_info_[i]->jump_label()->IsLinked()); |
| 35 ASSERT(!block_info_[i]->jump_label()->HasNear()); | 34 ASSERT(!block_info_[i]->jump_label()->HasNear()); |
| 36 } | 35 } |
| 37 } | 36 } |
| 38 | 37 |
| 39 | |
| 40 bool FlowGraphCompiler::SupportsUnboxedDoubles() { | 38 bool FlowGraphCompiler::SupportsUnboxedDoubles() { |
| 41 return true; | 39 return true; |
| 42 } | 40 } |
| 43 | 41 |
| 44 | |
| 45 bool FlowGraphCompiler::SupportsUnboxedMints() { | 42 bool FlowGraphCompiler::SupportsUnboxedMints() { |
| 46 return FLAG_unbox_mints; | 43 return FLAG_unbox_mints; |
| 47 } | 44 } |
| 48 | 45 |
| 49 | |
| 50 bool FlowGraphCompiler::SupportsUnboxedSimd128() { | 46 bool FlowGraphCompiler::SupportsUnboxedSimd128() { |
| 51 return FLAG_enable_simd_inline; | 47 return FLAG_enable_simd_inline; |
| 52 } | 48 } |
| 53 | 49 |
| 54 | |
| 55 bool FlowGraphCompiler::SupportsHardwareDivision() { | 50 bool FlowGraphCompiler::SupportsHardwareDivision() { |
| 56 return true; | 51 return true; |
| 57 } | 52 } |
| 58 | 53 |
| 59 | |
| 60 bool FlowGraphCompiler::CanConvertUnboxedMintToDouble() { | 54 bool FlowGraphCompiler::CanConvertUnboxedMintToDouble() { |
| 61 return false; | 55 return false; |
| 62 } | 56 } |
| 63 | 57 |
| 64 | |
| 65 void FlowGraphCompiler::EnterIntrinsicMode() { | 58 void FlowGraphCompiler::EnterIntrinsicMode() { |
| 66 ASSERT(!intrinsic_mode()); | 59 ASSERT(!intrinsic_mode()); |
| 67 intrinsic_mode_ = true; | 60 intrinsic_mode_ = true; |
| 68 ASSERT(!assembler()->constant_pool_allowed()); | 61 ASSERT(!assembler()->constant_pool_allowed()); |
| 69 } | 62 } |
| 70 | 63 |
| 71 | |
| 72 void FlowGraphCompiler::ExitIntrinsicMode() { | 64 void FlowGraphCompiler::ExitIntrinsicMode() { |
| 73 ASSERT(intrinsic_mode()); | 65 ASSERT(intrinsic_mode()); |
| 74 intrinsic_mode_ = false; | 66 intrinsic_mode_ = false; |
| 75 } | 67 } |
| 76 | 68 |
| 77 | |
| 78 RawTypedData* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler, | 69 RawTypedData* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler, |
| 79 DeoptInfoBuilder* builder, | 70 DeoptInfoBuilder* builder, |
| 80 const Array& deopt_table) { | 71 const Array& deopt_table) { |
| 81 if (deopt_env_ == NULL) { | 72 if (deopt_env_ == NULL) { |
| 82 ++builder->current_info_number_; | 73 ++builder->current_info_number_; |
| 83 return TypedData::null(); | 74 return TypedData::null(); |
| 84 } | 75 } |
| 85 | 76 |
| 86 intptr_t stack_height = compiler->StackSize(); | 77 intptr_t stack_height = compiler->StackSize(); |
| 87 AllocateIncomingParametersRecursive(deopt_env_, &stack_height); | 78 AllocateIncomingParametersRecursive(deopt_env_, &stack_height); |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 154 builder->AddCallerPc(slot_ix++); | 145 builder->AddCallerPc(slot_ix++); |
| 155 | 146 |
| 156 // For the outermost environment, set the incoming arguments. | 147 // For the outermost environment, set the incoming arguments. |
| 157 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { | 148 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { |
| 158 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); | 149 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); |
| 159 } | 150 } |
| 160 | 151 |
| 161 return builder->CreateDeoptInfo(deopt_table); | 152 return builder->CreateDeoptInfo(deopt_table); |
| 162 } | 153 } |
| 163 | 154 |
| 164 | |
| 165 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, | 155 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, |
| 166 intptr_t stub_ix) { | 156 intptr_t stub_ix) { |
| 167 // Calls do not need stubs, they share a deoptimization trampoline. | 157 // Calls do not need stubs, they share a deoptimization trampoline. |
| 168 ASSERT(reason() != ICData::kDeoptAtCall); | 158 ASSERT(reason() != ICData::kDeoptAtCall); |
| 169 Assembler* assembler = compiler->assembler(); | 159 Assembler* assembler = compiler->assembler(); |
| 170 #define __ assembler-> | 160 #define __ assembler-> |
| 171 __ Comment("%s", Name()); | 161 __ Comment("%s", Name()); |
| 172 __ Bind(entry_label()); | 162 __ Bind(entry_label()); |
| 173 if (FLAG_trap_on_deoptimization) { | 163 if (FLAG_trap_on_deoptimization) { |
| 174 __ int3(); | 164 __ int3(); |
| 175 } | 165 } |
| 176 | 166 |
| 177 ASSERT(deopt_env() != NULL); | 167 ASSERT(deopt_env() != NULL); |
| 178 | 168 |
| 179 __ pushq(CODE_REG); | 169 __ pushq(CODE_REG); |
| 180 __ Call(*StubCode::Deoptimize_entry()); | 170 __ Call(*StubCode::Deoptimize_entry()); |
| 181 set_pc_offset(assembler->CodeSize()); | 171 set_pc_offset(assembler->CodeSize()); |
| 182 __ int3(); | 172 __ int3(); |
| 183 #undef __ | 173 #undef __ |
| 184 } | 174 } |
| 185 | 175 |
| 186 | |
| 187 #define __ assembler()-> | 176 #define __ assembler()-> |
| 188 | 177 |
| 189 | |
| 190 // Fall through if bool_register contains null. | 178 // Fall through if bool_register contains null. |
| 191 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, | 179 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, |
| 192 Label* is_true, | 180 Label* is_true, |
| 193 Label* is_false) { | 181 Label* is_false) { |
| 194 Label fall_through; | 182 Label fall_through; |
| 195 __ CompareObject(bool_register, Object::null_object()); | 183 __ CompareObject(bool_register, Object::null_object()); |
| 196 __ j(EQUAL, &fall_through, Assembler::kNearJump); | 184 __ j(EQUAL, &fall_through, Assembler::kNearJump); |
| 197 __ CompareObject(bool_register, Bool::True()); | 185 __ CompareObject(bool_register, Bool::True()); |
| 198 __ j(EQUAL, is_true); | 186 __ j(EQUAL, is_true); |
| 199 __ jmp(is_false); | 187 __ jmp(is_false); |
| 200 __ Bind(&fall_through); | 188 __ Bind(&fall_through); |
| 201 } | 189 } |
| 202 | 190 |
| 203 | |
| 204 // Clobbers RCX. | 191 // Clobbers RCX. |
| 205 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( | 192 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( |
| 206 TypeTestStubKind test_kind, | 193 TypeTestStubKind test_kind, |
| 207 Register instance_reg, | 194 Register instance_reg, |
| 208 Register instantiator_type_arguments_reg, | 195 Register instantiator_type_arguments_reg, |
| 209 Register function_type_arguments_reg, | 196 Register function_type_arguments_reg, |
| 210 Register temp_reg, | 197 Register temp_reg, |
| 211 Label* is_instance_lbl, | 198 Label* is_instance_lbl, |
| 212 Label* is_not_instance_lbl) { | 199 Label* is_not_instance_lbl) { |
| 213 const SubtypeTestCache& type_test_cache = | 200 const SubtypeTestCache& type_test_cache = |
| (...skipping 23 matching lines...) Expand all Loading... |
| 237 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False. | 224 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False. |
| 238 ASSERT(instance_reg != RCX); | 225 ASSERT(instance_reg != RCX); |
| 239 ASSERT(temp_reg != RCX); | 226 ASSERT(temp_reg != RCX); |
| 240 __ Drop(2); | 227 __ Drop(2); |
| 241 __ popq(instance_reg); // Restore receiver. | 228 __ popq(instance_reg); // Restore receiver. |
| 242 __ popq(temp_reg); // Discard. | 229 __ popq(temp_reg); // Discard. |
| 243 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl); | 230 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl); |
| 244 return type_test_cache.raw(); | 231 return type_test_cache.raw(); |
| 245 } | 232 } |
| 246 | 233 |
| 247 | |
| 248 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if | 234 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if |
| 249 // type test is conclusive, otherwise fallthrough if a type test could not | 235 // type test is conclusive, otherwise fallthrough if a type test could not |
| 250 // be completed. | 236 // be completed. |
| 251 // RAX: instance (must survive). | 237 // RAX: instance (must survive). |
| 252 // Clobbers R10. | 238 // Clobbers R10. |
| 253 RawSubtypeTestCache* | 239 RawSubtypeTestCache* |
| 254 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest( | 240 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest( |
| 255 TokenPosition token_pos, | 241 TokenPosition token_pos, |
| 256 const AbstractType& type, | 242 const AbstractType& type, |
| 257 Label* is_instance_lbl, | 243 Label* is_instance_lbl, |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 315 // Regular subtype test cache involving instance's type arguments. | 301 // Regular subtype test cache involving instance's type arguments. |
| 316 const Register kInstantiatorTypeArgumentsReg = kNoRegister; | 302 const Register kInstantiatorTypeArgumentsReg = kNoRegister; |
| 317 const Register kFunctionTypeArgumentsReg = kNoRegister; | 303 const Register kFunctionTypeArgumentsReg = kNoRegister; |
| 318 const Register kTempReg = R10; | 304 const Register kTempReg = R10; |
| 319 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, kInstanceReg, | 305 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, kInstanceReg, |
| 320 kInstantiatorTypeArgumentsReg, | 306 kInstantiatorTypeArgumentsReg, |
| 321 kFunctionTypeArgumentsReg, kTempReg, | 307 kFunctionTypeArgumentsReg, kTempReg, |
| 322 is_instance_lbl, is_not_instance_lbl); | 308 is_instance_lbl, is_not_instance_lbl); |
| 323 } | 309 } |
| 324 | 310 |
| 325 | |
| 326 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, | 311 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, |
| 327 const GrowableArray<intptr_t>& class_ids, | 312 const GrowableArray<intptr_t>& class_ids, |
| 328 Label* is_equal_lbl, | 313 Label* is_equal_lbl, |
| 329 Label* is_not_equal_lbl) { | 314 Label* is_not_equal_lbl) { |
| 330 for (intptr_t i = 0; i < class_ids.length(); i++) { | 315 for (intptr_t i = 0; i < class_ids.length(); i++) { |
| 331 __ cmpl(class_id_reg, Immediate(class_ids[i])); | 316 __ cmpl(class_id_reg, Immediate(class_ids[i])); |
| 332 __ j(EQUAL, is_equal_lbl); | 317 __ j(EQUAL, is_equal_lbl); |
| 333 } | 318 } |
| 334 __ jmp(is_not_equal_lbl); | 319 __ jmp(is_not_equal_lbl); |
| 335 } | 320 } |
| 336 | 321 |
| 337 | |
| 338 // Testing against an instantiated type with no arguments, without | 322 // Testing against an instantiated type with no arguments, without |
| 339 // SubtypeTestCache. | 323 // SubtypeTestCache. |
| 340 // RAX: instance to test against (preserved). | 324 // RAX: instance to test against (preserved). |
| 341 // Clobbers R10, R13. | 325 // Clobbers R10, R13. |
| 342 // Returns true if there is a fallthrough. | 326 // Returns true if there is a fallthrough. |
| 343 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( | 327 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( |
| 344 TokenPosition token_pos, | 328 TokenPosition token_pos, |
| 345 const AbstractType& type, | 329 const AbstractType& type, |
| 346 Label* is_instance_lbl, | 330 Label* is_instance_lbl, |
| 347 Label* is_not_instance_lbl) { | 331 Label* is_not_instance_lbl) { |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 395 } | 379 } |
| 396 // Compare if the classes are equal. | 380 // Compare if the classes are equal. |
| 397 if (!type_class.is_abstract()) { | 381 if (!type_class.is_abstract()) { |
| 398 __ cmpl(kClassIdReg, Immediate(type_class.id())); | 382 __ cmpl(kClassIdReg, Immediate(type_class.id())); |
| 399 __ j(EQUAL, is_instance_lbl); | 383 __ j(EQUAL, is_instance_lbl); |
| 400 } | 384 } |
| 401 // Otherwise fallthrough. | 385 // Otherwise fallthrough. |
| 402 return true; | 386 return true; |
| 403 } | 387 } |
| 404 | 388 |
| 405 | |
| 406 // Uses SubtypeTestCache to store instance class and result. | 389 // Uses SubtypeTestCache to store instance class and result. |
| 407 // RAX: instance to test. | 390 // RAX: instance to test. |
| 408 // Clobbers R10, R13. | 391 // Clobbers R10, R13. |
| 409 // Immediate class test already done. | 392 // Immediate class test already done. |
| 410 // TODO(srdjan): Implement a quicker subtype check, as type test | 393 // TODO(srdjan): Implement a quicker subtype check, as type test |
| 411 // arrays can grow too high, but they may be useful when optimizing | 394 // arrays can grow too high, but they may be useful when optimizing |
| 412 // code (type-feedback). | 395 // code (type-feedback). |
| 413 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup( | 396 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup( |
| 414 TokenPosition token_pos, | 397 TokenPosition token_pos, |
| 415 const Class& type_class, | 398 const Class& type_class, |
| (...skipping 11 matching lines...) Expand all Loading... |
| 427 | 410 |
| 428 const Register kInstantiatorTypeArgumentsReg = kNoRegister; | 411 const Register kInstantiatorTypeArgumentsReg = kNoRegister; |
| 429 const Register kFunctionTypeArgumentsReg = kNoRegister; | 412 const Register kFunctionTypeArgumentsReg = kNoRegister; |
| 430 const Register kTempReg = R10; | 413 const Register kTempReg = R10; |
| 431 return GenerateCallSubtypeTestStub(kTestTypeOneArg, kInstanceReg, | 414 return GenerateCallSubtypeTestStub(kTestTypeOneArg, kInstanceReg, |
| 432 kInstantiatorTypeArgumentsReg, | 415 kInstantiatorTypeArgumentsReg, |
| 433 kFunctionTypeArgumentsReg, kTempReg, | 416 kFunctionTypeArgumentsReg, kTempReg, |
| 434 is_instance_lbl, is_not_instance_lbl); | 417 is_instance_lbl, is_not_instance_lbl); |
| 435 } | 418 } |
| 436 | 419 |
| 437 | |
| 438 // Generates inlined check if 'type' is a type parameter or type itself | 420 // Generates inlined check if 'type' is a type parameter or type itself |
| 439 // RAX: instance (preserved). | 421 // RAX: instance (preserved). |
| 440 // Clobbers RDI, RDX, R10. | 422 // Clobbers RDI, RDX, R10. |
| 441 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( | 423 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( |
| 442 TokenPosition token_pos, | 424 TokenPosition token_pos, |
| 443 const AbstractType& type, | 425 const AbstractType& type, |
| 444 Label* is_instance_lbl, | 426 Label* is_instance_lbl, |
| 445 Label* is_not_instance_lbl) { | 427 Label* is_not_instance_lbl) { |
| 446 __ Comment("UninstantiatedTypeTest"); | 428 __ Comment("UninstantiatedTypeTest"); |
| 447 ASSERT(!type.IsInstantiated()); | 429 ASSERT(!type.IsInstantiated()); |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 508 // arguments are determined at runtime by the instantiator(s). | 490 // arguments are determined at runtime by the instantiator(s). |
| 509 const Register kTempReg = R10; | 491 const Register kTempReg = R10; |
| 510 return GenerateCallSubtypeTestStub(kTestTypeFourArgs, kInstanceReg, | 492 return GenerateCallSubtypeTestStub(kTestTypeFourArgs, kInstanceReg, |
| 511 kInstantiatorTypeArgumentsReg, | 493 kInstantiatorTypeArgumentsReg, |
| 512 kFunctionTypeArgumentsReg, kTempReg, | 494 kFunctionTypeArgumentsReg, kTempReg, |
| 513 is_instance_lbl, is_not_instance_lbl); | 495 is_instance_lbl, is_not_instance_lbl); |
| 514 } | 496 } |
| 515 return SubtypeTestCache::null(); | 497 return SubtypeTestCache::null(); |
| 516 } | 498 } |
| 517 | 499 |
| 518 | |
| 519 // Inputs: | 500 // Inputs: |
| 520 // - RAX: instance to test against (preserved). | 501 // - RAX: instance to test against (preserved). |
| 521 // - RDX: optional instantiator type arguments (preserved). | 502 // - RDX: optional instantiator type arguments (preserved). |
| 522 // - RCX: optional function type arguments (preserved). | 503 // - RCX: optional function type arguments (preserved). |
| 523 // Clobbers R10, R13. | 504 // Clobbers R10, R13. |
| 524 // Returns: | 505 // Returns: |
| 525 // - preserved instance in RAX, optional instantiator type arguments in RDX, and | 506 // - preserved instance in RAX, optional instantiator type arguments in RDX, and |
| 526 // optional function type arguments in RCX. | 507 // optional function type arguments in RCX. |
| 527 // Note that this inlined code must be followed by the runtime_call code, as it | 508 // Note that this inlined code must be followed by the runtime_call code, as it |
| 528 // may fall through to it. Otherwise, this inline code will jump to the label | 509 // may fall through to it. Otherwise, this inline code will jump to the label |
| (...skipping 22 matching lines...) Expand all Loading... |
| 551 return GenerateSubtype1TestCacheLookup( | 532 return GenerateSubtype1TestCacheLookup( |
| 552 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); | 533 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); |
| 553 } else { | 534 } else { |
| 554 return SubtypeTestCache::null(); | 535 return SubtypeTestCache::null(); |
| 555 } | 536 } |
| 556 } | 537 } |
| 557 return GenerateUninstantiatedTypeTest(token_pos, type, is_instance_lbl, | 538 return GenerateUninstantiatedTypeTest(token_pos, type, is_instance_lbl, |
| 558 is_not_instance_lbl); | 539 is_not_instance_lbl); |
| 559 } | 540 } |
| 560 | 541 |
| 561 | |
| 562 // If instanceof type test cannot be performed successfully at compile time and | 542 // If instanceof type test cannot be performed successfully at compile time and |
| 563 // therefore eliminated, optimize it by adding inlined tests for: | 543 // therefore eliminated, optimize it by adding inlined tests for: |
| 564 // - NULL -> return type == Null (type is not Object or dynamic). | 544 // - NULL -> return type == Null (type is not Object or dynamic). |
| 565 // - Smi -> compile time subtype check (only if dst class is not parameterized). | 545 // - Smi -> compile time subtype check (only if dst class is not parameterized). |
| 566 // - Class equality (only if class is not parameterized). | 546 // - Class equality (only if class is not parameterized). |
| 567 // Inputs: | 547 // Inputs: |
| 568 // - RAX: object. | 548 // - RAX: object. |
| 569 // - RDX: instantiator type arguments or raw_null. | 549 // - RDX: instantiator type arguments or raw_null. |
| 570 // - RCX: function type arguments or raw_null. | 550 // - RCX: function type arguments or raw_null. |
| 571 // Returns: | 551 // Returns: |
| (...skipping 27 matching lines...) Expand all Loading... |
| 599 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); | 579 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); |
| 600 test_cache = | 580 test_cache = |
| 601 GenerateInlineInstanceof(token_pos, type, &is_instance, &is_not_instance); | 581 GenerateInlineInstanceof(token_pos, type, &is_instance, &is_not_instance); |
| 602 | 582 |
| 603 // test_cache is null if there is no fall-through. | 583 // test_cache is null if there is no fall-through. |
| 604 Label done; | 584 Label done; |
| 605 if (!test_cache.IsNull()) { | 585 if (!test_cache.IsNull()) { |
| 606 // Generate runtime call. | 586 // Generate runtime call. |
| 607 __ movq(RDX, Address(RSP, 1 * kWordSize)); // Get instantiator type args. | 587 __ movq(RDX, Address(RSP, 1 * kWordSize)); // Get instantiator type args. |
| 608 __ movq(RCX, Address(RSP, 0 * kWordSize)); // Get function type args. | 588 __ movq(RCX, Address(RSP, 0 * kWordSize)); // Get function type args. |
| 609 __ PushObject(Object::null_object()); // Make room for the result. | 589 __ PushObject(Object::null_object()); // Make room for the result. |
| 610 __ pushq(RAX); // Push the instance. | 590 __ pushq(RAX); // Push the instance. |
| 611 __ PushObject(type); // Push the type. | 591 __ PushObject(type); // Push the type. |
| 612 __ pushq(RDX); // Instantiator type arguments. | 592 __ pushq(RDX); // Instantiator type arguments. |
| 613 __ pushq(RCX); // Function type arguments. | 593 __ pushq(RCX); // Function type arguments. |
| 614 __ LoadUniqueObject(RAX, test_cache); | 594 __ LoadUniqueObject(RAX, test_cache); |
| 615 __ pushq(RAX); | 595 __ pushq(RAX); |
| 616 GenerateRuntimeCall(token_pos, deopt_id, kInstanceofRuntimeEntry, 5, locs); | 596 GenerateRuntimeCall(token_pos, deopt_id, kInstanceofRuntimeEntry, 5, locs); |
| 617 // Pop the parameters supplied to the runtime entry. The result of the | 597 // Pop the parameters supplied to the runtime entry. The result of the |
| 618 // instanceof runtime call will be left as the result of the operation. | 598 // instanceof runtime call will be left as the result of the operation. |
| 619 __ Drop(5); | 599 __ Drop(5); |
| 620 __ popq(RAX); | 600 __ popq(RAX); |
| 621 __ jmp(&done, Assembler::kNearJump); | 601 __ jmp(&done, Assembler::kNearJump); |
| 622 } | 602 } |
| 623 __ Bind(&is_not_instance); | 603 __ Bind(&is_not_instance); |
| 624 __ LoadObject(RAX, Bool::Get(false)); | 604 __ LoadObject(RAX, Bool::Get(false)); |
| 625 __ jmp(&done, Assembler::kNearJump); | 605 __ jmp(&done, Assembler::kNearJump); |
| 626 | 606 |
| 627 __ Bind(&is_instance); | 607 __ Bind(&is_instance); |
| 628 __ LoadObject(RAX, Bool::Get(true)); | 608 __ LoadObject(RAX, Bool::Get(true)); |
| 629 __ Bind(&done); | 609 __ Bind(&done); |
| 630 __ popq(RCX); // Remove pushed function type arguments. | 610 __ popq(RCX); // Remove pushed function type arguments. |
| 631 __ popq(RDX); // Remove pushed instantiator type arguments. | 611 __ popq(RDX); // Remove pushed instantiator type arguments. |
| 632 } | 612 } |
| 633 | 613 |
| 634 | |
| 635 // Optimize assignable type check by adding inlined tests for: | 614 // Optimize assignable type check by adding inlined tests for: |
| 636 // - NULL -> return NULL. | 615 // - NULL -> return NULL. |
| 637 // - Smi -> compile time subtype check (only if dst class is not parameterized). | 616 // - Smi -> compile time subtype check (only if dst class is not parameterized). |
| 638 // - Class equality (only if class is not parameterized). | 617 // - Class equality (only if class is not parameterized). |
| 639 // Inputs: | 618 // Inputs: |
| 640 // - RAX: object. | 619 // - RAX: object. |
| 641 // - RDX: instantiator type arguments or raw_null. | 620 // - RDX: instantiator type arguments or raw_null. |
| 642 // - RCX: function type arguments or raw_null. | 621 // - RCX: function type arguments or raw_null. |
| 643 // Returns: | 622 // Returns: |
| 644 // - object in RAX for successful assignable check (or throws TypeError). | 623 // - object in RAX for successful assignable check (or throws TypeError). |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 681 } | 660 } |
| 682 | 661 |
| 683 // Generate inline type check, linking to runtime call if not assignable. | 662 // Generate inline type check, linking to runtime call if not assignable. |
| 684 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); | 663 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); |
| 685 test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, | 664 test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, |
| 686 &runtime_call); | 665 &runtime_call); |
| 687 | 666 |
| 688 __ Bind(&runtime_call); | 667 __ Bind(&runtime_call); |
| 689 __ movq(RDX, Address(RSP, 1 * kWordSize)); // Get instantiator type args. | 668 __ movq(RDX, Address(RSP, 1 * kWordSize)); // Get instantiator type args. |
| 690 __ movq(RCX, Address(RSP, 0 * kWordSize)); // Get function type args. | 669 __ movq(RCX, Address(RSP, 0 * kWordSize)); // Get function type args. |
| 691 __ PushObject(Object::null_object()); // Make room for the result. | 670 __ PushObject(Object::null_object()); // Make room for the result. |
| 692 __ pushq(RAX); // Push the source object. | 671 __ pushq(RAX); // Push the source object. |
| 693 __ PushObject(dst_type); // Push the type of the destination. | 672 __ PushObject(dst_type); // Push the type of the destination. |
| 694 __ pushq(RDX); // Instantiator type arguments. | 673 __ pushq(RDX); // Instantiator type arguments. |
| 695 __ pushq(RCX); // Function type arguments. | 674 __ pushq(RCX); // Function type arguments. |
| 696 __ PushObject(dst_name); // Push the name of the destination. | 675 __ PushObject(dst_name); // Push the name of the destination. |
| 697 __ LoadUniqueObject(RAX, test_cache); | 676 __ LoadUniqueObject(RAX, test_cache); |
| 698 __ pushq(RAX); | 677 __ pushq(RAX); |
| 699 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); | 678 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); |
| 700 // Pop the parameters supplied to the runtime entry. The result of the | 679 // Pop the parameters supplied to the runtime entry. The result of the |
| 701 // type check runtime call is the checked value. | 680 // type check runtime call is the checked value. |
| 702 __ Drop(6); | 681 __ Drop(6); |
| 703 __ popq(RAX); | 682 __ popq(RAX); |
| 704 | 683 |
| 705 __ Bind(&is_assignable); | 684 __ Bind(&is_assignable); |
| 706 __ popq(RCX); // Remove pushed function type arguments. | 685 __ popq(RCX); // Remove pushed function type arguments. |
| 707 __ popq(RDX); // Remove pushed instantiator type arguments. | 686 __ popq(RDX); // Remove pushed instantiator type arguments. |
| 708 } | 687 } |
| 709 | 688 |
| 710 | |
| 711 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { | 689 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { |
| 712 if (is_optimizing()) { | 690 if (is_optimizing()) { |
| 713 return; | 691 return; |
| 714 } | 692 } |
| 715 Definition* defn = instr->AsDefinition(); | 693 Definition* defn = instr->AsDefinition(); |
| 716 if ((defn != NULL) && defn->HasTemp()) { | 694 if ((defn != NULL) && defn->HasTemp()) { |
| 717 Location value = defn->locs()->out(0); | 695 Location value = defn->locs()->out(0); |
| 718 if (value.IsRegister()) { | 696 if (value.IsRegister()) { |
| 719 __ pushq(value.reg()); | 697 __ pushq(value.reg()); |
| 720 } else if (value.IsConstant()) { | 698 } else if (value.IsConstant()) { |
| 721 __ PushObject(value.constant()); | 699 __ PushObject(value.constant()); |
| 722 } else { | 700 } else { |
| 723 ASSERT(value.IsStackSlot()); | 701 ASSERT(value.IsStackSlot()); |
| 724 __ pushq(value.ToStackSlotAddress()); | 702 __ pushq(value.ToStackSlotAddress()); |
| 725 } | 703 } |
| 726 } | 704 } |
| 727 } | 705 } |
| 728 | 706 |
| 729 | |
| 730 void FlowGraphCompiler::CopyParameters() { | 707 void FlowGraphCompiler::CopyParameters() { |
| 731 __ Comment("Copy parameters"); | 708 __ Comment("Copy parameters"); |
| 732 const Function& function = parsed_function().function(); | 709 const Function& function = parsed_function().function(); |
| 733 LocalScope* scope = parsed_function().node_sequence()->scope(); | 710 LocalScope* scope = parsed_function().node_sequence()->scope(); |
| 734 const int num_fixed_params = function.num_fixed_parameters(); | 711 const int num_fixed_params = function.num_fixed_parameters(); |
| 735 const int num_opt_pos_params = function.NumOptionalPositionalParameters(); | 712 const int num_opt_pos_params = function.NumOptionalPositionalParameters(); |
| 736 const int num_opt_named_params = function.NumOptionalNamedParameters(); | 713 const int num_opt_named_params = function.NumOptionalNamedParameters(); |
| 737 const int num_params = | 714 const int num_params = |
| 738 num_fixed_params + num_opt_pos_params + num_opt_named_params; | 715 num_fixed_params + num_opt_pos_params + num_opt_named_params; |
| 739 ASSERT(function.NumParameters() == num_params); | 716 ASSERT(function.NumParameters() == num_params); |
| (...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 922 __ jmp(&null_args_loop_condition, Assembler::kNearJump); | 899 __ jmp(&null_args_loop_condition, Assembler::kNearJump); |
| 923 const Address original_argument_addr(RBP, RCX, TIMES_8, | 900 const Address original_argument_addr(RBP, RCX, TIMES_8, |
| 924 (kParamEndSlotFromFp + 1) * kWordSize); | 901 (kParamEndSlotFromFp + 1) * kWordSize); |
| 925 __ Bind(&null_args_loop); | 902 __ Bind(&null_args_loop); |
| 926 __ movq(original_argument_addr, R12); | 903 __ movq(original_argument_addr, R12); |
| 927 __ Bind(&null_args_loop_condition); | 904 __ Bind(&null_args_loop_condition); |
| 928 __ decq(RCX); | 905 __ decq(RCX); |
| 929 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); | 906 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); |
| 930 } | 907 } |
| 931 | 908 |
| 932 | |
| 933 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { | 909 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { |
| 934 // TOS: return address. | 910 // TOS: return address. |
| 935 // +1 : receiver. | 911 // +1 : receiver. |
| 936 // Sequence node has one return node, its input is load field node. | 912 // Sequence node has one return node, its input is load field node. |
| 937 __ Comment("Inlined Getter"); | 913 __ Comment("Inlined Getter"); |
| 938 __ movq(RAX, Address(RSP, 1 * kWordSize)); | 914 __ movq(RAX, Address(RSP, 1 * kWordSize)); |
| 939 __ movq(RAX, FieldAddress(RAX, offset)); | 915 __ movq(RAX, FieldAddress(RAX, offset)); |
| 940 __ ret(); | 916 __ ret(); |
| 941 } | 917 } |
| 942 | 918 |
| 943 | |
| 944 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { | 919 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { |
| 945 // TOS: return address. | 920 // TOS: return address. |
| 946 // +1 : value | 921 // +1 : value |
| 947 // +2 : receiver. | 922 // +2 : receiver. |
| 948 // Sequence node has one store node and one return NULL node. | 923 // Sequence node has one store node and one return NULL node. |
| 949 __ Comment("Inlined Setter"); | 924 __ Comment("Inlined Setter"); |
| 950 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. | 925 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. |
| 951 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. | 926 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. |
| 952 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); | 927 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); |
| 953 __ LoadObject(RAX, Object::null_object()); | 928 __ LoadObject(RAX, Object::null_object()); |
| 954 __ ret(); | 929 __ ret(); |
| 955 } | 930 } |
| 956 | 931 |
| 957 | |
| 958 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc | 932 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc |
| 959 // needs to be updated to match. | 933 // needs to be updated to match. |
| 960 void FlowGraphCompiler::EmitFrameEntry() { | 934 void FlowGraphCompiler::EmitFrameEntry() { |
| 961 if (flow_graph().IsCompiledForOsr()) { | 935 if (flow_graph().IsCompiledForOsr()) { |
| 962 intptr_t extra_slots = StackSize() - flow_graph().num_stack_locals() - | 936 intptr_t extra_slots = StackSize() - flow_graph().num_stack_locals() - |
| 963 flow_graph().num_copied_params(); | 937 flow_graph().num_copied_params(); |
| 964 ASSERT(extra_slots >= 0); | 938 ASSERT(extra_slots >= 0); |
| 965 __ EnterOsrFrame(extra_slots * kWordSize); | 939 __ EnterOsrFrame(extra_slots * kWordSize); |
| 966 } else { | 940 } else { |
| 967 const Register new_pp = R13; | 941 const Register new_pp = R13; |
| (...skipping 16 matching lines...) Expand all Loading... |
| 984 Immediate(GetOptimizationThreshold())); | 958 Immediate(GetOptimizationThreshold())); |
| 985 ASSERT(function_reg == RDI); | 959 ASSERT(function_reg == RDI); |
| 986 __ J(GREATER_EQUAL, *StubCode::OptimizeFunction_entry(), new_pp); | 960 __ J(GREATER_EQUAL, *StubCode::OptimizeFunction_entry(), new_pp); |
| 987 } | 961 } |
| 988 ASSERT(StackSize() >= 0); | 962 ASSERT(StackSize() >= 0); |
| 989 __ Comment("Enter frame"); | 963 __ Comment("Enter frame"); |
| 990 __ EnterDartFrame(StackSize() * kWordSize, new_pp); | 964 __ EnterDartFrame(StackSize() * kWordSize, new_pp); |
| 991 } | 965 } |
| 992 } | 966 } |
| 993 | 967 |
| 994 | |
| 995 void FlowGraphCompiler::CompileGraph() { | 968 void FlowGraphCompiler::CompileGraph() { |
| 996 InitCompiler(); | 969 InitCompiler(); |
| 997 const Function& function = parsed_function().function(); | 970 const Function& function = parsed_function().function(); |
| 998 | 971 |
| 999 #ifdef DART_PRECOMPILER | 972 #ifdef DART_PRECOMPILER |
| 1000 if (function.IsDynamicFunction()) { | 973 if (function.IsDynamicFunction()) { |
| 1001 __ MonomorphicCheckedEntry(); | 974 __ MonomorphicCheckedEntry(); |
| 1002 } | 975 } |
| 1003 #endif // DART_PRECOMPILER | 976 #endif // DART_PRECOMPILER |
| 1004 | 977 |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1120 | 1093 |
| 1121 EndCodeSourceRange(TokenPosition::kDartCodePrologue); | 1094 EndCodeSourceRange(TokenPosition::kDartCodePrologue); |
| 1122 ASSERT(!block_order().is_empty()); | 1095 ASSERT(!block_order().is_empty()); |
| 1123 VisitBlocks(); | 1096 VisitBlocks(); |
| 1124 | 1097 |
| 1125 __ int3(); | 1098 __ int3(); |
| 1126 ASSERT(assembler()->constant_pool_allowed()); | 1099 ASSERT(assembler()->constant_pool_allowed()); |
| 1127 GenerateDeferredCode(); | 1100 GenerateDeferredCode(); |
| 1128 } | 1101 } |
| 1129 | 1102 |
| 1130 | |
| 1131 void FlowGraphCompiler::GenerateCall(TokenPosition token_pos, | 1103 void FlowGraphCompiler::GenerateCall(TokenPosition token_pos, |
| 1132 const StubEntry& stub_entry, | 1104 const StubEntry& stub_entry, |
| 1133 RawPcDescriptors::Kind kind, | 1105 RawPcDescriptors::Kind kind, |
| 1134 LocationSummary* locs) { | 1106 LocationSummary* locs) { |
| 1135 __ Call(stub_entry); | 1107 __ Call(stub_entry); |
| 1136 EmitCallsiteMetaData(token_pos, Thread::kNoDeoptId, kind, locs); | 1108 EmitCallsiteMetaData(token_pos, Thread::kNoDeoptId, kind, locs); |
| 1137 } | 1109 } |
| 1138 | 1110 |
| 1139 | |
| 1140 void FlowGraphCompiler::GeneratePatchableCall(TokenPosition token_pos, | 1111 void FlowGraphCompiler::GeneratePatchableCall(TokenPosition token_pos, |
| 1141 const StubEntry& stub_entry, | 1112 const StubEntry& stub_entry, |
| 1142 RawPcDescriptors::Kind kind, | 1113 RawPcDescriptors::Kind kind, |
| 1143 LocationSummary* locs) { | 1114 LocationSummary* locs) { |
| 1144 __ CallPatchable(stub_entry); | 1115 __ CallPatchable(stub_entry); |
| 1145 EmitCallsiteMetaData(token_pos, Thread::kNoDeoptId, kind, locs); | 1116 EmitCallsiteMetaData(token_pos, Thread::kNoDeoptId, kind, locs); |
| 1146 } | 1117 } |
| 1147 | 1118 |
| 1148 | |
| 1149 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, | 1119 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, |
| 1150 TokenPosition token_pos, | 1120 TokenPosition token_pos, |
| 1151 const StubEntry& stub_entry, | 1121 const StubEntry& stub_entry, |
| 1152 RawPcDescriptors::Kind kind, | 1122 RawPcDescriptors::Kind kind, |
| 1153 LocationSummary* locs) { | 1123 LocationSummary* locs) { |
| 1154 __ CallPatchable(stub_entry); | 1124 __ CallPatchable(stub_entry); |
| 1155 EmitCallsiteMetaData(token_pos, deopt_id, kind, locs); | 1125 EmitCallsiteMetaData(token_pos, deopt_id, kind, locs); |
| 1156 // Marks either the continuation point in unoptimized code or the | 1126 // Marks either the continuation point in unoptimized code or the |
| 1157 // deoptimization point in optimized code, after call. | 1127 // deoptimization point in optimized code, after call. |
| 1158 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); | 1128 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
| 1159 if (is_optimizing()) { | 1129 if (is_optimizing()) { |
| 1160 AddDeoptIndexAtCall(deopt_id_after); | 1130 AddDeoptIndexAtCall(deopt_id_after); |
| 1161 } else { | 1131 } else { |
| 1162 // Add deoptimization continuation point after the call and before the | 1132 // Add deoptimization continuation point after the call and before the |
| 1163 // arguments are removed. | 1133 // arguments are removed. |
| 1164 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1134 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
| 1165 } | 1135 } |
| 1166 } | 1136 } |
| 1167 | 1137 |
| 1168 | |
| 1169 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id, | 1138 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id, |
| 1170 TokenPosition token_pos, | 1139 TokenPosition token_pos, |
| 1171 const StubEntry& stub_entry, | 1140 const StubEntry& stub_entry, |
| 1172 RawPcDescriptors::Kind kind, | 1141 RawPcDescriptors::Kind kind, |
| 1173 LocationSummary* locs, | 1142 LocationSummary* locs, |
| 1174 const Function& target) { | 1143 const Function& target) { |
| 1175 // Call sites to the same target can share object pool entries. These | 1144 // Call sites to the same target can share object pool entries. These |
| 1176 // call sites are never patched for breakpoints: the function is deoptimized | 1145 // call sites are never patched for breakpoints: the function is deoptimized |
| 1177 // and the unoptimized code with IC calls for static calls is patched instead. | 1146 // and the unoptimized code with IC calls for static calls is patched instead. |
| 1178 ASSERT(is_optimizing()); | 1147 ASSERT(is_optimizing()); |
| 1179 __ CallWithEquivalence(stub_entry, target); | 1148 __ CallWithEquivalence(stub_entry, target); |
| 1180 | 1149 |
| 1181 EmitCallsiteMetaData(token_pos, deopt_id, kind, locs); | 1150 EmitCallsiteMetaData(token_pos, deopt_id, kind, locs); |
| 1182 // Marks either the continuation point in unoptimized code or the | 1151 // Marks either the continuation point in unoptimized code or the |
| 1183 // deoptimization point in optimized code, after call. | 1152 // deoptimization point in optimized code, after call. |
| 1184 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); | 1153 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
| 1185 if (is_optimizing()) { | 1154 if (is_optimizing()) { |
| 1186 AddDeoptIndexAtCall(deopt_id_after); | 1155 AddDeoptIndexAtCall(deopt_id_after); |
| 1187 } else { | 1156 } else { |
| 1188 // Add deoptimization continuation point after the call and before the | 1157 // Add deoptimization continuation point after the call and before the |
| 1189 // arguments are removed. | 1158 // arguments are removed. |
| 1190 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1159 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
| 1191 } | 1160 } |
| 1192 AddStaticCallTarget(target); | 1161 AddStaticCallTarget(target); |
| 1193 } | 1162 } |
| 1194 | 1163 |
| 1195 | |
| 1196 void FlowGraphCompiler::GenerateRuntimeCall(TokenPosition token_pos, | 1164 void FlowGraphCompiler::GenerateRuntimeCall(TokenPosition token_pos, |
| 1197 intptr_t deopt_id, | 1165 intptr_t deopt_id, |
| 1198 const RuntimeEntry& entry, | 1166 const RuntimeEntry& entry, |
| 1199 intptr_t argument_count, | 1167 intptr_t argument_count, |
| 1200 LocationSummary* locs) { | 1168 LocationSummary* locs) { |
| 1201 __ CallRuntime(entry, argument_count); | 1169 __ CallRuntime(entry, argument_count); |
| 1202 EmitCallsiteMetaData(token_pos, deopt_id, RawPcDescriptors::kOther, locs); | 1170 EmitCallsiteMetaData(token_pos, deopt_id, RawPcDescriptors::kOther, locs); |
| 1203 if (deopt_id != Thread::kNoDeoptId) { | 1171 if (deopt_id != Thread::kNoDeoptId) { |
| 1204 // Marks either the continuation point in unoptimized code or the | 1172 // Marks either the continuation point in unoptimized code or the |
| 1205 // deoptimization point in optimized code, after call. | 1173 // deoptimization point in optimized code, after call. |
| 1206 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); | 1174 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
| 1207 if (is_optimizing()) { | 1175 if (is_optimizing()) { |
| 1208 AddDeoptIndexAtCall(deopt_id_after); | 1176 AddDeoptIndexAtCall(deopt_id_after); |
| 1209 } else { | 1177 } else { |
| 1210 // Add deoptimization continuation point after the call and before the | 1178 // Add deoptimization continuation point after the call and before the |
| 1211 // arguments are removed. | 1179 // arguments are removed. |
| 1212 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1180 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
| 1213 } | 1181 } |
| 1214 } | 1182 } |
| 1215 } | 1183 } |
| 1216 | 1184 |
| 1217 | |
| 1218 void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t argument_count, | 1185 void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t argument_count, |
| 1219 intptr_t deopt_id, | 1186 intptr_t deopt_id, |
| 1220 TokenPosition token_pos, | 1187 TokenPosition token_pos, |
| 1221 LocationSummary* locs, | 1188 LocationSummary* locs, |
| 1222 const ICData& ic_data) { | 1189 const ICData& ic_data) { |
| 1223 const StubEntry* stub_entry = | 1190 const StubEntry* stub_entry = |
| 1224 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); | 1191 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); |
| 1225 __ LoadObject(RBX, ic_data); | 1192 __ LoadObject(RBX, ic_data); |
| 1226 GenerateDartCall(deopt_id, token_pos, *stub_entry, | 1193 GenerateDartCall(deopt_id, token_pos, *stub_entry, |
| 1227 RawPcDescriptors::kUnoptStaticCall, locs); | 1194 RawPcDescriptors::kUnoptStaticCall, locs); |
| 1228 __ Drop(argument_count, RCX); | 1195 __ Drop(argument_count, RCX); |
| 1229 } | 1196 } |
| 1230 | 1197 |
| 1231 | |
| 1232 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) { | 1198 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) { |
| 1233 // We do not check for overflow when incrementing the edge counter. The | 1199 // We do not check for overflow when incrementing the edge counter. The |
| 1234 // function should normally be optimized long before the counter can | 1200 // function should normally be optimized long before the counter can |
| 1235 // overflow; and though we do not reset the counters when we optimize or | 1201 // overflow; and though we do not reset the counters when we optimize or |
| 1236 // deoptimize, there is a bound on the number of | 1202 // deoptimize, there is a bound on the number of |
| 1237 // optimization/deoptimization cycles we will attempt. | 1203 // optimization/deoptimization cycles we will attempt. |
| 1238 ASSERT(!edge_counters_array_.IsNull()); | 1204 ASSERT(!edge_counters_array_.IsNull()); |
| 1239 ASSERT(assembler_->constant_pool_allowed()); | 1205 ASSERT(assembler_->constant_pool_allowed()); |
| 1240 __ Comment("Edge counter"); | 1206 __ Comment("Edge counter"); |
| 1241 __ LoadObject(RAX, edge_counters_array_); | 1207 __ LoadObject(RAX, edge_counters_array_); |
| 1242 __ IncrementSmiField(FieldAddress(RAX, Array::element_offset(edge_id)), 1); | 1208 __ IncrementSmiField(FieldAddress(RAX, Array::element_offset(edge_id)), 1); |
| 1243 } | 1209 } |
| 1244 | 1210 |
| 1245 | |
| 1246 void FlowGraphCompiler::EmitOptimizedInstanceCall(const StubEntry& stub_entry, | 1211 void FlowGraphCompiler::EmitOptimizedInstanceCall(const StubEntry& stub_entry, |
| 1247 const ICData& ic_data, | 1212 const ICData& ic_data, |
| 1248 intptr_t argument_count, | 1213 intptr_t argument_count, |
| 1249 intptr_t deopt_id, | 1214 intptr_t deopt_id, |
| 1250 TokenPosition token_pos, | 1215 TokenPosition token_pos, |
| 1251 LocationSummary* locs) { | 1216 LocationSummary* locs) { |
| 1252 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); | 1217 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); |
| 1253 // Each ICData propagated from unoptimized to optimized code contains the | 1218 // Each ICData propagated from unoptimized to optimized code contains the |
| 1254 // function that corresponds to the Dart function of that IC call. Due | 1219 // function that corresponds to the Dart function of that IC call. Due |
| 1255 // to inlining in optimized code, that function may not correspond to the | 1220 // to inlining in optimized code, that function may not correspond to the |
| 1256 // top-level function (parsed_function().function()) which could be | 1221 // top-level function (parsed_function().function()) which could be |
| 1257 // reoptimized and which counter needs to be incremented. | 1222 // reoptimized and which counter needs to be incremented. |
| 1258 // Pass the function explicitly, it is used in IC stub. | 1223 // Pass the function explicitly, it is used in IC stub. |
| 1259 __ LoadObject(RDI, parsed_function().function()); | 1224 __ LoadObject(RDI, parsed_function().function()); |
| 1260 __ LoadUniqueObject(RBX, ic_data); | 1225 __ LoadUniqueObject(RBX, ic_data); |
| 1261 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall, | 1226 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall, |
| 1262 locs); | 1227 locs); |
| 1263 __ Drop(argument_count, RCX); | 1228 __ Drop(argument_count, RCX); |
| 1264 } | 1229 } |
| 1265 | 1230 |
| 1266 | |
| 1267 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, | 1231 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, |
| 1268 const ICData& ic_data, | 1232 const ICData& ic_data, |
| 1269 intptr_t argument_count, | 1233 intptr_t argument_count, |
| 1270 intptr_t deopt_id, | 1234 intptr_t deopt_id, |
| 1271 TokenPosition token_pos, | 1235 TokenPosition token_pos, |
| 1272 LocationSummary* locs) { | 1236 LocationSummary* locs) { |
| 1273 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); | 1237 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); |
| 1274 __ LoadUniqueObject(RBX, ic_data); | 1238 __ LoadUniqueObject(RBX, ic_data); |
| 1275 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall, | 1239 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall, |
| 1276 locs); | 1240 locs); |
| 1277 __ Drop(argument_count, RCX); | 1241 __ Drop(argument_count, RCX); |
| 1278 } | 1242 } |
| 1279 | 1243 |
| 1280 | |
| 1281 void FlowGraphCompiler::EmitMegamorphicInstanceCall( | 1244 void FlowGraphCompiler::EmitMegamorphicInstanceCall( |
| 1282 const String& name, | 1245 const String& name, |
| 1283 const Array& arguments_descriptor, | 1246 const Array& arguments_descriptor, |
| 1284 intptr_t argument_count, | 1247 intptr_t argument_count, |
| 1285 intptr_t deopt_id, | 1248 intptr_t deopt_id, |
| 1286 TokenPosition token_pos, | 1249 TokenPosition token_pos, |
| 1287 LocationSummary* locs, | 1250 LocationSummary* locs, |
| 1288 intptr_t try_index, | 1251 intptr_t try_index, |
| 1289 intptr_t slow_path_argument_count) { | 1252 intptr_t slow_path_argument_count) { |
| 1290 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); | 1253 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 1315 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, | 1278 AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
| 1316 token_pos); | 1279 token_pos); |
| 1317 // Add deoptimization continuation point after the call and before the | 1280 // Add deoptimization continuation point after the call and before the |
| 1318 // arguments are removed. | 1281 // arguments are removed. |
| 1319 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1282 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
| 1320 } | 1283 } |
| 1321 EmitCatchEntryState(pending_deoptimization_env_, try_index); | 1284 EmitCatchEntryState(pending_deoptimization_env_, try_index); |
| 1322 __ Drop(argument_count, RCX); | 1285 __ Drop(argument_count, RCX); |
| 1323 } | 1286 } |
| 1324 | 1287 |
| 1325 | |
| 1326 void FlowGraphCompiler::EmitSwitchableInstanceCall(const ICData& ic_data, | 1288 void FlowGraphCompiler::EmitSwitchableInstanceCall(const ICData& ic_data, |
| 1327 intptr_t argument_count, | 1289 intptr_t argument_count, |
| 1328 intptr_t deopt_id, | 1290 intptr_t deopt_id, |
| 1329 TokenPosition token_pos, | 1291 TokenPosition token_pos, |
| 1330 LocationSummary* locs) { | 1292 LocationSummary* locs) { |
| 1331 ASSERT(ic_data.NumArgsTested() == 1); | 1293 ASSERT(ic_data.NumArgsTested() == 1); |
| 1332 const Code& initial_stub = | 1294 const Code& initial_stub = |
| 1333 Code::ZoneHandle(StubCode::ICCallThroughFunction_entry()->code()); | 1295 Code::ZoneHandle(StubCode::ICCallThroughFunction_entry()->code()); |
| 1334 | 1296 |
| 1335 __ Comment("SwitchableCall"); | 1297 __ Comment("SwitchableCall"); |
| 1336 __ movq(RDI, Address(RSP, (argument_count - 1) * kWordSize)); | 1298 __ movq(RDI, Address(RSP, (argument_count - 1) * kWordSize)); |
| 1337 __ LoadUniqueObject(CODE_REG, initial_stub); | 1299 __ LoadUniqueObject(CODE_REG, initial_stub); |
| 1338 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); | 1300 __ movq(RCX, FieldAddress(CODE_REG, Code::checked_entry_point_offset())); |
| 1339 __ LoadUniqueObject(RBX, ic_data); | 1301 __ LoadUniqueObject(RBX, ic_data); |
| 1340 __ call(RCX); | 1302 __ call(RCX); |
| 1341 | 1303 |
| 1342 | |
| 1343 EmitCallsiteMetaData(token_pos, deopt_id, RawPcDescriptors::kOther, locs); | 1304 EmitCallsiteMetaData(token_pos, deopt_id, RawPcDescriptors::kOther, locs); |
| 1344 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); | 1305 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
| 1345 if (is_optimizing()) { | 1306 if (is_optimizing()) { |
| 1346 AddDeoptIndexAtCall(deopt_id_after); | 1307 AddDeoptIndexAtCall(deopt_id_after); |
| 1347 } else { | 1308 } else { |
| 1348 // Add deoptimization continuation point after the call and before the | 1309 // Add deoptimization continuation point after the call and before the |
| 1349 // arguments are removed. | 1310 // arguments are removed. |
| 1350 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1311 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
| 1351 } | 1312 } |
| 1352 __ Drop(argument_count, RCX); | 1313 __ Drop(argument_count, RCX); |
| 1353 } | 1314 } |
| 1354 | 1315 |
| 1355 | |
| 1356 void FlowGraphCompiler::EmitOptimizedStaticCall( | 1316 void FlowGraphCompiler::EmitOptimizedStaticCall( |
| 1357 const Function& function, | 1317 const Function& function, |
| 1358 const Array& arguments_descriptor, | 1318 const Array& arguments_descriptor, |
| 1359 intptr_t argument_count, | 1319 intptr_t argument_count, |
| 1360 intptr_t deopt_id, | 1320 intptr_t deopt_id, |
| 1361 TokenPosition token_pos, | 1321 TokenPosition token_pos, |
| 1362 LocationSummary* locs) { | 1322 LocationSummary* locs) { |
| 1363 ASSERT(!function.IsClosureFunction()); | 1323 ASSERT(!function.IsClosureFunction()); |
| 1364 if (function.HasOptionalParameters() || | 1324 if (function.HasOptionalParameters() || |
| 1365 (FLAG_reify_generic_functions && function.IsGeneric())) { | 1325 (FLAG_reify_generic_functions && function.IsGeneric())) { |
| 1366 __ LoadObject(R10, arguments_descriptor); | 1326 __ LoadObject(R10, arguments_descriptor); |
| 1367 } else { | 1327 } else { |
| 1368 __ xorq(R10, R10); // GC safe smi zero because of stub. | 1328 __ xorq(R10, R10); // GC safe smi zero because of stub. |
| 1369 } | 1329 } |
| 1370 // Do not use the code from the function, but let the code be patched so that | 1330 // Do not use the code from the function, but let the code be patched so that |
| 1371 // we can record the outgoing edges to other code. | 1331 // we can record the outgoing edges to other code. |
| 1372 GenerateStaticDartCall(deopt_id, token_pos, | 1332 GenerateStaticDartCall(deopt_id, token_pos, |
| 1373 *StubCode::CallStaticFunction_entry(), | 1333 *StubCode::CallStaticFunction_entry(), |
| 1374 RawPcDescriptors::kOther, locs, function); | 1334 RawPcDescriptors::kOther, locs, function); |
| 1375 __ Drop(argument_count, RCX); | 1335 __ Drop(argument_count, RCX); |
| 1376 } | 1336 } |
| 1377 | 1337 |
| 1378 | |
| 1379 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( | 1338 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( |
| 1380 Register reg, | 1339 Register reg, |
| 1381 const Object& obj, | 1340 const Object& obj, |
| 1382 bool needs_number_check, | 1341 bool needs_number_check, |
| 1383 TokenPosition token_pos, | 1342 TokenPosition token_pos, |
| 1384 intptr_t deopt_id) { | 1343 intptr_t deopt_id) { |
| 1385 ASSERT(!needs_number_check || | 1344 ASSERT(!needs_number_check || |
| 1386 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); | 1345 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); |
| 1387 | 1346 |
| 1388 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { | 1347 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1402 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id, token_pos); | 1361 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id, token_pos); |
| 1403 // Stub returns result in flags (result of a cmpq, we need ZF computed). | 1362 // Stub returns result in flags (result of a cmpq, we need ZF computed). |
| 1404 __ popq(reg); // Discard constant. | 1363 __ popq(reg); // Discard constant. |
| 1405 __ popq(reg); // Restore 'reg'. | 1364 __ popq(reg); // Restore 'reg'. |
| 1406 } else { | 1365 } else { |
| 1407 __ CompareObject(reg, obj); | 1366 __ CompareObject(reg, obj); |
| 1408 } | 1367 } |
| 1409 return EQUAL; | 1368 return EQUAL; |
| 1410 } | 1369 } |
| 1411 | 1370 |
| 1412 | |
| 1413 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, | 1371 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, |
| 1414 Register right, | 1372 Register right, |
| 1415 bool needs_number_check, | 1373 bool needs_number_check, |
| 1416 TokenPosition token_pos, | 1374 TokenPosition token_pos, |
| 1417 intptr_t deopt_id) { | 1375 intptr_t deopt_id) { |
| 1418 if (needs_number_check) { | 1376 if (needs_number_check) { |
| 1419 __ pushq(left); | 1377 __ pushq(left); |
| 1420 __ pushq(right); | 1378 __ pushq(right); |
| 1421 if (is_optimizing()) { | 1379 if (is_optimizing()) { |
| 1422 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); | 1380 __ CallPatchable(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); |
| 1423 } else { | 1381 } else { |
| 1424 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); | 1382 __ CallPatchable(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); |
| 1425 } | 1383 } |
| 1426 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id, token_pos); | 1384 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id, token_pos); |
| 1427 // Stub returns result in flags (result of a cmpq, we need ZF computed). | 1385 // Stub returns result in flags (result of a cmpq, we need ZF computed). |
| 1428 __ popq(right); | 1386 __ popq(right); |
| 1429 __ popq(left); | 1387 __ popq(left); |
| 1430 } else { | 1388 } else { |
| 1431 __ CompareRegisters(left, right); | 1389 __ CompareRegisters(left, right); |
| 1432 } | 1390 } |
| 1433 return EQUAL; | 1391 return EQUAL; |
| 1434 } | 1392 } |
| 1435 | 1393 |
| 1436 | |
| 1437 // This function must be in sync with FlowGraphCompiler::RecordSafepoint and | 1394 // This function must be in sync with FlowGraphCompiler::RecordSafepoint and |
| 1438 // FlowGraphCompiler::SlowPathEnvironmentFor. | 1395 // FlowGraphCompiler::SlowPathEnvironmentFor. |
| 1439 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) { | 1396 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) { |
| 1440 #if defined(DEBUG) | 1397 #if defined(DEBUG) |
| 1441 locs->CheckWritableInputs(); | 1398 locs->CheckWritableInputs(); |
| 1442 ClobberDeadTempRegisters(locs); | 1399 ClobberDeadTempRegisters(locs); |
| 1443 #endif | 1400 #endif |
| 1444 | 1401 |
| 1445 // TODO(vegorov): avoid saving non-volatile registers. | 1402 // TODO(vegorov): avoid saving non-volatile registers. |
| 1446 __ PushRegisters(locs->live_registers()->cpu_registers(), | 1403 __ PushRegisters(locs->live_registers()->cpu_registers(), |
| 1447 locs->live_registers()->fpu_registers()); | 1404 locs->live_registers()->fpu_registers()); |
| 1448 } | 1405 } |
| 1449 | 1406 |
| 1450 | |
| 1451 void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) { | 1407 void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) { |
| 1452 __ PopRegisters(locs->live_registers()->cpu_registers(), | 1408 __ PopRegisters(locs->live_registers()->cpu_registers(), |
| 1453 locs->live_registers()->fpu_registers()); | 1409 locs->live_registers()->fpu_registers()); |
| 1454 } | 1410 } |
| 1455 | 1411 |
| 1456 | |
| 1457 #if defined(DEBUG) | 1412 #if defined(DEBUG) |
| 1458 void FlowGraphCompiler::ClobberDeadTempRegisters(LocationSummary* locs) { | 1413 void FlowGraphCompiler::ClobberDeadTempRegisters(LocationSummary* locs) { |
| 1459 // Clobber temporaries that have not been manually preserved. | 1414 // Clobber temporaries that have not been manually preserved. |
| 1460 for (intptr_t i = 0; i < locs->temp_count(); ++i) { | 1415 for (intptr_t i = 0; i < locs->temp_count(); ++i) { |
| 1461 Location tmp = locs->temp(i); | 1416 Location tmp = locs->temp(i); |
| 1462 // TODO(zerny): clobber non-live temporary FPU registers. | 1417 // TODO(zerny): clobber non-live temporary FPU registers. |
| 1463 if (tmp.IsRegister() && | 1418 if (tmp.IsRegister() && |
| 1464 !locs->live_registers()->ContainsRegister(tmp.reg())) { | 1419 !locs->live_registers()->ContainsRegister(tmp.reg())) { |
| 1465 __ movq(tmp.reg(), Immediate(0xf7)); | 1420 __ movq(tmp.reg(), Immediate(0xf7)); |
| 1466 } | 1421 } |
| 1467 } | 1422 } |
| 1468 } | 1423 } |
| 1469 #endif | 1424 #endif |
| 1470 | 1425 |
| 1471 | |
| 1472 void FlowGraphCompiler::EmitTestAndCallLoadReceiver( | 1426 void FlowGraphCompiler::EmitTestAndCallLoadReceiver( |
| 1473 intptr_t argument_count, | 1427 intptr_t argument_count, |
| 1474 const Array& arguments_descriptor) { | 1428 const Array& arguments_descriptor) { |
| 1475 __ Comment("EmitTestAndCall"); | 1429 __ Comment("EmitTestAndCall"); |
| 1476 // Load receiver into RAX. | 1430 // Load receiver into RAX. |
| 1477 __ movq(RAX, Address(RSP, (argument_count - 1) * kWordSize)); | 1431 __ movq(RAX, Address(RSP, (argument_count - 1) * kWordSize)); |
| 1478 __ LoadObject(R10, arguments_descriptor); | 1432 __ LoadObject(R10, arguments_descriptor); |
| 1479 } | 1433 } |
| 1480 | 1434 |
| 1481 | |
| 1482 void FlowGraphCompiler::EmitTestAndCallSmiBranch(Label* label, bool if_smi) { | 1435 void FlowGraphCompiler::EmitTestAndCallSmiBranch(Label* label, bool if_smi) { |
| 1483 __ testq(RAX, Immediate(kSmiTagMask)); | 1436 __ testq(RAX, Immediate(kSmiTagMask)); |
| 1484 // Jump if receiver is (not) Smi. | 1437 // Jump if receiver is (not) Smi. |
| 1485 __ j(if_smi ? ZERO : NOT_ZERO, label); | 1438 __ j(if_smi ? ZERO : NOT_ZERO, label); |
| 1486 } | 1439 } |
| 1487 | 1440 |
| 1488 | |
| 1489 void FlowGraphCompiler::EmitTestAndCallLoadCid() { | 1441 void FlowGraphCompiler::EmitTestAndCallLoadCid() { |
| 1490 __ LoadClassId(RDI, RAX); | 1442 __ LoadClassId(RDI, RAX); |
| 1491 } | 1443 } |
| 1492 | 1444 |
| 1493 | |
| 1494 int FlowGraphCompiler::EmitTestAndCallCheckCid(Label* next_label, | 1445 int FlowGraphCompiler::EmitTestAndCallCheckCid(Label* next_label, |
| 1495 const CidRange& range, | 1446 const CidRange& range, |
| 1496 int bias) { | 1447 int bias) { |
| 1497 intptr_t cid_start = range.cid_start; | 1448 intptr_t cid_start = range.cid_start; |
| 1498 if (range.IsSingleCid()) { | 1449 if (range.IsSingleCid()) { |
| 1499 __ cmpl(RDI, Immediate(cid_start - bias)); | 1450 __ cmpl(RDI, Immediate(cid_start - bias)); |
| 1500 __ j(NOT_EQUAL, next_label); | 1451 __ j(NOT_EQUAL, next_label); |
| 1501 } else { | 1452 } else { |
| 1502 __ addl(RDI, Immediate(bias - cid_start)); | 1453 __ addl(RDI, Immediate(bias - cid_start)); |
| 1503 bias = cid_start; | 1454 bias = cid_start; |
| 1504 __ cmpl(RDI, Immediate(range.Extent())); | 1455 __ cmpl(RDI, Immediate(range.Extent())); |
| 1505 __ j(ABOVE, next_label); // Unsigned higher. | 1456 __ j(ABOVE, next_label); // Unsigned higher. |
| 1506 } | 1457 } |
| 1507 return bias; | 1458 return bias; |
| 1508 } | 1459 } |
| 1509 | 1460 |
| 1510 | |
| 1511 #undef __ | 1461 #undef __ |
| 1512 #define __ compiler_->assembler()-> | 1462 #define __ compiler_->assembler()-> |
| 1513 | 1463 |
| 1514 | |
| 1515 void ParallelMoveResolver::EmitMove(int index) { | 1464 void ParallelMoveResolver::EmitMove(int index) { |
| 1516 MoveOperands* move = moves_[index]; | 1465 MoveOperands* move = moves_[index]; |
| 1517 const Location source = move->src(); | 1466 const Location source = move->src(); |
| 1518 const Location destination = move->dest(); | 1467 const Location destination = move->dest(); |
| 1519 | 1468 |
| 1520 if (source.IsRegister()) { | 1469 if (source.IsRegister()) { |
| 1521 if (destination.IsRegister()) { | 1470 if (destination.IsRegister()) { |
| 1522 __ movq(destination.reg(), source.reg()); | 1471 __ movq(destination.reg(), source.reg()); |
| 1523 } else { | 1472 } else { |
| 1524 ASSERT(destination.IsStackSlot()); | 1473 ASSERT(destination.IsStackSlot()); |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1598 Immediate(Smi::Cast(constant).Value())); | 1547 Immediate(Smi::Cast(constant).Value())); |
| 1599 } else { | 1548 } else { |
| 1600 StoreObject(destination.ToStackSlotAddress(), constant); | 1549 StoreObject(destination.ToStackSlotAddress(), constant); |
| 1601 } | 1550 } |
| 1602 } | 1551 } |
| 1603 } | 1552 } |
| 1604 | 1553 |
| 1605 move->Eliminate(); | 1554 move->Eliminate(); |
| 1606 } | 1555 } |
| 1607 | 1556 |
| 1608 | |
| 1609 void ParallelMoveResolver::EmitSwap(int index) { | 1557 void ParallelMoveResolver::EmitSwap(int index) { |
| 1610 MoveOperands* move = moves_[index]; | 1558 MoveOperands* move = moves_[index]; |
| 1611 const Location source = move->src(); | 1559 const Location source = move->src(); |
| 1612 const Location destination = move->dest(); | 1560 const Location destination = move->dest(); |
| 1613 | 1561 |
| 1614 if (source.IsRegister() && destination.IsRegister()) { | 1562 if (source.IsRegister() && destination.IsRegister()) { |
| 1615 __ xchgq(destination.reg(), source.reg()); | 1563 __ xchgq(destination.reg(), source.reg()); |
| 1616 } else if (source.IsRegister() && destination.IsStackSlot()) { | 1564 } else if (source.IsRegister() && destination.IsStackSlot()) { |
| 1617 Exchange(source.reg(), destination.ToStackSlotAddress()); | 1565 Exchange(source.reg(), destination.ToStackSlotAddress()); |
| 1618 } else if (source.IsStackSlot() && destination.IsRegister()) { | 1566 } else if (source.IsStackSlot() && destination.IsRegister()) { |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1674 for (int i = 0; i < moves_.length(); ++i) { | 1622 for (int i = 0; i < moves_.length(); ++i) { |
| 1675 const MoveOperands& other_move = *moves_[i]; | 1623 const MoveOperands& other_move = *moves_[i]; |
| 1676 if (other_move.Blocks(source)) { | 1624 if (other_move.Blocks(source)) { |
| 1677 moves_[i]->set_src(destination); | 1625 moves_[i]->set_src(destination); |
| 1678 } else if (other_move.Blocks(destination)) { | 1626 } else if (other_move.Blocks(destination)) { |
| 1679 moves_[i]->set_src(source); | 1627 moves_[i]->set_src(source); |
| 1680 } | 1628 } |
| 1681 } | 1629 } |
| 1682 } | 1630 } |
| 1683 | 1631 |
| 1684 | |
| 1685 void ParallelMoveResolver::MoveMemoryToMemory(const Address& dst, | 1632 void ParallelMoveResolver::MoveMemoryToMemory(const Address& dst, |
| 1686 const Address& src) { | 1633 const Address& src) { |
| 1687 __ MoveMemoryToMemory(dst, src); | 1634 __ MoveMemoryToMemory(dst, src); |
| 1688 } | 1635 } |
| 1689 | 1636 |
| 1690 | |
| 1691 void ParallelMoveResolver::StoreObject(const Address& dst, const Object& obj) { | 1637 void ParallelMoveResolver::StoreObject(const Address& dst, const Object& obj) { |
| 1692 __ StoreObject(dst, obj); | 1638 __ StoreObject(dst, obj); |
| 1693 } | 1639 } |
| 1694 | 1640 |
| 1695 | |
| 1696 void ParallelMoveResolver::Exchange(Register reg, const Address& mem) { | 1641 void ParallelMoveResolver::Exchange(Register reg, const Address& mem) { |
| 1697 __ Exchange(reg, mem); | 1642 __ Exchange(reg, mem); |
| 1698 } | 1643 } |
| 1699 | 1644 |
| 1700 | |
| 1701 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { | 1645 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { |
| 1702 __ Exchange(mem1, mem2); | 1646 __ Exchange(mem1, mem2); |
| 1703 } | 1647 } |
| 1704 | 1648 |
| 1705 | |
| 1706 void ParallelMoveResolver::Exchange(Register reg, | 1649 void ParallelMoveResolver::Exchange(Register reg, |
| 1707 Register base_reg, | 1650 Register base_reg, |
| 1708 intptr_t stack_offset) { | 1651 intptr_t stack_offset) { |
| 1709 UNREACHABLE(); | 1652 UNREACHABLE(); |
| 1710 } | 1653 } |
| 1711 | 1654 |
| 1712 | |
| 1713 void ParallelMoveResolver::Exchange(Register base_reg1, | 1655 void ParallelMoveResolver::Exchange(Register base_reg1, |
| 1714 intptr_t stack_offset1, | 1656 intptr_t stack_offset1, |
| 1715 Register base_reg2, | 1657 Register base_reg2, |
| 1716 intptr_t stack_offset2) { | 1658 intptr_t stack_offset2) { |
| 1717 UNREACHABLE(); | 1659 UNREACHABLE(); |
| 1718 } | 1660 } |
| 1719 | 1661 |
| 1720 | |
| 1721 void ParallelMoveResolver::SpillScratch(Register reg) { | 1662 void ParallelMoveResolver::SpillScratch(Register reg) { |
| 1722 __ pushq(reg); | 1663 __ pushq(reg); |
| 1723 } | 1664 } |
| 1724 | 1665 |
| 1725 | |
| 1726 void ParallelMoveResolver::RestoreScratch(Register reg) { | 1666 void ParallelMoveResolver::RestoreScratch(Register reg) { |
| 1727 __ popq(reg); | 1667 __ popq(reg); |
| 1728 } | 1668 } |
| 1729 | 1669 |
| 1730 | |
| 1731 void ParallelMoveResolver::SpillFpuScratch(FpuRegister reg) { | 1670 void ParallelMoveResolver::SpillFpuScratch(FpuRegister reg) { |
| 1732 __ AddImmediate(RSP, Immediate(-kFpuRegisterSize)); | 1671 __ AddImmediate(RSP, Immediate(-kFpuRegisterSize)); |
| 1733 __ movups(Address(RSP, 0), reg); | 1672 __ movups(Address(RSP, 0), reg); |
| 1734 } | 1673 } |
| 1735 | 1674 |
| 1736 | |
| 1737 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { | 1675 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { |
| 1738 __ movups(reg, Address(RSP, 0)); | 1676 __ movups(reg, Address(RSP, 0)); |
| 1739 __ AddImmediate(RSP, Immediate(kFpuRegisterSize)); | 1677 __ AddImmediate(RSP, Immediate(kFpuRegisterSize)); |
| 1740 } | 1678 } |
| 1741 | 1679 |
| 1742 | |
| 1743 #undef __ | 1680 #undef __ |
| 1744 | 1681 |
| 1745 } // namespace dart | 1682 } // namespace dart |
| 1746 | 1683 |
| 1747 #endif // defined TARGET_ARCH_X64 | 1684 #endif // defined TARGET_ARCH_X64 |
| OLD | NEW |