| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
| 6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
| 7 | 7 |
| 8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
| 9 | 9 |
| 10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 54 intptr_t slot_ix = 0; | 54 intptr_t slot_ix = 0; |
| 55 Environment* current = deopt_env_; | 55 Environment* current = deopt_env_; |
| 56 | 56 |
| 57 // Emit all kMaterializeObject instructions describing objects to be | 57 // Emit all kMaterializeObject instructions describing objects to be |
| 58 // materialized on the deoptimization as a prefix to the deoptimization info. | 58 // materialized on the deoptimization as a prefix to the deoptimization info. |
| 59 EmitMaterializations(deopt_env_, builder); | 59 EmitMaterializations(deopt_env_, builder); |
| 60 | 60 |
| 61 // The real frame starts here. | 61 // The real frame starts here. |
| 62 builder->MarkFrameStart(); | 62 builder->MarkFrameStart(); |
| 63 | 63 |
| 64 // Callee's PC marker is not used anymore. Pass Function::null() to set to 0. | 64 // Current PP, FP, and PC. |
| 65 builder->AddPp(current->function(), slot_ix++); |
| 65 builder->AddPcMarker(Function::Handle(), slot_ix++); | 66 builder->AddPcMarker(Function::Handle(), slot_ix++); |
| 66 | |
| 67 // Current FP and PC. | |
| 68 builder->AddCallerFp(slot_ix++); | 67 builder->AddCallerFp(slot_ix++); |
| 69 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); | 68 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); |
| 70 | 69 |
| 71 // Emit all values that are needed for materialization as a part of the | 70 // Emit all values that are needed for materialization as a part of the |
| 72 // expression stack for the bottom-most frame. This guarantees that GC | 71 // expression stack for the bottom-most frame. This guarantees that GC |
| 73 // will be able to find them during materialization. | 72 // will be able to find them during materialization. |
| 74 slot_ix = builder->EmitMaterializationArguments(slot_ix); | 73 slot_ix = builder->EmitMaterializationArguments(slot_ix); |
| 75 | 74 |
| 76 // For the innermost environment, set outgoing arguments and the locals. | 75 // For the innermost environment, set outgoing arguments and the locals. |
| 77 for (intptr_t i = current->Length() - 1; | 76 for (intptr_t i = current->Length() - 1; |
| 78 i >= current->fixed_parameter_count(); | 77 i >= current->fixed_parameter_count(); |
| 79 i--) { | 78 i--) { |
| 80 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); | 79 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); |
| 81 } | 80 } |
| 82 | 81 |
| 83 // Current PC marker and caller FP. | |
| 84 builder->AddPcMarker(current->function(), slot_ix++); | |
| 85 builder->AddCallerFp(slot_ix++); | |
| 86 | |
| 87 Environment* previous = current; | 82 Environment* previous = current; |
| 88 current = current->outer(); | 83 current = current->outer(); |
| 89 while (current != NULL) { | 84 while (current != NULL) { |
| 85 // PP, FP, and PC. |
| 86 builder->AddPp(current->function(), slot_ix++); |
| 87 builder->AddPcMarker(previous->function(), slot_ix++); |
| 88 builder->AddCallerFp(slot_ix++); |
| 89 |
| 90 // For any outer environment the deopt id is that of the call instruction | 90 // For any outer environment the deopt id is that of the call instruction |
| 91 // which is recorded in the outer environment. | 91 // which is recorded in the outer environment. |
| 92 builder->AddReturnAddress(current->function(), | 92 builder->AddReturnAddress(current->function(), |
| 93 Isolate::ToDeoptAfter(current->deopt_id()), | 93 Isolate::ToDeoptAfter(current->deopt_id()), |
| 94 slot_ix++); | 94 slot_ix++); |
| 95 | 95 |
| 96 // The values of outgoing arguments can be changed from the inlined call so | 96 // The values of outgoing arguments can be changed from the inlined call so |
| 97 // we must read them from the previous environment. | 97 // we must read them from the previous environment. |
| 98 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { | 98 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { |
| 99 builder->AddCopy(previous->ValueAt(i), | 99 builder->AddCopy(previous->ValueAt(i), |
| 100 previous->LocationAt(i), | 100 previous->LocationAt(i), |
| 101 slot_ix++); | 101 slot_ix++); |
| 102 } | 102 } |
| 103 | 103 |
| 104 // Set the locals, note that outgoing arguments are not in the environment. | 104 // Set the locals, note that outgoing arguments are not in the environment. |
| 105 for (intptr_t i = current->Length() - 1; | 105 for (intptr_t i = current->Length() - 1; |
| 106 i >= current->fixed_parameter_count(); | 106 i >= current->fixed_parameter_count(); |
| 107 i--) { | 107 i--) { |
| 108 builder->AddCopy(current->ValueAt(i), | 108 builder->AddCopy(current->ValueAt(i), |
| 109 current->LocationAt(i), | 109 current->LocationAt(i), |
| 110 slot_ix++); | 110 slot_ix++); |
| 111 } | 111 } |
| 112 | 112 |
| 113 // PC marker and caller FP. | |
| 114 builder->AddPcMarker(current->function(), slot_ix++); | |
| 115 builder->AddCallerFp(slot_ix++); | |
| 116 | |
| 117 // Iterate on the outer environment. | 113 // Iterate on the outer environment. |
| 118 previous = current; | 114 previous = current; |
| 119 current = current->outer(); | 115 current = current->outer(); |
| 120 } | 116 } |
| 121 // The previous pointer is now the outermost environment. | 117 // The previous pointer is now the outermost environment. |
| 122 ASSERT(previous != NULL); | 118 ASSERT(previous != NULL); |
| 123 | 119 |
| 124 // For the outermost environment, set caller PC. | 120 // For the outermost environment, set caller PC, caller PP, and caller FP. |
| 121 builder->AddCallerPp(slot_ix++); |
| 122 // PC marker. |
| 123 builder->AddPcMarker(previous->function(), slot_ix++); |
| 124 builder->AddCallerFp(slot_ix++); |
| 125 builder->AddCallerPc(slot_ix++); | 125 builder->AddCallerPc(slot_ix++); |
| 126 | 126 |
| 127 // For the outermost environment, set the incoming arguments. | 127 // For the outermost environment, set the incoming arguments. |
| 128 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { | 128 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { |
| 129 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); | 129 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); |
| 130 } | 130 } |
| 131 | 131 |
| 132 const DeoptInfo& deopt_info = DeoptInfo::Handle(builder->CreateDeoptInfo()); | 132 const DeoptInfo& deopt_info = DeoptInfo::Handle(builder->CreateDeoptInfo()); |
| 133 return deopt_info.raw(); | 133 return deopt_info.raw(); |
| 134 } | 134 } |
| 135 | 135 |
| 136 | 136 |
| 137 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, | 137 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, |
| 138 intptr_t stub_ix) { | 138 intptr_t stub_ix) { |
| 139 // Calls do not need stubs, they share a deoptimization trampoline. | 139 // Calls do not need stubs, they share a deoptimization trampoline. |
| 140 ASSERT(reason() != kDeoptAtCall); | 140 ASSERT(reason() != kDeoptAtCall); |
| 141 Assembler* assem = compiler->assembler(); | 141 Assembler* assem = compiler->assembler(); |
| 142 #define __ assem-> | 142 #define __ assem-> |
| 143 __ Comment("Deopt stub for id %" Pd "", deopt_id()); | 143 __ Comment("Deopt stub for id %" Pd "", deopt_id()); |
| 144 __ Bind(entry_label()); | 144 __ Bind(entry_label()); |
| 145 if (FLAG_trap_on_deoptimization) __ int3(); | 145 if (FLAG_trap_on_deoptimization) __ int3(); |
| 146 | 146 |
| 147 ASSERT(deopt_env() != NULL); | 147 ASSERT(deopt_env() != NULL); |
| 148 | 148 |
| 149 __ call(&StubCode::DeoptimizeLabel()); | 149 __ Call(&StubCode::DeoptimizeLabel(), PP); |
| 150 set_pc_offset(assem->CodeSize()); | 150 set_pc_offset(assem->CodeSize()); |
| 151 __ int3(); | 151 __ int3(); |
| 152 #undef __ | 152 #undef __ |
| 153 } | 153 } |
| 154 | 154 |
| 155 | 155 |
| 156 #define __ assembler()-> | 156 #define __ assembler()-> |
| 157 | 157 |
| 158 | 158 |
| 159 // Fall through if bool_register contains null. | 159 // Fall through if bool_register contains null. |
| 160 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, | 160 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, |
| 161 Label* is_true, | 161 Label* is_true, |
| 162 Label* is_false) { | 162 Label* is_false) { |
| 163 const Immediate& raw_null = | |
| 164 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
| 165 Label fall_through; | 163 Label fall_through; |
| 166 __ cmpq(bool_register, raw_null); | 164 __ CompareObject(bool_register, Object::Handle()); |
| 167 __ j(EQUAL, &fall_through, Assembler::kNearJump); | 165 __ j(EQUAL, &fall_through, Assembler::kNearJump); |
| 168 __ CompareObject(bool_register, Bool::True()); | 166 __ CompareObject(bool_register, Bool::True()); |
| 169 __ j(EQUAL, is_true); | 167 __ j(EQUAL, is_true); |
| 170 __ jmp(is_false); | 168 __ jmp(is_false); |
| 171 __ Bind(&fall_through); | 169 __ Bind(&fall_through); |
| 172 } | 170 } |
| 173 | 171 |
| 174 | 172 |
| 175 // Clobbers RCX. | 173 // Clobbers RCX. |
| 176 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( | 174 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( |
| 177 TypeTestStubKind test_kind, | 175 TypeTestStubKind test_kind, |
| 178 Register instance_reg, | 176 Register instance_reg, |
| 179 Register type_arguments_reg, | 177 Register type_arguments_reg, |
| 180 Register temp_reg, | 178 Register temp_reg, |
| 181 Label* is_instance_lbl, | 179 Label* is_instance_lbl, |
| 182 Label* is_not_instance_lbl) { | 180 Label* is_not_instance_lbl) { |
| 183 const SubtypeTestCache& type_test_cache = | 181 const SubtypeTestCache& type_test_cache = |
| 184 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New()); | 182 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New()); |
| 185 const Immediate& raw_null = | 183 __ LoadObject(temp_reg, type_test_cache, PP); |
| 186 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
| 187 __ LoadObject(temp_reg, type_test_cache); | |
| 188 __ pushq(temp_reg); // Subtype test cache. | 184 __ pushq(temp_reg); // Subtype test cache. |
| 189 __ pushq(instance_reg); // Instance. | 185 __ pushq(instance_reg); // Instance. |
| 190 if (test_kind == kTestTypeOneArg) { | 186 if (test_kind == kTestTypeOneArg) { |
| 191 ASSERT(type_arguments_reg == kNoRegister); | 187 ASSERT(type_arguments_reg == kNoRegister); |
| 192 __ pushq(raw_null); | 188 __ PushObject(Object::Handle()); |
| 193 __ call(&StubCode::Subtype1TestCacheLabel()); | 189 __ Call(&StubCode::Subtype1TestCacheLabel(), PP); |
| 194 } else if (test_kind == kTestTypeTwoArgs) { | 190 } else if (test_kind == kTestTypeTwoArgs) { |
| 195 ASSERT(type_arguments_reg == kNoRegister); | 191 ASSERT(type_arguments_reg == kNoRegister); |
| 196 __ pushq(raw_null); | 192 __ PushObject(Object::Handle()); |
| 197 __ call(&StubCode::Subtype2TestCacheLabel()); | 193 __ Call(&StubCode::Subtype2TestCacheLabel(), PP); |
| 198 } else if (test_kind == kTestTypeThreeArgs) { | 194 } else if (test_kind == kTestTypeThreeArgs) { |
| 199 __ pushq(type_arguments_reg); | 195 __ pushq(type_arguments_reg); |
| 200 __ call(&StubCode::Subtype3TestCacheLabel()); | 196 __ Call(&StubCode::Subtype3TestCacheLabel(), PP); |
| 201 } else { | 197 } else { |
| 202 UNREACHABLE(); | 198 UNREACHABLE(); |
| 203 } | 199 } |
| 204 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False. | 200 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False. |
| 205 ASSERT(instance_reg != RCX); | 201 ASSERT(instance_reg != RCX); |
| 206 ASSERT(temp_reg != RCX); | 202 ASSERT(temp_reg != RCX); |
| 207 __ popq(instance_reg); // Discard. | 203 __ popq(instance_reg); // Discard. |
| 208 __ popq(instance_reg); // Restore receiver. | 204 __ popq(instance_reg); // Restore receiver. |
| 209 __ popq(temp_reg); // Discard. | 205 __ popq(temp_reg); // Discard. |
| 210 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl); | 206 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl); |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 335 // interfaces. | 331 // interfaces. |
| 336 // Bool interface can be implemented only by core class Bool. | 332 // Bool interface can be implemented only by core class Bool. |
| 337 if (type.IsBoolType()) { | 333 if (type.IsBoolType()) { |
| 338 __ cmpl(kClassIdReg, Immediate(kBoolCid)); | 334 __ cmpl(kClassIdReg, Immediate(kBoolCid)); |
| 339 __ j(EQUAL, is_instance_lbl); | 335 __ j(EQUAL, is_instance_lbl); |
| 340 __ jmp(is_not_instance_lbl); | 336 __ jmp(is_not_instance_lbl); |
| 341 return false; | 337 return false; |
| 342 } | 338 } |
| 343 if (type.IsFunctionType()) { | 339 if (type.IsFunctionType()) { |
| 344 // Check if instance is a closure. | 340 // Check if instance is a closure. |
| 345 const Immediate& raw_null = | |
| 346 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
| 347 __ LoadClassById(R13, kClassIdReg); | 341 __ LoadClassById(R13, kClassIdReg); |
| 348 __ movq(R13, FieldAddress(R13, Class::signature_function_offset())); | 342 __ movq(R13, FieldAddress(R13, Class::signature_function_offset())); |
| 349 __ cmpq(R13, raw_null); | 343 __ CompareObject(R13, Object::Handle()); |
| 350 __ j(NOT_EQUAL, is_instance_lbl); | 344 __ j(NOT_EQUAL, is_instance_lbl); |
| 351 } | 345 } |
| 352 // Custom checking for numbers (Smi, Mint, Bigint and Double). | 346 // Custom checking for numbers (Smi, Mint, Bigint and Double). |
| 353 // Note that instance is not Smi (checked above). | 347 // Note that instance is not Smi (checked above). |
| 354 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { | 348 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { |
| 355 GenerateNumberTypeCheck( | 349 GenerateNumberTypeCheck( |
| 356 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); | 350 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); |
| 357 return false; | 351 return false; |
| 358 } | 352 } |
| 359 if (type.IsStringType()) { | 353 if (type.IsStringType()) { |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 402 // RAX: instance (preserved). | 396 // RAX: instance (preserved). |
| 403 // Clobbers RDI, RDX, R10. | 397 // Clobbers RDI, RDX, R10. |
| 404 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( | 398 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( |
| 405 intptr_t token_pos, | 399 intptr_t token_pos, |
| 406 const AbstractType& type, | 400 const AbstractType& type, |
| 407 Label* is_instance_lbl, | 401 Label* is_instance_lbl, |
| 408 Label* is_not_instance_lbl) { | 402 Label* is_not_instance_lbl) { |
| 409 __ Comment("UninstantiatedTypeTest"); | 403 __ Comment("UninstantiatedTypeTest"); |
| 410 ASSERT(!type.IsInstantiated()); | 404 ASSERT(!type.IsInstantiated()); |
| 411 // Skip check if destination is a dynamic type. | 405 // Skip check if destination is a dynamic type. |
| 412 const Immediate& raw_null = | |
| 413 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
| 414 if (type.IsTypeParameter()) { | 406 if (type.IsTypeParameter()) { |
| 415 const TypeParameter& type_param = TypeParameter::Cast(type); | 407 const TypeParameter& type_param = TypeParameter::Cast(type); |
| 416 // Load instantiator (or null) and instantiator type arguments on stack. | 408 // Load instantiator (or null) and instantiator type arguments on stack. |
| 417 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. | 409 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. |
| 418 // RDX: instantiator type arguments. | 410 // RDX: instantiator type arguments. |
| 419 // Check if type argument is dynamic. | 411 // Check if type argument is dynamic. |
| 420 __ cmpq(RDX, raw_null); | 412 __ CompareObject(RDX, Object::Handle()); |
| 421 __ j(EQUAL, is_instance_lbl); | 413 __ j(EQUAL, is_instance_lbl); |
| 422 // Can handle only type arguments that are instances of TypeArguments. | 414 // Can handle only type arguments that are instances of TypeArguments. |
| 423 // (runtime checks canonicalize type arguments). | 415 // (runtime checks canonicalize type arguments). |
| 424 Label fall_through; | 416 Label fall_through; |
| 425 __ CompareClassId(RDX, kTypeArgumentsCid); | 417 __ CompareClassId(RDX, kTypeArgumentsCid); |
| 426 __ j(NOT_EQUAL, &fall_through); | 418 __ j(NOT_EQUAL, &fall_through); |
| 427 __ movq(RDI, | 419 __ movq(RDI, |
| 428 FieldAddress(RDX, TypeArguments::type_at_offset(type_param.index()))); | 420 FieldAddress(RDX, TypeArguments::type_at_offset(type_param.index()))); |
| 429 // RDI: Concrete type of type. | 421 // RDI: Concrete type of type. |
| 430 // Check if type argument is dynamic. | 422 // Check if type argument is dynamic. |
| 431 __ CompareObject(RDI, Type::ZoneHandle(Type::DynamicType())); | 423 __ CompareObject(RDI, Type::ZoneHandle(Type::DynamicType())); |
| 432 __ j(EQUAL, is_instance_lbl); | 424 __ j(EQUAL, is_instance_lbl); |
| 433 __ cmpq(RDI, raw_null); | 425 __ CompareObject(RDI, Object::Handle()); |
| 434 __ j(EQUAL, is_instance_lbl); | 426 __ j(EQUAL, is_instance_lbl); |
| 435 const Type& object_type = Type::ZoneHandle(Type::ObjectType()); | 427 const Type& object_type = Type::ZoneHandle(Type::ObjectType()); |
| 436 __ CompareObject(RDI, object_type); | 428 __ CompareObject(RDI, object_type); |
| 437 __ j(EQUAL, is_instance_lbl); | 429 __ j(EQUAL, is_instance_lbl); |
| 438 | 430 |
| 439 // For Smi check quickly against int and num interfaces. | 431 // For Smi check quickly against int and num interfaces. |
| 440 Label not_smi; | 432 Label not_smi; |
| 441 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi? | 433 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi? |
| 442 __ j(NOT_ZERO, ¬_smi, Assembler::kNearJump); | 434 __ j(NOT_ZERO, ¬_smi, Assembler::kNearJump); |
| 443 __ CompareObject(RDI, Type::ZoneHandle(Type::IntType())); | 435 __ CompareObject(RDI, Type::ZoneHandle(Type::IntType())); |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 563 // Clobbers RCX and RDX. | 555 // Clobbers RCX and RDX. |
| 564 // Returns: | 556 // Returns: |
| 565 // - true or false in RAX. | 557 // - true or false in RAX. |
| 566 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos, | 558 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos, |
| 567 intptr_t deopt_id, | 559 intptr_t deopt_id, |
| 568 const AbstractType& type, | 560 const AbstractType& type, |
| 569 bool negate_result, | 561 bool negate_result, |
| 570 LocationSummary* locs) { | 562 LocationSummary* locs) { |
| 571 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded()); | 563 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded()); |
| 572 | 564 |
| 573 const Immediate& raw_null = | |
| 574 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
| 575 Label is_instance, is_not_instance; | 565 Label is_instance, is_not_instance; |
| 576 __ pushq(RCX); // Store instantiator on stack. | 566 __ pushq(RCX); // Store instantiator on stack. |
| 577 __ pushq(RDX); // Store instantiator type arguments. | 567 __ pushq(RDX); // Store instantiator type arguments. |
| 578 // If type is instantiated and non-parameterized, we can inline code | 568 // If type is instantiated and non-parameterized, we can inline code |
| 579 // checking whether the tested instance is a Smi. | 569 // checking whether the tested instance is a Smi. |
| 580 if (type.IsInstantiated()) { | 570 if (type.IsInstantiated()) { |
| 581 // A null object is only an instance of Object and dynamic, which has | 571 // A null object is only an instance of Object and dynamic, which has |
| 582 // already been checked above (if the type is instantiated). So we can | 572 // already been checked above (if the type is instantiated). So we can |
| 583 // return false here if the instance is null (and if the type is | 573 // return false here if the instance is null (and if the type is |
| 584 // instantiated). | 574 // instantiated). |
| 585 // We can only inline this null check if the type is instantiated at compile | 575 // We can only inline this null check if the type is instantiated at compile |
| 586 // time, since an uninstantiated type at compile time could be Object or | 576 // time, since an uninstantiated type at compile time could be Object or |
| 587 // dynamic at run time. | 577 // dynamic at run time. |
| 588 __ cmpq(RAX, raw_null); | 578 __ CompareObject(RAX, Object::Handle()); |
| 589 __ j(EQUAL, &is_not_instance); | 579 __ j(EQUAL, &is_not_instance); |
| 590 } | 580 } |
| 591 | 581 |
| 592 // Generate inline instanceof test. | 582 // Generate inline instanceof test. |
| 593 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(); | 583 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(); |
| 594 test_cache = GenerateInlineInstanceof(token_pos, type, | 584 test_cache = GenerateInlineInstanceof(token_pos, type, |
| 595 &is_instance, &is_not_instance); | 585 &is_instance, &is_not_instance); |
| 596 | 586 |
| 597 // test_cache is null if there is no fall-through. | 587 // test_cache is null if there is no fall-through. |
| 598 Label done; | 588 Label done; |
| 599 if (!test_cache.IsNull()) { | 589 if (!test_cache.IsNull()) { |
| 600 // Generate runtime call. | 590 // Generate runtime call. |
| 601 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. | 591 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. |
| 602 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. | 592 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. |
| 603 __ PushObject(Object::ZoneHandle()); // Make room for the result. | 593 __ PushObject(Object::ZoneHandle()); // Make room for the result. |
| 604 __ pushq(RAX); // Push the instance. | 594 __ pushq(RAX); // Push the instance. |
| 605 __ PushObject(type); // Push the type. | 595 __ PushObject(type); // Push the type. |
| 606 __ pushq(RCX); // TODO(srdjan): Pass instantiator instead of null. | 596 __ pushq(RCX); // TODO(srdjan): Pass instantiator instead of null. |
| 607 __ pushq(RDX); // Instantiator type arguments. | 597 __ pushq(RDX); // Instantiator type arguments. |
| 608 __ LoadObject(RAX, test_cache); | 598 __ LoadObject(RAX, test_cache, PP); |
| 609 __ pushq(RAX); | 599 __ pushq(RAX); |
| 610 GenerateCallRuntime(token_pos, | 600 GenerateCallRuntime(token_pos, |
| 611 deopt_id, | 601 deopt_id, |
| 612 kInstanceofRuntimeEntry, | 602 kInstanceofRuntimeEntry, |
| 613 5, | 603 5, |
| 614 locs); | 604 locs); |
| 615 // Pop the parameters supplied to the runtime entry. The result of the | 605 // Pop the parameters supplied to the runtime entry. The result of the |
| 616 // instanceof runtime call will be left as the result of the operation. | 606 // instanceof runtime call will be left as the result of the operation. |
| 617 __ Drop(5); | 607 __ Drop(5); |
| 618 if (negate_result) { | 608 if (negate_result) { |
| 619 __ popq(RDX); | 609 __ popq(RDX); |
| 620 __ LoadObject(RAX, Bool::True()); | 610 __ LoadObject(RAX, Bool::True(), PP); |
| 621 __ cmpq(RDX, RAX); | 611 __ cmpq(RDX, RAX); |
| 622 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 612 __ j(NOT_EQUAL, &done, Assembler::kNearJump); |
| 623 __ LoadObject(RAX, Bool::False()); | 613 __ LoadObject(RAX, Bool::False(), PP); |
| 624 } else { | 614 } else { |
| 625 __ popq(RAX); | 615 __ popq(RAX); |
| 626 } | 616 } |
| 627 __ jmp(&done, Assembler::kNearJump); | 617 __ jmp(&done, Assembler::kNearJump); |
| 628 } | 618 } |
| 629 __ Bind(&is_not_instance); | 619 __ Bind(&is_not_instance); |
| 630 __ LoadObject(RAX, Bool::Get(negate_result)); | 620 __ LoadObject(RAX, Bool::Get(negate_result), PP); |
| 631 __ jmp(&done, Assembler::kNearJump); | 621 __ jmp(&done, Assembler::kNearJump); |
| 632 | 622 |
| 633 __ Bind(&is_instance); | 623 __ Bind(&is_instance); |
| 634 __ LoadObject(RAX, Bool::Get(!negate_result)); | 624 __ LoadObject(RAX, Bool::Get(!negate_result), PP); |
| 635 __ Bind(&done); | 625 __ Bind(&done); |
| 636 __ popq(RDX); // Remove pushed instantiator type arguments. | 626 __ popq(RDX); // Remove pushed instantiator type arguments. |
| 637 __ popq(RCX); // Remove pushed instantiator. | 627 __ popq(RCX); // Remove pushed instantiator. |
| 638 } | 628 } |
| 639 | 629 |
| 640 | 630 |
| 641 // Optimize assignable type check by adding inlined tests for: | 631 // Optimize assignable type check by adding inlined tests for: |
| 642 // - NULL -> return NULL. | 632 // - NULL -> return NULL. |
| 643 // - Smi -> compile time subtype check (only if dst class is not parameterized). | 633 // - Smi -> compile time subtype check (only if dst class is not parameterized). |
| 644 // - Class equality (only if class is not parameterized). | 634 // - Class equality (only if class is not parameterized). |
| (...skipping 12 matching lines...) Expand all Loading... |
| 657 LocationSummary* locs) { | 647 LocationSummary* locs) { |
| 658 ASSERT(token_pos >= 0); | 648 ASSERT(token_pos >= 0); |
| 659 ASSERT(!dst_type.IsNull()); | 649 ASSERT(!dst_type.IsNull()); |
| 660 ASSERT(dst_type.IsFinalized()); | 650 ASSERT(dst_type.IsFinalized()); |
| 661 // Assignable check is skipped in FlowGraphBuilder, not here. | 651 // Assignable check is skipped in FlowGraphBuilder, not here. |
| 662 ASSERT(dst_type.IsMalformed() || dst_type.IsMalbounded() || | 652 ASSERT(dst_type.IsMalformed() || dst_type.IsMalbounded() || |
| 663 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); | 653 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); |
| 664 __ pushq(RCX); // Store instantiator. | 654 __ pushq(RCX); // Store instantiator. |
| 665 __ pushq(RDX); // Store instantiator type arguments. | 655 __ pushq(RDX); // Store instantiator type arguments. |
| 666 // A null object is always assignable and is returned as result. | 656 // A null object is always assignable and is returned as result. |
| 667 const Immediate& raw_null = | |
| 668 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
| 669 Label is_assignable, runtime_call; | 657 Label is_assignable, runtime_call; |
| 670 __ cmpq(RAX, raw_null); | 658 __ CompareObject(RAX, Object::Handle()); |
| 671 __ j(EQUAL, &is_assignable); | 659 __ j(EQUAL, &is_assignable); |
| 672 | 660 |
| 673 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) { | 661 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) { |
| 674 // If type checks are not eliminated during the graph building then | 662 // If type checks are not eliminated during the graph building then |
| 675 // a transition sentinel can be seen here. | 663 // a transition sentinel can be seen here. |
| 676 __ CompareObject(RAX, Object::transition_sentinel()); | 664 __ CompareObject(RAX, Object::transition_sentinel()); |
| 677 __ j(EQUAL, &is_assignable); | 665 __ j(EQUAL, &is_assignable); |
| 678 } | 666 } |
| 679 | 667 |
| 680 // Generate throw new TypeError() if the type is malformed or malbounded. | 668 // Generate throw new TypeError() if the type is malformed or malbounded. |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 713 | 701 |
| 714 __ Bind(&runtime_call); | 702 __ Bind(&runtime_call); |
| 715 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. | 703 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. |
| 716 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. | 704 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. |
| 717 __ PushObject(Object::ZoneHandle()); // Make room for the result. | 705 __ PushObject(Object::ZoneHandle()); // Make room for the result. |
| 718 __ pushq(RAX); // Push the source object. | 706 __ pushq(RAX); // Push the source object. |
| 719 __ PushObject(dst_type); // Push the type of the destination. | 707 __ PushObject(dst_type); // Push the type of the destination. |
| 720 __ pushq(RCX); // Instantiator. | 708 __ pushq(RCX); // Instantiator. |
| 721 __ pushq(RDX); // Instantiator type arguments. | 709 __ pushq(RDX); // Instantiator type arguments. |
| 722 __ PushObject(dst_name); // Push the name of the destination. | 710 __ PushObject(dst_name); // Push the name of the destination. |
| 723 __ LoadObject(RAX, test_cache); | 711 __ LoadObject(RAX, test_cache, PP); |
| 724 __ pushq(RAX); | 712 __ pushq(RAX); |
| 725 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); | 713 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); |
| 726 // Pop the parameters supplied to the runtime entry. The result of the | 714 // Pop the parameters supplied to the runtime entry. The result of the |
| 727 // type check runtime call is the checked value. | 715 // type check runtime call is the checked value. |
| 728 __ Drop(6); | 716 __ Drop(6); |
| 729 __ popq(RAX); | 717 __ popq(RAX); |
| 730 | 718 |
| 731 __ Bind(&is_assignable); | 719 __ Bind(&is_assignable); |
| 732 __ popq(RDX); // Remove pushed instantiator type arguments. | 720 __ popq(RDX); // Remove pushed instantiator type arguments. |
| 733 __ popq(RCX); // Remove pushed instantiator. | 721 __ popq(RCX); // Remove pushed instantiator. |
| (...skipping 24 matching lines...) Expand all Loading... |
| 758 | 746 |
| 759 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset, | 747 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset, |
| 760 Location loc, | 748 Location loc, |
| 761 bool* push_emitted) { | 749 bool* push_emitted) { |
| 762 const Address dest(RBP, dest_offset); | 750 const Address dest(RBP, dest_offset); |
| 763 if (loc.IsConstant()) { | 751 if (loc.IsConstant()) { |
| 764 if (!*push_emitted) { | 752 if (!*push_emitted) { |
| 765 __ pushq(RAX); | 753 __ pushq(RAX); |
| 766 *push_emitted = true; | 754 *push_emitted = true; |
| 767 } | 755 } |
| 768 __ LoadObject(RAX, loc.constant()); | 756 __ LoadObject(RAX, loc.constant(), PP); |
| 769 __ movq(dest, RAX); | 757 __ movq(dest, RAX); |
| 770 } else if (loc.IsRegister()) { | 758 } else if (loc.IsRegister()) { |
| 771 if (*push_emitted && loc.reg() == RAX) { | 759 if (*push_emitted && loc.reg() == RAX) { |
| 772 __ movq(RAX, Address(RSP, 0)); | 760 __ movq(RAX, Address(RSP, 0)); |
| 773 __ movq(dest, RAX); | 761 __ movq(dest, RAX); |
| 774 } else { | 762 } else { |
| 775 __ movq(dest, loc.reg()); | 763 __ movq(dest, loc.reg()); |
| 776 } | 764 } |
| 777 } else { | 765 } else { |
| 778 Address src = loc.ToStackSlotAddress(); | 766 Address src = loc.ToStackSlotAddress(); |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 888 const Address argument_addr(RBX, RCX, TIMES_8, 0); | 876 const Address argument_addr(RBX, RCX, TIMES_8, 0); |
| 889 const Address copy_addr(RDI, RCX, TIMES_8, 0); | 877 const Address copy_addr(RDI, RCX, TIMES_8, 0); |
| 890 __ Bind(&loop); | 878 __ Bind(&loop); |
| 891 __ movq(RAX, argument_addr); | 879 __ movq(RAX, argument_addr); |
| 892 __ movq(copy_addr, RAX); | 880 __ movq(copy_addr, RAX); |
| 893 __ Bind(&loop_condition); | 881 __ Bind(&loop_condition); |
| 894 __ decq(RCX); | 882 __ decq(RCX); |
| 895 __ j(POSITIVE, &loop, Assembler::kNearJump); | 883 __ j(POSITIVE, &loop, Assembler::kNearJump); |
| 896 | 884 |
| 897 // Copy or initialize optional named arguments. | 885 // Copy or initialize optional named arguments. |
| 898 const Immediate& raw_null = | |
| 899 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
| 900 Label all_arguments_processed; | 886 Label all_arguments_processed; |
| 901 #ifdef DEBUG | 887 #ifdef DEBUG |
| 902 const bool check_correct_named_args = true; | 888 const bool check_correct_named_args = true; |
| 903 #else | 889 #else |
| 904 const bool check_correct_named_args = function.IsClosureFunction(); | 890 const bool check_correct_named_args = function.IsClosureFunction(); |
| 905 #endif | 891 #endif |
| 906 if (num_opt_named_params > 0) { | 892 if (num_opt_named_params > 0) { |
| 907 // Start by alphabetically sorting the names of the optional parameters. | 893 // Start by alphabetically sorting the names of the optional parameters. |
| 908 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; | 894 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; |
| 909 int* opt_param_position = new int[num_opt_named_params]; | 895 int* opt_param_position = new int[num_opt_named_params]; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 951 __ addq(RDI, Immediate(ArgumentsDescriptor::named_entry_size())); | 937 __ addq(RDI, Immediate(ArgumentsDescriptor::named_entry_size())); |
| 952 __ negq(RAX); | 938 __ negq(RAX); |
| 953 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi. | 939 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi. |
| 954 __ movq(RAX, argument_addr); | 940 __ movq(RAX, argument_addr); |
| 955 __ jmp(&assign_optional_parameter, Assembler::kNearJump); | 941 __ jmp(&assign_optional_parameter, Assembler::kNearJump); |
| 956 __ Bind(&load_default_value); | 942 __ Bind(&load_default_value); |
| 957 // Load RAX with default argument. | 943 // Load RAX with default argument. |
| 958 const Object& value = Object::ZoneHandle( | 944 const Object& value = Object::ZoneHandle( |
| 959 parsed_function().default_parameter_values().At( | 945 parsed_function().default_parameter_values().At( |
| 960 param_pos - num_fixed_params)); | 946 param_pos - num_fixed_params)); |
| 961 __ LoadObject(RAX, value); | 947 __ LoadObject(RAX, value, PP); |
| 962 __ Bind(&assign_optional_parameter); | 948 __ Bind(&assign_optional_parameter); |
| 963 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. | 949 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. |
| 964 // We do not use the final allocation index of the variable here, i.e. | 950 // We do not use the final allocation index of the variable here, i.e. |
| 965 // scope->VariableAt(i)->index(), because captured variables still need | 951 // scope->VariableAt(i)->index(), because captured variables still need |
| 966 // to be copied to the context that is not yet allocated. | 952 // to be copied to the context that is not yet allocated. |
| 967 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; | 953 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; |
| 968 const Address param_addr(RBP, computed_param_pos * kWordSize); | 954 const Address param_addr(RBP, computed_param_pos * kWordSize); |
| 969 __ movq(param_addr, RAX); | 955 __ movq(param_addr, RAX); |
| 970 } | 956 } |
| 971 delete[] opt_param; | 957 delete[] opt_param; |
| 972 delete[] opt_param_position; | 958 delete[] opt_param_position; |
| 973 if (check_correct_named_args) { | 959 if (check_correct_named_args) { |
| 974 // Check that RDI now points to the null terminator in the arguments | 960 // Check that RDI now points to the null terminator in the arguments |
| 975 // descriptor. | 961 // descriptor. |
| 976 __ cmpq(Address(RDI, 0), raw_null); | 962 __ LoadObject(TMP, Object::Handle(), PP); |
| 963 __ cmpq(Address(RDI, 0), TMP); |
| 977 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); | 964 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); |
| 978 } | 965 } |
| 979 } else { | 966 } else { |
| 980 ASSERT(num_opt_pos_params > 0); | 967 ASSERT(num_opt_pos_params > 0); |
| 981 __ movq(RCX, | 968 __ movq(RCX, |
| 982 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset())); | 969 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset())); |
| 983 __ SmiUntag(RCX); | 970 __ SmiUntag(RCX); |
| 984 for (int i = 0; i < num_opt_pos_params; i++) { | 971 for (int i = 0; i < num_opt_pos_params; i++) { |
| 985 Label next_parameter; | 972 Label next_parameter; |
| 986 // Handle this optional positional parameter only if k or fewer positional | 973 // Handle this optional positional parameter only if k or fewer positional |
| 987 // arguments have been passed, where k is param_pos, the position of this | 974 // arguments have been passed, where k is param_pos, the position of this |
| 988 // optional parameter in the formal parameter list. | 975 // optional parameter in the formal parameter list. |
| 989 const int param_pos = num_fixed_params + i; | 976 const int param_pos = num_fixed_params + i; |
| 990 __ cmpq(RCX, Immediate(param_pos)); | 977 __ cmpq(RCX, Immediate(param_pos)); |
| 991 __ j(GREATER, &next_parameter, Assembler::kNearJump); | 978 __ j(GREATER, &next_parameter, Assembler::kNearJump); |
| 992 // Load RAX with default argument. | 979 // Load RAX with default argument. |
| 993 const Object& value = Object::ZoneHandle( | 980 const Object& value = Object::ZoneHandle( |
| 994 parsed_function().default_parameter_values().At(i)); | 981 parsed_function().default_parameter_values().At(i)); |
| 995 __ LoadObject(RAX, value); | 982 __ LoadObject(RAX, value, PP); |
| 996 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. | 983 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. |
| 997 // We do not use the final allocation index of the variable here, i.e. | 984 // We do not use the final allocation index of the variable here, i.e. |
| 998 // scope->VariableAt(i)->index(), because captured variables still need | 985 // scope->VariableAt(i)->index(), because captured variables still need |
| 999 // to be copied to the context that is not yet allocated. | 986 // to be copied to the context that is not yet allocated. |
| 1000 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; | 987 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; |
| 1001 const Address param_addr(RBP, computed_param_pos * kWordSize); | 988 const Address param_addr(RBP, computed_param_pos * kWordSize); |
| 1002 __ movq(param_addr, RAX); | 989 __ movq(param_addr, RAX); |
| 1003 __ Bind(&next_parameter); | 990 __ Bind(&next_parameter); |
| 1004 } | 991 } |
| 1005 if (check_correct_named_args) { | 992 if (check_correct_named_args) { |
| 1006 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 993 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
| 1007 __ SmiUntag(RBX); | 994 __ SmiUntag(RBX); |
| 1008 // Check that RCX equals RBX, i.e. no named arguments passed. | 995 // Check that RCX equals RBX, i.e. no named arguments passed. |
| 1009 __ cmpq(RCX, RBX); | 996 __ cmpq(RCX, RBX); |
| 1010 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); | 997 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); |
| 1011 } | 998 } |
| 1012 } | 999 } |
| 1013 | 1000 |
| 1014 __ Bind(&wrong_num_arguments); | 1001 __ Bind(&wrong_num_arguments); |
| 1015 if (function.IsClosureFunction()) { | 1002 if (function.IsClosureFunction()) { |
| 1016 // Invoke noSuchMethod function passing "call" as the original name. | 1003 // Invoke noSuchMethod function passing "call" as the original name. |
| 1017 const int kNumArgsChecked = 1; | 1004 const int kNumArgsChecked = 1; |
| 1018 const ICData& ic_data = ICData::ZoneHandle( | 1005 const ICData& ic_data = ICData::ZoneHandle( |
| 1019 ICData::New(function, Symbols::Call(), Object::empty_array(), | 1006 ICData::New(function, Symbols::Call(), Object::empty_array(), |
| 1020 Isolate::kNoDeoptId, kNumArgsChecked)); | 1007 Isolate::kNoDeoptId, kNumArgsChecked)); |
| 1021 __ LoadObject(RBX, ic_data); | 1008 __ LoadObject(RBX, ic_data, PP); |
| 1022 __ LeaveFrame(); // The arguments are still on the stack. | 1009 __ LeaveFrameWithPP(); // The arguments are still on the stack. |
| 1023 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); | 1010 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); |
| 1024 // The noSuchMethod call may return to the caller, but not here. | 1011 // The noSuchMethod call may return to the caller, but not here. |
| 1025 __ int3(); | 1012 __ int3(); |
| 1026 } else if (check_correct_named_args) { | 1013 } else if (check_correct_named_args) { |
| 1027 __ Stop("Wrong arguments"); | 1014 __ Stop("Wrong arguments"); |
| 1028 } | 1015 } |
| 1029 | 1016 |
| 1030 __ Bind(&all_arguments_processed); | 1017 __ Bind(&all_arguments_processed); |
| 1031 // Nullify originally passed arguments only after they have been copied and | 1018 // Nullify originally passed arguments only after they have been copied and |
| 1032 // checked, otherwise noSuchMethod would not see their original values. | 1019 // checked, otherwise noSuchMethod would not see their original values. |
| 1033 // This step can be skipped in case we decide that formal parameters are | 1020 // This step can be skipped in case we decide that formal parameters are |
| 1034 // implicitly final, since garbage collecting the unmodified value is not | 1021 // implicitly final, since garbage collecting the unmodified value is not |
| 1035 // an issue anymore. | 1022 // an issue anymore. |
| 1036 | 1023 |
| 1037 // R10 : arguments descriptor array. | 1024 // R10 : arguments descriptor array. |
| 1038 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1025 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
| 1039 __ SmiUntag(RCX); | 1026 __ SmiUntag(RCX); |
| 1027 __ LoadObject(R12, Object::Handle(), PP); |
| 1040 Label null_args_loop, null_args_loop_condition; | 1028 Label null_args_loop, null_args_loop_condition; |
| 1041 __ jmp(&null_args_loop_condition, Assembler::kNearJump); | 1029 __ jmp(&null_args_loop_condition, Assembler::kNearJump); |
| 1042 const Address original_argument_addr( | 1030 const Address original_argument_addr( |
| 1043 RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize); | 1031 RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize); |
| 1044 __ Bind(&null_args_loop); | 1032 __ Bind(&null_args_loop); |
| 1045 __ movq(original_argument_addr, raw_null); | 1033 __ movq(original_argument_addr, R12); |
| 1046 __ Bind(&null_args_loop_condition); | 1034 __ Bind(&null_args_loop_condition); |
| 1047 __ decq(RCX); | 1035 __ decq(RCX); |
| 1048 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); | 1036 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); |
| 1049 } | 1037 } |
| 1050 | 1038 |
| 1051 | 1039 |
| 1052 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { | 1040 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { |
| 1053 // TOS: return address. | 1041 // TOS: return address. |
| 1054 // +1 : receiver. | 1042 // +1 : receiver. |
| 1055 // Sequence node has one return node, its input is load field node. | 1043 // Sequence node has one return node, its input is load field node. |
| 1056 __ movq(RAX, Address(RSP, 1 * kWordSize)); | 1044 __ movq(RAX, Address(RSP, 1 * kWordSize)); |
| 1057 __ movq(RAX, FieldAddress(RAX, offset)); | 1045 __ movq(RAX, FieldAddress(RAX, offset)); |
| 1058 __ ret(); | 1046 __ ret(); |
| 1059 } | 1047 } |
| 1060 | 1048 |
| 1061 | 1049 |
| 1062 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { | 1050 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { |
| 1063 // TOS: return address. | 1051 // TOS: return address. |
| 1064 // +1 : value | 1052 // +1 : value |
| 1065 // +2 : receiver. | 1053 // +2 : receiver. |
| 1066 // Sequence node has one store node and one return NULL node. | 1054 // Sequence node has one store node and one return NULL node. |
| 1067 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. | 1055 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. |
| 1068 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. | 1056 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. |
| 1069 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); | 1057 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); |
| 1070 const Immediate& raw_null = | 1058 __ LoadObject(RAX, Object::Handle(), PP); |
| 1071 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
| 1072 __ movq(RAX, raw_null); | |
| 1073 __ ret(); | 1059 __ ret(); |
| 1074 } | 1060 } |
| 1075 | 1061 |
| 1076 | 1062 |
| 1077 void FlowGraphCompiler::EmitFrameEntry() { | 1063 void FlowGraphCompiler::EmitFrameEntry() { |
| 1078 const Function& function = parsed_function().function(); | 1064 const Function& function = parsed_function().function(); |
| 1065 Register new_pp = kNoRegister; |
| 1066 Register new_pc = kNoRegister; |
| 1079 if (CanOptimizeFunction() && | 1067 if (CanOptimizeFunction() && |
| 1080 function.is_optimizable() && | 1068 function.is_optimizable() && |
| 1081 (!is_optimizing() || may_reoptimize())) { | 1069 (!is_optimizing() || may_reoptimize())) { |
| 1082 const Register function_reg = RDI; | 1070 const Register function_reg = RDI; |
| 1083 __ LoadObject(function_reg, function); | 1071 new_pp = R13; |
| 1072 new_pc = R12; |
| 1073 |
| 1074 Label next; |
| 1075 __ nop(4); // Need a fixed size sequence on frame entry. |
| 1076 __ call(&next); |
| 1077 __ Bind(&next); |
| 1078 |
| 1079 const intptr_t object_pool_pc_dist = |
| 1080 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| 1081 __ CodeSize(); |
| 1082 const intptr_t offset = |
| 1083 Assembler::kEntryPointToPcMarkerOffset - __ CodeSize(); |
| 1084 __ popq(new_pc); |
| 1085 if (offset != 0) { |
| 1086 __ addq(new_pc, Immediate(offset)); |
| 1087 } |
| 1088 |
| 1089 // Load callee's pool pointer. |
| 1090 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset)); |
| 1091 |
| 1092 // Load function object using the callee's pool pointer. |
| 1093 __ LoadObject(function_reg, function, new_pp); |
| 1094 |
| 1084 // Patch point is after the eventually inlined function object. | 1095 // Patch point is after the eventually inlined function object. |
| 1085 AddCurrentDescriptor(PcDescriptors::kEntryPatch, | 1096 AddCurrentDescriptor(PcDescriptors::kEntryPatch, |
| 1086 Isolate::kNoDeoptId, | 1097 Isolate::kNoDeoptId, |
| 1087 0); // No token position. | 1098 0); // No token position. |
| 1088 if (is_optimizing()) { | 1099 if (is_optimizing()) { |
| 1089 // Reoptimization of an optimized function is triggered by counting in | 1100 // Reoptimization of an optimized function is triggered by counting in |
| 1090 // IC stubs, but not at the entry of the function. | 1101 // IC stubs, but not at the entry of the function. |
| 1091 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), | 1102 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), |
| 1092 Immediate(FLAG_reoptimization_counter_threshold)); | 1103 Immediate(FLAG_reoptimization_counter_threshold)); |
| 1093 } else { | 1104 } else { |
| 1094 __ incq(FieldAddress(function_reg, Function::usage_counter_offset())); | 1105 __ incq(FieldAddress(function_reg, Function::usage_counter_offset())); |
| 1095 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), | 1106 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), |
| 1096 Immediate(FLAG_optimization_counter_threshold)); | 1107 Immediate(FLAG_optimization_counter_threshold)); |
| 1097 } | 1108 } |
| 1098 ASSERT(function_reg == RDI); | 1109 ASSERT(function_reg == RDI); |
| 1099 __ j(GREATER_EQUAL, &StubCode::OptimizeFunctionLabel()); | 1110 __ J(GREATER_EQUAL, &StubCode::OptimizeFunctionLabel(), R13); |
| 1100 } else if (!flow_graph().IsCompiledForOsr()) { | 1111 } else if (!flow_graph().IsCompiledForOsr()) { |
| 1112 // We have to load the PP here too because a load of an external label |
| 1113 // may be patched at the AddCurrentDescriptor below. |
| 1114 new_pp = R13; |
| 1115 new_pc = R12; |
| 1116 |
| 1117 Label next; |
| 1118 __ nop(4); // Need a fixed size sequence on frame entry. |
| 1119 __ call(&next); |
| 1120 __ Bind(&next); |
| 1121 |
| 1122 const intptr_t object_pool_pc_dist = |
| 1123 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| 1124 __ CodeSize(); |
| 1125 const intptr_t offset = |
| 1126 Assembler::kEntryPointToPcMarkerOffset - __ CodeSize(); |
| 1127 __ popq(new_pc); |
| 1128 if (offset != 0) { |
| 1129 __ addq(new_pc, Immediate(offset)); |
| 1130 } |
| 1131 |
| 1132 // Load callee's pool pointer. |
| 1133 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset)); |
| 1101 AddCurrentDescriptor(PcDescriptors::kEntryPatch, | 1134 AddCurrentDescriptor(PcDescriptors::kEntryPatch, |
| 1102 Isolate::kNoDeoptId, | 1135 Isolate::kNoDeoptId, |
| 1103 0); // No token position. | 1136 0); // No token position. |
| 1104 } | 1137 } |
| 1105 __ Comment("Enter frame"); | 1138 __ Comment("Enter frame"); |
| 1106 if (flow_graph().IsCompiledForOsr()) { | 1139 if (flow_graph().IsCompiledForOsr()) { |
| 1107 intptr_t extra_slots = StackSize() | 1140 intptr_t extra_slots = StackSize() |
| 1108 - flow_graph().num_stack_locals() | 1141 - flow_graph().num_stack_locals() |
| 1109 - flow_graph().num_copied_params(); | 1142 - flow_graph().num_copied_params(); |
| 1110 ASSERT(extra_slots >= 0); | 1143 ASSERT(extra_slots >= 0); |
| 1111 __ EnterOsrFrame(extra_slots * kWordSize); | 1144 __ EnterOsrFrame(extra_slots * kWordSize, new_pp, new_pc); |
| 1112 } else { | 1145 } else { |
| 1113 ASSERT(StackSize() >= 0); | 1146 ASSERT(StackSize() >= 0); |
| 1114 __ EnterDartFrame(StackSize() * kWordSize); | 1147 __ EnterDartFrameWithInfo(StackSize() * kWordSize, new_pp, new_pc); |
| 1115 } | 1148 } |
| 1116 } | 1149 } |
| 1117 | 1150 |
| 1118 | 1151 |
| 1119 void FlowGraphCompiler::CompileGraph() { | 1152 void FlowGraphCompiler::CompileGraph() { |
| 1120 InitCompiler(); | 1153 InitCompiler(); |
| 1121 | 1154 |
| 1122 TryIntrinsify(); | 1155 TryIntrinsify(); |
| 1123 | 1156 |
| 1124 EmitFrameEntry(); | 1157 EmitFrameEntry(); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1158 // Invoke noSuchMethod function passing the original function name. | 1191 // Invoke noSuchMethod function passing the original function name. |
| 1159 // For closure functions, use "call" as the original name. | 1192 // For closure functions, use "call" as the original name. |
| 1160 const String& name = | 1193 const String& name = |
| 1161 String::Handle(function.IsClosureFunction() | 1194 String::Handle(function.IsClosureFunction() |
| 1162 ? Symbols::Call().raw() | 1195 ? Symbols::Call().raw() |
| 1163 : function.name()); | 1196 : function.name()); |
| 1164 const int kNumArgsChecked = 1; | 1197 const int kNumArgsChecked = 1; |
| 1165 const ICData& ic_data = ICData::ZoneHandle( | 1198 const ICData& ic_data = ICData::ZoneHandle( |
| 1166 ICData::New(function, name, Object::empty_array(), | 1199 ICData::New(function, name, Object::empty_array(), |
| 1167 Isolate::kNoDeoptId, kNumArgsChecked)); | 1200 Isolate::kNoDeoptId, kNumArgsChecked)); |
| 1168 __ LoadObject(RBX, ic_data); | 1201 __ LoadObject(RBX, ic_data, PP); |
| 1169 __ LeaveFrame(); // The arguments are still on the stack. | 1202 __ LeaveFrameWithPP(); // The arguments are still on the stack. |
| 1170 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); | 1203 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); |
| 1171 // The noSuchMethod call may return to the caller, but not here. | 1204 // The noSuchMethod call may return to the caller, but not here. |
| 1172 __ int3(); | 1205 __ int3(); |
| 1173 } else { | 1206 } else { |
| 1174 __ Stop("Wrong number of arguments"); | 1207 __ Stop("Wrong number of arguments"); |
| 1175 } | 1208 } |
| 1176 __ Bind(&correct_num_arguments); | 1209 __ Bind(&correct_num_arguments); |
| 1177 } | 1210 } |
| 1178 } else if (!flow_graph().IsCompiledForOsr()) { | 1211 } else if (!flow_graph().IsCompiledForOsr()) { |
| 1179 CopyParameters(); | 1212 CopyParameters(); |
| 1180 } | 1213 } |
| 1181 | 1214 |
| 1182 // In unoptimized code, initialize (non-argument) stack allocated slots to | 1215 // In unoptimized code, initialize (non-argument) stack allocated slots to |
| 1183 // null. | 1216 // null. |
| 1184 if (!is_optimizing() && (num_locals > 0)) { | 1217 if (!is_optimizing() && (num_locals > 0)) { |
| 1185 __ Comment("Initialize spill slots"); | 1218 __ Comment("Initialize spill slots"); |
| 1186 const intptr_t slot_base = parsed_function().first_stack_local_index(); | 1219 const intptr_t slot_base = parsed_function().first_stack_local_index(); |
| 1187 const Immediate& raw_null = | 1220 __ LoadObject(RAX, Object::Handle(), PP); |
| 1188 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
| 1189 __ movq(RAX, raw_null); | |
| 1190 for (intptr_t i = 0; i < num_locals; ++i) { | 1221 for (intptr_t i = 0; i < num_locals; ++i) { |
| 1191 // Subtract index i (locals lie at lower addresses than RBP). | 1222 // Subtract index i (locals lie at lower addresses than RBP). |
| 1192 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX); | 1223 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX); |
| 1193 } | 1224 } |
| 1194 } | 1225 } |
| 1195 | 1226 |
| 1196 if (FLAG_print_scopes) { | 1227 if (FLAG_print_scopes) { |
| 1197 // Print the function scope (again) after generating the prologue in order | 1228 // Print the function scope (again) after generating the prologue in order |
| 1198 // to see annotations such as allocation indices of locals. | 1229 // to see annotations such as allocation indices of locals. |
| 1199 if (FLAG_print_ast) { | 1230 if (FLAG_print_ast) { |
| 1200 // Second printing. | 1231 // Second printing. |
| 1201 OS::Print("Annotated "); | 1232 OS::Print("Annotated "); |
| 1202 } | 1233 } |
| 1203 AstPrinter::PrintFunctionScope(parsed_function()); | 1234 AstPrinter::PrintFunctionScope(parsed_function()); |
| 1204 } | 1235 } |
| 1205 | 1236 |
| 1206 ASSERT(!block_order().is_empty()); | 1237 ASSERT(!block_order().is_empty()); |
| 1207 VisitBlocks(); | 1238 VisitBlocks(); |
| 1208 | 1239 |
| 1209 __ int3(); | 1240 __ int3(); |
| 1210 GenerateDeferredCode(); | 1241 GenerateDeferredCode(); |
| 1211 // Emit function patching code. This will be swapped with the first 13 bytes | 1242 // Emit function patching code. This will be swapped with the first 13 bytes |
| 1212 // at entry point. | 1243 // at entry point. |
| 1213 AddCurrentDescriptor(PcDescriptors::kPatchCode, | 1244 AddCurrentDescriptor(PcDescriptors::kPatchCode, |
| 1214 Isolate::kNoDeoptId, | 1245 Isolate::kNoDeoptId, |
| 1215 0); // No token position. | 1246 0); // No token position. |
| 1216 __ jmp(&StubCode::FixCallersTargetLabel()); | 1247 // This is patched up to a point in FrameEntry where the PP for the |
| 1248 // current function is in R13 instead of PP. |
| 1249 __ JmpPatchable(&StubCode::FixCallersTargetLabel(), R13); |
| 1250 |
| 1251 // TOOD(zra): Is this descriptor used? |
| 1217 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump, | 1252 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump, |
| 1218 Isolate::kNoDeoptId, | 1253 Isolate::kNoDeoptId, |
| 1219 0); // No token position. | 1254 0); // No token position. |
| 1220 __ jmp(&StubCode::DeoptimizeLazyLabel()); | 1255 __ Jmp(&StubCode::DeoptimizeLazyLabel(), PP); |
| 1221 } | 1256 } |
| 1222 | 1257 |
| 1223 | 1258 |
| 1224 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, | 1259 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, |
| 1225 const ExternalLabel* label, | 1260 const ExternalLabel* label, |
| 1226 PcDescriptors::Kind kind, | 1261 PcDescriptors::Kind kind, |
| 1227 LocationSummary* locs) { | 1262 LocationSummary* locs) { |
| 1228 __ call(label); | 1263 __ Call(label, PP); |
| 1229 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos); | 1264 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos); |
| 1230 RecordSafepoint(locs); | 1265 RecordSafepoint(locs); |
| 1231 } | 1266 } |
| 1232 | 1267 |
| 1233 | 1268 |
| 1234 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, | 1269 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, |
| 1235 intptr_t token_pos, | 1270 intptr_t token_pos, |
| 1236 const ExternalLabel* label, | 1271 const ExternalLabel* label, |
| 1237 PcDescriptors::Kind kind, | 1272 PcDescriptors::Kind kind, |
| 1238 LocationSummary* locs) { | 1273 LocationSummary* locs) { |
| 1239 __ call(label); | 1274 __ CallPatchable(label); |
| 1240 AddCurrentDescriptor(kind, deopt_id, token_pos); | 1275 AddCurrentDescriptor(kind, deopt_id, token_pos); |
| 1241 RecordSafepoint(locs); | 1276 RecordSafepoint(locs); |
| 1242 // Marks either the continuation point in unoptimized code or the | 1277 // Marks either the continuation point in unoptimized code or the |
| 1243 // deoptimization point in optimized code, after call. | 1278 // deoptimization point in optimized code, after call. |
| 1244 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id); | 1279 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id); |
| 1245 if (is_optimizing()) { | 1280 if (is_optimizing()) { |
| 1246 AddDeoptIndexAtCall(deopt_id_after, token_pos); | 1281 AddDeoptIndexAtCall(deopt_id_after, token_pos); |
| 1247 } else { | 1282 } else { |
| 1248 // Add deoptimization continuation point after the call and before the | 1283 // Add deoptimization continuation point after the call and before the |
| 1249 // arguments are removed. | 1284 // arguments are removed. |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1268 AddDeoptIndexAtCall(deopt_id_after, token_pos); | 1303 AddDeoptIndexAtCall(deopt_id_after, token_pos); |
| 1269 } else { | 1304 } else { |
| 1270 // Add deoptimization continuation point after the call and before the | 1305 // Add deoptimization continuation point after the call and before the |
| 1271 // arguments are removed. | 1306 // arguments are removed. |
| 1272 AddCurrentDescriptor(PcDescriptors::kDeopt, deopt_id_after, token_pos); | 1307 AddCurrentDescriptor(PcDescriptors::kDeopt, deopt_id_after, token_pos); |
| 1273 } | 1308 } |
| 1274 } | 1309 } |
| 1275 } | 1310 } |
| 1276 | 1311 |
| 1277 | 1312 |
| 1313 void FlowGraphCompiler::EmitUnoptimizedStaticCall( |
| 1314 const Function& target_function, |
| 1315 const Array& arguments_descriptor, |
| 1316 intptr_t argument_count, |
| 1317 intptr_t deopt_id, |
| 1318 intptr_t token_pos, |
| 1319 LocationSummary* locs) { |
| 1320 // TODO(srdjan): Improve performance of function recognition. |
| 1321 MethodRecognizer::Kind recognized_kind = |
| 1322 MethodRecognizer::RecognizeKind(target_function); |
| 1323 int num_args_checked = 0; |
| 1324 if ((recognized_kind == MethodRecognizer::kMathMin) || |
| 1325 (recognized_kind == MethodRecognizer::kMathMax)) { |
| 1326 num_args_checked = 2; |
| 1327 } |
| 1328 const ICData& ic_data = ICData::ZoneHandle( |
| 1329 ICData::New(parsed_function().function(), // Caller function. |
| 1330 String::Handle(target_function.name()), |
| 1331 arguments_descriptor, |
| 1332 deopt_id, |
| 1333 num_args_checked)); // No arguments checked. |
| 1334 ic_data.AddTarget(target_function); |
| 1335 uword label_address = 0; |
| 1336 if (ic_data.num_args_tested() == 0) { |
| 1337 label_address = StubCode::ZeroArgsUnoptimizedStaticCallEntryPoint(); |
| 1338 } else if (ic_data.num_args_tested() == 2) { |
| 1339 label_address = StubCode::TwoArgsUnoptimizedStaticCallEntryPoint(); |
| 1340 } else { |
| 1341 UNIMPLEMENTED(); |
| 1342 } |
| 1343 ExternalLabel target_label("StaticCallICStub", label_address); |
| 1344 __ LoadObject(RBX, ic_data, PP); |
| 1345 GenerateDartCall(deopt_id, |
| 1346 token_pos, |
| 1347 &target_label, |
| 1348 PcDescriptors::kUnoptStaticCall, |
| 1349 locs); |
| 1350 __ Drop(argument_count); |
| 1351 } |
| 1352 |
| 1353 |
| 1278 void FlowGraphCompiler::EmitOptimizedInstanceCall( | 1354 void FlowGraphCompiler::EmitOptimizedInstanceCall( |
| 1279 ExternalLabel* target_label, | 1355 ExternalLabel* target_label, |
| 1280 const ICData& ic_data, | 1356 const ICData& ic_data, |
| 1281 intptr_t argument_count, | 1357 intptr_t argument_count, |
| 1282 intptr_t deopt_id, | 1358 intptr_t deopt_id, |
| 1283 intptr_t token_pos, | 1359 intptr_t token_pos, |
| 1284 LocationSummary* locs) { | 1360 LocationSummary* locs) { |
| 1285 // Each ICData propagated from unoptimized to optimized code contains the | 1361 // Each ICData propagated from unoptimized to optimized code contains the |
| 1286 // function that corresponds to the Dart function of that IC call. Due | 1362 // function that corresponds to the Dart function of that IC call. Due |
| 1287 // to inlining in optimized code, that function may not correspond to the | 1363 // to inlining in optimized code, that function may not correspond to the |
| 1288 // top-level function (parsed_function().function()) which could be | 1364 // top-level function (parsed_function().function()) which could be |
| 1289 // reoptimized and which counter needs to be incremented. | 1365 // reoptimized and which counter needs to be incremented. |
| 1290 // Pass the function explicitly, it is used in IC stub. | 1366 // Pass the function explicitly, it is used in IC stub. |
| 1291 __ LoadObject(RDI, parsed_function().function()); | 1367 __ LoadObject(RDI, parsed_function().function(), PP); |
| 1292 __ LoadObject(RBX, ic_data); | 1368 __ LoadObject(RBX, ic_data, PP); |
| 1293 GenerateDartCall(deopt_id, | 1369 GenerateDartCall(deopt_id, |
| 1294 token_pos, | 1370 token_pos, |
| 1295 target_label, | 1371 target_label, |
| 1296 PcDescriptors::kIcCall, | 1372 PcDescriptors::kIcCall, |
| 1297 locs); | 1373 locs); |
| 1298 __ Drop(argument_count); | 1374 __ Drop(argument_count); |
| 1299 } | 1375 } |
| 1300 | 1376 |
| 1301 | 1377 |
| 1302 void FlowGraphCompiler::EmitInstanceCall(ExternalLabel* target_label, | 1378 void FlowGraphCompiler::EmitInstanceCall(ExternalLabel* target_label, |
| 1303 const ICData& ic_data, | 1379 const ICData& ic_data, |
| 1304 intptr_t argument_count, | 1380 intptr_t argument_count, |
| 1305 intptr_t deopt_id, | 1381 intptr_t deopt_id, |
| 1306 intptr_t token_pos, | 1382 intptr_t token_pos, |
| 1307 LocationSummary* locs) { | 1383 LocationSummary* locs) { |
| 1308 __ LoadObject(RBX, ic_data); | 1384 __ LoadObject(RBX, ic_data, PP); |
| 1309 GenerateDartCall(deopt_id, | 1385 GenerateDartCall(deopt_id, |
| 1310 token_pos, | 1386 token_pos, |
| 1311 target_label, | 1387 target_label, |
| 1312 PcDescriptors::kIcCall, | 1388 PcDescriptors::kIcCall, |
| 1313 locs); | 1389 locs); |
| 1314 __ Drop(argument_count); | 1390 __ Drop(argument_count); |
| 1315 } | 1391 } |
| 1316 | 1392 |
| 1317 | 1393 |
| 1318 void FlowGraphCompiler::EmitMegamorphicInstanceCall( | 1394 void FlowGraphCompiler::EmitMegamorphicInstanceCall( |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1334 __ j(NOT_ZERO, ¬_smi, Assembler::kNearJump); | 1410 __ j(NOT_ZERO, ¬_smi, Assembler::kNearJump); |
| 1335 __ movq(RAX, Immediate(Smi::RawValue(kSmiCid))); | 1411 __ movq(RAX, Immediate(Smi::RawValue(kSmiCid))); |
| 1336 __ jmp(&load_cache); | 1412 __ jmp(&load_cache); |
| 1337 | 1413 |
| 1338 __ Bind(¬_smi); | 1414 __ Bind(¬_smi); |
| 1339 __ LoadClassId(RAX, RAX); | 1415 __ LoadClassId(RAX, RAX); |
| 1340 __ SmiTag(RAX); | 1416 __ SmiTag(RAX); |
| 1341 | 1417 |
| 1342 // RAX: class ID of the receiver (smi). | 1418 // RAX: class ID of the receiver (smi). |
| 1343 __ Bind(&load_cache); | 1419 __ Bind(&load_cache); |
| 1344 __ LoadObject(RBX, cache); | 1420 __ LoadObject(RBX, cache, PP); |
| 1345 __ movq(RDI, FieldAddress(RBX, MegamorphicCache::buckets_offset())); | 1421 __ movq(RDI, FieldAddress(RBX, MegamorphicCache::buckets_offset())); |
| 1346 __ movq(RBX, FieldAddress(RBX, MegamorphicCache::mask_offset())); | 1422 __ movq(RBX, FieldAddress(RBX, MegamorphicCache::mask_offset())); |
| 1347 // RDI: cache buckets array. | 1423 // RDI: cache buckets array. |
| 1348 // RBX: mask. | 1424 // RBX: mask. |
| 1349 __ movq(RCX, RAX); | 1425 __ movq(RCX, RAX); |
| 1350 | 1426 |
| 1351 Label loop, update, call_target_function; | 1427 Label loop, update, call_target_function; |
| 1352 __ jmp(&loop); | 1428 __ jmp(&loop); |
| 1353 | 1429 |
| 1354 __ Bind(&update); | 1430 __ Bind(&update); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 1366 __ j(NOT_EQUAL, &update, Assembler::kNearJump); | 1442 __ j(NOT_EQUAL, &update, Assembler::kNearJump); |
| 1367 | 1443 |
| 1368 __ Bind(&call_target_function); | 1444 __ Bind(&call_target_function); |
| 1369 // Call the target found in the cache. For a class id match, this is a | 1445 // Call the target found in the cache. For a class id match, this is a |
| 1370 // proper target for the given name and arguments descriptor. If the | 1446 // proper target for the given name and arguments descriptor. If the |
| 1371 // illegal class id was found, the target is a cache miss handler that can | 1447 // illegal class id was found, the target is a cache miss handler that can |
| 1372 // be invoked as a normal Dart function. | 1448 // be invoked as a normal Dart function. |
| 1373 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize)); | 1449 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize)); |
| 1374 __ movq(RAX, FieldAddress(RAX, Function::code_offset())); | 1450 __ movq(RAX, FieldAddress(RAX, Function::code_offset())); |
| 1375 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); | 1451 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); |
| 1376 __ LoadObject(RBX, ic_data); | 1452 __ LoadObject(RBX, ic_data, PP); |
| 1377 __ LoadObject(R10, arguments_descriptor); | 1453 __ LoadObject(R10, arguments_descriptor, PP); |
| 1378 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 1454 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
| 1379 __ call(RAX); | 1455 __ call(RAX); |
| 1380 AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos); | 1456 AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos); |
| 1381 RecordSafepoint(locs); | 1457 RecordSafepoint(locs); |
| 1382 AddDeoptIndexAtCall(Isolate::ToDeoptAfter(deopt_id), token_pos); | 1458 AddDeoptIndexAtCall(Isolate::ToDeoptAfter(deopt_id), token_pos); |
| 1383 __ Drop(argument_count); | 1459 __ Drop(argument_count); |
| 1384 } | 1460 } |
| 1385 | 1461 |
| 1386 | 1462 |
| 1387 void FlowGraphCompiler::EmitOptimizedStaticCall( | 1463 void FlowGraphCompiler::EmitOptimizedStaticCall( |
| 1388 const Function& function, | 1464 const Function& function, |
| 1389 const Array& arguments_descriptor, | 1465 const Array& arguments_descriptor, |
| 1390 intptr_t argument_count, | 1466 intptr_t argument_count, |
| 1391 intptr_t deopt_id, | 1467 intptr_t deopt_id, |
| 1392 intptr_t token_pos, | 1468 intptr_t token_pos, |
| 1393 LocationSummary* locs) { | 1469 LocationSummary* locs) { |
| 1394 __ LoadObject(R10, arguments_descriptor); | 1470 __ LoadObject(R10, arguments_descriptor, PP); |
| 1395 // Do not use the code from the function, but let the code be patched so that | 1471 // Do not use the code from the function, but let the code be patched so that |
| 1396 // we can record the outgoing edges to other code. | 1472 // we can record the outgoing edges to other code. |
| 1397 GenerateDartCall(deopt_id, | 1473 GenerateDartCall(deopt_id, |
| 1398 token_pos, | 1474 token_pos, |
| 1399 &StubCode::CallStaticFunctionLabel(), | 1475 &StubCode::CallStaticFunctionLabel(), |
| 1400 PcDescriptors::kOptStaticCall, | 1476 PcDescriptors::kOptStaticCall, |
| 1401 locs); | 1477 locs); |
| 1402 AddStaticCallTarget(function); | 1478 AddStaticCallTarget(function); |
| 1403 __ Drop(argument_count); | 1479 __ Drop(argument_count); |
| 1404 } | 1480 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1417 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { | 1493 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { |
| 1418 ASSERT(!needs_number_check); | 1494 ASSERT(!needs_number_check); |
| 1419 __ testq(reg, reg); | 1495 __ testq(reg, reg); |
| 1420 return; | 1496 return; |
| 1421 } | 1497 } |
| 1422 | 1498 |
| 1423 if (needs_number_check) { | 1499 if (needs_number_check) { |
| 1424 __ pushq(reg); | 1500 __ pushq(reg); |
| 1425 __ PushObject(obj); | 1501 __ PushObject(obj); |
| 1426 if (is_optimizing()) { | 1502 if (is_optimizing()) { |
| 1427 __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); | 1503 __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); |
| 1428 } else { | 1504 } else { |
| 1429 __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); | 1505 __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); |
| 1430 } | 1506 } |
| 1431 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, | 1507 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, |
| 1432 Isolate::kNoDeoptId, | 1508 Isolate::kNoDeoptId, |
| 1433 token_pos); | 1509 token_pos); |
| 1434 __ popq(reg); // Discard constant. | 1510 __ popq(reg); // Discard constant. |
| 1435 __ popq(reg); // Restore 'reg'. | 1511 __ popq(reg); // Restore 'reg'. |
| 1436 return; | 1512 return; |
| 1437 } | 1513 } |
| 1438 | 1514 |
| 1439 __ CompareObject(reg, obj); | 1515 __ CompareObject(reg, obj); |
| 1440 } | 1516 } |
| 1441 | 1517 |
| 1442 | 1518 |
| 1443 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, | 1519 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, |
| 1444 Register right, | 1520 Register right, |
| 1445 bool needs_number_check, | 1521 bool needs_number_check, |
| 1446 intptr_t token_pos) { | 1522 intptr_t token_pos) { |
| 1447 if (needs_number_check) { | 1523 if (needs_number_check) { |
| 1448 __ pushq(left); | 1524 __ pushq(left); |
| 1449 __ pushq(right); | 1525 __ pushq(right); |
| 1450 if (is_optimizing()) { | 1526 if (is_optimizing()) { |
| 1451 __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); | 1527 __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); |
| 1452 } else { | 1528 } else { |
| 1453 __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); | 1529 __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); |
| 1454 } | 1530 } |
| 1455 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, | 1531 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, |
| 1456 Isolate::kNoDeoptId, | 1532 Isolate::kNoDeoptId, |
| 1457 token_pos); | 1533 token_pos); |
| 1458 // Stub returns result in flags (result of a cmpl, we need ZF computed). | 1534 // Stub returns result in flags (result of a cmpl, we need ZF computed). |
| 1459 __ popq(right); | 1535 __ popq(right); |
| 1460 __ popq(left); | 1536 __ popq(left); |
| 1461 } else { | 1537 } else { |
| 1462 __ cmpl(left, right); | 1538 __ cmpl(left, right); |
| 1463 } | 1539 } |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1533 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); | 1609 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); |
| 1534 Label match_found; | 1610 Label match_found; |
| 1535 const intptr_t len = ic_data.NumberOfChecks(); | 1611 const intptr_t len = ic_data.NumberOfChecks(); |
| 1536 GrowableArray<CidTarget> sorted(len); | 1612 GrowableArray<CidTarget> sorted(len); |
| 1537 SortICDataByCount(ic_data, &sorted); | 1613 SortICDataByCount(ic_data, &sorted); |
| 1538 ASSERT(class_id_reg != R10); | 1614 ASSERT(class_id_reg != R10); |
| 1539 ASSERT(len > 0); // Why bother otherwise. | 1615 ASSERT(len > 0); // Why bother otherwise. |
| 1540 const Array& arguments_descriptor = | 1616 const Array& arguments_descriptor = |
| 1541 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, | 1617 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, |
| 1542 argument_names)); | 1618 argument_names)); |
| 1543 __ LoadObject(R10, arguments_descriptor); | 1619 __ LoadObject(R10, arguments_descriptor, PP); |
| 1544 for (intptr_t i = 0; i < len; i++) { | 1620 for (intptr_t i = 0; i < len; i++) { |
| 1545 const bool is_last_check = (i == (len - 1)); | 1621 const bool is_last_check = (i == (len - 1)); |
| 1546 Label next_test; | 1622 Label next_test; |
| 1547 assembler()->cmpl(class_id_reg, Immediate(sorted[i].cid)); | 1623 assembler()->cmpl(class_id_reg, Immediate(sorted[i].cid)); |
| 1548 if (is_last_check) { | 1624 if (is_last_check) { |
| 1549 assembler()->j(NOT_EQUAL, deopt); | 1625 assembler()->j(NOT_EQUAL, deopt); |
| 1550 } else { | 1626 } else { |
| 1551 assembler()->j(NOT_EQUAL, &next_test); | 1627 assembler()->j(NOT_EQUAL, &next_test); |
| 1552 } | 1628 } |
| 1553 // Do not use the code from the function, but let the code be patched so | 1629 // Do not use the code from the function, but let the code be patched so |
| (...skipping 21 matching lines...) Expand all Loading... |
| 1575 BranchInstr* branch) { | 1651 BranchInstr* branch) { |
| 1576 ASSERT(branch != NULL); | 1652 ASSERT(branch != NULL); |
| 1577 assembler()->comisd(left, right); | 1653 assembler()->comisd(left, right); |
| 1578 BlockEntryInstr* nan_result = (true_condition == NOT_EQUAL) ? | 1654 BlockEntryInstr* nan_result = (true_condition == NOT_EQUAL) ? |
| 1579 branch->true_successor() : branch->false_successor(); | 1655 branch->true_successor() : branch->false_successor(); |
| 1580 assembler()->j(PARITY_EVEN, GetJumpLabel(nan_result)); | 1656 assembler()->j(PARITY_EVEN, GetJumpLabel(nan_result)); |
| 1581 branch->EmitBranchOnCondition(this, true_condition); | 1657 branch->EmitBranchOnCondition(this, true_condition); |
| 1582 } | 1658 } |
| 1583 | 1659 |
| 1584 | 1660 |
| 1585 | |
| 1586 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition, | 1661 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition, |
| 1587 FpuRegister left, | 1662 FpuRegister left, |
| 1588 FpuRegister right, | 1663 FpuRegister right, |
| 1589 Register result) { | 1664 Register result) { |
| 1590 assembler()->comisd(left, right); | 1665 assembler()->comisd(left, right); |
| 1591 Label is_false, is_true, done; | 1666 Label is_false, is_true, done; |
| 1592 assembler()->j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN false; | 1667 assembler()->j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN false; |
| 1593 assembler()->j(true_condition, &is_true, Assembler::kNearJump); | 1668 assembler()->j(true_condition, &is_true, Assembler::kNearJump); |
| 1594 assembler()->Bind(&is_false); | 1669 assembler()->Bind(&is_false); |
| 1595 assembler()->LoadObject(result, Bool::False()); | 1670 assembler()->LoadObject(result, Bool::False(), PP); |
| 1596 assembler()->jmp(&done); | 1671 assembler()->jmp(&done); |
| 1597 assembler()->Bind(&is_true); | 1672 assembler()->Bind(&is_true); |
| 1598 assembler()->LoadObject(result, Bool::True()); | 1673 assembler()->LoadObject(result, Bool::True(), PP); |
| 1599 assembler()->Bind(&done); | 1674 assembler()->Bind(&done); |
| 1600 } | 1675 } |
| 1601 | 1676 |
| 1602 | 1677 |
| 1603 FieldAddress FlowGraphCompiler::ElementAddressForIntIndex(intptr_t cid, | 1678 FieldAddress FlowGraphCompiler::ElementAddressForIntIndex(intptr_t cid, |
| 1604 intptr_t index_scale, | 1679 intptr_t index_scale, |
| 1605 Register array, | 1680 Register array, |
| 1606 intptr_t index) { | 1681 intptr_t index) { |
| 1607 const int64_t disp = | 1682 const int64_t disp = |
| 1608 static_cast<int64_t>(index) * index_scale + DataOffsetFor(cid); | 1683 static_cast<int64_t>(index) * index_scale + DataOffsetFor(cid); |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1709 __ movups(XMM0, source.ToStackSlotAddress()); | 1784 __ movups(XMM0, source.ToStackSlotAddress()); |
| 1710 __ movups(destination.ToStackSlotAddress(), XMM0); | 1785 __ movups(destination.ToStackSlotAddress(), XMM0); |
| 1711 } | 1786 } |
| 1712 } else { | 1787 } else { |
| 1713 ASSERT(source.IsConstant()); | 1788 ASSERT(source.IsConstant()); |
| 1714 if (destination.IsRegister()) { | 1789 if (destination.IsRegister()) { |
| 1715 const Object& constant = source.constant(); | 1790 const Object& constant = source.constant(); |
| 1716 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) { | 1791 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) { |
| 1717 __ xorq(destination.reg(), destination.reg()); | 1792 __ xorq(destination.reg(), destination.reg()); |
| 1718 } else { | 1793 } else { |
| 1719 __ LoadObject(destination.reg(), constant); | 1794 __ LoadObject(destination.reg(), constant, PP); |
| 1720 } | 1795 } |
| 1721 } else { | 1796 } else { |
| 1722 ASSERT(destination.IsStackSlot()); | 1797 ASSERT(destination.IsStackSlot()); |
| 1723 StoreObject(destination.ToStackSlotAddress(), source.constant()); | 1798 StoreObject(destination.ToStackSlotAddress(), source.constant()); |
| 1724 } | 1799 } |
| 1725 } | 1800 } |
| 1726 | 1801 |
| 1727 move->Eliminate(); | 1802 move->Eliminate(); |
| 1728 } | 1803 } |
| 1729 | 1804 |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1858 __ movups(reg, Address(RSP, 0)); | 1933 __ movups(reg, Address(RSP, 0)); |
| 1859 __ addq(RSP, Immediate(kFpuRegisterSize)); | 1934 __ addq(RSP, Immediate(kFpuRegisterSize)); |
| 1860 } | 1935 } |
| 1861 | 1936 |
| 1862 | 1937 |
| 1863 #undef __ | 1938 #undef __ |
| 1864 | 1939 |
| 1865 } // namespace dart | 1940 } // namespace dart |
| 1866 | 1941 |
| 1867 #endif // defined TARGET_ARCH_X64 | 1942 #endif // defined TARGET_ARCH_X64 |
| OLD | NEW |