OLD | NEW |
---|---|
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
9 | 9 |
10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
59 intptr_t slot_ix = 0; | 59 intptr_t slot_ix = 0; |
60 Environment* current = deopt_env_; | 60 Environment* current = deopt_env_; |
61 | 61 |
62 // Emit all kMaterializeObject instructions describing objects to be | 62 // Emit all kMaterializeObject instructions describing objects to be |
63 // materialized on the deoptimization as a prefix to the deoptimization info. | 63 // materialized on the deoptimization as a prefix to the deoptimization info. |
64 EmitMaterializations(deopt_env_, builder); | 64 EmitMaterializations(deopt_env_, builder); |
65 | 65 |
66 // The real frame starts here. | 66 // The real frame starts here. |
67 builder->MarkFrameStart(); | 67 builder->MarkFrameStart(); |
68 | 68 |
69 // Callee's PC marker is not used anymore. Pass Function::null() to set to 0. | 69 // Current PP, FP, and PC. |
70 builder->AddPp(current->function(), slot_ix++); | |
70 builder->AddPcMarker(Function::Handle(), slot_ix++); | 71 builder->AddPcMarker(Function::Handle(), slot_ix++); |
71 | |
72 // Current FP and PC. | |
73 builder->AddCallerFp(slot_ix++); | 72 builder->AddCallerFp(slot_ix++); |
74 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); | 73 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); |
75 | 74 |
76 // Emit all values that are needed for materialization as a part of the | 75 // Emit all values that are needed for materialization as a part of the |
77 // expression stack for the bottom-most frame. This guarantees that GC | 76 // expression stack for the bottom-most frame. This guarantees that GC |
78 // will be able to find them during materialization. | 77 // will be able to find them during materialization. |
79 slot_ix = builder->EmitMaterializationArguments(slot_ix); | 78 slot_ix = builder->EmitMaterializationArguments(slot_ix); |
80 | 79 |
81 // For the innermost environment, set outgoing arguments and the locals. | 80 // For the innermost environment, set outgoing arguments and the locals. |
82 for (intptr_t i = current->Length() - 1; | 81 for (intptr_t i = current->Length() - 1; |
83 i >= current->fixed_parameter_count(); | 82 i >= current->fixed_parameter_count(); |
84 i--) { | 83 i--) { |
85 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); | 84 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); |
86 } | 85 } |
87 | 86 |
88 // Current PC marker and caller FP. | |
89 builder->AddPcMarker(current->function(), slot_ix++); | |
90 builder->AddCallerFp(slot_ix++); | |
91 | |
92 Environment* previous = current; | 87 Environment* previous = current; |
93 current = current->outer(); | 88 current = current->outer(); |
94 while (current != NULL) { | 89 while (current != NULL) { |
90 // PP, FP, and PC. | |
91 builder->AddPp(current->function(), slot_ix++); | |
92 builder->AddPcMarker(previous->function(), slot_ix++); | |
93 builder->AddCallerFp(slot_ix++); | |
94 | |
95 // For any outer environment the deopt id is that of the call instruction | 95 // For any outer environment the deopt id is that of the call instruction |
96 // which is recorded in the outer environment. | 96 // which is recorded in the outer environment. |
97 builder->AddReturnAddress(current->function(), | 97 builder->AddReturnAddress(current->function(), |
98 Isolate::ToDeoptAfter(current->deopt_id()), | 98 Isolate::ToDeoptAfter(current->deopt_id()), |
99 slot_ix++); | 99 slot_ix++); |
100 | 100 |
101 // The values of outgoing arguments can be changed from the inlined call so | 101 // The values of outgoing arguments can be changed from the inlined call so |
102 // we must read them from the previous environment. | 102 // we must read them from the previous environment. |
103 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { | 103 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { |
104 builder->AddCopy(previous->ValueAt(i), | 104 builder->AddCopy(previous->ValueAt(i), |
105 previous->LocationAt(i), | 105 previous->LocationAt(i), |
106 slot_ix++); | 106 slot_ix++); |
107 } | 107 } |
108 | 108 |
109 // Set the locals, note that outgoing arguments are not in the environment. | 109 // Set the locals, note that outgoing arguments are not in the environment. |
110 for (intptr_t i = current->Length() - 1; | 110 for (intptr_t i = current->Length() - 1; |
111 i >= current->fixed_parameter_count(); | 111 i >= current->fixed_parameter_count(); |
112 i--) { | 112 i--) { |
113 builder->AddCopy(current->ValueAt(i), | 113 builder->AddCopy(current->ValueAt(i), |
114 current->LocationAt(i), | 114 current->LocationAt(i), |
115 slot_ix++); | 115 slot_ix++); |
116 } | 116 } |
117 | 117 |
118 // PC marker and caller FP. | |
119 builder->AddPcMarker(current->function(), slot_ix++); | |
120 builder->AddCallerFp(slot_ix++); | |
121 | |
122 // Iterate on the outer environment. | 118 // Iterate on the outer environment. |
123 previous = current; | 119 previous = current; |
124 current = current->outer(); | 120 current = current->outer(); |
125 } | 121 } |
126 // The previous pointer is now the outermost environment. | 122 // The previous pointer is now the outermost environment. |
127 ASSERT(previous != NULL); | 123 ASSERT(previous != NULL); |
128 | 124 |
129 // For the outermost environment, set caller PC. | 125 // For the outermost environment, set caller PC, caller PP, and caller FP. |
126 builder->AddCallerPp(slot_ix++); | |
127 // PC marker. | |
128 builder->AddPcMarker(previous->function(), slot_ix++); | |
129 builder->AddCallerFp(slot_ix++); | |
130 builder->AddCallerPc(slot_ix++); | 130 builder->AddCallerPc(slot_ix++); |
131 | 131 |
132 // For the outermost environment, set the incoming arguments. | 132 // For the outermost environment, set the incoming arguments. |
133 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { | 133 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { |
134 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); | 134 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); |
135 } | 135 } |
136 | 136 |
137 const DeoptInfo& deopt_info = DeoptInfo::Handle(builder->CreateDeoptInfo()); | 137 const DeoptInfo& deopt_info = DeoptInfo::Handle(builder->CreateDeoptInfo()); |
138 return deopt_info.raw(); | 138 return deopt_info.raw(); |
139 } | 139 } |
140 | 140 |
141 | 141 |
142 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, | 142 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, |
143 intptr_t stub_ix) { | 143 intptr_t stub_ix) { |
144 // Calls do not need stubs, they share a deoptimization trampoline. | 144 // Calls do not need stubs, they share a deoptimization trampoline. |
145 ASSERT(reason() != kDeoptAtCall); | 145 ASSERT(reason() != kDeoptAtCall); |
146 Assembler* assem = compiler->assembler(); | 146 Assembler* assem = compiler->assembler(); |
147 #define __ assem-> | 147 #define __ assem-> |
148 __ Comment("Deopt stub for id %" Pd "", deopt_id()); | 148 __ Comment("Deopt stub for id %" Pd "", deopt_id()); |
149 __ Bind(entry_label()); | 149 __ Bind(entry_label()); |
150 if (FLAG_trap_on_deoptimization) __ int3(); | 150 if (FLAG_trap_on_deoptimization) __ int3(); |
151 | 151 |
152 ASSERT(deopt_env() != NULL); | 152 ASSERT(deopt_env() != NULL); |
153 | 153 |
154 __ call(&StubCode::DeoptimizeLabel()); | 154 __ CallFromPool(&StubCode::DeoptimizeLabel()); |
155 set_pc_offset(assem->CodeSize()); | 155 set_pc_offset(assem->CodeSize()); |
156 __ int3(); | 156 __ int3(); |
157 #undef __ | 157 #undef __ |
158 } | 158 } |
159 | 159 |
160 | 160 |
161 #define __ assembler()-> | 161 #define __ assembler()-> |
162 | 162 |
163 | 163 |
164 // Fall through if bool_register contains null. | 164 // Fall through if bool_register contains null. |
165 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, | 165 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, |
166 Label* is_true, | 166 Label* is_true, |
167 Label* is_false) { | 167 Label* is_false) { |
168 const Immediate& raw_null = | |
169 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
170 Label fall_through; | 168 Label fall_through; |
171 __ cmpq(bool_register, raw_null); | 169 __ CompareObject(bool_register, Object::Handle(Object::null())); |
srdjan
2013/09/04 22:57:23
Object::Handle() is same as Object::Handle(Object:
zra
2013/09/05 00:23:11
Done.
| |
172 __ j(EQUAL, &fall_through, Assembler::kNearJump); | 170 __ j(EQUAL, &fall_through, Assembler::kNearJump); |
173 __ CompareObject(bool_register, Bool::True()); | 171 __ CompareObject(bool_register, Bool::True()); |
174 __ j(EQUAL, is_true); | 172 __ j(EQUAL, is_true); |
175 __ jmp(is_false); | 173 __ jmp(is_false); |
176 __ Bind(&fall_through); | 174 __ Bind(&fall_through); |
177 } | 175 } |
178 | 176 |
179 | 177 |
180 // Clobbers RCX. | 178 // Clobbers RCX. |
181 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( | 179 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( |
182 TypeTestStubKind test_kind, | 180 TypeTestStubKind test_kind, |
183 Register instance_reg, | 181 Register instance_reg, |
184 Register type_arguments_reg, | 182 Register type_arguments_reg, |
185 Register temp_reg, | 183 Register temp_reg, |
186 Label* is_instance_lbl, | 184 Label* is_instance_lbl, |
187 Label* is_not_instance_lbl) { | 185 Label* is_not_instance_lbl) { |
188 const SubtypeTestCache& type_test_cache = | 186 const SubtypeTestCache& type_test_cache = |
189 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New()); | 187 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New()); |
190 const Immediate& raw_null = | 188 __ LoadObjectFromPool(temp_reg, type_test_cache, |
191 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 189 Assembler::kNotPatchable, PP); |
192 __ LoadObject(temp_reg, type_test_cache); | |
193 __ pushq(temp_reg); // Subtype test cache. | 190 __ pushq(temp_reg); // Subtype test cache. |
194 __ pushq(instance_reg); // Instance. | 191 __ pushq(instance_reg); // Instance. |
195 if (test_kind == kTestTypeOneArg) { | 192 if (test_kind == kTestTypeOneArg) { |
196 ASSERT(type_arguments_reg == kNoRegister); | 193 ASSERT(type_arguments_reg == kNoRegister); |
197 __ pushq(raw_null); | 194 __ PushObject(Object::Handle(Object::null())); |
198 __ call(&StubCode::Subtype1TestCacheLabel()); | 195 __ CallFromPool(&StubCode::Subtype1TestCacheLabel()); |
199 } else if (test_kind == kTestTypeTwoArgs) { | 196 } else if (test_kind == kTestTypeTwoArgs) { |
200 ASSERT(type_arguments_reg == kNoRegister); | 197 ASSERT(type_arguments_reg == kNoRegister); |
201 __ pushq(raw_null); | 198 __ PushObject(Object::Handle(Object::null())); |
202 __ call(&StubCode::Subtype2TestCacheLabel()); | 199 __ CallFromPool(&StubCode::Subtype2TestCacheLabel()); |
203 } else if (test_kind == kTestTypeThreeArgs) { | 200 } else if (test_kind == kTestTypeThreeArgs) { |
204 __ pushq(type_arguments_reg); | 201 __ pushq(type_arguments_reg); |
205 __ call(&StubCode::Subtype3TestCacheLabel()); | 202 __ CallFromPool(&StubCode::Subtype3TestCacheLabel()); |
206 } else { | 203 } else { |
207 UNREACHABLE(); | 204 UNREACHABLE(); |
208 } | 205 } |
209 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False. | 206 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False. |
210 ASSERT(instance_reg != RCX); | 207 ASSERT(instance_reg != RCX); |
211 ASSERT(temp_reg != RCX); | 208 ASSERT(temp_reg != RCX); |
212 __ popq(instance_reg); // Discard. | 209 __ popq(instance_reg); // Discard. |
213 __ popq(instance_reg); // Restore receiver. | 210 __ popq(instance_reg); // Restore receiver. |
214 __ popq(temp_reg); // Discard. | 211 __ popq(temp_reg); // Discard. |
215 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl); | 212 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl); |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
340 // interfaces. | 337 // interfaces. |
341 // Bool interface can be implemented only by core class Bool. | 338 // Bool interface can be implemented only by core class Bool. |
342 if (type.IsBoolType()) { | 339 if (type.IsBoolType()) { |
343 __ cmpl(kClassIdReg, Immediate(kBoolCid)); | 340 __ cmpl(kClassIdReg, Immediate(kBoolCid)); |
344 __ j(EQUAL, is_instance_lbl); | 341 __ j(EQUAL, is_instance_lbl); |
345 __ jmp(is_not_instance_lbl); | 342 __ jmp(is_not_instance_lbl); |
346 return false; | 343 return false; |
347 } | 344 } |
348 if (type.IsFunctionType()) { | 345 if (type.IsFunctionType()) { |
349 // Check if instance is a closure. | 346 // Check if instance is a closure. |
350 const Immediate& raw_null = | |
351 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
352 __ LoadClassById(R13, kClassIdReg); | 347 __ LoadClassById(R13, kClassIdReg); |
353 __ movq(R13, FieldAddress(R13, Class::signature_function_offset())); | 348 __ movq(R13, FieldAddress(R13, Class::signature_function_offset())); |
354 __ cmpq(R13, raw_null); | 349 __ CompareObject(R13, Object::Handle(Object::null())); |
355 __ j(NOT_EQUAL, is_instance_lbl); | 350 __ j(NOT_EQUAL, is_instance_lbl); |
356 } | 351 } |
357 // Custom checking for numbers (Smi, Mint, Bigint and Double). | 352 // Custom checking for numbers (Smi, Mint, Bigint and Double). |
358 // Note that instance is not Smi (checked above). | 353 // Note that instance is not Smi (checked above). |
359 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { | 354 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { |
360 GenerateNumberTypeCheck( | 355 GenerateNumberTypeCheck( |
361 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); | 356 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); |
362 return false; | 357 return false; |
363 } | 358 } |
364 if (type.IsStringType()) { | 359 if (type.IsStringType()) { |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
407 // RAX: instance (preserved). | 402 // RAX: instance (preserved). |
408 // Clobbers RDI, RDX, R10. | 403 // Clobbers RDI, RDX, R10. |
409 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( | 404 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( |
410 intptr_t token_pos, | 405 intptr_t token_pos, |
411 const AbstractType& type, | 406 const AbstractType& type, |
412 Label* is_instance_lbl, | 407 Label* is_instance_lbl, |
413 Label* is_not_instance_lbl) { | 408 Label* is_not_instance_lbl) { |
414 __ Comment("UninstantiatedTypeTest"); | 409 __ Comment("UninstantiatedTypeTest"); |
415 ASSERT(!type.IsInstantiated()); | 410 ASSERT(!type.IsInstantiated()); |
416 // Skip check if destination is a dynamic type. | 411 // Skip check if destination is a dynamic type. |
417 const Immediate& raw_null = | |
418 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
419 if (type.IsTypeParameter()) { | 412 if (type.IsTypeParameter()) { |
420 const TypeParameter& type_param = TypeParameter::Cast(type); | 413 const TypeParameter& type_param = TypeParameter::Cast(type); |
421 // Load instantiator (or null) and instantiator type arguments on stack. | 414 // Load instantiator (or null) and instantiator type arguments on stack. |
422 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. | 415 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. |
423 // RDX: instantiator type arguments. | 416 // RDX: instantiator type arguments. |
424 // Check if type argument is dynamic. | 417 // Check if type argument is dynamic. |
425 __ cmpq(RDX, raw_null); | 418 __ CompareObject(RDX, Object::Handle(Object::null())); |
426 __ j(EQUAL, is_instance_lbl); | 419 __ j(EQUAL, is_instance_lbl); |
427 // Can handle only type arguments that are instances of TypeArguments. | 420 // Can handle only type arguments that are instances of TypeArguments. |
428 // (runtime checks canonicalize type arguments). | 421 // (runtime checks canonicalize type arguments). |
429 Label fall_through; | 422 Label fall_through; |
430 __ CompareClassId(RDX, kTypeArgumentsCid); | 423 __ CompareClassId(RDX, kTypeArgumentsCid); |
431 __ j(NOT_EQUAL, &fall_through); | 424 __ j(NOT_EQUAL, &fall_through); |
432 __ movq(RDI, | 425 __ movq(RDI, |
433 FieldAddress(RDX, TypeArguments::type_at_offset(type_param.index()))); | 426 FieldAddress(RDX, TypeArguments::type_at_offset(type_param.index()))); |
434 // RDI: Concrete type of type. | 427 // RDI: Concrete type of type. |
435 // Check if type argument is dynamic. | 428 // Check if type argument is dynamic. |
436 __ CompareObject(RDI, Type::ZoneHandle(Type::DynamicType())); | 429 __ CompareObject(RDI, Type::ZoneHandle(Type::DynamicType())); |
437 __ j(EQUAL, is_instance_lbl); | 430 __ j(EQUAL, is_instance_lbl); |
438 __ cmpq(RDI, raw_null); | 431 __ CompareObject(RDI, Object::Handle(Object::null())); |
439 __ j(EQUAL, is_instance_lbl); | 432 __ j(EQUAL, is_instance_lbl); |
440 const Type& object_type = Type::ZoneHandle(Type::ObjectType()); | 433 const Type& object_type = Type::ZoneHandle(Type::ObjectType()); |
441 __ CompareObject(RDI, object_type); | 434 __ CompareObject(RDI, object_type); |
442 __ j(EQUAL, is_instance_lbl); | 435 __ j(EQUAL, is_instance_lbl); |
443 | 436 |
444 // For Smi check quickly against int and num interfaces. | 437 // For Smi check quickly against int and num interfaces. |
445 Label not_smi; | 438 Label not_smi; |
446 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi? | 439 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi? |
447 __ j(NOT_ZERO, ¬_smi, Assembler::kNearJump); | 440 __ j(NOT_ZERO, ¬_smi, Assembler::kNearJump); |
448 __ CompareObject(RDI, Type::ZoneHandle(Type::IntType())); | 441 __ CompareObject(RDI, Type::ZoneHandle(Type::IntType())); |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
568 // Clobbers RCX and RDX. | 561 // Clobbers RCX and RDX. |
569 // Returns: | 562 // Returns: |
570 // - true or false in RAX. | 563 // - true or false in RAX. |
571 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos, | 564 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos, |
572 intptr_t deopt_id, | 565 intptr_t deopt_id, |
573 const AbstractType& type, | 566 const AbstractType& type, |
574 bool negate_result, | 567 bool negate_result, |
575 LocationSummary* locs) { | 568 LocationSummary* locs) { |
576 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded()); | 569 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded()); |
577 | 570 |
578 const Immediate& raw_null = | |
579 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
580 Label is_instance, is_not_instance; | 571 Label is_instance, is_not_instance; |
581 __ pushq(RCX); // Store instantiator on stack. | 572 __ pushq(RCX); // Store instantiator on stack. |
582 __ pushq(RDX); // Store instantiator type arguments. | 573 __ pushq(RDX); // Store instantiator type arguments. |
583 // If type is instantiated and non-parameterized, we can inline code | 574 // If type is instantiated and non-parameterized, we can inline code |
584 // checking whether the tested instance is a Smi. | 575 // checking whether the tested instance is a Smi. |
585 if (type.IsInstantiated()) { | 576 if (type.IsInstantiated()) { |
586 // A null object is only an instance of Object and dynamic, which has | 577 // A null object is only an instance of Object and dynamic, which has |
587 // already been checked above (if the type is instantiated). So we can | 578 // already been checked above (if the type is instantiated). So we can |
588 // return false here if the instance is null (and if the type is | 579 // return false here if the instance is null (and if the type is |
589 // instantiated). | 580 // instantiated). |
590 // We can only inline this null check if the type is instantiated at compile | 581 // We can only inline this null check if the type is instantiated at compile |
591 // time, since an uninstantiated type at compile time could be Object or | 582 // time, since an uninstantiated type at compile time could be Object or |
592 // dynamic at run time. | 583 // dynamic at run time. |
593 __ cmpq(RAX, raw_null); | 584 __ CompareObject(RAX, Object::Handle(Object::null())); |
594 __ j(EQUAL, &is_not_instance); | 585 __ j(EQUAL, &is_not_instance); |
595 } | 586 } |
596 | 587 |
597 // Generate inline instanceof test. | 588 // Generate inline instanceof test. |
598 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(); | 589 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(); |
599 test_cache = GenerateInlineInstanceof(token_pos, type, | 590 test_cache = GenerateInlineInstanceof(token_pos, type, |
600 &is_instance, &is_not_instance); | 591 &is_instance, &is_not_instance); |
601 | 592 |
602 // test_cache is null if there is no fall-through. | 593 // test_cache is null if there is no fall-through. |
603 Label done; | 594 Label done; |
604 if (!test_cache.IsNull()) { | 595 if (!test_cache.IsNull()) { |
605 // Generate runtime call. | 596 // Generate runtime call. |
606 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. | 597 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. |
607 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. | 598 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. |
608 __ PushObject(Object::ZoneHandle()); // Make room for the result. | 599 __ PushObject(Object::ZoneHandle()); // Make room for the result. |
609 __ pushq(RAX); // Push the instance. | 600 __ pushq(RAX); // Push the instance. |
610 __ PushObject(type); // Push the type. | 601 __ PushObject(type); // Push the type. |
611 __ pushq(RCX); // TODO(srdjan): Pass instantiator instead of null. | 602 __ pushq(RCX); // TODO(srdjan): Pass instantiator instead of null. |
612 __ pushq(RDX); // Instantiator type arguments. | 603 __ pushq(RDX); // Instantiator type arguments. |
613 __ LoadObject(RAX, test_cache); | 604 __ LoadObjectFromPool(RAX, test_cache, Assembler::kNotPatchable, PP); |
614 __ pushq(RAX); | 605 __ pushq(RAX); |
615 GenerateCallRuntime(token_pos, | 606 GenerateCallRuntime(token_pos, |
616 deopt_id, | 607 deopt_id, |
617 kInstanceofRuntimeEntry, | 608 kInstanceofRuntimeEntry, |
618 5, | 609 5, |
619 locs); | 610 locs); |
620 // Pop the parameters supplied to the runtime entry. The result of the | 611 // Pop the parameters supplied to the runtime entry. The result of the |
621 // instanceof runtime call will be left as the result of the operation. | 612 // instanceof runtime call will be left as the result of the operation. |
622 __ Drop(5); | 613 __ Drop(5); |
623 if (negate_result) { | 614 if (negate_result) { |
624 __ popq(RDX); | 615 __ popq(RDX); |
625 __ LoadObject(RAX, Bool::True()); | 616 __ LoadObjectFromPool(RAX, Bool::True(), Assembler::kNotPatchable, PP); |
626 __ cmpq(RDX, RAX); | 617 __ cmpq(RDX, RAX); |
627 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 618 __ j(NOT_EQUAL, &done, Assembler::kNearJump); |
628 __ LoadObject(RAX, Bool::False()); | 619 __ LoadObjectFromPool(RAX, Bool::False(), Assembler::kNotPatchable, PP); |
629 } else { | 620 } else { |
630 __ popq(RAX); | 621 __ popq(RAX); |
631 } | 622 } |
632 __ jmp(&done, Assembler::kNearJump); | 623 __ jmp(&done, Assembler::kNearJump); |
633 } | 624 } |
634 __ Bind(&is_not_instance); | 625 __ Bind(&is_not_instance); |
635 __ LoadObject(RAX, Bool::Get(negate_result)); | 626 __ LoadObjectFromPool(RAX, Bool::Get(negate_result), |
627 Assembler::kNotPatchable, PP); | |
636 __ jmp(&done, Assembler::kNearJump); | 628 __ jmp(&done, Assembler::kNearJump); |
637 | 629 |
638 __ Bind(&is_instance); | 630 __ Bind(&is_instance); |
639 __ LoadObject(RAX, Bool::Get(!negate_result)); | 631 __ LoadObjectFromPool(RAX, Bool::Get(!negate_result), |
632 Assembler::kNotPatchable, PP); | |
640 __ Bind(&done); | 633 __ Bind(&done); |
641 __ popq(RDX); // Remove pushed instantiator type arguments. | 634 __ popq(RDX); // Remove pushed instantiator type arguments. |
642 __ popq(RCX); // Remove pushed instantiator. | 635 __ popq(RCX); // Remove pushed instantiator. |
643 } | 636 } |
644 | 637 |
645 | 638 |
646 // Optimize assignable type check by adding inlined tests for: | 639 // Optimize assignable type check by adding inlined tests for: |
647 // - NULL -> return NULL. | 640 // - NULL -> return NULL. |
648 // - Smi -> compile time subtype check (only if dst class is not parameterized). | 641 // - Smi -> compile time subtype check (only if dst class is not parameterized). |
649 // - Class equality (only if class is not parameterized). | 642 // - Class equality (only if class is not parameterized). |
(...skipping 12 matching lines...) Expand all Loading... | |
662 LocationSummary* locs) { | 655 LocationSummary* locs) { |
663 ASSERT(token_pos >= 0); | 656 ASSERT(token_pos >= 0); |
664 ASSERT(!dst_type.IsNull()); | 657 ASSERT(!dst_type.IsNull()); |
665 ASSERT(dst_type.IsFinalized()); | 658 ASSERT(dst_type.IsFinalized()); |
666 // Assignable check is skipped in FlowGraphBuilder, not here. | 659 // Assignable check is skipped in FlowGraphBuilder, not here. |
667 ASSERT(dst_type.IsMalformed() || dst_type.IsMalbounded() || | 660 ASSERT(dst_type.IsMalformed() || dst_type.IsMalbounded() || |
668 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); | 661 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); |
669 __ pushq(RCX); // Store instantiator. | 662 __ pushq(RCX); // Store instantiator. |
670 __ pushq(RDX); // Store instantiator type arguments. | 663 __ pushq(RDX); // Store instantiator type arguments. |
671 // A null object is always assignable and is returned as result. | 664 // A null object is always assignable and is returned as result. |
672 const Immediate& raw_null = | |
673 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
674 Label is_assignable, runtime_call; | 665 Label is_assignable, runtime_call; |
675 __ cmpq(RAX, raw_null); | 666 __ CompareObject(RAX, Object::Handle(Object::null())); |
676 __ j(EQUAL, &is_assignable); | 667 __ j(EQUAL, &is_assignable); |
677 | 668 |
678 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) { | 669 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) { |
679 // If type checks are not eliminated during the graph building then | 670 // If type checks are not eliminated during the graph building then |
680 // a transition sentinel can be seen here. | 671 // a transition sentinel can be seen here. |
681 __ CompareObject(RAX, Object::transition_sentinel()); | 672 __ CompareObject(RAX, Object::transition_sentinel()); |
682 __ j(EQUAL, &is_assignable); | 673 __ j(EQUAL, &is_assignable); |
683 } | 674 } |
684 | 675 |
685 // Generate throw new TypeError() if the type is malformed or malbounded. | 676 // Generate throw new TypeError() if the type is malformed or malbounded. |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
718 | 709 |
719 __ Bind(&runtime_call); | 710 __ Bind(&runtime_call); |
720 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. | 711 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. |
721 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. | 712 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. |
722 __ PushObject(Object::ZoneHandle()); // Make room for the result. | 713 __ PushObject(Object::ZoneHandle()); // Make room for the result. |
723 __ pushq(RAX); // Push the source object. | 714 __ pushq(RAX); // Push the source object. |
724 __ PushObject(dst_type); // Push the type of the destination. | 715 __ PushObject(dst_type); // Push the type of the destination. |
725 __ pushq(RCX); // Instantiator. | 716 __ pushq(RCX); // Instantiator. |
726 __ pushq(RDX); // Instantiator type arguments. | 717 __ pushq(RDX); // Instantiator type arguments. |
727 __ PushObject(dst_name); // Push the name of the destination. | 718 __ PushObject(dst_name); // Push the name of the destination. |
728 __ LoadObject(RAX, test_cache); | 719 __ LoadObjectFromPool(RAX, test_cache, Assembler::kNotPatchable, PP); |
729 __ pushq(RAX); | 720 __ pushq(RAX); |
730 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); | 721 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); |
731 // Pop the parameters supplied to the runtime entry. The result of the | 722 // Pop the parameters supplied to the runtime entry. The result of the |
732 // type check runtime call is the checked value. | 723 // type check runtime call is the checked value. |
733 __ Drop(6); | 724 __ Drop(6); |
734 __ popq(RAX); | 725 __ popq(RAX); |
735 | 726 |
736 __ Bind(&is_assignable); | 727 __ Bind(&is_assignable); |
737 __ popq(RDX); // Remove pushed instantiator type arguments. | 728 __ popq(RDX); // Remove pushed instantiator type arguments. |
738 __ popq(RCX); // Remove pushed instantiator. | 729 __ popq(RCX); // Remove pushed instantiator. |
(...skipping 24 matching lines...) Expand all Loading... | |
763 | 754 |
764 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset, | 755 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset, |
765 Location loc, | 756 Location loc, |
766 bool* push_emitted) { | 757 bool* push_emitted) { |
767 const Address dest(RBP, dest_offset); | 758 const Address dest(RBP, dest_offset); |
768 if (loc.IsConstant()) { | 759 if (loc.IsConstant()) { |
769 if (!*push_emitted) { | 760 if (!*push_emitted) { |
770 __ pushq(RAX); | 761 __ pushq(RAX); |
771 *push_emitted = true; | 762 *push_emitted = true; |
772 } | 763 } |
773 __ LoadObject(RAX, loc.constant()); | 764 __ LoadObjectFromPool(RAX, loc.constant(), Assembler::kNotPatchable, PP); |
774 __ movq(dest, RAX); | 765 __ movq(dest, RAX); |
775 } else if (loc.IsRegister()) { | 766 } else if (loc.IsRegister()) { |
776 if (*push_emitted && loc.reg() == RAX) { | 767 if (*push_emitted && loc.reg() == RAX) { |
777 __ movq(RAX, Address(RSP, 0)); | 768 __ movq(RAX, Address(RSP, 0)); |
778 __ movq(dest, RAX); | 769 __ movq(dest, RAX); |
779 } else { | 770 } else { |
780 __ movq(dest, loc.reg()); | 771 __ movq(dest, loc.reg()); |
781 } | 772 } |
782 } else { | 773 } else { |
783 Address src = loc.ToStackSlotAddress(); | 774 Address src = loc.ToStackSlotAddress(); |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
893 const Address argument_addr(RBX, RCX, TIMES_8, 0); | 884 const Address argument_addr(RBX, RCX, TIMES_8, 0); |
894 const Address copy_addr(RDI, RCX, TIMES_8, 0); | 885 const Address copy_addr(RDI, RCX, TIMES_8, 0); |
895 __ Bind(&loop); | 886 __ Bind(&loop); |
896 __ movq(RAX, argument_addr); | 887 __ movq(RAX, argument_addr); |
897 __ movq(copy_addr, RAX); | 888 __ movq(copy_addr, RAX); |
898 __ Bind(&loop_condition); | 889 __ Bind(&loop_condition); |
899 __ decq(RCX); | 890 __ decq(RCX); |
900 __ j(POSITIVE, &loop, Assembler::kNearJump); | 891 __ j(POSITIVE, &loop, Assembler::kNearJump); |
901 | 892 |
902 // Copy or initialize optional named arguments. | 893 // Copy or initialize optional named arguments. |
903 const Immediate& raw_null = | |
904 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
905 Label all_arguments_processed; | 894 Label all_arguments_processed; |
906 #ifdef DEBUG | 895 #ifdef DEBUG |
907 const bool check_correct_named_args = true; | 896 const bool check_correct_named_args = true; |
908 #else | 897 #else |
909 const bool check_correct_named_args = function.IsClosureFunction(); | 898 const bool check_correct_named_args = function.IsClosureFunction(); |
910 #endif | 899 #endif |
911 if (num_opt_named_params > 0) { | 900 if (num_opt_named_params > 0) { |
912 // Start by alphabetically sorting the names of the optional parameters. | 901 // Start by alphabetically sorting the names of the optional parameters. |
913 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; | 902 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; |
914 int* opt_param_position = new int[num_opt_named_params]; | 903 int* opt_param_position = new int[num_opt_named_params]; |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
956 __ addq(RDI, Immediate(ArgumentsDescriptor::named_entry_size())); | 945 __ addq(RDI, Immediate(ArgumentsDescriptor::named_entry_size())); |
957 __ negq(RAX); | 946 __ negq(RAX); |
958 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi. | 947 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi. |
959 __ movq(RAX, argument_addr); | 948 __ movq(RAX, argument_addr); |
960 __ jmp(&assign_optional_parameter, Assembler::kNearJump); | 949 __ jmp(&assign_optional_parameter, Assembler::kNearJump); |
961 __ Bind(&load_default_value); | 950 __ Bind(&load_default_value); |
962 // Load RAX with default argument. | 951 // Load RAX with default argument. |
963 const Object& value = Object::ZoneHandle( | 952 const Object& value = Object::ZoneHandle( |
964 parsed_function().default_parameter_values().At( | 953 parsed_function().default_parameter_values().At( |
965 param_pos - num_fixed_params)); | 954 param_pos - num_fixed_params)); |
966 __ LoadObject(RAX, value); | 955 __ LoadObjectFromPool(RAX, value, Assembler::kNotPatchable, PP); |
967 __ Bind(&assign_optional_parameter); | 956 __ Bind(&assign_optional_parameter); |
968 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. | 957 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. |
969 // We do not use the final allocation index of the variable here, i.e. | 958 // We do not use the final allocation index of the variable here, i.e. |
970 // scope->VariableAt(i)->index(), because captured variables still need | 959 // scope->VariableAt(i)->index(), because captured variables still need |
971 // to be copied to the context that is not yet allocated. | 960 // to be copied to the context that is not yet allocated. |
972 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; | 961 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; |
973 const Address param_addr(RBP, computed_param_pos * kWordSize); | 962 const Address param_addr(RBP, computed_param_pos * kWordSize); |
974 __ movq(param_addr, RAX); | 963 __ movq(param_addr, RAX); |
975 } | 964 } |
976 delete[] opt_param; | 965 delete[] opt_param; |
977 delete[] opt_param_position; | 966 delete[] opt_param_position; |
978 if (check_correct_named_args) { | 967 if (check_correct_named_args) { |
979 // Check that RDI now points to the null terminator in the arguments | 968 // Check that RDI now points to the null terminator in the arguments |
980 // descriptor. | 969 // descriptor. |
981 __ cmpq(Address(RDI, 0), raw_null); | 970 __ LoadObjectFromPool(TMP, Object::Handle(Object::null()), |
971 Assembler::kNotPatchable, PP); | |
972 __ cmpq(Address(RDI, 0), TMP); | |
982 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); | 973 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); |
983 } | 974 } |
984 } else { | 975 } else { |
985 ASSERT(num_opt_pos_params > 0); | 976 ASSERT(num_opt_pos_params > 0); |
986 __ movq(RCX, | 977 __ movq(RCX, |
987 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset())); | 978 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset())); |
988 __ SmiUntag(RCX); | 979 __ SmiUntag(RCX); |
989 for (int i = 0; i < num_opt_pos_params; i++) { | 980 for (int i = 0; i < num_opt_pos_params; i++) { |
990 Label next_parameter; | 981 Label next_parameter; |
991 // Handle this optional positional parameter only if k or fewer positional | 982 // Handle this optional positional parameter only if k or fewer positional |
992 // arguments have been passed, where k is param_pos, the position of this | 983 // arguments have been passed, where k is param_pos, the position of this |
993 // optional parameter in the formal parameter list. | 984 // optional parameter in the formal parameter list. |
994 const int param_pos = num_fixed_params + i; | 985 const int param_pos = num_fixed_params + i; |
995 __ cmpq(RCX, Immediate(param_pos)); | 986 __ cmpq(RCX, Immediate(param_pos)); |
996 __ j(GREATER, &next_parameter, Assembler::kNearJump); | 987 __ j(GREATER, &next_parameter, Assembler::kNearJump); |
997 // Load RAX with default argument. | 988 // Load RAX with default argument. |
998 const Object& value = Object::ZoneHandle( | 989 const Object& value = Object::ZoneHandle( |
999 parsed_function().default_parameter_values().At(i)); | 990 parsed_function().default_parameter_values().At(i)); |
1000 __ LoadObject(RAX, value); | 991 __ LoadObjectFromPool(RAX, value, Assembler::kNotPatchable, PP); |
1001 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. | 992 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. |
1002 // We do not use the final allocation index of the variable here, i.e. | 993 // We do not use the final allocation index of the variable here, i.e. |
1003 // scope->VariableAt(i)->index(), because captured variables still need | 994 // scope->VariableAt(i)->index(), because captured variables still need |
1004 // to be copied to the context that is not yet allocated. | 995 // to be copied to the context that is not yet allocated. |
1005 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; | 996 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; |
1006 const Address param_addr(RBP, computed_param_pos * kWordSize); | 997 const Address param_addr(RBP, computed_param_pos * kWordSize); |
1007 __ movq(param_addr, RAX); | 998 __ movq(param_addr, RAX); |
1008 __ Bind(&next_parameter); | 999 __ Bind(&next_parameter); |
1009 } | 1000 } |
1010 if (check_correct_named_args) { | 1001 if (check_correct_named_args) { |
1011 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1002 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
1012 __ SmiUntag(RBX); | 1003 __ SmiUntag(RBX); |
1013 // Check that RCX equals RBX, i.e. no named arguments passed. | 1004 // Check that RCX equals RBX, i.e. no named arguments passed. |
1014 __ cmpq(RCX, RBX); | 1005 __ cmpq(RCX, RBX); |
1015 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); | 1006 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); |
1016 } | 1007 } |
1017 } | 1008 } |
1018 | 1009 |
1019 __ Bind(&wrong_num_arguments); | 1010 __ Bind(&wrong_num_arguments); |
1020 if (function.IsClosureFunction()) { | 1011 if (function.IsClosureFunction()) { |
1021 // Invoke noSuchMethod function passing "call" as the original name. | 1012 // Invoke noSuchMethod function passing "call" as the original name. |
1022 const int kNumArgsChecked = 1; | 1013 const int kNumArgsChecked = 1; |
1023 const ICData& ic_data = ICData::ZoneHandle( | 1014 const ICData& ic_data = ICData::ZoneHandle( |
1024 ICData::New(function, Symbols::Call(), Object::empty_array(), | 1015 ICData::New(function, Symbols::Call(), Object::empty_array(), |
1025 Isolate::kNoDeoptId, kNumArgsChecked)); | 1016 Isolate::kNoDeoptId, kNumArgsChecked)); |
1026 __ LoadObject(RBX, ic_data); | 1017 __ LoadObjectFromPool(RBX, ic_data, Assembler::kNotPatchable, PP); |
1027 __ LeaveFrame(); // The arguments are still on the stack. | 1018 __ LeaveFrameWithPP(); // The arguments are still on the stack. |
1028 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); | 1019 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); |
1029 // The noSuchMethod call may return to the caller, but not here. | 1020 // The noSuchMethod call may return to the caller, but not here. |
1030 __ int3(); | 1021 __ int3(); |
1031 } else if (check_correct_named_args) { | 1022 } else if (check_correct_named_args) { |
1032 __ Stop("Wrong arguments"); | 1023 __ Stop("Wrong arguments"); |
1033 } | 1024 } |
1034 | 1025 |
1035 __ Bind(&all_arguments_processed); | 1026 __ Bind(&all_arguments_processed); |
1036 // Nullify originally passed arguments only after they have been copied and | 1027 // Nullify originally passed arguments only after they have been copied and |
1037 // checked, otherwise noSuchMethod would not see their original values. | 1028 // checked, otherwise noSuchMethod would not see their original values. |
1038 // This step can be skipped in case we decide that formal parameters are | 1029 // This step can be skipped in case we decide that formal parameters are |
1039 // implicitly final, since garbage collecting the unmodified value is not | 1030 // implicitly final, since garbage collecting the unmodified value is not |
1040 // an issue anymore. | 1031 // an issue anymore. |
1041 | 1032 |
1042 // R10 : arguments descriptor array. | 1033 // R10 : arguments descriptor array. |
1043 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); | 1034 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); |
1044 __ SmiUntag(RCX); | 1035 __ SmiUntag(RCX); |
1036 __ LoadObjectFromPool(R12, Object::Handle(Object::null()), | |
1037 Assembler::kNotPatchable, PP); | |
1045 Label null_args_loop, null_args_loop_condition; | 1038 Label null_args_loop, null_args_loop_condition; |
1046 __ jmp(&null_args_loop_condition, Assembler::kNearJump); | 1039 __ jmp(&null_args_loop_condition, Assembler::kNearJump); |
1047 const Address original_argument_addr( | 1040 const Address original_argument_addr( |
1048 RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize); | 1041 RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize); |
1049 __ Bind(&null_args_loop); | 1042 __ Bind(&null_args_loop); |
1050 __ movq(original_argument_addr, raw_null); | 1043 __ movq(original_argument_addr, R12); |
1051 __ Bind(&null_args_loop_condition); | 1044 __ Bind(&null_args_loop_condition); |
1052 __ decq(RCX); | 1045 __ decq(RCX); |
1053 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); | 1046 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); |
1054 } | 1047 } |
1055 | 1048 |
1056 | 1049 |
1057 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { | 1050 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { |
1058 // TOS: return address. | 1051 // TOS: return address. |
1059 // +1 : receiver. | 1052 // +1 : receiver. |
1060 // Sequence node has one return node, its input is load field node. | 1053 // Sequence node has one return node, its input is load field node. |
1061 __ movq(RAX, Address(RSP, 1 * kWordSize)); | 1054 __ movq(RAX, Address(RSP, 1 * kWordSize)); |
1062 __ movq(RAX, FieldAddress(RAX, offset)); | 1055 __ movq(RAX, FieldAddress(RAX, offset)); |
1063 __ ret(); | 1056 __ ret(); |
1064 } | 1057 } |
1065 | 1058 |
1066 | 1059 |
1067 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { | 1060 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { |
1068 // TOS: return address. | 1061 // TOS: return address. |
1069 // +1 : value | 1062 // +1 : value |
1070 // +2 : receiver. | 1063 // +2 : receiver. |
1071 // Sequence node has one store node and one return NULL node. | 1064 // Sequence node has one store node and one return NULL node. |
1072 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. | 1065 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. |
1073 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. | 1066 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. |
1074 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); | 1067 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); |
1075 const Immediate& raw_null = | 1068 __ LoadObjectFromPool(RAX, Object::Handle(Object::null()), |
1076 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 1069 Assembler::kNotPatchable, PP); |
1077 __ movq(RAX, raw_null); | |
1078 __ ret(); | 1070 __ ret(); |
1079 } | 1071 } |
1080 | 1072 |
1081 | 1073 |
1082 void FlowGraphCompiler::EmitFrameEntry() { | 1074 void FlowGraphCompiler::EmitFrameEntry() { |
1083 const Function& function = parsed_function().function(); | 1075 const Function& function = parsed_function().function(); |
1076 Register new_pp = kNoRegister; | |
1077 Register new_pc = kNoRegister; | |
1084 if (CanOptimizeFunction() && | 1078 if (CanOptimizeFunction() && |
1085 function.is_optimizable() && | 1079 function.is_optimizable() && |
1086 (!is_optimizing() || may_reoptimize())) { | 1080 (!is_optimizing() || may_reoptimize())) { |
1087 const Register function_reg = RDI; | 1081 const Register function_reg = RDI; |
1088 __ LoadObject(function_reg, function); | 1082 new_pp = R13; |
1083 new_pc = R12; | |
1084 | |
1085 Label next; | |
1086 __ nop(4); // Need a fixed size sequence on frame entry. | |
1087 __ call(&next); | |
1088 __ Bind(&next); | |
1089 | |
1090 const intptr_t object_pool_pc_dist = | |
1091 Instructions::HeaderSize() - Instructions::object_pool_offset() + | |
1092 __ CodeSize(); | |
1093 const intptr_t offset = | |
1094 Assembler::kEntryPointToPcMarkerOffset - __ CodeSize(); | |
1095 __ popq(new_pc); | |
1096 if (offset != 0) { | |
1097 __ addq(new_pc, Immediate(offset)); | |
1098 } | |
1099 | |
1100 // Load callee's pool pointer. | |
1101 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset)); | |
1102 | |
1103 // Load function object using the callee's pool pointer. | |
1104 __ LoadObjectFromPool(function_reg, function, | |
1105 Assembler::kPatchable, new_pp); | |
1106 | |
1089 // Patch point is after the eventually inlined function object. | 1107 // Patch point is after the eventually inlined function object. |
1090 AddCurrentDescriptor(PcDescriptors::kEntryPatch, | 1108 AddCurrentDescriptor(PcDescriptors::kEntryPatch, |
1091 Isolate::kNoDeoptId, | 1109 Isolate::kNoDeoptId, |
1092 0); // No token position. | 1110 0); // No token position. |
1093 if (is_optimizing()) { | 1111 if (is_optimizing()) { |
1094 // Reoptimization of an optimized function is triggered by counting in | 1112 // Reoptimization of an optimized function is triggered by counting in |
1095 // IC stubs, but not at the entry of the function. | 1113 // IC stubs, but not at the entry of the function. |
1096 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), | 1114 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), |
1097 Immediate(FLAG_reoptimization_counter_threshold)); | 1115 Immediate(FLAG_reoptimization_counter_threshold)); |
1098 } else { | 1116 } else { |
1099 __ incq(FieldAddress(function_reg, Function::usage_counter_offset())); | 1117 __ incq(FieldAddress(function_reg, Function::usage_counter_offset())); |
1100 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), | 1118 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), |
1101 Immediate(FLAG_optimization_counter_threshold)); | 1119 Immediate(FLAG_optimization_counter_threshold)); |
1102 } | 1120 } |
1103 ASSERT(function_reg == RDI); | 1121 ASSERT(function_reg == RDI); |
1104 __ j(GREATER_EQUAL, &StubCode::OptimizeFunctionLabel()); | 1122 __ ConditionalJumpFromPool(GREATER_EQUAL, |
1123 &StubCode::OptimizeFunctionLabel(), R13); | |
1105 } else if (!flow_graph().IsCompiledForOsr()) { | 1124 } else if (!flow_graph().IsCompiledForOsr()) { |
1125 // We have to load the PP here too because a load of an external label | |
1126 // may be patched at the AddCurrentDescriptor below. | |
1127 new_pp = R13; | |
1128 new_pc = R12; | |
1129 | |
1130 Label next; | |
1131 __ nop(4); // Need a fixed size sequence on frame entry. | |
1132 __ call(&next); | |
1133 __ Bind(&next); | |
1134 | |
1135 const intptr_t object_pool_pc_dist = | |
1136 Instructions::HeaderSize() - Instructions::object_pool_offset() + | |
1137 __ CodeSize(); | |
1138 const intptr_t offset = | |
1139 Assembler::kEntryPointToPcMarkerOffset - __ CodeSize(); | |
1140 __ popq(new_pc); | |
1141 if (offset != 0) { | |
1142 __ addq(new_pc, Immediate(offset)); | |
1143 } | |
1144 | |
1145 // Load callee's pool pointer. | |
1146 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset)); | |
1106 AddCurrentDescriptor(PcDescriptors::kEntryPatch, | 1147 AddCurrentDescriptor(PcDescriptors::kEntryPatch, |
1107 Isolate::kNoDeoptId, | 1148 Isolate::kNoDeoptId, |
1108 0); // No token position. | 1149 0); // No token position. |
1109 } | 1150 } |
1110 __ Comment("Enter frame"); | 1151 __ Comment("Enter frame"); |
1111 if (flow_graph().IsCompiledForOsr()) { | 1152 if (flow_graph().IsCompiledForOsr()) { |
1112 intptr_t extra_slots = StackSize() | 1153 intptr_t extra_slots = StackSize() |
1113 - flow_graph().num_stack_locals() | 1154 - flow_graph().num_stack_locals() |
1114 - flow_graph().num_copied_params(); | 1155 - flow_graph().num_copied_params(); |
1115 ASSERT(extra_slots >= 0); | 1156 ASSERT(extra_slots >= 0); |
1116 __ EnterOsrFrame(extra_slots * kWordSize); | 1157 __ EnterOsrFrame(extra_slots * kWordSize, new_pp, new_pc); |
1117 } else { | 1158 } else { |
1118 ASSERT(StackSize() >= 0); | 1159 ASSERT(StackSize() >= 0); |
1119 __ EnterDartFrame(StackSize() * kWordSize); | 1160 __ EnterDartFrame(StackSize() * kWordSize, new_pp, new_pc); |
1120 } | 1161 } |
1121 } | 1162 } |
1122 | 1163 |
1123 | 1164 |
1124 void FlowGraphCompiler::CompileGraph() { | 1165 void FlowGraphCompiler::CompileGraph() { |
1125 InitCompiler(); | 1166 InitCompiler(); |
1126 | 1167 |
1127 TryIntrinsify(); | 1168 TryIntrinsify(); |
1128 | 1169 |
1129 EmitFrameEntry(); | 1170 EmitFrameEntry(); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1163 // Invoke noSuchMethod function passing the original function name. | 1204 // Invoke noSuchMethod function passing the original function name. |
1164 // For closure functions, use "call" as the original name. | 1205 // For closure functions, use "call" as the original name. |
1165 const String& name = | 1206 const String& name = |
1166 String::Handle(function.IsClosureFunction() | 1207 String::Handle(function.IsClosureFunction() |
1167 ? Symbols::Call().raw() | 1208 ? Symbols::Call().raw() |
1168 : function.name()); | 1209 : function.name()); |
1169 const int kNumArgsChecked = 1; | 1210 const int kNumArgsChecked = 1; |
1170 const ICData& ic_data = ICData::ZoneHandle( | 1211 const ICData& ic_data = ICData::ZoneHandle( |
1171 ICData::New(function, name, Object::empty_array(), | 1212 ICData::New(function, name, Object::empty_array(), |
1172 Isolate::kNoDeoptId, kNumArgsChecked)); | 1213 Isolate::kNoDeoptId, kNumArgsChecked)); |
1173 __ LoadObject(RBX, ic_data); | 1214 __ LoadObjectFromPool(RBX, ic_data, Assembler::kNotPatchable, PP); |
1174 __ LeaveFrame(); // The arguments are still on the stack. | 1215 __ LeaveFrameWithPP(); // The arguments are still on the stack. |
1175 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); | 1216 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); |
1176 // The noSuchMethod call may return to the caller, but not here. | 1217 // The noSuchMethod call may return to the caller, but not here. |
1177 __ int3(); | 1218 __ int3(); |
1178 } else { | 1219 } else { |
1179 __ Stop("Wrong number of arguments"); | 1220 __ Stop("Wrong number of arguments"); |
1180 } | 1221 } |
1181 __ Bind(&correct_num_arguments); | 1222 __ Bind(&correct_num_arguments); |
1182 } | 1223 } |
1183 } else if (!flow_graph().IsCompiledForOsr()) { | 1224 } else if (!flow_graph().IsCompiledForOsr()) { |
1184 CopyParameters(); | 1225 CopyParameters(); |
1185 } | 1226 } |
1186 | 1227 |
1187 // In unoptimized code, initialize (non-argument) stack allocated slots to | 1228 // In unoptimized code, initialize (non-argument) stack allocated slots to |
1188 // null. | 1229 // null. |
1189 if (!is_optimizing() && (num_locals > 0)) { | 1230 if (!is_optimizing() && (num_locals > 0)) { |
1190 __ Comment("Initialize spill slots"); | 1231 __ Comment("Initialize spill slots"); |
1191 const intptr_t slot_base = parsed_function().first_stack_local_index(); | 1232 const intptr_t slot_base = parsed_function().first_stack_local_index(); |
1192 const Immediate& raw_null = | 1233 __ LoadObjectFromPool(RAX, Object::Handle(Object::null()), |
1193 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 1234 Assembler::kNotPatchable, PP); |
1194 __ movq(RAX, raw_null); | |
1195 for (intptr_t i = 0; i < num_locals; ++i) { | 1235 for (intptr_t i = 0; i < num_locals; ++i) { |
1196 // Subtract index i (locals lie at lower addresses than RBP). | 1236 // Subtract index i (locals lie at lower addresses than RBP). |
1197 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX); | 1237 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX); |
1198 } | 1238 } |
1199 } | 1239 } |
1200 | 1240 |
1201 if (FLAG_print_scopes) { | 1241 if (FLAG_print_scopes) { |
1202 // Print the function scope (again) after generating the prologue in order | 1242 // Print the function scope (again) after generating the prologue in order |
1203 // to see annotations such as allocation indices of locals. | 1243 // to see annotations such as allocation indices of locals. |
1204 if (FLAG_print_ast) { | 1244 if (FLAG_print_ast) { |
1205 // Second printing. | 1245 // Second printing. |
1206 OS::Print("Annotated "); | 1246 OS::Print("Annotated "); |
1207 } | 1247 } |
1208 AstPrinter::PrintFunctionScope(parsed_function()); | 1248 AstPrinter::PrintFunctionScope(parsed_function()); |
1209 } | 1249 } |
1210 | 1250 |
1211 ASSERT(!block_order().is_empty()); | 1251 ASSERT(!block_order().is_empty()); |
1212 VisitBlocks(); | 1252 VisitBlocks(); |
1213 | 1253 |
1214 __ int3(); | 1254 __ int3(); |
1215 GenerateDeferredCode(); | 1255 GenerateDeferredCode(); |
1216 // Emit function patching code. This will be swapped with the first 13 bytes | 1256 // Emit function patching code. This will be swapped with the first 13 bytes |
1217 // at entry point. | 1257 // at entry point. |
1218 AddCurrentDescriptor(PcDescriptors::kPatchCode, | 1258 AddCurrentDescriptor(PcDescriptors::kPatchCode, |
1219 Isolate::kNoDeoptId, | 1259 Isolate::kNoDeoptId, |
1220 0); // No token position. | 1260 0); // No token position. |
1221 __ jmp(&StubCode::FixCallersTargetLabel()); | 1261 // This is patched up to a point in FrameEntry where the PP for the |
1262 // current function is in R13 instead of PP. | |
1263 __ JumpPatchable(&StubCode::FixCallersTargetLabel(), R13); | |
1264 | |
1265 // TOOD(zra): Is this descriptor used? | |
1222 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump, | 1266 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump, |
1223 Isolate::kNoDeoptId, | 1267 Isolate::kNoDeoptId, |
1224 0); // No token position. | 1268 0); // No token position. |
1225 __ jmp(&StubCode::DeoptimizeLazyLabel()); | 1269 __ JumpFromPool(&StubCode::DeoptimizeLazyLabel(), PP); |
1226 } | 1270 } |
1227 | 1271 |
1228 | 1272 |
1229 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, | 1273 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, |
1230 const ExternalLabel* label, | 1274 const ExternalLabel* label, |
1231 PcDescriptors::Kind kind, | 1275 PcDescriptors::Kind kind, |
1232 LocationSummary* locs) { | 1276 LocationSummary* locs) { |
1233 __ call(label); | 1277 __ CallFromPool(label); |
1234 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos); | 1278 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos); |
1235 RecordSafepoint(locs); | 1279 RecordSafepoint(locs); |
1236 } | 1280 } |
1237 | 1281 |
1238 | 1282 |
1239 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, | 1283 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, |
1240 intptr_t token_pos, | 1284 intptr_t token_pos, |
1241 const ExternalLabel* label, | 1285 const ExternalLabel* label, |
1242 PcDescriptors::Kind kind, | 1286 PcDescriptors::Kind kind, |
1243 LocationSummary* locs) { | 1287 LocationSummary* locs) { |
1244 __ call(label); | 1288 __ CallPatchable(label); |
1245 AddCurrentDescriptor(kind, deopt_id, token_pos); | 1289 AddCurrentDescriptor(kind, deopt_id, token_pos); |
1246 RecordSafepoint(locs); | 1290 RecordSafepoint(locs); |
1247 // Marks either the continuation point in unoptimized code or the | 1291 // Marks either the continuation point in unoptimized code or the |
1248 // deoptimization point in optimized code, after call. | 1292 // deoptimization point in optimized code, after call. |
1249 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id); | 1293 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id); |
1250 if (is_optimizing()) { | 1294 if (is_optimizing()) { |
1251 AddDeoptIndexAtCall(deopt_id_after, token_pos); | 1295 AddDeoptIndexAtCall(deopt_id_after, token_pos); |
1252 } else { | 1296 } else { |
1253 // Add deoptimization continuation point after the call and before the | 1297 // Add deoptimization continuation point after the call and before the |
1254 // arguments are removed. | 1298 // arguments are removed. |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1339 __ j(NOT_ZERO, ¬_smi, Assembler::kNearJump); | 1383 __ j(NOT_ZERO, ¬_smi, Assembler::kNearJump); |
1340 __ movq(RAX, Immediate(Smi::RawValue(kSmiCid))); | 1384 __ movq(RAX, Immediate(Smi::RawValue(kSmiCid))); |
1341 __ jmp(&load_cache); | 1385 __ jmp(&load_cache); |
1342 | 1386 |
1343 __ Bind(¬_smi); | 1387 __ Bind(¬_smi); |
1344 __ LoadClassId(RAX, RAX); | 1388 __ LoadClassId(RAX, RAX); |
1345 __ SmiTag(RAX); | 1389 __ SmiTag(RAX); |
1346 | 1390 |
1347 // RAX: class ID of the receiver (smi). | 1391 // RAX: class ID of the receiver (smi). |
1348 __ Bind(&load_cache); | 1392 __ Bind(&load_cache); |
1349 __ LoadObject(RBX, cache); | 1393 __ LoadObjectFromPool(RBX, cache, Assembler::kNotPatchable, PP); |
1350 __ movq(RDI, FieldAddress(RBX, MegamorphicCache::buckets_offset())); | 1394 __ movq(RDI, FieldAddress(RBX, MegamorphicCache::buckets_offset())); |
1351 __ movq(RBX, FieldAddress(RBX, MegamorphicCache::mask_offset())); | 1395 __ movq(RBX, FieldAddress(RBX, MegamorphicCache::mask_offset())); |
1352 // RDI: cache buckets array. | 1396 // RDI: cache buckets array. |
1353 // RBX: mask. | 1397 // RBX: mask. |
1354 __ movq(RCX, RAX); | 1398 __ movq(RCX, RAX); |
1355 | 1399 |
1356 Label loop, update, call_target_function; | 1400 Label loop, update, call_target_function; |
1357 __ jmp(&loop); | 1401 __ jmp(&loop); |
1358 | 1402 |
1359 __ Bind(&update); | 1403 __ Bind(&update); |
(...skipping 11 matching lines...) Expand all Loading... | |
1371 __ j(NOT_EQUAL, &update, Assembler::kNearJump); | 1415 __ j(NOT_EQUAL, &update, Assembler::kNearJump); |
1372 | 1416 |
1373 __ Bind(&call_target_function); | 1417 __ Bind(&call_target_function); |
1374 // Call the target found in the cache. For a class id match, this is a | 1418 // Call the target found in the cache. For a class id match, this is a |
1375 // proper target for the given name and arguments descriptor. If the | 1419 // proper target for the given name and arguments descriptor. If the |
1376 // illegal class id was found, the target is a cache miss handler that can | 1420 // illegal class id was found, the target is a cache miss handler that can |
1377 // be invoked as a normal Dart function. | 1421 // be invoked as a normal Dart function. |
1378 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize)); | 1422 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize)); |
1379 __ movq(RAX, FieldAddress(RAX, Function::code_offset())); | 1423 __ movq(RAX, FieldAddress(RAX, Function::code_offset())); |
1380 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); | 1424 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); |
1381 __ LoadObject(RBX, ic_data); | 1425 __ LoadObjectFromPool(RBX, ic_data, Assembler::kNotPatchable, PP); |
1382 __ LoadObject(R10, arguments_descriptor); | 1426 __ LoadObjectFromPool(R10, arguments_descriptor, |
1427 Assembler::kNotPatchable, PP); | |
1383 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); | 1428 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
1384 __ call(RAX); | 1429 __ call(RAX); |
1385 AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos); | 1430 AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos); |
1386 RecordSafepoint(locs); | 1431 RecordSafepoint(locs); |
1387 AddDeoptIndexAtCall(Isolate::ToDeoptAfter(deopt_id), token_pos); | 1432 AddDeoptIndexAtCall(Isolate::ToDeoptAfter(deopt_id), token_pos); |
1388 __ Drop(argument_count); | 1433 __ Drop(argument_count); |
1389 } | 1434 } |
1390 | 1435 |
1391 | 1436 |
1392 void FlowGraphCompiler::EmitOptimizedStaticCall( | 1437 void FlowGraphCompiler::EmitOptimizedStaticCall( |
(...skipping 29 matching lines...) Expand all Loading... | |
1422 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { | 1467 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { |
1423 ASSERT(!needs_number_check); | 1468 ASSERT(!needs_number_check); |
1424 __ testq(reg, reg); | 1469 __ testq(reg, reg); |
1425 return; | 1470 return; |
1426 } | 1471 } |
1427 | 1472 |
1428 if (needs_number_check) { | 1473 if (needs_number_check) { |
1429 __ pushq(reg); | 1474 __ pushq(reg); |
1430 __ PushObject(obj); | 1475 __ PushObject(obj); |
1431 if (is_optimizing()) { | 1476 if (is_optimizing()) { |
1432 __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); | 1477 __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); |
1433 } else { | 1478 } else { |
1434 __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); | 1479 __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); |
1435 } | 1480 } |
1436 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, | 1481 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, |
1437 Isolate::kNoDeoptId, | 1482 Isolate::kNoDeoptId, |
1438 token_pos); | 1483 token_pos); |
1439 __ popq(reg); // Discard constant. | 1484 __ popq(reg); // Discard constant. |
1440 __ popq(reg); // Restore 'reg'. | 1485 __ popq(reg); // Restore 'reg'. |
1441 return; | 1486 return; |
1442 } | 1487 } |
1443 | 1488 |
1444 __ CompareObject(reg, obj); | 1489 __ CompareObject(reg, obj); |
1445 } | 1490 } |
1446 | 1491 |
1447 | 1492 |
1448 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, | 1493 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, |
1449 Register right, | 1494 Register right, |
1450 bool needs_number_check, | 1495 bool needs_number_check, |
1451 intptr_t token_pos) { | 1496 intptr_t token_pos) { |
1452 if (needs_number_check) { | 1497 if (needs_number_check) { |
1453 __ pushq(left); | 1498 __ pushq(left); |
1454 __ pushq(right); | 1499 __ pushq(right); |
1455 if (is_optimizing()) { | 1500 if (is_optimizing()) { |
1456 __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); | 1501 __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); |
1457 } else { | 1502 } else { |
1458 __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); | 1503 __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); |
1459 } | 1504 } |
1460 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, | 1505 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, |
1461 Isolate::kNoDeoptId, | 1506 Isolate::kNoDeoptId, |
1462 token_pos); | 1507 token_pos); |
1463 // Stub returns result in flags (result of a cmpl, we need ZF computed). | 1508 // Stub returns result in flags (result of a cmpl, we need ZF computed). |
1464 __ popq(right); | 1509 __ popq(right); |
1465 __ popq(left); | 1510 __ popq(left); |
1466 } else { | 1511 } else { |
1467 __ cmpl(left, right); | 1512 __ cmpl(left, right); |
1468 } | 1513 } |
1469 } | 1514 } |
1470 | 1515 |
1471 | 1516 |
1472 // Implement equality spec: if any of the arguments is null do identity check. | 1517 // Implement equality spec: if any of the arguments is null do identity check. |
1473 // Fallthrough calls super equality. | 1518 // Fallthrough calls super equality. |
1474 void FlowGraphCompiler::EmitSuperEqualityCallPrologue(Register result, | 1519 void FlowGraphCompiler::EmitSuperEqualityCallPrologue(Register result, |
1475 Label* skip_call) { | 1520 Label* skip_call) { |
1476 const Immediate& raw_null = | 1521 __ LoadObjectFromPool(TMP, Object::Handle(Object::null()), |
1477 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 1522 Assembler::kNotPatchable, PP); |
1478 Label check_identity, fall_through; | 1523 Label check_identity, fall_through; |
1479 __ cmpq(Address(RSP, 0 * kWordSize), raw_null); | 1524 __ cmpq(Address(RSP, 0 * kWordSize), TMP); |
1480 __ j(EQUAL, &check_identity, Assembler::kNearJump); | 1525 __ j(EQUAL, &check_identity, Assembler::kNearJump); |
1481 __ cmpq(Address(RSP, 1 * kWordSize), raw_null); | 1526 __ cmpq(Address(RSP, 1 * kWordSize), TMP); |
1482 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump); | 1527 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump); |
1483 | 1528 |
1484 __ Bind(&check_identity); | 1529 __ Bind(&check_identity); |
1485 __ popq(result); | 1530 __ popq(result); |
1486 __ cmpq(result, Address(RSP, 0 * kWordSize)); | 1531 __ cmpq(result, Address(RSP, 0 * kWordSize)); |
1487 Label is_false; | 1532 Label is_false; |
1488 __ j(NOT_EQUAL, &is_false, Assembler::kNearJump); | 1533 __ j(NOT_EQUAL, &is_false, Assembler::kNearJump); |
1489 __ LoadObject(result, Bool::True()); | 1534 __ LoadObjectFromPool(result, Bool::True(), Assembler::kNotPatchable, PP); |
1490 __ Drop(1); | 1535 __ Drop(1); |
1491 __ jmp(skip_call); | 1536 __ jmp(skip_call); |
1492 __ Bind(&is_false); | 1537 __ Bind(&is_false); |
1493 __ LoadObject(result, Bool::False()); | 1538 __ LoadObjectFromPool(result, Bool::False(), Assembler::kNotPatchable, PP); |
1494 __ Drop(1); | 1539 __ Drop(1); |
1495 __ jmp(skip_call); | 1540 __ jmp(skip_call); |
1496 __ Bind(&fall_through); | 1541 __ Bind(&fall_through); |
1497 } | 1542 } |
1498 | 1543 |
1499 | 1544 |
1500 // This function must be in sync with FlowGraphCompiler::RecordSafepoint. | 1545 // This function must be in sync with FlowGraphCompiler::RecordSafepoint. |
1501 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) { | 1546 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) { |
1502 // TODO(vegorov): consider saving only caller save (volatile) registers. | 1547 // TODO(vegorov): consider saving only caller save (volatile) registers. |
1503 const intptr_t xmm_regs_count = locs->live_registers()->fpu_regs_count(); | 1548 const intptr_t xmm_regs_count = locs->live_registers()->fpu_regs_count(); |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1566 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); | 1611 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); |
1567 Label match_found; | 1612 Label match_found; |
1568 const intptr_t len = ic_data.NumberOfChecks(); | 1613 const intptr_t len = ic_data.NumberOfChecks(); |
1569 GrowableArray<CidTarget> sorted(len); | 1614 GrowableArray<CidTarget> sorted(len); |
1570 SortICDataByCount(ic_data, &sorted); | 1615 SortICDataByCount(ic_data, &sorted); |
1571 ASSERT(class_id_reg != R10); | 1616 ASSERT(class_id_reg != R10); |
1572 ASSERT(len > 0); // Why bother otherwise. | 1617 ASSERT(len > 0); // Why bother otherwise. |
1573 const Array& arguments_descriptor = | 1618 const Array& arguments_descriptor = |
1574 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, | 1619 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, |
1575 argument_names)); | 1620 argument_names)); |
1576 __ LoadObject(R10, arguments_descriptor); | 1621 __ LoadObjectFromPool(R10, arguments_descriptor, |
1622 Assembler::kNotPatchable, PP); | |
1577 for (intptr_t i = 0; i < len; i++) { | 1623 for (intptr_t i = 0; i < len; i++) { |
1578 const bool is_last_check = (i == (len - 1)); | 1624 const bool is_last_check = (i == (len - 1)); |
1579 Label next_test; | 1625 Label next_test; |
1580 assembler()->cmpl(class_id_reg, Immediate(sorted[i].cid)); | 1626 assembler()->cmpl(class_id_reg, Immediate(sorted[i].cid)); |
1581 if (is_last_check) { | 1627 if (is_last_check) { |
1582 assembler()->j(NOT_EQUAL, deopt); | 1628 assembler()->j(NOT_EQUAL, deopt); |
1583 } else { | 1629 } else { |
1584 assembler()->j(NOT_EQUAL, &next_test); | 1630 assembler()->j(NOT_EQUAL, &next_test); |
1585 } | 1631 } |
1586 // Do not use the code from the function, but let the code be patched so | 1632 // Do not use the code from the function, but let the code be patched so |
(...skipping 21 matching lines...) Expand all Loading... | |
1608 BranchInstr* branch) { | 1654 BranchInstr* branch) { |
1609 ASSERT(branch != NULL); | 1655 ASSERT(branch != NULL); |
1610 assembler()->comisd(left, right); | 1656 assembler()->comisd(left, right); |
1611 BlockEntryInstr* nan_result = (true_condition == NOT_EQUAL) ? | 1657 BlockEntryInstr* nan_result = (true_condition == NOT_EQUAL) ? |
1612 branch->true_successor() : branch->false_successor(); | 1658 branch->true_successor() : branch->false_successor(); |
1613 assembler()->j(PARITY_EVEN, GetJumpLabel(nan_result)); | 1659 assembler()->j(PARITY_EVEN, GetJumpLabel(nan_result)); |
1614 branch->EmitBranchOnCondition(this, true_condition); | 1660 branch->EmitBranchOnCondition(this, true_condition); |
1615 } | 1661 } |
1616 | 1662 |
1617 | 1663 |
1618 | |
1619 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition, | 1664 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition, |
1620 FpuRegister left, | 1665 FpuRegister left, |
1621 FpuRegister right, | 1666 FpuRegister right, |
1622 Register result) { | 1667 Register result) { |
1623 assembler()->comisd(left, right); | 1668 assembler()->comisd(left, right); |
1624 Label is_false, is_true, done; | 1669 Label is_false, is_true, done; |
1625 assembler()->j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN false; | 1670 assembler()->j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN false; |
1626 assembler()->j(true_condition, &is_true, Assembler::kNearJump); | 1671 assembler()->j(true_condition, &is_true, Assembler::kNearJump); |
1627 assembler()->Bind(&is_false); | 1672 assembler()->Bind(&is_false); |
1628 assembler()->LoadObject(result, Bool::False()); | 1673 assembler()->LoadObjectFromPool(result, Bool::False(), |
1674 Assembler::kNotPatchable, PP); | |
1629 assembler()->jmp(&done); | 1675 assembler()->jmp(&done); |
1630 assembler()->Bind(&is_true); | 1676 assembler()->Bind(&is_true); |
1631 assembler()->LoadObject(result, Bool::True()); | 1677 assembler()->LoadObjectFromPool(result, Bool::True(), |
1678 Assembler::kNotPatchable, PP); | |
1632 assembler()->Bind(&done); | 1679 assembler()->Bind(&done); |
1633 } | 1680 } |
1634 | 1681 |
1635 | 1682 |
1636 FieldAddress FlowGraphCompiler::ElementAddressForIntIndex(intptr_t cid, | 1683 FieldAddress FlowGraphCompiler::ElementAddressForIntIndex(intptr_t cid, |
1637 intptr_t index_scale, | 1684 intptr_t index_scale, |
1638 Register array, | 1685 Register array, |
1639 intptr_t index) { | 1686 intptr_t index) { |
1640 const int64_t disp = | 1687 const int64_t disp = |
1641 static_cast<int64_t>(index) * index_scale + DataOffsetFor(cid); | 1688 static_cast<int64_t>(index) * index_scale + DataOffsetFor(cid); |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1742 __ movups(XMM0, source.ToStackSlotAddress()); | 1789 __ movups(XMM0, source.ToStackSlotAddress()); |
1743 __ movups(destination.ToStackSlotAddress(), XMM0); | 1790 __ movups(destination.ToStackSlotAddress(), XMM0); |
1744 } | 1791 } |
1745 } else { | 1792 } else { |
1746 ASSERT(source.IsConstant()); | 1793 ASSERT(source.IsConstant()); |
1747 if (destination.IsRegister()) { | 1794 if (destination.IsRegister()) { |
1748 const Object& constant = source.constant(); | 1795 const Object& constant = source.constant(); |
1749 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) { | 1796 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) { |
1750 __ xorq(destination.reg(), destination.reg()); | 1797 __ xorq(destination.reg(), destination.reg()); |
1751 } else { | 1798 } else { |
1752 __ LoadObject(destination.reg(), constant); | 1799 __ LoadObjectFromPool(destination.reg(), constant, |
1800 Assembler::kNotPatchable, PP); | |
1753 } | 1801 } |
1754 } else { | 1802 } else { |
1755 ASSERT(destination.IsStackSlot()); | 1803 ASSERT(destination.IsStackSlot()); |
1756 StoreObject(destination.ToStackSlotAddress(), source.constant()); | 1804 StoreObject(destination.ToStackSlotAddress(), source.constant()); |
1757 } | 1805 } |
1758 } | 1806 } |
1759 | 1807 |
1760 move->Eliminate(); | 1808 move->Eliminate(); |
1761 } | 1809 } |
1762 | 1810 |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1891 __ movups(reg, Address(RSP, 0)); | 1939 __ movups(reg, Address(RSP, 0)); |
1892 __ addq(RSP, Immediate(kFpuRegisterSize)); | 1940 __ addq(RSP, Immediate(kFpuRegisterSize)); |
1893 } | 1941 } |
1894 | 1942 |
1895 | 1943 |
1896 #undef __ | 1944 #undef __ |
1897 | 1945 |
1898 } // namespace dart | 1946 } // namespace dart |
1899 | 1947 |
1900 #endif // defined TARGET_ARCH_X64 | 1948 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |