Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(695)

Side by Side Diff: runtime/vm/flow_graph_compiler_x64.cc

Issue 22825023: Uses an object pool on x64 (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
59 intptr_t slot_ix = 0; 59 intptr_t slot_ix = 0;
60 Environment* current = deopt_env_; 60 Environment* current = deopt_env_;
61 61
62 // Emit all kMaterializeObject instructions describing objects to be 62 // Emit all kMaterializeObject instructions describing objects to be
63 // materialized on the deoptimization as a prefix to the deoptimization info. 63 // materialized on the deoptimization as a prefix to the deoptimization info.
64 EmitMaterializations(deopt_env_, builder); 64 EmitMaterializations(deopt_env_, builder);
65 65
66 // The real frame starts here. 66 // The real frame starts here.
67 builder->MarkFrameStart(); 67 builder->MarkFrameStart();
68 68
69 // Callee's PC marker is not used anymore. Pass Function::null() to set to 0. 69 // Current PP, FP, and PC.
70 builder->AddPp(current->function(), slot_ix++);
70 builder->AddPcMarker(Function::Handle(), slot_ix++); 71 builder->AddPcMarker(Function::Handle(), slot_ix++);
71
72 // Current FP and PC.
73 builder->AddCallerFp(slot_ix++); 72 builder->AddCallerFp(slot_ix++);
74 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); 73 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++);
75 74
76 // Emit all values that are needed for materialization as a part of the 75 // Emit all values that are needed for materialization as a part of the
77 // expression stack for the bottom-most frame. This guarantees that GC 76 // expression stack for the bottom-most frame. This guarantees that GC
78 // will be able to find them during materialization. 77 // will be able to find them during materialization.
79 slot_ix = builder->EmitMaterializationArguments(slot_ix); 78 slot_ix = builder->EmitMaterializationArguments(slot_ix);
80 79
81 // For the innermost environment, set outgoing arguments and the locals. 80 // For the innermost environment, set outgoing arguments and the locals.
82 for (intptr_t i = current->Length() - 1; 81 for (intptr_t i = current->Length() - 1;
83 i >= current->fixed_parameter_count(); 82 i >= current->fixed_parameter_count();
84 i--) { 83 i--) {
85 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); 84 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
86 } 85 }
87 86
88 // Current PC marker and caller FP.
89 builder->AddPcMarker(current->function(), slot_ix++);
90 builder->AddCallerFp(slot_ix++);
91
92 Environment* previous = current; 87 Environment* previous = current;
93 current = current->outer(); 88 current = current->outer();
94 while (current != NULL) { 89 while (current != NULL) {
90 // PP, FP, and PC.
91 builder->AddPp(current->function(), slot_ix++);
92 builder->AddPcMarker(previous->function(), slot_ix++);
93 builder->AddCallerFp(slot_ix++);
94
95 // For any outer environment the deopt id is that of the call instruction 95 // For any outer environment the deopt id is that of the call instruction
96 // which is recorded in the outer environment. 96 // which is recorded in the outer environment.
97 builder->AddReturnAddress(current->function(), 97 builder->AddReturnAddress(current->function(),
98 Isolate::ToDeoptAfter(current->deopt_id()), 98 Isolate::ToDeoptAfter(current->deopt_id()),
99 slot_ix++); 99 slot_ix++);
100 100
101 // The values of outgoing arguments can be changed from the inlined call so 101 // The values of outgoing arguments can be changed from the inlined call so
102 // we must read them from the previous environment. 102 // we must read them from the previous environment.
103 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { 103 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
104 builder->AddCopy(previous->ValueAt(i), 104 builder->AddCopy(previous->ValueAt(i),
105 previous->LocationAt(i), 105 previous->LocationAt(i),
106 slot_ix++); 106 slot_ix++);
107 } 107 }
108 108
109 // Set the locals, note that outgoing arguments are not in the environment. 109 // Set the locals, note that outgoing arguments are not in the environment.
110 for (intptr_t i = current->Length() - 1; 110 for (intptr_t i = current->Length() - 1;
111 i >= current->fixed_parameter_count(); 111 i >= current->fixed_parameter_count();
112 i--) { 112 i--) {
113 builder->AddCopy(current->ValueAt(i), 113 builder->AddCopy(current->ValueAt(i),
114 current->LocationAt(i), 114 current->LocationAt(i),
115 slot_ix++); 115 slot_ix++);
116 } 116 }
117 117
118 // PC marker and caller FP.
119 builder->AddPcMarker(current->function(), slot_ix++);
120 builder->AddCallerFp(slot_ix++);
121
122 // Iterate on the outer environment. 118 // Iterate on the outer environment.
123 previous = current; 119 previous = current;
124 current = current->outer(); 120 current = current->outer();
125 } 121 }
126 // The previous pointer is now the outermost environment. 122 // The previous pointer is now the outermost environment.
127 ASSERT(previous != NULL); 123 ASSERT(previous != NULL);
128 124
129 // For the outermost environment, set caller PC. 125 // For the outermost environment, set caller PC, caller PP, and caller FP.
126 builder->AddCallerPp(slot_ix++);
127 // PC marker.
128 builder->AddPcMarker(previous->function(), slot_ix++);
129 builder->AddCallerFp(slot_ix++);
130 builder->AddCallerPc(slot_ix++); 130 builder->AddCallerPc(slot_ix++);
131 131
132 // For the outermost environment, set the incoming arguments. 132 // For the outermost environment, set the incoming arguments.
133 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { 133 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
134 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); 134 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++);
135 } 135 }
136 136
137 const DeoptInfo& deopt_info = DeoptInfo::Handle(builder->CreateDeoptInfo()); 137 const DeoptInfo& deopt_info = DeoptInfo::Handle(builder->CreateDeoptInfo());
138 return deopt_info.raw(); 138 return deopt_info.raw();
139 } 139 }
140 140
141 141
142 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, 142 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
143 intptr_t stub_ix) { 143 intptr_t stub_ix) {
144 // Calls do not need stubs, they share a deoptimization trampoline. 144 // Calls do not need stubs, they share a deoptimization trampoline.
145 ASSERT(reason() != kDeoptAtCall); 145 ASSERT(reason() != kDeoptAtCall);
146 Assembler* assem = compiler->assembler(); 146 Assembler* assem = compiler->assembler();
147 #define __ assem-> 147 #define __ assem->
148 __ Comment("Deopt stub for id %" Pd "", deopt_id()); 148 __ Comment("Deopt stub for id %" Pd "", deopt_id());
149 __ Bind(entry_label()); 149 __ Bind(entry_label());
150 if (FLAG_trap_on_deoptimization) __ int3(); 150 if (FLAG_trap_on_deoptimization) __ int3();
151 151
152 ASSERT(deopt_env() != NULL); 152 ASSERT(deopt_env() != NULL);
153 153
154 __ call(&StubCode::DeoptimizeLabel()); 154 __ Call(&StubCode::DeoptimizeLabel(), PP);
155 set_pc_offset(assem->CodeSize()); 155 set_pc_offset(assem->CodeSize());
156 __ int3(); 156 __ int3();
157 #undef __ 157 #undef __
158 } 158 }
159 159
160 160
161 #define __ assembler()-> 161 #define __ assembler()->
162 162
163 163
164 // Fall through if bool_register contains null. 164 // Fall through if bool_register contains null.
165 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, 165 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
166 Label* is_true, 166 Label* is_true,
167 Label* is_false) { 167 Label* is_false) {
168 const Immediate& raw_null =
169 Immediate(reinterpret_cast<intptr_t>(Object::null()));
170 Label fall_through; 168 Label fall_through;
171 __ cmpq(bool_register, raw_null); 169 __ CompareObject(bool_register, Object::Handle());
172 __ j(EQUAL, &fall_through, Assembler::kNearJump); 170 __ j(EQUAL, &fall_through, Assembler::kNearJump);
173 __ CompareObject(bool_register, Bool::True()); 171 __ CompareObject(bool_register, Bool::True());
174 __ j(EQUAL, is_true); 172 __ j(EQUAL, is_true);
175 __ jmp(is_false); 173 __ jmp(is_false);
176 __ Bind(&fall_through); 174 __ Bind(&fall_through);
177 } 175 }
178 176
179 177
180 // Clobbers RCX. 178 // Clobbers RCX.
181 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( 179 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub(
182 TypeTestStubKind test_kind, 180 TypeTestStubKind test_kind,
183 Register instance_reg, 181 Register instance_reg,
184 Register type_arguments_reg, 182 Register type_arguments_reg,
185 Register temp_reg, 183 Register temp_reg,
186 Label* is_instance_lbl, 184 Label* is_instance_lbl,
187 Label* is_not_instance_lbl) { 185 Label* is_not_instance_lbl) {
188 const SubtypeTestCache& type_test_cache = 186 const SubtypeTestCache& type_test_cache =
189 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New()); 187 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New());
190 const Immediate& raw_null = 188 __ LoadObject(temp_reg, type_test_cache, PP);
191 Immediate(reinterpret_cast<intptr_t>(Object::null()));
192 __ LoadObject(temp_reg, type_test_cache);
193 __ pushq(temp_reg); // Subtype test cache. 189 __ pushq(temp_reg); // Subtype test cache.
194 __ pushq(instance_reg); // Instance. 190 __ pushq(instance_reg); // Instance.
195 if (test_kind == kTestTypeOneArg) { 191 if (test_kind == kTestTypeOneArg) {
196 ASSERT(type_arguments_reg == kNoRegister); 192 ASSERT(type_arguments_reg == kNoRegister);
197 __ pushq(raw_null); 193 __ PushObject(Object::Handle());
198 __ call(&StubCode::Subtype1TestCacheLabel()); 194 __ Call(&StubCode::Subtype1TestCacheLabel(), PP);
199 } else if (test_kind == kTestTypeTwoArgs) { 195 } else if (test_kind == kTestTypeTwoArgs) {
200 ASSERT(type_arguments_reg == kNoRegister); 196 ASSERT(type_arguments_reg == kNoRegister);
201 __ pushq(raw_null); 197 __ PushObject(Object::Handle());
202 __ call(&StubCode::Subtype2TestCacheLabel()); 198 __ Call(&StubCode::Subtype2TestCacheLabel(), PP);
203 } else if (test_kind == kTestTypeThreeArgs) { 199 } else if (test_kind == kTestTypeThreeArgs) {
204 __ pushq(type_arguments_reg); 200 __ pushq(type_arguments_reg);
205 __ call(&StubCode::Subtype3TestCacheLabel()); 201 __ Call(&StubCode::Subtype3TestCacheLabel(), PP);
206 } else { 202 } else {
207 UNREACHABLE(); 203 UNREACHABLE();
208 } 204 }
209 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False. 205 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False.
210 ASSERT(instance_reg != RCX); 206 ASSERT(instance_reg != RCX);
211 ASSERT(temp_reg != RCX); 207 ASSERT(temp_reg != RCX);
212 __ popq(instance_reg); // Discard. 208 __ popq(instance_reg); // Discard.
213 __ popq(instance_reg); // Restore receiver. 209 __ popq(instance_reg); // Restore receiver.
214 __ popq(temp_reg); // Discard. 210 __ popq(temp_reg); // Discard.
215 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl); 211 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl);
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
340 // interfaces. 336 // interfaces.
341 // Bool interface can be implemented only by core class Bool. 337 // Bool interface can be implemented only by core class Bool.
342 if (type.IsBoolType()) { 338 if (type.IsBoolType()) {
343 __ cmpl(kClassIdReg, Immediate(kBoolCid)); 339 __ cmpl(kClassIdReg, Immediate(kBoolCid));
344 __ j(EQUAL, is_instance_lbl); 340 __ j(EQUAL, is_instance_lbl);
345 __ jmp(is_not_instance_lbl); 341 __ jmp(is_not_instance_lbl);
346 return false; 342 return false;
347 } 343 }
348 if (type.IsFunctionType()) { 344 if (type.IsFunctionType()) {
349 // Check if instance is a closure. 345 // Check if instance is a closure.
350 const Immediate& raw_null =
351 Immediate(reinterpret_cast<intptr_t>(Object::null()));
352 __ LoadClassById(R13, kClassIdReg); 346 __ LoadClassById(R13, kClassIdReg);
353 __ movq(R13, FieldAddress(R13, Class::signature_function_offset())); 347 __ movq(R13, FieldAddress(R13, Class::signature_function_offset()));
354 __ cmpq(R13, raw_null); 348 __ CompareObject(R13, Object::Handle());
355 __ j(NOT_EQUAL, is_instance_lbl); 349 __ j(NOT_EQUAL, is_instance_lbl);
356 } 350 }
357 // Custom checking for numbers (Smi, Mint, Bigint and Double). 351 // Custom checking for numbers (Smi, Mint, Bigint and Double).
358 // Note that instance is not Smi (checked above). 352 // Note that instance is not Smi (checked above).
359 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { 353 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) {
360 GenerateNumberTypeCheck( 354 GenerateNumberTypeCheck(
361 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); 355 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl);
362 return false; 356 return false;
363 } 357 }
364 if (type.IsStringType()) { 358 if (type.IsStringType()) {
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
407 // RAX: instance (preserved). 401 // RAX: instance (preserved).
408 // Clobbers RDI, RDX, R10. 402 // Clobbers RDI, RDX, R10.
409 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( 403 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
410 intptr_t token_pos, 404 intptr_t token_pos,
411 const AbstractType& type, 405 const AbstractType& type,
412 Label* is_instance_lbl, 406 Label* is_instance_lbl,
413 Label* is_not_instance_lbl) { 407 Label* is_not_instance_lbl) {
414 __ Comment("UninstantiatedTypeTest"); 408 __ Comment("UninstantiatedTypeTest");
415 ASSERT(!type.IsInstantiated()); 409 ASSERT(!type.IsInstantiated());
416 // Skip check if destination is a dynamic type. 410 // Skip check if destination is a dynamic type.
417 const Immediate& raw_null =
418 Immediate(reinterpret_cast<intptr_t>(Object::null()));
419 if (type.IsTypeParameter()) { 411 if (type.IsTypeParameter()) {
420 const TypeParameter& type_param = TypeParameter::Cast(type); 412 const TypeParameter& type_param = TypeParameter::Cast(type);
421 // Load instantiator (or null) and instantiator type arguments on stack. 413 // Load instantiator (or null) and instantiator type arguments on stack.
422 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. 414 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
423 // RDX: instantiator type arguments. 415 // RDX: instantiator type arguments.
424 // Check if type argument is dynamic. 416 // Check if type argument is dynamic.
425 __ cmpq(RDX, raw_null); 417 __ CompareObject(RDX, Object::Handle());
426 __ j(EQUAL, is_instance_lbl); 418 __ j(EQUAL, is_instance_lbl);
427 // Can handle only type arguments that are instances of TypeArguments. 419 // Can handle only type arguments that are instances of TypeArguments.
428 // (runtime checks canonicalize type arguments). 420 // (runtime checks canonicalize type arguments).
429 Label fall_through; 421 Label fall_through;
430 __ CompareClassId(RDX, kTypeArgumentsCid); 422 __ CompareClassId(RDX, kTypeArgumentsCid);
431 __ j(NOT_EQUAL, &fall_through); 423 __ j(NOT_EQUAL, &fall_through);
432 __ movq(RDI, 424 __ movq(RDI,
433 FieldAddress(RDX, TypeArguments::type_at_offset(type_param.index()))); 425 FieldAddress(RDX, TypeArguments::type_at_offset(type_param.index())));
434 // RDI: Concrete type of type. 426 // RDI: Concrete type of type.
435 // Check if type argument is dynamic. 427 // Check if type argument is dynamic.
436 __ CompareObject(RDI, Type::ZoneHandle(Type::DynamicType())); 428 __ CompareObject(RDI, Type::ZoneHandle(Type::DynamicType()));
437 __ j(EQUAL, is_instance_lbl); 429 __ j(EQUAL, is_instance_lbl);
438 __ cmpq(RDI, raw_null); 430 __ CompareObject(RDI, Object::Handle());
439 __ j(EQUAL, is_instance_lbl); 431 __ j(EQUAL, is_instance_lbl);
440 const Type& object_type = Type::ZoneHandle(Type::ObjectType()); 432 const Type& object_type = Type::ZoneHandle(Type::ObjectType());
441 __ CompareObject(RDI, object_type); 433 __ CompareObject(RDI, object_type);
442 __ j(EQUAL, is_instance_lbl); 434 __ j(EQUAL, is_instance_lbl);
443 435
444 // For Smi check quickly against int and num interfaces. 436 // For Smi check quickly against int and num interfaces.
445 Label not_smi; 437 Label not_smi;
446 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi? 438 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi?
447 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump); 439 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump);
448 __ CompareObject(RDI, Type::ZoneHandle(Type::IntType())); 440 __ CompareObject(RDI, Type::ZoneHandle(Type::IntType()));
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
568 // Clobbers RCX and RDX. 560 // Clobbers RCX and RDX.
569 // Returns: 561 // Returns:
570 // - true or false in RAX. 562 // - true or false in RAX.
571 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos, 563 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos,
572 intptr_t deopt_id, 564 intptr_t deopt_id,
573 const AbstractType& type, 565 const AbstractType& type,
574 bool negate_result, 566 bool negate_result,
575 LocationSummary* locs) { 567 LocationSummary* locs) {
576 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded()); 568 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded());
577 569
578 const Immediate& raw_null =
579 Immediate(reinterpret_cast<intptr_t>(Object::null()));
580 Label is_instance, is_not_instance; 570 Label is_instance, is_not_instance;
581 __ pushq(RCX); // Store instantiator on stack. 571 __ pushq(RCX); // Store instantiator on stack.
582 __ pushq(RDX); // Store instantiator type arguments. 572 __ pushq(RDX); // Store instantiator type arguments.
583 // If type is instantiated and non-parameterized, we can inline code 573 // If type is instantiated and non-parameterized, we can inline code
584 // checking whether the tested instance is a Smi. 574 // checking whether the tested instance is a Smi.
585 if (type.IsInstantiated()) { 575 if (type.IsInstantiated()) {
586 // A null object is only an instance of Object and dynamic, which has 576 // A null object is only an instance of Object and dynamic, which has
587 // already been checked above (if the type is instantiated). So we can 577 // already been checked above (if the type is instantiated). So we can
588 // return false here if the instance is null (and if the type is 578 // return false here if the instance is null (and if the type is
589 // instantiated). 579 // instantiated).
590 // We can only inline this null check if the type is instantiated at compile 580 // We can only inline this null check if the type is instantiated at compile
591 // time, since an uninstantiated type at compile time could be Object or 581 // time, since an uninstantiated type at compile time could be Object or
592 // dynamic at run time. 582 // dynamic at run time.
593 __ cmpq(RAX, raw_null); 583 __ CompareObject(RAX, Object::Handle());
594 __ j(EQUAL, &is_not_instance); 584 __ j(EQUAL, &is_not_instance);
595 } 585 }
596 586
597 // Generate inline instanceof test. 587 // Generate inline instanceof test.
598 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(); 588 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle();
599 test_cache = GenerateInlineInstanceof(token_pos, type, 589 test_cache = GenerateInlineInstanceof(token_pos, type,
600 &is_instance, &is_not_instance); 590 &is_instance, &is_not_instance);
601 591
602 // test_cache is null if there is no fall-through. 592 // test_cache is null if there is no fall-through.
603 Label done; 593 Label done;
604 if (!test_cache.IsNull()) { 594 if (!test_cache.IsNull()) {
605 // Generate runtime call. 595 // Generate runtime call.
606 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. 596 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
607 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. 597 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator.
608 __ PushObject(Object::ZoneHandle()); // Make room for the result. 598 __ PushObject(Object::ZoneHandle()); // Make room for the result.
609 __ pushq(RAX); // Push the instance. 599 __ pushq(RAX); // Push the instance.
610 __ PushObject(type); // Push the type. 600 __ PushObject(type); // Push the type.
611 __ pushq(RCX); // TODO(srdjan): Pass instantiator instead of null. 601 __ pushq(RCX); // TODO(srdjan): Pass instantiator instead of null.
612 __ pushq(RDX); // Instantiator type arguments. 602 __ pushq(RDX); // Instantiator type arguments.
613 __ LoadObject(RAX, test_cache); 603 __ LoadObject(RAX, test_cache, PP);
614 __ pushq(RAX); 604 __ pushq(RAX);
615 GenerateCallRuntime(token_pos, 605 GenerateCallRuntime(token_pos,
616 deopt_id, 606 deopt_id,
617 kInstanceofRuntimeEntry, 607 kInstanceofRuntimeEntry,
618 5, 608 5,
619 locs); 609 locs);
620 // Pop the parameters supplied to the runtime entry. The result of the 610 // Pop the parameters supplied to the runtime entry. The result of the
621 // instanceof runtime call will be left as the result of the operation. 611 // instanceof runtime call will be left as the result of the operation.
622 __ Drop(5); 612 __ Drop(5);
623 if (negate_result) { 613 if (negate_result) {
624 __ popq(RDX); 614 __ popq(RDX);
625 __ LoadObject(RAX, Bool::True()); 615 __ LoadObject(RAX, Bool::True(), PP);
626 __ cmpq(RDX, RAX); 616 __ cmpq(RDX, RAX);
627 __ j(NOT_EQUAL, &done, Assembler::kNearJump); 617 __ j(NOT_EQUAL, &done, Assembler::kNearJump);
628 __ LoadObject(RAX, Bool::False()); 618 __ LoadObject(RAX, Bool::False(), PP);
629 } else { 619 } else {
630 __ popq(RAX); 620 __ popq(RAX);
631 } 621 }
632 __ jmp(&done, Assembler::kNearJump); 622 __ jmp(&done, Assembler::kNearJump);
633 } 623 }
634 __ Bind(&is_not_instance); 624 __ Bind(&is_not_instance);
635 __ LoadObject(RAX, Bool::Get(negate_result)); 625 __ LoadObject(RAX, Bool::Get(negate_result), PP);
636 __ jmp(&done, Assembler::kNearJump); 626 __ jmp(&done, Assembler::kNearJump);
637 627
638 __ Bind(&is_instance); 628 __ Bind(&is_instance);
639 __ LoadObject(RAX, Bool::Get(!negate_result)); 629 __ LoadObject(RAX, Bool::Get(!negate_result), PP);
640 __ Bind(&done); 630 __ Bind(&done);
641 __ popq(RDX); // Remove pushed instantiator type arguments. 631 __ popq(RDX); // Remove pushed instantiator type arguments.
642 __ popq(RCX); // Remove pushed instantiator. 632 __ popq(RCX); // Remove pushed instantiator.
643 } 633 }
644 634
645 635
646 // Optimize assignable type check by adding inlined tests for: 636 // Optimize assignable type check by adding inlined tests for:
647 // - NULL -> return NULL. 637 // - NULL -> return NULL.
648 // - Smi -> compile time subtype check (only if dst class is not parameterized). 638 // - Smi -> compile time subtype check (only if dst class is not parameterized).
649 // - Class equality (only if class is not parameterized). 639 // - Class equality (only if class is not parameterized).
(...skipping 12 matching lines...) Expand all
662 LocationSummary* locs) { 652 LocationSummary* locs) {
663 ASSERT(token_pos >= 0); 653 ASSERT(token_pos >= 0);
664 ASSERT(!dst_type.IsNull()); 654 ASSERT(!dst_type.IsNull());
665 ASSERT(dst_type.IsFinalized()); 655 ASSERT(dst_type.IsFinalized());
666 // Assignable check is skipped in FlowGraphBuilder, not here. 656 // Assignable check is skipped in FlowGraphBuilder, not here.
667 ASSERT(dst_type.IsMalformed() || dst_type.IsMalbounded() || 657 ASSERT(dst_type.IsMalformed() || dst_type.IsMalbounded() ||
668 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); 658 (!dst_type.IsDynamicType() && !dst_type.IsObjectType()));
669 __ pushq(RCX); // Store instantiator. 659 __ pushq(RCX); // Store instantiator.
670 __ pushq(RDX); // Store instantiator type arguments. 660 __ pushq(RDX); // Store instantiator type arguments.
671 // A null object is always assignable and is returned as result. 661 // A null object is always assignable and is returned as result.
672 const Immediate& raw_null =
673 Immediate(reinterpret_cast<intptr_t>(Object::null()));
674 Label is_assignable, runtime_call; 662 Label is_assignable, runtime_call;
675 __ cmpq(RAX, raw_null); 663 __ CompareObject(RAX, Object::Handle());
676 __ j(EQUAL, &is_assignable); 664 __ j(EQUAL, &is_assignable);
677 665
678 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) { 666 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) {
679 // If type checks are not eliminated during the graph building then 667 // If type checks are not eliminated during the graph building then
680 // a transition sentinel can be seen here. 668 // a transition sentinel can be seen here.
681 __ CompareObject(RAX, Object::transition_sentinel()); 669 __ CompareObject(RAX, Object::transition_sentinel());
682 __ j(EQUAL, &is_assignable); 670 __ j(EQUAL, &is_assignable);
683 } 671 }
684 672
685 // Generate throw new TypeError() if the type is malformed or malbounded. 673 // Generate throw new TypeError() if the type is malformed or malbounded.
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
718 706
719 __ Bind(&runtime_call); 707 __ Bind(&runtime_call);
720 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. 708 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
721 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. 709 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator.
722 __ PushObject(Object::ZoneHandle()); // Make room for the result. 710 __ PushObject(Object::ZoneHandle()); // Make room for the result.
723 __ pushq(RAX); // Push the source object. 711 __ pushq(RAX); // Push the source object.
724 __ PushObject(dst_type); // Push the type of the destination. 712 __ PushObject(dst_type); // Push the type of the destination.
725 __ pushq(RCX); // Instantiator. 713 __ pushq(RCX); // Instantiator.
726 __ pushq(RDX); // Instantiator type arguments. 714 __ pushq(RDX); // Instantiator type arguments.
727 __ PushObject(dst_name); // Push the name of the destination. 715 __ PushObject(dst_name); // Push the name of the destination.
728 __ LoadObject(RAX, test_cache); 716 __ LoadObject(RAX, test_cache, PP);
729 __ pushq(RAX); 717 __ pushq(RAX);
730 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); 718 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs);
731 // Pop the parameters supplied to the runtime entry. The result of the 719 // Pop the parameters supplied to the runtime entry. The result of the
732 // type check runtime call is the checked value. 720 // type check runtime call is the checked value.
733 __ Drop(6); 721 __ Drop(6);
734 __ popq(RAX); 722 __ popq(RAX);
735 723
736 __ Bind(&is_assignable); 724 __ Bind(&is_assignable);
737 __ popq(RDX); // Remove pushed instantiator type arguments. 725 __ popq(RDX); // Remove pushed instantiator type arguments.
738 __ popq(RCX); // Remove pushed instantiator. 726 __ popq(RCX); // Remove pushed instantiator.
(...skipping 24 matching lines...) Expand all
763 751
764 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset, 752 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset,
765 Location loc, 753 Location loc,
766 bool* push_emitted) { 754 bool* push_emitted) {
767 const Address dest(RBP, dest_offset); 755 const Address dest(RBP, dest_offset);
768 if (loc.IsConstant()) { 756 if (loc.IsConstant()) {
769 if (!*push_emitted) { 757 if (!*push_emitted) {
770 __ pushq(RAX); 758 __ pushq(RAX);
771 *push_emitted = true; 759 *push_emitted = true;
772 } 760 }
773 __ LoadObject(RAX, loc.constant()); 761 __ LoadObject(RAX, loc.constant(), PP);
774 __ movq(dest, RAX); 762 __ movq(dest, RAX);
775 } else if (loc.IsRegister()) { 763 } else if (loc.IsRegister()) {
776 if (*push_emitted && loc.reg() == RAX) { 764 if (*push_emitted && loc.reg() == RAX) {
777 __ movq(RAX, Address(RSP, 0)); 765 __ movq(RAX, Address(RSP, 0));
778 __ movq(dest, RAX); 766 __ movq(dest, RAX);
779 } else { 767 } else {
780 __ movq(dest, loc.reg()); 768 __ movq(dest, loc.reg());
781 } 769 }
782 } else { 770 } else {
783 Address src = loc.ToStackSlotAddress(); 771 Address src = loc.ToStackSlotAddress();
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
893 const Address argument_addr(RBX, RCX, TIMES_8, 0); 881 const Address argument_addr(RBX, RCX, TIMES_8, 0);
894 const Address copy_addr(RDI, RCX, TIMES_8, 0); 882 const Address copy_addr(RDI, RCX, TIMES_8, 0);
895 __ Bind(&loop); 883 __ Bind(&loop);
896 __ movq(RAX, argument_addr); 884 __ movq(RAX, argument_addr);
897 __ movq(copy_addr, RAX); 885 __ movq(copy_addr, RAX);
898 __ Bind(&loop_condition); 886 __ Bind(&loop_condition);
899 __ decq(RCX); 887 __ decq(RCX);
900 __ j(POSITIVE, &loop, Assembler::kNearJump); 888 __ j(POSITIVE, &loop, Assembler::kNearJump);
901 889
902 // Copy or initialize optional named arguments. 890 // Copy or initialize optional named arguments.
903 const Immediate& raw_null =
904 Immediate(reinterpret_cast<intptr_t>(Object::null()));
905 Label all_arguments_processed; 891 Label all_arguments_processed;
906 #ifdef DEBUG 892 #ifdef DEBUG
907 const bool check_correct_named_args = true; 893 const bool check_correct_named_args = true;
908 #else 894 #else
909 const bool check_correct_named_args = function.IsClosureFunction(); 895 const bool check_correct_named_args = function.IsClosureFunction();
910 #endif 896 #endif
911 if (num_opt_named_params > 0) { 897 if (num_opt_named_params > 0) {
912 // Start by alphabetically sorting the names of the optional parameters. 898 // Start by alphabetically sorting the names of the optional parameters.
913 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; 899 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params];
914 int* opt_param_position = new int[num_opt_named_params]; 900 int* opt_param_position = new int[num_opt_named_params];
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
956 __ addq(RDI, Immediate(ArgumentsDescriptor::named_entry_size())); 942 __ addq(RDI, Immediate(ArgumentsDescriptor::named_entry_size()));
957 __ negq(RAX); 943 __ negq(RAX);
958 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi. 944 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi.
959 __ movq(RAX, argument_addr); 945 __ movq(RAX, argument_addr);
960 __ jmp(&assign_optional_parameter, Assembler::kNearJump); 946 __ jmp(&assign_optional_parameter, Assembler::kNearJump);
961 __ Bind(&load_default_value); 947 __ Bind(&load_default_value);
962 // Load RAX with default argument. 948 // Load RAX with default argument.
963 const Object& value = Object::ZoneHandle( 949 const Object& value = Object::ZoneHandle(
964 parsed_function().default_parameter_values().At( 950 parsed_function().default_parameter_values().At(
965 param_pos - num_fixed_params)); 951 param_pos - num_fixed_params));
966 __ LoadObject(RAX, value); 952 __ LoadObject(RAX, value, PP);
967 __ Bind(&assign_optional_parameter); 953 __ Bind(&assign_optional_parameter);
968 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. 954 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos].
969 // We do not use the final allocation index of the variable here, i.e. 955 // We do not use the final allocation index of the variable here, i.e.
970 // scope->VariableAt(i)->index(), because captured variables still need 956 // scope->VariableAt(i)->index(), because captured variables still need
971 // to be copied to the context that is not yet allocated. 957 // to be copied to the context that is not yet allocated.
972 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; 958 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos;
973 const Address param_addr(RBP, computed_param_pos * kWordSize); 959 const Address param_addr(RBP, computed_param_pos * kWordSize);
974 __ movq(param_addr, RAX); 960 __ movq(param_addr, RAX);
975 } 961 }
976 delete[] opt_param; 962 delete[] opt_param;
977 delete[] opt_param_position; 963 delete[] opt_param_position;
978 if (check_correct_named_args) { 964 if (check_correct_named_args) {
979 // Check that RDI now points to the null terminator in the arguments 965 // Check that RDI now points to the null terminator in the arguments
980 // descriptor. 966 // descriptor.
981 __ cmpq(Address(RDI, 0), raw_null); 967 __ LoadObject(TMP, Object::Handle(), PP);
968 __ cmpq(Address(RDI, 0), TMP);
982 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); 969 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump);
983 } 970 }
984 } else { 971 } else {
985 ASSERT(num_opt_pos_params > 0); 972 ASSERT(num_opt_pos_params > 0);
986 __ movq(RCX, 973 __ movq(RCX,
987 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset())); 974 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset()));
988 __ SmiUntag(RCX); 975 __ SmiUntag(RCX);
989 for (int i = 0; i < num_opt_pos_params; i++) { 976 for (int i = 0; i < num_opt_pos_params; i++) {
990 Label next_parameter; 977 Label next_parameter;
991 // Handle this optional positional parameter only if k or fewer positional 978 // Handle this optional positional parameter only if k or fewer positional
992 // arguments have been passed, where k is param_pos, the position of this 979 // arguments have been passed, where k is param_pos, the position of this
993 // optional parameter in the formal parameter list. 980 // optional parameter in the formal parameter list.
994 const int param_pos = num_fixed_params + i; 981 const int param_pos = num_fixed_params + i;
995 __ cmpq(RCX, Immediate(param_pos)); 982 __ cmpq(RCX, Immediate(param_pos));
996 __ j(GREATER, &next_parameter, Assembler::kNearJump); 983 __ j(GREATER, &next_parameter, Assembler::kNearJump);
997 // Load RAX with default argument. 984 // Load RAX with default argument.
998 const Object& value = Object::ZoneHandle( 985 const Object& value = Object::ZoneHandle(
999 parsed_function().default_parameter_values().At(i)); 986 parsed_function().default_parameter_values().At(i));
1000 __ LoadObject(RAX, value); 987 __ LoadObject(RAX, value, PP);
1001 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. 988 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos].
1002 // We do not use the final allocation index of the variable here, i.e. 989 // We do not use the final allocation index of the variable here, i.e.
1003 // scope->VariableAt(i)->index(), because captured variables still need 990 // scope->VariableAt(i)->index(), because captured variables still need
1004 // to be copied to the context that is not yet allocated. 991 // to be copied to the context that is not yet allocated.
1005 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; 992 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos;
1006 const Address param_addr(RBP, computed_param_pos * kWordSize); 993 const Address param_addr(RBP, computed_param_pos * kWordSize);
1007 __ movq(param_addr, RAX); 994 __ movq(param_addr, RAX);
1008 __ Bind(&next_parameter); 995 __ Bind(&next_parameter);
1009 } 996 }
1010 if (check_correct_named_args) { 997 if (check_correct_named_args) {
1011 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 998 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
1012 __ SmiUntag(RBX); 999 __ SmiUntag(RBX);
1013 // Check that RCX equals RBX, i.e. no named arguments passed. 1000 // Check that RCX equals RBX, i.e. no named arguments passed.
1014 __ cmpq(RCX, RBX); 1001 __ cmpq(RCX, RBX);
1015 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); 1002 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump);
1016 } 1003 }
1017 } 1004 }
1018 1005
1019 __ Bind(&wrong_num_arguments); 1006 __ Bind(&wrong_num_arguments);
1020 if (function.IsClosureFunction()) { 1007 if (function.IsClosureFunction()) {
1021 // Invoke noSuchMethod function passing "call" as the original name. 1008 // Invoke noSuchMethod function passing "call" as the original name.
1022 const int kNumArgsChecked = 1; 1009 const int kNumArgsChecked = 1;
1023 const ICData& ic_data = ICData::ZoneHandle( 1010 const ICData& ic_data = ICData::ZoneHandle(
1024 ICData::New(function, Symbols::Call(), Object::empty_array(), 1011 ICData::New(function, Symbols::Call(), Object::empty_array(),
1025 Isolate::kNoDeoptId, kNumArgsChecked)); 1012 Isolate::kNoDeoptId, kNumArgsChecked));
1026 __ LoadObject(RBX, ic_data); 1013 __ LoadObject(RBX, ic_data, PP);
1027 __ LeaveFrame(); // The arguments are still on the stack. 1014 __ LeaveFrameWithPP(); // The arguments are still on the stack.
1028 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); 1015 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel());
1029 // The noSuchMethod call may return to the caller, but not here. 1016 // The noSuchMethod call may return to the caller, but not here.
1030 __ int3(); 1017 __ int3();
1031 } else if (check_correct_named_args) { 1018 } else if (check_correct_named_args) {
1032 __ Stop("Wrong arguments"); 1019 __ Stop("Wrong arguments");
1033 } 1020 }
1034 1021
1035 __ Bind(&all_arguments_processed); 1022 __ Bind(&all_arguments_processed);
1036 // Nullify originally passed arguments only after they have been copied and 1023 // Nullify originally passed arguments only after they have been copied and
1037 // checked, otherwise noSuchMethod would not see their original values. 1024 // checked, otherwise noSuchMethod would not see their original values.
1038 // This step can be skipped in case we decide that formal parameters are 1025 // This step can be skipped in case we decide that formal parameters are
1039 // implicitly final, since garbage collecting the unmodified value is not 1026 // implicitly final, since garbage collecting the unmodified value is not
1040 // an issue anymore. 1027 // an issue anymore.
1041 1028
1042 // R10 : arguments descriptor array. 1029 // R10 : arguments descriptor array.
1043 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 1030 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
1044 __ SmiUntag(RCX); 1031 __ SmiUntag(RCX);
1032 __ LoadObject(R12, Object::Handle(), PP);
1045 Label null_args_loop, null_args_loop_condition; 1033 Label null_args_loop, null_args_loop_condition;
1046 __ jmp(&null_args_loop_condition, Assembler::kNearJump); 1034 __ jmp(&null_args_loop_condition, Assembler::kNearJump);
1047 const Address original_argument_addr( 1035 const Address original_argument_addr(
1048 RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize); 1036 RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize);
1049 __ Bind(&null_args_loop); 1037 __ Bind(&null_args_loop);
1050 __ movq(original_argument_addr, raw_null); 1038 __ movq(original_argument_addr, R12);
1051 __ Bind(&null_args_loop_condition); 1039 __ Bind(&null_args_loop_condition);
1052 __ decq(RCX); 1040 __ decq(RCX);
1053 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); 1041 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump);
1054 } 1042 }
1055 1043
1056 1044
1057 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { 1045 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) {
1058 // TOS: return address. 1046 // TOS: return address.
1059 // +1 : receiver. 1047 // +1 : receiver.
1060 // Sequence node has one return node, its input is load field node. 1048 // Sequence node has one return node, its input is load field node.
1061 __ movq(RAX, Address(RSP, 1 * kWordSize)); 1049 __ movq(RAX, Address(RSP, 1 * kWordSize));
1062 __ movq(RAX, FieldAddress(RAX, offset)); 1050 __ movq(RAX, FieldAddress(RAX, offset));
1063 __ ret(); 1051 __ ret();
1064 } 1052 }
1065 1053
1066 1054
1067 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { 1055 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) {
1068 // TOS: return address. 1056 // TOS: return address.
1069 // +1 : value 1057 // +1 : value
1070 // +2 : receiver. 1058 // +2 : receiver.
1071 // Sequence node has one store node and one return NULL node. 1059 // Sequence node has one store node and one return NULL node.
1072 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. 1060 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver.
1073 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. 1061 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value.
1074 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); 1062 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX);
1075 const Immediate& raw_null = 1063 __ LoadObject(RAX, Object::Handle(), PP);
1076 Immediate(reinterpret_cast<intptr_t>(Object::null()));
1077 __ movq(RAX, raw_null);
1078 __ ret(); 1064 __ ret();
1079 } 1065 }
1080 1066
1081 1067
1082 void FlowGraphCompiler::EmitFrameEntry() { 1068 void FlowGraphCompiler::EmitFrameEntry() {
1083 const Function& function = parsed_function().function(); 1069 const Function& function = parsed_function().function();
1070 Register new_pp = kNoRegister;
1071 Register new_pc = kNoRegister;
1084 if (CanOptimizeFunction() && 1072 if (CanOptimizeFunction() &&
1085 function.is_optimizable() && 1073 function.is_optimizable() &&
1086 (!is_optimizing() || may_reoptimize())) { 1074 (!is_optimizing() || may_reoptimize())) {
1087 const Register function_reg = RDI; 1075 const Register function_reg = RDI;
1088 __ LoadObject(function_reg, function); 1076 new_pp = R13;
1077 new_pc = R12;
1078
1079 Label next;
1080 __ nop(4); // Need a fixed size sequence on frame entry.
1081 __ call(&next);
1082 __ Bind(&next);
1083
1084 const intptr_t object_pool_pc_dist =
1085 Instructions::HeaderSize() - Instructions::object_pool_offset() +
1086 __ CodeSize();
1087 const intptr_t offset =
1088 Assembler::kEntryPointToPcMarkerOffset - __ CodeSize();
1089 __ popq(new_pc);
1090 if (offset != 0) {
1091 __ addq(new_pc, Immediate(offset));
1092 }
1093
1094 // Load callee's pool pointer.
1095 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset));
1096
1097 // Load function object using the callee's pool pointer.
1098 __ LoadObject(function_reg, function, new_pp);
1099
1089 // Patch point is after the eventually inlined function object. 1100 // Patch point is after the eventually inlined function object.
1090 AddCurrentDescriptor(PcDescriptors::kEntryPatch, 1101 AddCurrentDescriptor(PcDescriptors::kEntryPatch,
1091 Isolate::kNoDeoptId, 1102 Isolate::kNoDeoptId,
1092 0); // No token position. 1103 0); // No token position.
1093 if (is_optimizing()) { 1104 if (is_optimizing()) {
1094 // Reoptimization of an optimized function is triggered by counting in 1105 // Reoptimization of an optimized function is triggered by counting in
1095 // IC stubs, but not at the entry of the function. 1106 // IC stubs, but not at the entry of the function.
1096 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), 1107 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()),
1097 Immediate(FLAG_reoptimization_counter_threshold)); 1108 Immediate(FLAG_reoptimization_counter_threshold));
1098 } else { 1109 } else {
1099 __ incq(FieldAddress(function_reg, Function::usage_counter_offset())); 1110 __ incq(FieldAddress(function_reg, Function::usage_counter_offset()));
1100 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), 1111 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()),
1101 Immediate(FLAG_optimization_counter_threshold)); 1112 Immediate(FLAG_optimization_counter_threshold));
1102 } 1113 }
1103 ASSERT(function_reg == RDI); 1114 ASSERT(function_reg == RDI);
1104 __ j(GREATER_EQUAL, &StubCode::OptimizeFunctionLabel()); 1115 __ J(GREATER_EQUAL, &StubCode::OptimizeFunctionLabel(), R13);
1105 } else if (!flow_graph().IsCompiledForOsr()) { 1116 } else if (!flow_graph().IsCompiledForOsr()) {
1117 // We have to load the PP here too because a load of an external label
1118 // may be patched at the AddCurrentDescriptor below.
1119 new_pp = R13;
1120 new_pc = R12;
1121
1122 Label next;
1123 __ nop(4); // Need a fixed size sequence on frame entry.
1124 __ call(&next);
1125 __ Bind(&next);
1126
1127 const intptr_t object_pool_pc_dist =
1128 Instructions::HeaderSize() - Instructions::object_pool_offset() +
1129 __ CodeSize();
1130 const intptr_t offset =
1131 Assembler::kEntryPointToPcMarkerOffset - __ CodeSize();
1132 __ popq(new_pc);
1133 if (offset != 0) {
1134 __ addq(new_pc, Immediate(offset));
1135 }
1136
1137 // Load callee's pool pointer.
1138 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset));
1106 AddCurrentDescriptor(PcDescriptors::kEntryPatch, 1139 AddCurrentDescriptor(PcDescriptors::kEntryPatch,
1107 Isolate::kNoDeoptId, 1140 Isolate::kNoDeoptId,
1108 0); // No token position. 1141 0); // No token position.
1109 } 1142 }
1110 __ Comment("Enter frame"); 1143 __ Comment("Enter frame");
1111 if (flow_graph().IsCompiledForOsr()) { 1144 if (flow_graph().IsCompiledForOsr()) {
1112 intptr_t extra_slots = StackSize() 1145 intptr_t extra_slots = StackSize()
1113 - flow_graph().num_stack_locals() 1146 - flow_graph().num_stack_locals()
1114 - flow_graph().num_copied_params(); 1147 - flow_graph().num_copied_params();
1115 ASSERT(extra_slots >= 0); 1148 ASSERT(extra_slots >= 0);
1116 __ EnterOsrFrame(extra_slots * kWordSize); 1149 __ EnterOsrFrame(extra_slots * kWordSize, new_pp, new_pc);
1117 } else { 1150 } else {
1118 ASSERT(StackSize() >= 0); 1151 ASSERT(StackSize() >= 0);
1119 __ EnterDartFrame(StackSize() * kWordSize); 1152 __ EnterDartFrameWithInfo(StackSize() * kWordSize, new_pp, new_pc);
1120 } 1153 }
1121 } 1154 }
1122 1155
1123 1156
1124 void FlowGraphCompiler::CompileGraph() { 1157 void FlowGraphCompiler::CompileGraph() {
1125 InitCompiler(); 1158 InitCompiler();
1126 1159
1127 TryIntrinsify(); 1160 TryIntrinsify();
1128 1161
1129 EmitFrameEntry(); 1162 EmitFrameEntry();
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1163 // Invoke noSuchMethod function passing the original function name. 1196 // Invoke noSuchMethod function passing the original function name.
1164 // For closure functions, use "call" as the original name. 1197 // For closure functions, use "call" as the original name.
1165 const String& name = 1198 const String& name =
1166 String::Handle(function.IsClosureFunction() 1199 String::Handle(function.IsClosureFunction()
1167 ? Symbols::Call().raw() 1200 ? Symbols::Call().raw()
1168 : function.name()); 1201 : function.name());
1169 const int kNumArgsChecked = 1; 1202 const int kNumArgsChecked = 1;
1170 const ICData& ic_data = ICData::ZoneHandle( 1203 const ICData& ic_data = ICData::ZoneHandle(
1171 ICData::New(function, name, Object::empty_array(), 1204 ICData::New(function, name, Object::empty_array(),
1172 Isolate::kNoDeoptId, kNumArgsChecked)); 1205 Isolate::kNoDeoptId, kNumArgsChecked));
1173 __ LoadObject(RBX, ic_data); 1206 __ LoadObject(RBX, ic_data, PP);
1174 __ LeaveFrame(); // The arguments are still on the stack. 1207 __ LeaveFrameWithPP(); // The arguments are still on the stack.
1175 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); 1208 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel());
1176 // The noSuchMethod call may return to the caller, but not here. 1209 // The noSuchMethod call may return to the caller, but not here.
1177 __ int3(); 1210 __ int3();
1178 } else { 1211 } else {
1179 __ Stop("Wrong number of arguments"); 1212 __ Stop("Wrong number of arguments");
1180 } 1213 }
1181 __ Bind(&correct_num_arguments); 1214 __ Bind(&correct_num_arguments);
1182 } 1215 }
1183 } else if (!flow_graph().IsCompiledForOsr()) { 1216 } else if (!flow_graph().IsCompiledForOsr()) {
1184 CopyParameters(); 1217 CopyParameters();
1185 } 1218 }
1186 1219
1187 // In unoptimized code, initialize (non-argument) stack allocated slots to 1220 // In unoptimized code, initialize (non-argument) stack allocated slots to
1188 // null. 1221 // null.
1189 if (!is_optimizing() && (num_locals > 0)) { 1222 if (!is_optimizing() && (num_locals > 0)) {
1190 __ Comment("Initialize spill slots"); 1223 __ Comment("Initialize spill slots");
1191 const intptr_t slot_base = parsed_function().first_stack_local_index(); 1224 const intptr_t slot_base = parsed_function().first_stack_local_index();
1192 const Immediate& raw_null = 1225 __ LoadObject(RAX, Object::Handle(), PP);
1193 Immediate(reinterpret_cast<intptr_t>(Object::null()));
1194 __ movq(RAX, raw_null);
1195 for (intptr_t i = 0; i < num_locals; ++i) { 1226 for (intptr_t i = 0; i < num_locals; ++i) {
1196 // Subtract index i (locals lie at lower addresses than RBP). 1227 // Subtract index i (locals lie at lower addresses than RBP).
1197 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX); 1228 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX);
1198 } 1229 }
1199 } 1230 }
1200 1231
1201 if (FLAG_print_scopes) { 1232 if (FLAG_print_scopes) {
1202 // Print the function scope (again) after generating the prologue in order 1233 // Print the function scope (again) after generating the prologue in order
1203 // to see annotations such as allocation indices of locals. 1234 // to see annotations such as allocation indices of locals.
1204 if (FLAG_print_ast) { 1235 if (FLAG_print_ast) {
1205 // Second printing. 1236 // Second printing.
1206 OS::Print("Annotated "); 1237 OS::Print("Annotated ");
1207 } 1238 }
1208 AstPrinter::PrintFunctionScope(parsed_function()); 1239 AstPrinter::PrintFunctionScope(parsed_function());
1209 } 1240 }
1210 1241
1211 ASSERT(!block_order().is_empty()); 1242 ASSERT(!block_order().is_empty());
1212 VisitBlocks(); 1243 VisitBlocks();
1213 1244
1214 __ int3(); 1245 __ int3();
1215 GenerateDeferredCode(); 1246 GenerateDeferredCode();
1216 // Emit function patching code. This will be swapped with the first 13 bytes 1247 // Emit function patching code. This will be swapped with the first 13 bytes
1217 // at entry point. 1248 // at entry point.
1218 AddCurrentDescriptor(PcDescriptors::kPatchCode, 1249 AddCurrentDescriptor(PcDescriptors::kPatchCode,
1219 Isolate::kNoDeoptId, 1250 Isolate::kNoDeoptId,
1220 0); // No token position. 1251 0); // No token position.
1221 __ jmp(&StubCode::FixCallersTargetLabel()); 1252 // This is patched up to a point in FrameEntry where the PP for the
1253 // current function is in R13 instead of PP.
1254 __ JmpPatchable(&StubCode::FixCallersTargetLabel(), R13);
1255
1256 // TOOD(zra): Is this descriptor used?
1222 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump, 1257 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump,
1223 Isolate::kNoDeoptId, 1258 Isolate::kNoDeoptId,
1224 0); // No token position. 1259 0); // No token position.
1225 __ jmp(&StubCode::DeoptimizeLazyLabel()); 1260 __ Jmp(&StubCode::DeoptimizeLazyLabel(), PP);
1226 } 1261 }
1227 1262
1228 1263
1229 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 1264 void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
1230 const ExternalLabel* label, 1265 const ExternalLabel* label,
1231 PcDescriptors::Kind kind, 1266 PcDescriptors::Kind kind,
1232 LocationSummary* locs) { 1267 LocationSummary* locs) {
1233 __ call(label); 1268 __ Call(label, PP);
1234 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos); 1269 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos);
1235 RecordSafepoint(locs); 1270 RecordSafepoint(locs);
1236 } 1271 }
1237 1272
1238 1273
1239 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, 1274 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
1240 intptr_t token_pos, 1275 intptr_t token_pos,
1241 const ExternalLabel* label, 1276 const ExternalLabel* label,
1242 PcDescriptors::Kind kind, 1277 PcDescriptors::Kind kind,
1243 LocationSummary* locs) { 1278 LocationSummary* locs) {
1244 __ call(label); 1279 __ CallPatchable(label);
1245 AddCurrentDescriptor(kind, deopt_id, token_pos); 1280 AddCurrentDescriptor(kind, deopt_id, token_pos);
1246 RecordSafepoint(locs); 1281 RecordSafepoint(locs);
1247 // Marks either the continuation point in unoptimized code or the 1282 // Marks either the continuation point in unoptimized code or the
1248 // deoptimization point in optimized code, after call. 1283 // deoptimization point in optimized code, after call.
1249 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id); 1284 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id);
1250 if (is_optimizing()) { 1285 if (is_optimizing()) {
1251 AddDeoptIndexAtCall(deopt_id_after, token_pos); 1286 AddDeoptIndexAtCall(deopt_id_after, token_pos);
1252 } else { 1287 } else {
1253 // Add deoptimization continuation point after the call and before the 1288 // Add deoptimization continuation point after the call and before the
1254 // arguments are removed. 1289 // arguments are removed.
(...skipping 18 matching lines...) Expand all
1273 AddDeoptIndexAtCall(deopt_id_after, token_pos); 1308 AddDeoptIndexAtCall(deopt_id_after, token_pos);
1274 } else { 1309 } else {
1275 // Add deoptimization continuation point after the call and before the 1310 // Add deoptimization continuation point after the call and before the
1276 // arguments are removed. 1311 // arguments are removed.
1277 AddCurrentDescriptor(PcDescriptors::kDeopt, deopt_id_after, token_pos); 1312 AddCurrentDescriptor(PcDescriptors::kDeopt, deopt_id_after, token_pos);
1278 } 1313 }
1279 } 1314 }
1280 } 1315 }
1281 1316
1282 1317
1318 void FlowGraphCompiler::EmitUnoptimizedStaticCall(
1319 const Function& target_function,
1320 const Array& arguments_descriptor,
1321 intptr_t argument_count,
1322 intptr_t deopt_id,
1323 intptr_t token_pos,
1324 LocationSummary* locs) {
1325 // TODO(srdjan): Improve performance of function recognition.
1326 MethodRecognizer::Kind recognized_kind =
1327 MethodRecognizer::RecognizeKind(target_function);
1328 int num_args_checked = 0;
1329 if ((recognized_kind == MethodRecognizer::kMathMin) ||
1330 (recognized_kind == MethodRecognizer::kMathMax)) {
1331 num_args_checked = 2;
1332 }
1333 const ICData& ic_data = ICData::ZoneHandle(
1334 ICData::New(parsed_function().function(), // Caller function.
1335 String::Handle(target_function.name()),
1336 arguments_descriptor,
1337 deopt_id,
1338 num_args_checked)); // No arguments checked.
1339 ic_data.AddTarget(target_function);
1340 uword label_address = 0;
1341 if (ic_data.num_args_tested() == 0) {
1342 label_address = StubCode::ZeroArgsUnoptimizedStaticCallEntryPoint();
1343 } else if (ic_data.num_args_tested() == 2) {
1344 label_address = StubCode::TwoArgsUnoptimizedStaticCallEntryPoint();
1345 } else {
1346 UNIMPLEMENTED();
1347 }
1348 ExternalLabel target_label("StaticCallICStub", label_address);
1349 __ LoadObject(RBX, ic_data, PP);
1350 GenerateDartCall(deopt_id,
1351 token_pos,
1352 &target_label,
1353 PcDescriptors::kUnoptStaticCall,
1354 locs);
1355 __ Drop(argument_count);
1356 }
1357
1358
1283 void FlowGraphCompiler::EmitOptimizedInstanceCall( 1359 void FlowGraphCompiler::EmitOptimizedInstanceCall(
1284 ExternalLabel* target_label, 1360 ExternalLabel* target_label,
1285 const ICData& ic_data, 1361 const ICData& ic_data,
1286 intptr_t argument_count, 1362 intptr_t argument_count,
1287 intptr_t deopt_id, 1363 intptr_t deopt_id,
1288 intptr_t token_pos, 1364 intptr_t token_pos,
1289 LocationSummary* locs) { 1365 LocationSummary* locs) {
1290 // Each ICData propagated from unoptimized to optimized code contains the 1366 // Each ICData propagated from unoptimized to optimized code contains the
1291 // function that corresponds to the Dart function of that IC call. Due 1367 // function that corresponds to the Dart function of that IC call. Due
1292 // to inlining in optimized code, that function may not correspond to the 1368 // to inlining in optimized code, that function may not correspond to the
1293 // top-level function (parsed_function().function()) which could be 1369 // top-level function (parsed_function().function()) which could be
1294 // reoptimized and which counter needs to be incremented. 1370 // reoptimized and which counter needs to be incremented.
1295 // Pass the function explicitly, it is used in IC stub. 1371 // Pass the function explicitly, it is used in IC stub.
1296 __ LoadObject(RDI, parsed_function().function()); 1372 __ LoadObject(RDI, parsed_function().function(), PP);
1297 __ LoadObject(RBX, ic_data); 1373 __ LoadObject(RBX, ic_data, PP);
1298 GenerateDartCall(deopt_id, 1374 GenerateDartCall(deopt_id,
1299 token_pos, 1375 token_pos,
1300 target_label, 1376 target_label,
1301 PcDescriptors::kIcCall, 1377 PcDescriptors::kIcCall,
1302 locs); 1378 locs);
1303 __ Drop(argument_count); 1379 __ Drop(argument_count);
1304 } 1380 }
1305 1381
1306 1382
1307 void FlowGraphCompiler::EmitInstanceCall(ExternalLabel* target_label, 1383 void FlowGraphCompiler::EmitInstanceCall(ExternalLabel* target_label,
1308 const ICData& ic_data, 1384 const ICData& ic_data,
1309 intptr_t argument_count, 1385 intptr_t argument_count,
1310 intptr_t deopt_id, 1386 intptr_t deopt_id,
1311 intptr_t token_pos, 1387 intptr_t token_pos,
1312 LocationSummary* locs) { 1388 LocationSummary* locs) {
1313 __ LoadObject(RBX, ic_data); 1389 __ LoadObject(RBX, ic_data, PP);
1314 GenerateDartCall(deopt_id, 1390 GenerateDartCall(deopt_id,
1315 token_pos, 1391 token_pos,
1316 target_label, 1392 target_label,
1317 PcDescriptors::kIcCall, 1393 PcDescriptors::kIcCall,
1318 locs); 1394 locs);
1319 __ Drop(argument_count); 1395 __ Drop(argument_count);
1320 } 1396 }
1321 1397
1322 1398
1323 void FlowGraphCompiler::EmitMegamorphicInstanceCall( 1399 void FlowGraphCompiler::EmitMegamorphicInstanceCall(
(...skipping 15 matching lines...) Expand all
1339 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump); 1415 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump);
1340 __ movq(RAX, Immediate(Smi::RawValue(kSmiCid))); 1416 __ movq(RAX, Immediate(Smi::RawValue(kSmiCid)));
1341 __ jmp(&load_cache); 1417 __ jmp(&load_cache);
1342 1418
1343 __ Bind(&not_smi); 1419 __ Bind(&not_smi);
1344 __ LoadClassId(RAX, RAX); 1420 __ LoadClassId(RAX, RAX);
1345 __ SmiTag(RAX); 1421 __ SmiTag(RAX);
1346 1422
1347 // RAX: class ID of the receiver (smi). 1423 // RAX: class ID of the receiver (smi).
1348 __ Bind(&load_cache); 1424 __ Bind(&load_cache);
1349 __ LoadObject(RBX, cache); 1425 __ LoadObject(RBX, cache, PP);
1350 __ movq(RDI, FieldAddress(RBX, MegamorphicCache::buckets_offset())); 1426 __ movq(RDI, FieldAddress(RBX, MegamorphicCache::buckets_offset()));
1351 __ movq(RBX, FieldAddress(RBX, MegamorphicCache::mask_offset())); 1427 __ movq(RBX, FieldAddress(RBX, MegamorphicCache::mask_offset()));
1352 // RDI: cache buckets array. 1428 // RDI: cache buckets array.
1353 // RBX: mask. 1429 // RBX: mask.
1354 __ movq(RCX, RAX); 1430 __ movq(RCX, RAX);
1355 1431
1356 Label loop, update, call_target_function; 1432 Label loop, update, call_target_function;
1357 __ jmp(&loop); 1433 __ jmp(&loop);
1358 1434
1359 __ Bind(&update); 1435 __ Bind(&update);
(...skipping 11 matching lines...) Expand all
1371 __ j(NOT_EQUAL, &update, Assembler::kNearJump); 1447 __ j(NOT_EQUAL, &update, Assembler::kNearJump);
1372 1448
1373 __ Bind(&call_target_function); 1449 __ Bind(&call_target_function);
1374 // Call the target found in the cache. For a class id match, this is a 1450 // Call the target found in the cache. For a class id match, this is a
1375 // proper target for the given name and arguments descriptor. If the 1451 // proper target for the given name and arguments descriptor. If the
1376 // illegal class id was found, the target is a cache miss handler that can 1452 // illegal class id was found, the target is a cache miss handler that can
1377 // be invoked as a normal Dart function. 1453 // be invoked as a normal Dart function.
1378 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize)); 1454 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize));
1379 __ movq(RAX, FieldAddress(RAX, Function::code_offset())); 1455 __ movq(RAX, FieldAddress(RAX, Function::code_offset()));
1380 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); 1456 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset()));
1381 __ LoadObject(RBX, ic_data); 1457 __ LoadObject(RBX, ic_data, PP);
1382 __ LoadObject(R10, arguments_descriptor); 1458 __ LoadObject(R10, arguments_descriptor, PP);
1383 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); 1459 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag));
1384 __ call(RAX); 1460 __ call(RAX);
1385 AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos); 1461 AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos);
1386 RecordSafepoint(locs); 1462 RecordSafepoint(locs);
1387 AddDeoptIndexAtCall(Isolate::ToDeoptAfter(deopt_id), token_pos); 1463 AddDeoptIndexAtCall(Isolate::ToDeoptAfter(deopt_id), token_pos);
1388 __ Drop(argument_count); 1464 __ Drop(argument_count);
1389 } 1465 }
1390 1466
1391 1467
1392 void FlowGraphCompiler::EmitOptimizedStaticCall( 1468 void FlowGraphCompiler::EmitOptimizedStaticCall(
1393 const Function& function, 1469 const Function& function,
1394 const Array& arguments_descriptor, 1470 const Array& arguments_descriptor,
1395 intptr_t argument_count, 1471 intptr_t argument_count,
1396 intptr_t deopt_id, 1472 intptr_t deopt_id,
1397 intptr_t token_pos, 1473 intptr_t token_pos,
1398 LocationSummary* locs) { 1474 LocationSummary* locs) {
1399 __ LoadObject(R10, arguments_descriptor); 1475 __ LoadObject(R10, arguments_descriptor, PP);
1400 // Do not use the code from the function, but let the code be patched so that 1476 // Do not use the code from the function, but let the code be patched so that
1401 // we can record the outgoing edges to other code. 1477 // we can record the outgoing edges to other code.
1402 GenerateDartCall(deopt_id, 1478 GenerateDartCall(deopt_id,
1403 token_pos, 1479 token_pos,
1404 &StubCode::CallStaticFunctionLabel(), 1480 &StubCode::CallStaticFunctionLabel(),
1405 PcDescriptors::kOptStaticCall, 1481 PcDescriptors::kOptStaticCall,
1406 locs); 1482 locs);
1407 AddStaticCallTarget(function); 1483 AddStaticCallTarget(function);
1408 __ Drop(argument_count); 1484 __ Drop(argument_count);
1409 } 1485 }
(...skipping 12 matching lines...) Expand all
1422 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { 1498 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) {
1423 ASSERT(!needs_number_check); 1499 ASSERT(!needs_number_check);
1424 __ testq(reg, reg); 1500 __ testq(reg, reg);
1425 return; 1501 return;
1426 } 1502 }
1427 1503
1428 if (needs_number_check) { 1504 if (needs_number_check) {
1429 __ pushq(reg); 1505 __ pushq(reg);
1430 __ PushObject(obj); 1506 __ PushObject(obj);
1431 if (is_optimizing()) { 1507 if (is_optimizing()) {
1432 __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); 1508 __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel());
1433 } else { 1509 } else {
1434 __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); 1510 __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
1435 } 1511 }
1436 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, 1512 AddCurrentDescriptor(PcDescriptors::kRuntimeCall,
1437 Isolate::kNoDeoptId, 1513 Isolate::kNoDeoptId,
1438 token_pos); 1514 token_pos);
1439 __ popq(reg); // Discard constant. 1515 __ popq(reg); // Discard constant.
1440 __ popq(reg); // Restore 'reg'. 1516 __ popq(reg); // Restore 'reg'.
1441 return; 1517 return;
1442 } 1518 }
1443 1519
1444 __ CompareObject(reg, obj); 1520 __ CompareObject(reg, obj);
1445 } 1521 }
1446 1522
1447 1523
1448 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, 1524 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
1449 Register right, 1525 Register right,
1450 bool needs_number_check, 1526 bool needs_number_check,
1451 intptr_t token_pos) { 1527 intptr_t token_pos) {
1452 if (needs_number_check) { 1528 if (needs_number_check) {
1453 __ pushq(left); 1529 __ pushq(left);
1454 __ pushq(right); 1530 __ pushq(right);
1455 if (is_optimizing()) { 1531 if (is_optimizing()) {
1456 __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); 1532 __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel());
1457 } else { 1533 } else {
1458 __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); 1534 __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
1459 } 1535 }
1460 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, 1536 AddCurrentDescriptor(PcDescriptors::kRuntimeCall,
1461 Isolate::kNoDeoptId, 1537 Isolate::kNoDeoptId,
1462 token_pos); 1538 token_pos);
1463 // Stub returns result in flags (result of a cmpl, we need ZF computed). 1539 // Stub returns result in flags (result of a cmpl, we need ZF computed).
1464 __ popq(right); 1540 __ popq(right);
1465 __ popq(left); 1541 __ popq(left);
1466 } else { 1542 } else {
1467 __ cmpl(left, right); 1543 __ cmpl(left, right);
1468 } 1544 }
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
1538 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); 1614 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0));
1539 Label match_found; 1615 Label match_found;
1540 const intptr_t len = ic_data.NumberOfChecks(); 1616 const intptr_t len = ic_data.NumberOfChecks();
1541 GrowableArray<CidTarget> sorted(len); 1617 GrowableArray<CidTarget> sorted(len);
1542 SortICDataByCount(ic_data, &sorted); 1618 SortICDataByCount(ic_data, &sorted);
1543 ASSERT(class_id_reg != R10); 1619 ASSERT(class_id_reg != R10);
1544 ASSERT(len > 0); // Why bother otherwise. 1620 ASSERT(len > 0); // Why bother otherwise.
1545 const Array& arguments_descriptor = 1621 const Array& arguments_descriptor =
1546 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 1622 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
1547 argument_names)); 1623 argument_names));
1548 __ LoadObject(R10, arguments_descriptor); 1624 __ LoadObject(R10, arguments_descriptor, PP);
1549 for (intptr_t i = 0; i < len; i++) { 1625 for (intptr_t i = 0; i < len; i++) {
1550 const bool is_last_check = (i == (len - 1)); 1626 const bool is_last_check = (i == (len - 1));
1551 Label next_test; 1627 Label next_test;
1552 assembler()->cmpl(class_id_reg, Immediate(sorted[i].cid)); 1628 assembler()->cmpl(class_id_reg, Immediate(sorted[i].cid));
1553 if (is_last_check) { 1629 if (is_last_check) {
1554 assembler()->j(NOT_EQUAL, deopt); 1630 assembler()->j(NOT_EQUAL, deopt);
1555 } else { 1631 } else {
1556 assembler()->j(NOT_EQUAL, &next_test); 1632 assembler()->j(NOT_EQUAL, &next_test);
1557 } 1633 }
1558 // Do not use the code from the function, but let the code be patched so 1634 // Do not use the code from the function, but let the code be patched so
(...skipping 21 matching lines...) Expand all
1580 BranchInstr* branch) { 1656 BranchInstr* branch) {
1581 ASSERT(branch != NULL); 1657 ASSERT(branch != NULL);
1582 assembler()->comisd(left, right); 1658 assembler()->comisd(left, right);
1583 BlockEntryInstr* nan_result = (true_condition == NOT_EQUAL) ? 1659 BlockEntryInstr* nan_result = (true_condition == NOT_EQUAL) ?
1584 branch->true_successor() : branch->false_successor(); 1660 branch->true_successor() : branch->false_successor();
1585 assembler()->j(PARITY_EVEN, GetJumpLabel(nan_result)); 1661 assembler()->j(PARITY_EVEN, GetJumpLabel(nan_result));
1586 branch->EmitBranchOnCondition(this, true_condition); 1662 branch->EmitBranchOnCondition(this, true_condition);
1587 } 1663 }
1588 1664
1589 1665
1590
1591 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition, 1666 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition,
1592 FpuRegister left, 1667 FpuRegister left,
1593 FpuRegister right, 1668 FpuRegister right,
1594 Register result) { 1669 Register result) {
1595 assembler()->comisd(left, right); 1670 assembler()->comisd(left, right);
1596 Label is_false, is_true, done; 1671 Label is_false, is_true, done;
1597 assembler()->j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN false; 1672 assembler()->j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN false;
1598 assembler()->j(true_condition, &is_true, Assembler::kNearJump); 1673 assembler()->j(true_condition, &is_true, Assembler::kNearJump);
1599 assembler()->Bind(&is_false); 1674 assembler()->Bind(&is_false);
1600 assembler()->LoadObject(result, Bool::False()); 1675 assembler()->LoadObject(result, Bool::False(), PP);
1601 assembler()->jmp(&done); 1676 assembler()->jmp(&done);
1602 assembler()->Bind(&is_true); 1677 assembler()->Bind(&is_true);
1603 assembler()->LoadObject(result, Bool::True()); 1678 assembler()->LoadObject(result, Bool::True(), PP);
1604 assembler()->Bind(&done); 1679 assembler()->Bind(&done);
1605 } 1680 }
1606 1681
1607 1682
1608 FieldAddress FlowGraphCompiler::ElementAddressForIntIndex(intptr_t cid, 1683 FieldAddress FlowGraphCompiler::ElementAddressForIntIndex(intptr_t cid,
1609 intptr_t index_scale, 1684 intptr_t index_scale,
1610 Register array, 1685 Register array,
1611 intptr_t index) { 1686 intptr_t index) {
1612 const int64_t disp = 1687 const int64_t disp =
1613 static_cast<int64_t>(index) * index_scale + DataOffsetFor(cid); 1688 static_cast<int64_t>(index) * index_scale + DataOffsetFor(cid);
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
1714 __ movups(XMM0, source.ToStackSlotAddress()); 1789 __ movups(XMM0, source.ToStackSlotAddress());
1715 __ movups(destination.ToStackSlotAddress(), XMM0); 1790 __ movups(destination.ToStackSlotAddress(), XMM0);
1716 } 1791 }
1717 } else { 1792 } else {
1718 ASSERT(source.IsConstant()); 1793 ASSERT(source.IsConstant());
1719 if (destination.IsRegister()) { 1794 if (destination.IsRegister()) {
1720 const Object& constant = source.constant(); 1795 const Object& constant = source.constant();
1721 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) { 1796 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) {
1722 __ xorq(destination.reg(), destination.reg()); 1797 __ xorq(destination.reg(), destination.reg());
1723 } else { 1798 } else {
1724 __ LoadObject(destination.reg(), constant); 1799 __ LoadObject(destination.reg(), constant, PP);
1725 } 1800 }
1726 } else { 1801 } else {
1727 ASSERT(destination.IsStackSlot()); 1802 ASSERT(destination.IsStackSlot());
1728 StoreObject(destination.ToStackSlotAddress(), source.constant()); 1803 StoreObject(destination.ToStackSlotAddress(), source.constant());
1729 } 1804 }
1730 } 1805 }
1731 1806
1732 move->Eliminate(); 1807 move->Eliminate();
1733 } 1808 }
1734 1809
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
1863 __ movups(reg, Address(RSP, 0)); 1938 __ movups(reg, Address(RSP, 0));
1864 __ addq(RSP, Immediate(kFpuRegisterSize)); 1939 __ addq(RSP, Immediate(kFpuRegisterSize));
1865 } 1940 }
1866 1941
1867 1942
1868 #undef __ 1943 #undef __
1869 1944
1870 } // namespace dart 1945 } // namespace dart
1871 1946
1872 #endif // defined TARGET_ARCH_X64 1947 #endif // defined TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698