Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(726)

Side by Side Diff: runtime/vm/flow_graph_compiler_x64.cc

Issue 22825023: Uses an object pool on x64 (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
59 intptr_t slot_ix = 0; 59 intptr_t slot_ix = 0;
60 Environment* current = deopt_env_; 60 Environment* current = deopt_env_;
61 61
62 // Emit all kMaterializeObject instructions describing objects to be 62 // Emit all kMaterializeObject instructions describing objects to be
63 // materialized on the deoptimization as a prefix to the deoptimization info. 63 // materialized on the deoptimization as a prefix to the deoptimization info.
64 EmitMaterializations(deopt_env_, builder); 64 EmitMaterializations(deopt_env_, builder);
65 65
66 // The real frame starts here. 66 // The real frame starts here.
67 builder->MarkFrameStart(); 67 builder->MarkFrameStart();
68 68
69 // Callee's PC marker is not used anymore. Pass Function::null() to set to 0. 69 // Current PP, FP, and PC.
70 builder->AddPp(current->function(), slot_ix++);
70 builder->AddPcMarker(Function::Handle(), slot_ix++); 71 builder->AddPcMarker(Function::Handle(), slot_ix++);
71
72 // Current FP and PC.
73 builder->AddCallerFp(slot_ix++); 72 builder->AddCallerFp(slot_ix++);
74 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); 73 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++);
75 74
76 // Emit all values that are needed for materialization as a part of the 75 // Emit all values that are needed for materialization as a part of the
77 // expression stack for the bottom-most frame. This guarantees that GC 76 // expression stack for the bottom-most frame. This guarantees that GC
78 // will be able to find them during materialization. 77 // will be able to find them during materialization.
79 slot_ix = builder->EmitMaterializationArguments(slot_ix); 78 slot_ix = builder->EmitMaterializationArguments(slot_ix);
80 79
81 // For the innermost environment, set outgoing arguments and the locals. 80 // For the innermost environment, set outgoing arguments and the locals.
82 for (intptr_t i = current->Length() - 1; 81 for (intptr_t i = current->Length() - 1;
83 i >= current->fixed_parameter_count(); 82 i >= current->fixed_parameter_count();
84 i--) { 83 i--) {
85 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); 84 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
86 } 85 }
87 86
88 // Current PC marker and caller FP.
89 builder->AddPcMarker(current->function(), slot_ix++);
90 builder->AddCallerFp(slot_ix++);
91
92 Environment* previous = current; 87 Environment* previous = current;
93 current = current->outer(); 88 current = current->outer();
94 while (current != NULL) { 89 while (current != NULL) {
90 // PP, FP, and PC.
91 builder->AddPp(current->function(), slot_ix++);
92 builder->AddPcMarker(previous->function(), slot_ix++);
93 builder->AddCallerFp(slot_ix++);
94
95 // For any outer environment the deopt id is that of the call instruction 95 // For any outer environment the deopt id is that of the call instruction
96 // which is recorded in the outer environment. 96 // which is recorded in the outer environment.
97 builder->AddReturnAddress(current->function(), 97 builder->AddReturnAddress(current->function(),
98 Isolate::ToDeoptAfter(current->deopt_id()), 98 Isolate::ToDeoptAfter(current->deopt_id()),
99 slot_ix++); 99 slot_ix++);
100 100
101 // The values of outgoing arguments can be changed from the inlined call so 101 // The values of outgoing arguments can be changed from the inlined call so
102 // we must read them from the previous environment. 102 // we must read them from the previous environment.
103 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { 103 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
104 builder->AddCopy(previous->ValueAt(i), 104 builder->AddCopy(previous->ValueAt(i),
105 previous->LocationAt(i), 105 previous->LocationAt(i),
106 slot_ix++); 106 slot_ix++);
107 } 107 }
108 108
109 // Set the locals, note that outgoing arguments are not in the environment. 109 // Set the locals, note that outgoing arguments are not in the environment.
110 for (intptr_t i = current->Length() - 1; 110 for (intptr_t i = current->Length() - 1;
111 i >= current->fixed_parameter_count(); 111 i >= current->fixed_parameter_count();
112 i--) { 112 i--) {
113 builder->AddCopy(current->ValueAt(i), 113 builder->AddCopy(current->ValueAt(i),
114 current->LocationAt(i), 114 current->LocationAt(i),
115 slot_ix++); 115 slot_ix++);
116 } 116 }
117 117
118 // PC marker and caller FP.
119 builder->AddPcMarker(current->function(), slot_ix++);
120 builder->AddCallerFp(slot_ix++);
121
122 // Iterate on the outer environment. 118 // Iterate on the outer environment.
123 previous = current; 119 previous = current;
124 current = current->outer(); 120 current = current->outer();
125 } 121 }
126 // The previous pointer is now the outermost environment. 122 // The previous pointer is now the outermost environment.
127 ASSERT(previous != NULL); 123 ASSERT(previous != NULL);
128 124
129 // For the outermost environment, set caller PC. 125 // For the outermost environment, set caller PC, caller PP, and caller FP.
126 builder->AddCallerPp(slot_ix++);
127 // PC marker.
128 builder->AddPcMarker(previous->function(), slot_ix++);
129 builder->AddCallerFp(slot_ix++);
130 builder->AddCallerPc(slot_ix++); 130 builder->AddCallerPc(slot_ix++);
131 131
132 // For the outermost environment, set the incoming arguments. 132 // For the outermost environment, set the incoming arguments.
133 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { 133 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
134 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); 134 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++);
135 } 135 }
136 136
137 const DeoptInfo& deopt_info = DeoptInfo::Handle(builder->CreateDeoptInfo()); 137 const DeoptInfo& deopt_info = DeoptInfo::Handle(builder->CreateDeoptInfo());
138 return deopt_info.raw(); 138 return deopt_info.raw();
139 } 139 }
140 140
141
Florian Schneider 2013/09/04 09:39:47 Accidental change?
zra 2013/09/04 21:00:41 Done.
142 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, 141 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
143 intptr_t stub_ix) { 142 intptr_t stub_ix) {
144 // Calls do not need stubs, they share a deoptimization trampoline. 143 // Calls do not need stubs, they share a deoptimization trampoline.
145 ASSERT(reason() != kDeoptAtCall); 144 ASSERT(reason() != kDeoptAtCall);
146 Assembler* assem = compiler->assembler(); 145 Assembler* assem = compiler->assembler();
147 #define __ assem-> 146 #define __ assem->
148 __ Comment("Deopt stub for id %" Pd "", deopt_id()); 147 __ Comment("Deopt stub for id %" Pd "", deopt_id());
149 __ Bind(entry_label()); 148 __ Bind(entry_label());
150 if (FLAG_trap_on_deoptimization) __ int3(); 149 if (FLAG_trap_on_deoptimization) __ int3();
151 150
152 ASSERT(deopt_env() != NULL); 151 ASSERT(deopt_env() != NULL);
153 152
154 __ call(&StubCode::DeoptimizeLabel()); 153 __ CallFromPool(&StubCode::DeoptimizeLabel());
155 set_pc_offset(assem->CodeSize()); 154 set_pc_offset(assem->CodeSize());
156 __ int3(); 155 __ int3();
157 #undef __ 156 #undef __
158 } 157 }
159 158
160 159
161 #define __ assembler()-> 160 #define __ assembler()->
162 161
163 162
164 // Fall through if bool_register contains null. 163 // Fall through if bool_register contains null.
165 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, 164 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
166 Label* is_true, 165 Label* is_true,
167 Label* is_false) { 166 Label* is_false) {
168 const Immediate& raw_null =
169 Immediate(reinterpret_cast<intptr_t>(Object::null()));
170 Label fall_through; 167 Label fall_through;
171 __ cmpq(bool_register, raw_null); 168 __ CompareObject(bool_register, Object::Handle(Object::null()));
172 __ j(EQUAL, &fall_through, Assembler::kNearJump); 169 __ j(EQUAL, &fall_through, Assembler::kNearJump);
173 __ CompareObject(bool_register, Bool::True()); 170 __ CompareObject(bool_register, Bool::True());
174 __ j(EQUAL, is_true); 171 __ j(EQUAL, is_true);
175 __ jmp(is_false); 172 __ jmp(is_false);
176 __ Bind(&fall_through); 173 __ Bind(&fall_through);
177 } 174 }
178 175
179 176
180 // Clobbers RCX. 177 // Clobbers RCX.
181 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( 178 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub(
182 TypeTestStubKind test_kind, 179 TypeTestStubKind test_kind,
183 Register instance_reg, 180 Register instance_reg,
184 Register type_arguments_reg, 181 Register type_arguments_reg,
185 Register temp_reg, 182 Register temp_reg,
186 Label* is_instance_lbl, 183 Label* is_instance_lbl,
187 Label* is_not_instance_lbl) { 184 Label* is_not_instance_lbl) {
188 const SubtypeTestCache& type_test_cache = 185 const SubtypeTestCache& type_test_cache =
189 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New()); 186 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New());
190 const Immediate& raw_null = 187 __ LoadObject(temp_reg, type_test_cache, Assembler::kNotPatchable);
191 Immediate(reinterpret_cast<intptr_t>(Object::null()));
192 __ LoadObject(temp_reg, type_test_cache);
193 __ pushq(temp_reg); // Subtype test cache. 188 __ pushq(temp_reg); // Subtype test cache.
194 __ pushq(instance_reg); // Instance. 189 __ pushq(instance_reg); // Instance.
195 if (test_kind == kTestTypeOneArg) { 190 if (test_kind == kTestTypeOneArg) {
196 ASSERT(type_arguments_reg == kNoRegister); 191 ASSERT(type_arguments_reg == kNoRegister);
197 __ pushq(raw_null); 192 __ PushObject(Object::Handle(Object::null()));
198 __ call(&StubCode::Subtype1TestCacheLabel()); 193 __ CallFromPool(&StubCode::Subtype1TestCacheLabel());
Florian Schneider 2013/09/04 09:39:47 Are there any direct calls to ExternalLabel left?
zra 2013/09/04 21:00:41 Yes, there is at least the call in RuntimeEntry::C
199 } else if (test_kind == kTestTypeTwoArgs) { 194 } else if (test_kind == kTestTypeTwoArgs) {
200 ASSERT(type_arguments_reg == kNoRegister); 195 ASSERT(type_arguments_reg == kNoRegister);
201 __ pushq(raw_null); 196 __ PushObject(Object::Handle(Object::null()));
202 __ call(&StubCode::Subtype2TestCacheLabel()); 197 __ CallFromPool(&StubCode::Subtype2TestCacheLabel());
203 } else if (test_kind == kTestTypeThreeArgs) { 198 } else if (test_kind == kTestTypeThreeArgs) {
204 __ pushq(type_arguments_reg); 199 __ pushq(type_arguments_reg);
205 __ call(&StubCode::Subtype3TestCacheLabel()); 200 __ CallFromPool(&StubCode::Subtype3TestCacheLabel());
206 } else { 201 } else {
207 UNREACHABLE(); 202 UNREACHABLE();
208 } 203 }
209 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False. 204 // Result is in RCX: null -> not found, otherwise Bool::True or Bool::False.
210 ASSERT(instance_reg != RCX); 205 ASSERT(instance_reg != RCX);
211 ASSERT(temp_reg != RCX); 206 ASSERT(temp_reg != RCX);
212 __ popq(instance_reg); // Discard. 207 __ popq(instance_reg); // Discard.
213 __ popq(instance_reg); // Restore receiver. 208 __ popq(instance_reg); // Restore receiver.
214 __ popq(temp_reg); // Discard. 209 __ popq(temp_reg); // Discard.
215 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl); 210 GenerateBoolToJump(RCX, is_instance_lbl, is_not_instance_lbl);
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
340 // interfaces. 335 // interfaces.
341 // Bool interface can be implemented only by core class Bool. 336 // Bool interface can be implemented only by core class Bool.
342 if (type.IsBoolType()) { 337 if (type.IsBoolType()) {
343 __ cmpl(kClassIdReg, Immediate(kBoolCid)); 338 __ cmpl(kClassIdReg, Immediate(kBoolCid));
344 __ j(EQUAL, is_instance_lbl); 339 __ j(EQUAL, is_instance_lbl);
345 __ jmp(is_not_instance_lbl); 340 __ jmp(is_not_instance_lbl);
346 return false; 341 return false;
347 } 342 }
348 if (type.IsFunctionType()) { 343 if (type.IsFunctionType()) {
349 // Check if instance is a closure. 344 // Check if instance is a closure.
350 const Immediate& raw_null =
351 Immediate(reinterpret_cast<intptr_t>(Object::null()));
352 __ LoadClassById(R13, kClassIdReg); 345 __ LoadClassById(R13, kClassIdReg);
353 __ movq(R13, FieldAddress(R13, Class::signature_function_offset())); 346 __ movq(R13, FieldAddress(R13, Class::signature_function_offset()));
354 __ cmpq(R13, raw_null); 347 __ CompareObject(R13, Object::Handle(Object::null()));
355 __ j(NOT_EQUAL, is_instance_lbl); 348 __ j(NOT_EQUAL, is_instance_lbl);
356 } 349 }
357 // Custom checking for numbers (Smi, Mint, Bigint and Double). 350 // Custom checking for numbers (Smi, Mint, Bigint and Double).
358 // Note that instance is not Smi (checked above). 351 // Note that instance is not Smi (checked above).
359 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) { 352 if (type.IsSubtypeOf(Type::Handle(Type::Number()), NULL)) {
360 GenerateNumberTypeCheck( 353 GenerateNumberTypeCheck(
361 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); 354 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl);
362 return false; 355 return false;
363 } 356 }
364 if (type.IsStringType()) { 357 if (type.IsStringType()) {
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
407 // RAX: instance (preserved). 400 // RAX: instance (preserved).
408 // Clobbers RDI, RDX, R10. 401 // Clobbers RDI, RDX, R10.
409 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( 402 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
410 intptr_t token_pos, 403 intptr_t token_pos,
411 const AbstractType& type, 404 const AbstractType& type,
412 Label* is_instance_lbl, 405 Label* is_instance_lbl,
413 Label* is_not_instance_lbl) { 406 Label* is_not_instance_lbl) {
414 __ Comment("UninstantiatedTypeTest"); 407 __ Comment("UninstantiatedTypeTest");
415 ASSERT(!type.IsInstantiated()); 408 ASSERT(!type.IsInstantiated());
416 // Skip check if destination is a dynamic type. 409 // Skip check if destination is a dynamic type.
417 const Immediate& raw_null =
418 Immediate(reinterpret_cast<intptr_t>(Object::null()));
419 if (type.IsTypeParameter()) { 410 if (type.IsTypeParameter()) {
420 const TypeParameter& type_param = TypeParameter::Cast(type); 411 const TypeParameter& type_param = TypeParameter::Cast(type);
421 // Load instantiator (or null) and instantiator type arguments on stack. 412 // Load instantiator (or null) and instantiator type arguments on stack.
422 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. 413 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
423 // RDX: instantiator type arguments. 414 // RDX: instantiator type arguments.
424 // Check if type argument is dynamic. 415 // Check if type argument is dynamic.
425 __ cmpq(RDX, raw_null); 416 __ CompareObject(RDX, Object::Handle(Object::null()));
426 __ j(EQUAL, is_instance_lbl); 417 __ j(EQUAL, is_instance_lbl);
427 // Can handle only type arguments that are instances of TypeArguments. 418 // Can handle only type arguments that are instances of TypeArguments.
428 // (runtime checks canonicalize type arguments). 419 // (runtime checks canonicalize type arguments).
429 Label fall_through; 420 Label fall_through;
430 __ CompareClassId(RDX, kTypeArgumentsCid); 421 __ CompareClassId(RDX, kTypeArgumentsCid);
431 __ j(NOT_EQUAL, &fall_through); 422 __ j(NOT_EQUAL, &fall_through);
432 __ movq(RDI, 423 __ movq(RDI,
433 FieldAddress(RDX, TypeArguments::type_at_offset(type_param.index()))); 424 FieldAddress(RDX, TypeArguments::type_at_offset(type_param.index())));
434 // RDI: Concrete type of type. 425 // RDI: Concrete type of type.
435 // Check if type argument is dynamic. 426 // Check if type argument is dynamic.
436 __ CompareObject(RDI, Type::ZoneHandle(Type::DynamicType())); 427 __ CompareObject(RDI, Type::ZoneHandle(Type::DynamicType()));
437 __ j(EQUAL, is_instance_lbl); 428 __ j(EQUAL, is_instance_lbl);
438 __ cmpq(RDI, raw_null); 429 __ CompareObject(RDI, Object::Handle(Object::null()));
439 __ j(EQUAL, is_instance_lbl); 430 __ j(EQUAL, is_instance_lbl);
440 const Type& object_type = Type::ZoneHandle(Type::ObjectType()); 431 const Type& object_type = Type::ZoneHandle(Type::ObjectType());
441 __ CompareObject(RDI, object_type); 432 __ CompareObject(RDI, object_type);
442 __ j(EQUAL, is_instance_lbl); 433 __ j(EQUAL, is_instance_lbl);
443 434
444 // For Smi check quickly against int and num interfaces. 435 // For Smi check quickly against int and num interfaces.
445 Label not_smi; 436 Label not_smi;
446 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi? 437 __ testq(RAX, Immediate(kSmiTagMask)); // Value is Smi?
447 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump); 438 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump);
448 __ CompareObject(RDI, Type::ZoneHandle(Type::IntType())); 439 __ CompareObject(RDI, Type::ZoneHandle(Type::IntType()));
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
568 // Clobbers RCX and RDX. 559 // Clobbers RCX and RDX.
569 // Returns: 560 // Returns:
570 // - true or false in RAX. 561 // - true or false in RAX.
571 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos, 562 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos,
572 intptr_t deopt_id, 563 intptr_t deopt_id,
573 const AbstractType& type, 564 const AbstractType& type,
574 bool negate_result, 565 bool negate_result,
575 LocationSummary* locs) { 566 LocationSummary* locs) {
576 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded()); 567 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded());
577 568
578 const Immediate& raw_null =
579 Immediate(reinterpret_cast<intptr_t>(Object::null()));
580 Label is_instance, is_not_instance; 569 Label is_instance, is_not_instance;
581 __ pushq(RCX); // Store instantiator on stack. 570 __ pushq(RCX); // Store instantiator on stack.
582 __ pushq(RDX); // Store instantiator type arguments. 571 __ pushq(RDX); // Store instantiator type arguments.
583 // If type is instantiated and non-parameterized, we can inline code 572 // If type is instantiated and non-parameterized, we can inline code
584 // checking whether the tested instance is a Smi. 573 // checking whether the tested instance is a Smi.
585 if (type.IsInstantiated()) { 574 if (type.IsInstantiated()) {
586 // A null object is only an instance of Object and dynamic, which has 575 // A null object is only an instance of Object and dynamic, which has
587 // already been checked above (if the type is instantiated). So we can 576 // already been checked above (if the type is instantiated). So we can
588 // return false here if the instance is null (and if the type is 577 // return false here if the instance is null (and if the type is
589 // instantiated). 578 // instantiated).
590 // We can only inline this null check if the type is instantiated at compile 579 // We can only inline this null check if the type is instantiated at compile
591 // time, since an uninstantiated type at compile time could be Object or 580 // time, since an uninstantiated type at compile time could be Object or
592 // dynamic at run time. 581 // dynamic at run time.
593 __ cmpq(RAX, raw_null); 582 __ CompareObject(RAX, Object::Handle(Object::null()));
594 __ j(EQUAL, &is_not_instance); 583 __ j(EQUAL, &is_not_instance);
595 } 584 }
596 585
597 // Generate inline instanceof test. 586 // Generate inline instanceof test.
598 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(); 587 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle();
599 test_cache = GenerateInlineInstanceof(token_pos, type, 588 test_cache = GenerateInlineInstanceof(token_pos, type,
600 &is_instance, &is_not_instance); 589 &is_instance, &is_not_instance);
601 590
602 // test_cache is null if there is no fall-through. 591 // test_cache is null if there is no fall-through.
603 Label done; 592 Label done;
604 if (!test_cache.IsNull()) { 593 if (!test_cache.IsNull()) {
605 // Generate runtime call. 594 // Generate runtime call.
606 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. 595 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
607 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. 596 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator.
608 __ PushObject(Object::ZoneHandle()); // Make room for the result. 597 __ PushObject(Object::ZoneHandle()); // Make room for the result.
609 __ pushq(RAX); // Push the instance. 598 __ pushq(RAX); // Push the instance.
610 __ PushObject(type); // Push the type. 599 __ PushObject(type); // Push the type.
611 __ pushq(RCX); // TODO(srdjan): Pass instantiator instead of null. 600 __ pushq(RCX); // TODO(srdjan): Pass instantiator instead of null.
612 __ pushq(RDX); // Instantiator type arguments. 601 __ pushq(RDX); // Instantiator type arguments.
613 __ LoadObject(RAX, test_cache); 602 __ LoadObject(RAX, test_cache, Assembler::kNotPatchable);
614 __ pushq(RAX); 603 __ pushq(RAX);
615 GenerateCallRuntime(token_pos, 604 GenerateCallRuntime(token_pos,
616 deopt_id, 605 deopt_id,
617 kInstanceofRuntimeEntry, 606 kInstanceofRuntimeEntry,
618 locs); 607 locs);
619 // Pop the parameters supplied to the runtime entry. The result of the 608 // Pop the parameters supplied to the runtime entry. The result of the
620 // instanceof runtime call will be left as the result of the operation. 609 // instanceof runtime call will be left as the result of the operation.
621 __ Drop(5); 610 __ Drop(5);
622 if (negate_result) { 611 if (negate_result) {
623 __ popq(RDX); 612 __ popq(RDX);
624 __ LoadObject(RAX, Bool::True()); 613 __ LoadObject(RAX, Bool::True(), Assembler::kNotPatchable);
625 __ cmpq(RDX, RAX); 614 __ cmpq(RDX, RAX);
626 __ j(NOT_EQUAL, &done, Assembler::kNearJump); 615 __ j(NOT_EQUAL, &done, Assembler::kNearJump);
627 __ LoadObject(RAX, Bool::False()); 616 __ LoadObject(RAX, Bool::False(), Assembler::kNotPatchable);
628 } else { 617 } else {
629 __ popq(RAX); 618 __ popq(RAX);
630 } 619 }
631 __ jmp(&done, Assembler::kNearJump); 620 __ jmp(&done, Assembler::kNearJump);
632 } 621 }
633 __ Bind(&is_not_instance); 622 __ Bind(&is_not_instance);
634 __ LoadObject(RAX, negate_result ? Bool::True() : Bool::False()); 623 __ LoadObject(RAX, negate_result ? Bool::True() : Bool::False(),
624 Assembler::kNotPatchable);
635 __ jmp(&done, Assembler::kNearJump); 625 __ jmp(&done, Assembler::kNearJump);
636 626
637 __ Bind(&is_instance); 627 __ Bind(&is_instance);
638 __ LoadObject(RAX, negate_result ? Bool::False() : Bool::True()); 628 __ LoadObject(RAX, negate_result ? Bool::False() : Bool::True(),
629 Assembler::kNotPatchable);
639 __ Bind(&done); 630 __ Bind(&done);
640 __ popq(RDX); // Remove pushed instantiator type arguments. 631 __ popq(RDX); // Remove pushed instantiator type arguments.
641 __ popq(RCX); // Remove pushed instantiator. 632 __ popq(RCX); // Remove pushed instantiator.
642 } 633 }
643 634
644 635
645 // Optimize assignable type check by adding inlined tests for: 636 // Optimize assignable type check by adding inlined tests for:
646 // - NULL -> return NULL. 637 // - NULL -> return NULL.
647 // - Smi -> compile time subtype check (only if dst class is not parameterized). 638 // - Smi -> compile time subtype check (only if dst class is not parameterized).
648 // - Class equality (only if class is not parameterized). 639 // - Class equality (only if class is not parameterized).
(...skipping 12 matching lines...) Expand all
661 LocationSummary* locs) { 652 LocationSummary* locs) {
662 ASSERT(token_pos >= 0); 653 ASSERT(token_pos >= 0);
663 ASSERT(!dst_type.IsNull()); 654 ASSERT(!dst_type.IsNull());
664 ASSERT(dst_type.IsFinalized()); 655 ASSERT(dst_type.IsFinalized());
665 // Assignable check is skipped in FlowGraphBuilder, not here. 656 // Assignable check is skipped in FlowGraphBuilder, not here.
666 ASSERT(dst_type.IsMalformed() || dst_type.IsMalbounded() || 657 ASSERT(dst_type.IsMalformed() || dst_type.IsMalbounded() ||
667 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); 658 (!dst_type.IsDynamicType() && !dst_type.IsObjectType()));
668 __ pushq(RCX); // Store instantiator. 659 __ pushq(RCX); // Store instantiator.
669 __ pushq(RDX); // Store instantiator type arguments. 660 __ pushq(RDX); // Store instantiator type arguments.
670 // A null object is always assignable and is returned as result. 661 // A null object is always assignable and is returned as result.
671 const Immediate& raw_null =
672 Immediate(reinterpret_cast<intptr_t>(Object::null()));
673 Label is_assignable, runtime_call; 662 Label is_assignable, runtime_call;
674 __ cmpq(RAX, raw_null); 663 __ CompareObject(RAX, Object::Handle(Object::null()));
675 __ j(EQUAL, &is_assignable); 664 __ j(EQUAL, &is_assignable);
676 665
677 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) { 666 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) {
678 // If type checks are not eliminated during the graph building then 667 // If type checks are not eliminated during the graph building then
679 // a transition sentinel can be seen here. 668 // a transition sentinel can be seen here.
680 __ CompareObject(RAX, Object::transition_sentinel()); 669 __ CompareObject(RAX, Object::transition_sentinel());
681 __ j(EQUAL, &is_assignable); 670 __ j(EQUAL, &is_assignable);
682 } 671 }
683 672
684 // Generate throw new TypeError() if the type is malformed or malbounded. 673 // Generate throw new TypeError() if the type is malformed or malbounded.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
716 705
717 __ Bind(&runtime_call); 706 __ Bind(&runtime_call);
718 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments. 707 __ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
719 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator. 708 __ movq(RCX, Address(RSP, kWordSize)); // Get instantiator.
720 __ PushObject(Object::ZoneHandle()); // Make room for the result. 709 __ PushObject(Object::ZoneHandle()); // Make room for the result.
721 __ pushq(RAX); // Push the source object. 710 __ pushq(RAX); // Push the source object.
722 __ PushObject(dst_type); // Push the type of the destination. 711 __ PushObject(dst_type); // Push the type of the destination.
723 __ pushq(RCX); // Instantiator. 712 __ pushq(RCX); // Instantiator.
724 __ pushq(RDX); // Instantiator type arguments. 713 __ pushq(RDX); // Instantiator type arguments.
725 __ PushObject(dst_name); // Push the name of the destination. 714 __ PushObject(dst_name); // Push the name of the destination.
726 __ LoadObject(RAX, test_cache); 715 __ LoadObject(RAX, test_cache, Assembler::kNotPatchable);
727 __ pushq(RAX); 716 __ pushq(RAX);
728 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, locs); 717 GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, locs);
729 // Pop the parameters supplied to the runtime entry. The result of the 718 // Pop the parameters supplied to the runtime entry. The result of the
730 // type check runtime call is the checked value. 719 // type check runtime call is the checked value.
731 __ Drop(6); 720 __ Drop(6);
732 __ popq(RAX); 721 __ popq(RAX);
733 722
734 __ Bind(&is_assignable); 723 __ Bind(&is_assignable);
735 __ popq(RDX); // Remove pushed instantiator type arguments. 724 __ popq(RDX); // Remove pushed instantiator type arguments.
736 __ popq(RCX); // Remove pushed instantiator. 725 __ popq(RCX); // Remove pushed instantiator.
(...skipping 24 matching lines...) Expand all
761 750
762 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset, 751 void FlowGraphCompiler::EmitTrySyncMove(intptr_t dest_offset,
763 Location loc, 752 Location loc,
764 bool* push_emitted) { 753 bool* push_emitted) {
765 const Address dest(RBP, dest_offset); 754 const Address dest(RBP, dest_offset);
766 if (loc.IsConstant()) { 755 if (loc.IsConstant()) {
767 if (!*push_emitted) { 756 if (!*push_emitted) {
768 __ pushq(RAX); 757 __ pushq(RAX);
769 *push_emitted = true; 758 *push_emitted = true;
770 } 759 }
771 __ LoadObject(RAX, loc.constant()); 760 __ LoadObject(RAX, loc.constant(), Assembler::kNotPatchable);
772 __ movq(dest, RAX); 761 __ movq(dest, RAX);
773 } else if (loc.IsRegister()) { 762 } else if (loc.IsRegister()) {
774 if (*push_emitted && loc.reg() == RAX) { 763 if (*push_emitted && loc.reg() == RAX) {
775 __ movq(RAX, Address(RSP, 0)); 764 __ movq(RAX, Address(RSP, 0));
776 __ movq(dest, RAX); 765 __ movq(dest, RAX);
777 } else { 766 } else {
778 __ movq(dest, loc.reg()); 767 __ movq(dest, loc.reg());
779 } 768 }
780 } else { 769 } else {
781 Address src = loc.ToStackSlotAddress(); 770 Address src = loc.ToStackSlotAddress();
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
891 const Address argument_addr(RBX, RCX, TIMES_8, 0); 880 const Address argument_addr(RBX, RCX, TIMES_8, 0);
892 const Address copy_addr(RDI, RCX, TIMES_8, 0); 881 const Address copy_addr(RDI, RCX, TIMES_8, 0);
893 __ Bind(&loop); 882 __ Bind(&loop);
894 __ movq(RAX, argument_addr); 883 __ movq(RAX, argument_addr);
895 __ movq(copy_addr, RAX); 884 __ movq(copy_addr, RAX);
896 __ Bind(&loop_condition); 885 __ Bind(&loop_condition);
897 __ decq(RCX); 886 __ decq(RCX);
898 __ j(POSITIVE, &loop, Assembler::kNearJump); 887 __ j(POSITIVE, &loop, Assembler::kNearJump);
899 888
900 // Copy or initialize optional named arguments. 889 // Copy or initialize optional named arguments.
901 const Immediate& raw_null =
902 Immediate(reinterpret_cast<intptr_t>(Object::null()));
903 Label all_arguments_processed; 890 Label all_arguments_processed;
904 #ifdef DEBUG 891 #ifdef DEBUG
905 const bool check_correct_named_args = true; 892 const bool check_correct_named_args = true;
906 #else 893 #else
907 const bool check_correct_named_args = function.IsClosureFunction(); 894 const bool check_correct_named_args = function.IsClosureFunction();
908 #endif 895 #endif
909 if (num_opt_named_params > 0) { 896 if (num_opt_named_params > 0) {
910 // Start by alphabetically sorting the names of the optional parameters. 897 // Start by alphabetically sorting the names of the optional parameters.
911 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params]; 898 LocalVariable** opt_param = new LocalVariable*[num_opt_named_params];
912 int* opt_param_position = new int[num_opt_named_params]; 899 int* opt_param_position = new int[num_opt_named_params];
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
954 __ addq(RDI, Immediate(ArgumentsDescriptor::named_entry_size())); 941 __ addq(RDI, Immediate(ArgumentsDescriptor::named_entry_size()));
955 __ negq(RAX); 942 __ negq(RAX);
956 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi. 943 Address argument_addr(RBX, RAX, TIMES_4, 0); // RAX is a negative Smi.
957 __ movq(RAX, argument_addr); 944 __ movq(RAX, argument_addr);
958 __ jmp(&assign_optional_parameter, Assembler::kNearJump); 945 __ jmp(&assign_optional_parameter, Assembler::kNearJump);
959 __ Bind(&load_default_value); 946 __ Bind(&load_default_value);
960 // Load RAX with default argument. 947 // Load RAX with default argument.
961 const Object& value = Object::ZoneHandle( 948 const Object& value = Object::ZoneHandle(
962 parsed_function().default_parameter_values().At( 949 parsed_function().default_parameter_values().At(
963 param_pos - num_fixed_params)); 950 param_pos - num_fixed_params));
964 __ LoadObject(RAX, value); 951 __ LoadObject(RAX, value, Assembler::kNotPatchable);
965 __ Bind(&assign_optional_parameter); 952 __ Bind(&assign_optional_parameter);
966 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. 953 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos].
967 // We do not use the final allocation index of the variable here, i.e. 954 // We do not use the final allocation index of the variable here, i.e.
968 // scope->VariableAt(i)->index(), because captured variables still need 955 // scope->VariableAt(i)->index(), because captured variables still need
969 // to be copied to the context that is not yet allocated. 956 // to be copied to the context that is not yet allocated.
970 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; 957 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos;
971 const Address param_addr(RBP, computed_param_pos * kWordSize); 958 const Address param_addr(RBP, computed_param_pos * kWordSize);
972 __ movq(param_addr, RAX); 959 __ movq(param_addr, RAX);
973 } 960 }
974 delete[] opt_param; 961 delete[] opt_param;
975 delete[] opt_param_position; 962 delete[] opt_param_position;
976 if (check_correct_named_args) { 963 if (check_correct_named_args) {
977 // Check that RDI now points to the null terminator in the arguments 964 // Check that RDI now points to the null terminator in the arguments
978 // descriptor. 965 // descriptor.
979 __ cmpq(Address(RDI, 0), raw_null); 966 __ LoadObject(TMP, Object::Handle(Object::null()),
967 Assembler::kNotPatchable);
968 __ cmpq(Address(RDI, 0), TMP);
980 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); 969 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump);
981 } 970 }
982 } else { 971 } else {
983 ASSERT(num_opt_pos_params > 0); 972 ASSERT(num_opt_pos_params > 0);
984 __ movq(RCX, 973 __ movq(RCX,
985 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset())); 974 FieldAddress(R10, ArgumentsDescriptor::positional_count_offset()));
986 __ SmiUntag(RCX); 975 __ SmiUntag(RCX);
987 for (int i = 0; i < num_opt_pos_params; i++) { 976 for (int i = 0; i < num_opt_pos_params; i++) {
988 Label next_parameter; 977 Label next_parameter;
989 // Handle this optional positional parameter only if k or fewer positional 978 // Handle this optional positional parameter only if k or fewer positional
990 // arguments have been passed, where k is param_pos, the position of this 979 // arguments have been passed, where k is param_pos, the position of this
991 // optional parameter in the formal parameter list. 980 // optional parameter in the formal parameter list.
992 const int param_pos = num_fixed_params + i; 981 const int param_pos = num_fixed_params + i;
993 __ cmpq(RCX, Immediate(param_pos)); 982 __ cmpq(RCX, Immediate(param_pos));
994 __ j(GREATER, &next_parameter, Assembler::kNearJump); 983 __ j(GREATER, &next_parameter, Assembler::kNearJump);
995 // Load RAX with default argument. 984 // Load RAX with default argument.
996 const Object& value = Object::ZoneHandle( 985 const Object& value = Object::ZoneHandle(
997 parsed_function().default_parameter_values().At(i)); 986 parsed_function().default_parameter_values().At(i));
998 __ LoadObject(RAX, value); 987 __ LoadObject(RAX, value, Assembler::kNotPatchable);
999 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos]. 988 // Assign RAX to fp[kFirstLocalSlotFromFp - param_pos].
1000 // We do not use the final allocation index of the variable here, i.e. 989 // We do not use the final allocation index of the variable here, i.e.
1001 // scope->VariableAt(i)->index(), because captured variables still need 990 // scope->VariableAt(i)->index(), because captured variables still need
1002 // to be copied to the context that is not yet allocated. 991 // to be copied to the context that is not yet allocated.
1003 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; 992 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos;
1004 const Address param_addr(RBP, computed_param_pos * kWordSize); 993 const Address param_addr(RBP, computed_param_pos * kWordSize);
1005 __ movq(param_addr, RAX); 994 __ movq(param_addr, RAX);
1006 __ Bind(&next_parameter); 995 __ Bind(&next_parameter);
1007 } 996 }
1008 if (check_correct_named_args) { 997 if (check_correct_named_args) {
1009 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 998 __ movq(RBX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
1010 __ SmiUntag(RBX); 999 __ SmiUntag(RBX);
1011 // Check that RCX equals RBX, i.e. no named arguments passed. 1000 // Check that RCX equals RBX, i.e. no named arguments passed.
1012 __ cmpq(RCX, RBX); 1001 __ cmpq(RCX, RBX);
1013 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump); 1002 __ j(EQUAL, &all_arguments_processed, Assembler::kNearJump);
1014 } 1003 }
1015 } 1004 }
1016 1005
1017 __ Bind(&wrong_num_arguments); 1006 __ Bind(&wrong_num_arguments);
1018 if (function.IsClosureFunction()) { 1007 if (function.IsClosureFunction()) {
1019 // Invoke noSuchMethod function passing "call" as the original name. 1008 // Invoke noSuchMethod function passing "call" as the original name.
1020 const int kNumArgsChecked = 1; 1009 const int kNumArgsChecked = 1;
1021 const ICData& ic_data = ICData::ZoneHandle( 1010 const ICData& ic_data = ICData::ZoneHandle(
1022 ICData::New(function, Symbols::Call(), Object::empty_array(), 1011 ICData::New(function, Symbols::Call(), Object::empty_array(),
1023 Isolate::kNoDeoptId, kNumArgsChecked)); 1012 Isolate::kNoDeoptId, kNumArgsChecked));
1024 __ LoadObject(RBX, ic_data); 1013 __ LoadObject(RBX, ic_data, Assembler::kNotPatchable);
1025 __ LeaveFrame(); // The arguments are still on the stack. 1014 __ LeaveFrame(true); // The arguments are still on the stack.
1026 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); 1015 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel());
1027 // The noSuchMethod call may return to the caller, but not here. 1016 // The noSuchMethod call may return to the caller, but not here.
1028 __ int3(); 1017 __ int3();
1029 } else if (check_correct_named_args) { 1018 } else if (check_correct_named_args) {
1030 __ Stop("Wrong arguments"); 1019 __ Stop("Wrong arguments");
1031 } 1020 }
1032 1021
1033 __ Bind(&all_arguments_processed); 1022 __ Bind(&all_arguments_processed);
1034 // Nullify originally passed arguments only after they have been copied and 1023 // Nullify originally passed arguments only after they have been copied and
1035 // checked, otherwise noSuchMethod would not see their original values. 1024 // checked, otherwise noSuchMethod would not see their original values.
1036 // This step can be skipped in case we decide that formal parameters are 1025 // This step can be skipped in case we decide that formal parameters are
1037 // implicitly final, since garbage collecting the unmodified value is not 1026 // implicitly final, since garbage collecting the unmodified value is not
1038 // an issue anymore. 1027 // an issue anymore.
1039 1028
1040 // R10 : arguments descriptor array. 1029 // R10 : arguments descriptor array.
1041 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset())); 1030 __ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
1042 __ SmiUntag(RCX); 1031 __ SmiUntag(RCX);
1032 __ LoadObject(R12, Object::Handle(Object::null()), Assembler::kNotPatchable);
1043 Label null_args_loop, null_args_loop_condition; 1033 Label null_args_loop, null_args_loop_condition;
1044 __ jmp(&null_args_loop_condition, Assembler::kNearJump); 1034 __ jmp(&null_args_loop_condition, Assembler::kNearJump);
1045 const Address original_argument_addr( 1035 const Address original_argument_addr(
1046 RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize); 1036 RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize);
1047 __ Bind(&null_args_loop); 1037 __ Bind(&null_args_loop);
1048 __ movq(original_argument_addr, raw_null); 1038 __ movq(original_argument_addr, R12);
1049 __ Bind(&null_args_loop_condition); 1039 __ Bind(&null_args_loop_condition);
1050 __ decq(RCX); 1040 __ decq(RCX);
1051 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); 1041 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump);
1052 } 1042 }
1053 1043
1054 1044
1055 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { 1045 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) {
1056 // TOS: return address. 1046 // TOS: return address.
1057 // +1 : receiver. 1047 // +1 : receiver.
1058 // Sequence node has one return node, its input is load field node. 1048 // Sequence node has one return node, its input is load field node.
1059 __ movq(RAX, Address(RSP, 1 * kWordSize)); 1049 __ movq(RAX, Address(RSP, 1 * kWordSize));
1060 __ movq(RAX, FieldAddress(RAX, offset)); 1050 __ movq(RAX, FieldAddress(RAX, offset));
1061 __ ret(); 1051 __ ret();
1062 } 1052 }
1063 1053
1064 1054
1065 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { 1055 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) {
1066 // TOS: return address. 1056 // TOS: return address.
1067 // +1 : value 1057 // +1 : value
1068 // +2 : receiver. 1058 // +2 : receiver.
1069 // Sequence node has one store node and one return NULL node. 1059 // Sequence node has one store node and one return NULL node.
1070 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver. 1060 __ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver.
1071 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value. 1061 __ movq(RBX, Address(RSP, 1 * kWordSize)); // Value.
1072 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX); 1062 __ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX);
1073 const Immediate& raw_null = 1063 __ LoadObject(RAX, Object::Handle(Object::null()), Assembler::kNotPatchable);
1074 Immediate(reinterpret_cast<intptr_t>(Object::null()));
1075 __ movq(RAX, raw_null);
1076 __ ret(); 1064 __ ret();
1077 } 1065 }
1078 1066
1079 1067
1080 void FlowGraphCompiler::EmitFrameEntry() { 1068 void FlowGraphCompiler::EmitFrameEntry() {
1081 const Function& function = parsed_function().function(); 1069 const Function& function = parsed_function().function();
1070 Register new_pp = kNoRegister;
1071 Register new_pc = kNoRegister;
1082 if (CanOptimizeFunction() && 1072 if (CanOptimizeFunction() &&
1083 function.is_optimizable() && 1073 function.is_optimizable() &&
1084 (!is_optimizing() || may_reoptimize())) { 1074 (!is_optimizing() || may_reoptimize())) {
1085 const Register function_reg = RDI; 1075 const Register function_reg = RDI;
1086 __ LoadObject(function_reg, function); 1076 new_pp = R13;
1077 new_pc = R12;
1078
1079 Label next;
1080 __ nop(4); // Need a fixed size sequence on frame entry.
1081 __ call(&next);
1082 __ Bind(&next);
1083
1084 const intptr_t object_pool_pc_dist =
1085 Instructions::HeaderSize() - Instructions::object_pool_offset() +
1086 __ CodeSize();
1087 const intptr_t offset =
1088 Assembler::kEntryPointToPcMarkerOffset - __ CodeSize();
1089 __ popq(new_pc);
1090 if (offset != 0) {
1091 __ addq(new_pc, Immediate(offset));
1092 }
1093
1094 // Load callee's pool pointer.
1095 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset));
1096
1097 // Load function object using the callee's pool pointer.
1098 __ LoadObject(function_reg, function, Assembler::kNotPatchable, new_pp);
1099
1087 // Patch point is after the eventually inlined function object. 1100 // Patch point is after the eventually inlined function object.
1088 AddCurrentDescriptor(PcDescriptors::kEntryPatch, 1101 AddCurrentDescriptor(PcDescriptors::kEntryPatch,
1089 Isolate::kNoDeoptId, 1102 Isolate::kNoDeoptId,
1090 0); // No token position. 1103 0); // No token position.
1091 if (is_optimizing()) { 1104 if (is_optimizing()) {
1092 // Reoptimization of an optimized function is triggered by counting in 1105 // Reoptimization of an optimized function is triggered by counting in
1093 // IC stubs, but not at the entry of the function. 1106 // IC stubs, but not at the entry of the function.
1094 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), 1107 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()),
1095 Immediate(FLAG_reoptimization_counter_threshold)); 1108 Immediate(FLAG_reoptimization_counter_threshold));
1096 } else { 1109 } else {
1097 __ incq(FieldAddress(function_reg, Function::usage_counter_offset())); 1110 __ incq(FieldAddress(function_reg, Function::usage_counter_offset()));
1098 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()), 1111 __ cmpq(FieldAddress(function_reg, Function::usage_counter_offset()),
1099 Immediate(FLAG_optimization_counter_threshold)); 1112 Immediate(FLAG_optimization_counter_threshold));
1100 } 1113 }
1101 ASSERT(function_reg == RDI); 1114 ASSERT(function_reg == RDI);
1102 __ j(GREATER_EQUAL, &StubCode::OptimizeFunctionLabel()); 1115 __ JumpFromPool(GREATER_EQUAL, &StubCode::OptimizeFunctionLabel(), R13);
1103 } else if (!flow_graph().IsCompiledForOsr()) { 1116 } else if (!flow_graph().IsCompiledForOsr()) {
1117 // We have to load the PP here too because a load of an external label
1118 // may be patched at the AddCurrentDescriptor below.
1119 new_pp = R13;
1120 new_pc = R12;
1121
1122 Label next;
1123 __ nop(4); // Need a fixed size sequence on frame entry.
1124 __ call(&next);
1125 __ Bind(&next);
1126
1127 const intptr_t object_pool_pc_dist =
1128 Instructions::HeaderSize() - Instructions::object_pool_offset() +
1129 __ CodeSize();
1130 const intptr_t offset =
1131 Assembler::kEntryPointToPcMarkerOffset - __ CodeSize();
1132 __ popq(new_pc);
1133 if (offset != 0) {
1134 __ addq(new_pc, Immediate(offset));
1135 }
1136
1137 // Load callee's pool pointer.
1138 __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset));
1104 AddCurrentDescriptor(PcDescriptors::kEntryPatch, 1139 AddCurrentDescriptor(PcDescriptors::kEntryPatch,
1105 Isolate::kNoDeoptId, 1140 Isolate::kNoDeoptId,
1106 0); // No token position. 1141 0); // No token position.
1107 } 1142 }
1108 __ Comment("Enter frame"); 1143 __ Comment("Enter frame");
1109 if (flow_graph().IsCompiledForOsr()) { 1144 if (flow_graph().IsCompiledForOsr()) {
1110 intptr_t extra_slots = StackSize() 1145 intptr_t extra_slots = StackSize()
1111 - flow_graph().num_stack_locals() 1146 - flow_graph().num_stack_locals()
1112 - flow_graph().num_copied_params(); 1147 - flow_graph().num_copied_params();
1113 ASSERT(extra_slots >= 0); 1148 ASSERT(extra_slots >= 0);
1114 __ EnterOsrFrame(extra_slots * kWordSize); 1149 __ EnterOsrFrame(extra_slots * kWordSize, new_pp, new_pc);
1115 } else { 1150 } else {
1116 ASSERT(StackSize() >= 0); 1151 ASSERT(StackSize() >= 0);
1117 __ EnterDartFrame(StackSize() * kWordSize); 1152 __ EnterDartFrame(StackSize() * kWordSize, new_pp, new_pc);
1118 } 1153 }
1119 } 1154 }
1120 1155
1121 1156
1122 void FlowGraphCompiler::CompileGraph() { 1157 void FlowGraphCompiler::CompileGraph() {
1123 InitCompiler(); 1158 InitCompiler();
1124 1159
1125 TryIntrinsify(); 1160 TryIntrinsify();
1126 1161
1127 EmitFrameEntry(); 1162 EmitFrameEntry();
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1161 // Invoke noSuchMethod function passing the original function name. 1196 // Invoke noSuchMethod function passing the original function name.
1162 // For closure functions, use "call" as the original name. 1197 // For closure functions, use "call" as the original name.
1163 const String& name = 1198 const String& name =
1164 String::Handle(function.IsClosureFunction() 1199 String::Handle(function.IsClosureFunction()
1165 ? Symbols::Call().raw() 1200 ? Symbols::Call().raw()
1166 : function.name()); 1201 : function.name());
1167 const int kNumArgsChecked = 1; 1202 const int kNumArgsChecked = 1;
1168 const ICData& ic_data = ICData::ZoneHandle( 1203 const ICData& ic_data = ICData::ZoneHandle(
1169 ICData::New(function, name, Object::empty_array(), 1204 ICData::New(function, name, Object::empty_array(),
1170 Isolate::kNoDeoptId, kNumArgsChecked)); 1205 Isolate::kNoDeoptId, kNumArgsChecked));
1171 __ LoadObject(RBX, ic_data); 1206 __ LoadObject(RBX, ic_data, Assembler::kNotPatchable);
1172 __ LeaveFrame(); // The arguments are still on the stack. 1207 __ LeaveFrame(true); // The arguments are still on the stack.
1173 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel()); 1208 __ jmp(&StubCode::CallNoSuchMethodFunctionLabel());
1174 // The noSuchMethod call may return to the caller, but not here. 1209 // The noSuchMethod call may return to the caller, but not here.
1175 __ int3(); 1210 __ int3();
1176 } else { 1211 } else {
1177 __ Stop("Wrong number of arguments"); 1212 __ Stop("Wrong number of arguments");
1178 } 1213 }
1179 __ Bind(&correct_num_arguments); 1214 __ Bind(&correct_num_arguments);
1180 } 1215 }
1181 } else if (!flow_graph().IsCompiledForOsr()) { 1216 } else if (!flow_graph().IsCompiledForOsr()) {
1182 CopyParameters(); 1217 CopyParameters();
1183 } 1218 }
1184 1219
1185 // In unoptimized code, initialize (non-argument) stack allocated slots to 1220 // In unoptimized code, initialize (non-argument) stack allocated slots to
1186 // null. 1221 // null.
1187 if (!is_optimizing() && (num_locals > 0)) { 1222 if (!is_optimizing() && (num_locals > 0)) {
1188 __ Comment("Initialize spill slots"); 1223 __ Comment("Initialize spill slots");
1189 const intptr_t slot_base = parsed_function().first_stack_local_index(); 1224 const intptr_t slot_base = parsed_function().first_stack_local_index();
1190 const Immediate& raw_null = 1225 __ LoadObject(RAX, Object::Handle(Object::null()),
1191 Immediate(reinterpret_cast<intptr_t>(Object::null())); 1226 Assembler::kNotPatchable);
1192 __ movq(RAX, raw_null);
1193 for (intptr_t i = 0; i < num_locals; ++i) { 1227 for (intptr_t i = 0; i < num_locals; ++i) {
1194 // Subtract index i (locals lie at lower addresses than RBP). 1228 // Subtract index i (locals lie at lower addresses than RBP).
1195 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX); 1229 __ movq(Address(RBP, (slot_base - i) * kWordSize), RAX);
1196 } 1230 }
1197 } 1231 }
1198 1232
1199 if (FLAG_print_scopes) { 1233 if (FLAG_print_scopes) {
1200 // Print the function scope (again) after generating the prologue in order 1234 // Print the function scope (again) after generating the prologue in order
1201 // to see annotations such as allocation indices of locals. 1235 // to see annotations such as allocation indices of locals.
1202 if (FLAG_print_ast) { 1236 if (FLAG_print_ast) {
1203 // Second printing. 1237 // Second printing.
1204 OS::Print("Annotated "); 1238 OS::Print("Annotated ");
1205 } 1239 }
1206 AstPrinter::PrintFunctionScope(parsed_function()); 1240 AstPrinter::PrintFunctionScope(parsed_function());
1207 } 1241 }
1208 1242
1209 ASSERT(!block_order().is_empty()); 1243 ASSERT(!block_order().is_empty());
1210 VisitBlocks(); 1244 VisitBlocks();
1211 1245
1212 __ int3(); 1246 __ int3();
1213 GenerateDeferredCode(); 1247 GenerateDeferredCode();
1214 // Emit function patching code. This will be swapped with the first 13 bytes 1248 // Emit function patching code. This will be swapped with the first 13 bytes
1215 // at entry point. 1249 // at entry point.
1216 AddCurrentDescriptor(PcDescriptors::kPatchCode, 1250 AddCurrentDescriptor(PcDescriptors::kPatchCode,
1217 Isolate::kNoDeoptId, 1251 Isolate::kNoDeoptId,
1218 0); // No token position. 1252 0); // No token position.
1219 __ jmp(&StubCode::FixCallersTargetLabel()); 1253 // This is patched up to a point in FrameEntry where the PP for the
1254 // current function is in R13 instead of PP.
1255 __ JumpPatchable(&StubCode::FixCallersTargetLabel(), R13);
1256
1257 // TOOD(zra): Is this descriptor used?
1220 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump, 1258 AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump,
1221 Isolate::kNoDeoptId, 1259 Isolate::kNoDeoptId,
1222 0); // No token position. 1260 0); // No token position.
1223 __ jmp(&StubCode::DeoptimizeLazyLabel()); 1261 __ JumpFromPool(&StubCode::DeoptimizeLazyLabel());
1224 } 1262 }
1225 1263
1226 1264
1227 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 1265 void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
1228 const ExternalLabel* label, 1266 const ExternalLabel* label,
1229 PcDescriptors::Kind kind, 1267 PcDescriptors::Kind kind,
1230 LocationSummary* locs) { 1268 LocationSummary* locs) {
1231 __ call(label); 1269 __ CallFromPool(label);
1232 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos); 1270 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos);
1233 RecordSafepoint(locs); 1271 RecordSafepoint(locs);
1234 } 1272 }
1235 1273
1236 1274
1237 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, 1275 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
1238 intptr_t token_pos, 1276 intptr_t token_pos,
1239 const ExternalLabel* label, 1277 const ExternalLabel* label,
1240 PcDescriptors::Kind kind, 1278 PcDescriptors::Kind kind,
1241 LocationSummary* locs) { 1279 LocationSummary* locs) {
1242 __ call(label); 1280 __ CallPatchable(label);
1243 AddCurrentDescriptor(kind, deopt_id, token_pos); 1281 AddCurrentDescriptor(kind, deopt_id, token_pos);
1244 RecordSafepoint(locs); 1282 RecordSafepoint(locs);
1245 // Marks either the continuation point in unoptimized code or the 1283 // Marks either the continuation point in unoptimized code or the
1246 // deoptimization point in optimized code, after call. 1284 // deoptimization point in optimized code, after call.
1247 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id); 1285 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id);
1248 if (is_optimizing()) { 1286 if (is_optimizing()) {
1249 AddDeoptIndexAtCall(deopt_id_after, token_pos); 1287 AddDeoptIndexAtCall(deopt_id_after, token_pos);
1250 } else { 1288 } else {
1251 // Add deoptimization continuation point after the call and before the 1289 // Add deoptimization continuation point after the call and before the
1252 // arguments are removed. 1290 // arguments are removed.
(...skipping 30 matching lines...) Expand all
1283 intptr_t argument_count, 1321 intptr_t argument_count,
1284 intptr_t deopt_id, 1322 intptr_t deopt_id,
1285 intptr_t token_pos, 1323 intptr_t token_pos,
1286 LocationSummary* locs) { 1324 LocationSummary* locs) {
1287 // Each ICData propagated from unoptimized to optimized code contains the 1325 // Each ICData propagated from unoptimized to optimized code contains the
1288 // function that corresponds to the Dart function of that IC call. Due 1326 // function that corresponds to the Dart function of that IC call. Due
1289 // to inlining in optimized code, that function may not correspond to the 1327 // to inlining in optimized code, that function may not correspond to the
1290 // top-level function (parsed_function().function()) which could be 1328 // top-level function (parsed_function().function()) which could be
1291 // reoptimized and which counter needs to be incremented. 1329 // reoptimized and which counter needs to be incremented.
1292 // Pass the function explicitly, it is used in IC stub. 1330 // Pass the function explicitly, it is used in IC stub.
1293 __ LoadObject(RDI, parsed_function().function()); 1331 __ LoadObject(RDI, parsed_function().function(), Assembler::kNotPatchable);
1294 __ LoadObject(RBX, ic_data); 1332 __ LoadObject(RBX, ic_data);
1295 GenerateDartCall(deopt_id, 1333 GenerateDartCall(deopt_id,
1296 token_pos, 1334 token_pos,
1297 target_label, 1335 target_label,
1298 PcDescriptors::kIcCall, 1336 PcDescriptors::kIcCall,
1299 locs); 1337 locs);
1300 __ Drop(argument_count); 1338 __ Drop(argument_count);
1301 } 1339 }
1302 1340
1303 1341
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1336 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump); 1374 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump);
1337 __ movq(RAX, Immediate(Smi::RawValue(kSmiCid))); 1375 __ movq(RAX, Immediate(Smi::RawValue(kSmiCid)));
1338 __ jmp(&load_cache); 1376 __ jmp(&load_cache);
1339 1377
1340 __ Bind(&not_smi); 1378 __ Bind(&not_smi);
1341 __ LoadClassId(RAX, RAX); 1379 __ LoadClassId(RAX, RAX);
1342 __ SmiTag(RAX); 1380 __ SmiTag(RAX);
1343 1381
1344 // RAX: class ID of the receiver (smi). 1382 // RAX: class ID of the receiver (smi).
1345 __ Bind(&load_cache); 1383 __ Bind(&load_cache);
1346 __ LoadObject(RBX, cache); 1384 __ LoadObject(RBX, cache, Assembler::kNotPatchable);
1347 __ movq(RDI, FieldAddress(RBX, MegamorphicCache::buckets_offset())); 1385 __ movq(RDI, FieldAddress(RBX, MegamorphicCache::buckets_offset()));
1348 __ movq(RBX, FieldAddress(RBX, MegamorphicCache::mask_offset())); 1386 __ movq(RBX, FieldAddress(RBX, MegamorphicCache::mask_offset()));
1349 // RDI: cache buckets array. 1387 // RDI: cache buckets array.
1350 // RBX: mask. 1388 // RBX: mask.
1351 __ movq(RCX, RAX); 1389 __ movq(RCX, RAX);
1352 1390
1353 Label loop, update, call_target_function; 1391 Label loop, update, call_target_function;
1354 __ jmp(&loop); 1392 __ jmp(&loop);
1355 1393
1356 __ Bind(&update); 1394 __ Bind(&update);
(...skipping 11 matching lines...) Expand all
1368 __ j(NOT_EQUAL, &update, Assembler::kNearJump); 1406 __ j(NOT_EQUAL, &update, Assembler::kNearJump);
1369 1407
1370 __ Bind(&call_target_function); 1408 __ Bind(&call_target_function);
1371 // Call the target found in the cache. For a class id match, this is a 1409 // Call the target found in the cache. For a class id match, this is a
1372 // proper target for the given name and arguments descriptor. If the 1410 // proper target for the given name and arguments descriptor. If the
1373 // illegal class id was found, the target is a cache miss handler that can 1411 // illegal class id was found, the target is a cache miss handler that can
1374 // be invoked as a normal Dart function. 1412 // be invoked as a normal Dart function.
1375 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize)); 1413 __ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize));
1376 __ movq(RAX, FieldAddress(RAX, Function::code_offset())); 1414 __ movq(RAX, FieldAddress(RAX, Function::code_offset()));
1377 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset())); 1415 __ movq(RAX, FieldAddress(RAX, Code::instructions_offset()));
1378 __ LoadObject(RBX, ic_data); 1416 __ LoadObject(RBX, ic_data, Assembler::kNotPatchable);
1379 __ LoadObject(R10, arguments_descriptor); 1417 __ LoadObject(R10, arguments_descriptor, Assembler::kNotPatchable);
1380 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); 1418 __ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag));
1381 __ call(RAX); 1419 __ call(RAX);
1382 AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos); 1420 AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos);
1383 RecordSafepoint(locs); 1421 RecordSafepoint(locs);
1384 AddDeoptIndexAtCall(Isolate::ToDeoptAfter(deopt_id), token_pos); 1422 AddDeoptIndexAtCall(Isolate::ToDeoptAfter(deopt_id), token_pos);
1385 __ Drop(argument_count); 1423 __ Drop(argument_count);
1386 } 1424 }
1387 1425
1388 1426
1389 void FlowGraphCompiler::EmitOptimizedStaticCall( 1427 void FlowGraphCompiler::EmitOptimizedStaticCall(
1390 const Function& function, 1428 const Function& function,
1391 const Array& arguments_descriptor, 1429 const Array& arguments_descriptor,
1392 intptr_t argument_count, 1430 intptr_t argument_count,
1393 intptr_t deopt_id, 1431 intptr_t deopt_id,
1394 intptr_t token_pos, 1432 intptr_t token_pos,
1395 LocationSummary* locs) { 1433 LocationSummary* locs) {
1396 __ LoadObject(R10, arguments_descriptor); 1434 __ LoadObject(R10, arguments_descriptor, Assembler::kNotPatchable);
1397 // Do not use the code from the function, but let the code be patched so that 1435 // Do not use the code from the function, but let the code be patched so that
1398 // we can record the outgoing edges to other code. 1436 // we can record the outgoing edges to other code.
1399 GenerateDartCall(deopt_id, 1437 GenerateDartCall(deopt_id,
1400 token_pos, 1438 token_pos,
1401 &StubCode::CallStaticFunctionLabel(), 1439 &StubCode::CallStaticFunctionLabel(),
1402 PcDescriptors::kOptStaticCall, 1440 PcDescriptors::kOptStaticCall,
1403 locs); 1441 locs);
1404 AddStaticCallTarget(function); 1442 AddStaticCallTarget(function);
1405 __ Drop(argument_count); 1443 __ Drop(argument_count);
1406 } 1444 }
(...skipping 12 matching lines...) Expand all
1419 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { 1457 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) {
1420 ASSERT(!needs_number_check); 1458 ASSERT(!needs_number_check);
1421 __ testq(reg, reg); 1459 __ testq(reg, reg);
1422 return; 1460 return;
1423 } 1461 }
1424 1462
1425 if (needs_number_check) { 1463 if (needs_number_check) {
1426 __ pushq(reg); 1464 __ pushq(reg);
1427 __ PushObject(obj); 1465 __ PushObject(obj);
1428 if (is_optimizing()) { 1466 if (is_optimizing()) {
1429 __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); 1467 __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel());
1430 } else { 1468 } else {
1431 __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); 1469 __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
1432 } 1470 }
1433 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, 1471 AddCurrentDescriptor(PcDescriptors::kRuntimeCall,
1434 Isolate::kNoDeoptId, 1472 Isolate::kNoDeoptId,
1435 token_pos); 1473 token_pos);
1436 __ popq(reg); // Discard constant. 1474 __ popq(reg); // Discard constant.
1437 __ popq(reg); // Restore 'reg'. 1475 __ popq(reg); // Restore 'reg'.
1438 return; 1476 return;
1439 } 1477 }
1440 1478
1441 __ CompareObject(reg, obj); 1479 __ CompareObject(reg, obj);
1442 } 1480 }
1443 1481
1444 1482
1445 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, 1483 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
1446 Register right, 1484 Register right,
1447 bool needs_number_check, 1485 bool needs_number_check,
1448 intptr_t token_pos) { 1486 intptr_t token_pos) {
1449 if (needs_number_check) { 1487 if (needs_number_check) {
1450 __ pushq(left); 1488 __ pushq(left);
1451 __ pushq(right); 1489 __ pushq(right);
1452 if (is_optimizing()) { 1490 if (is_optimizing()) {
1453 __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel()); 1491 __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel());
1454 } else { 1492 } else {
1455 __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); 1493 __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
1456 } 1494 }
1457 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, 1495 AddCurrentDescriptor(PcDescriptors::kRuntimeCall,
1458 Isolate::kNoDeoptId, 1496 Isolate::kNoDeoptId,
1459 token_pos); 1497 token_pos);
1460 // Stub returns result in flags (result of a cmpl, we need ZF computed). 1498 // Stub returns result in flags (result of a cmpl, we need ZF computed).
1461 __ popq(right); 1499 __ popq(right);
1462 __ popq(left); 1500 __ popq(left);
1463 } else { 1501 } else {
1464 __ cmpl(left, right); 1502 __ cmpl(left, right);
1465 } 1503 }
1466 } 1504 }
1467 1505
1468 1506
1469 // Implement equality spec: if any of the arguments is null do identity check. 1507 // Implement equality spec: if any of the arguments is null do identity check.
1470 // Fallthrough calls super equality. 1508 // Fallthrough calls super equality.
1471 void FlowGraphCompiler::EmitSuperEqualityCallPrologue(Register result, 1509 void FlowGraphCompiler::EmitSuperEqualityCallPrologue(Register result,
1472 Label* skip_call) { 1510 Label* skip_call) {
1473 const Immediate& raw_null = 1511 __ LoadObject(TMP, Object::Handle(Object::null()), Assembler::kNotPatchable);
1474 Immediate(reinterpret_cast<intptr_t>(Object::null()));
1475 Label check_identity, fall_through; 1512 Label check_identity, fall_through;
1476 __ cmpq(Address(RSP, 0 * kWordSize), raw_null); 1513 __ cmpq(Address(RSP, 0 * kWordSize), TMP);
1477 __ j(EQUAL, &check_identity, Assembler::kNearJump); 1514 __ j(EQUAL, &check_identity, Assembler::kNearJump);
1478 __ cmpq(Address(RSP, 1 * kWordSize), raw_null); 1515 __ cmpq(Address(RSP, 1 * kWordSize), TMP);
1479 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump); 1516 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump);
1480 1517
1481 __ Bind(&check_identity); 1518 __ Bind(&check_identity);
1482 __ popq(result); 1519 __ popq(result);
1483 __ cmpq(result, Address(RSP, 0 * kWordSize)); 1520 __ cmpq(result, Address(RSP, 0 * kWordSize));
1484 Label is_false; 1521 Label is_false;
1485 __ j(NOT_EQUAL, &is_false, Assembler::kNearJump); 1522 __ j(NOT_EQUAL, &is_false, Assembler::kNearJump);
1486 __ LoadObject(result, Bool::True()); 1523 __ LoadObject(result, Bool::True(), Assembler::kNotPatchable);
1487 __ Drop(1); 1524 __ Drop(1);
1488 __ jmp(skip_call); 1525 __ jmp(skip_call);
1489 __ Bind(&is_false); 1526 __ Bind(&is_false);
1490 __ LoadObject(result, Bool::False()); 1527 __ LoadObject(result, Bool::False(), Assembler::kNotPatchable);
1491 __ Drop(1); 1528 __ Drop(1);
1492 __ jmp(skip_call); 1529 __ jmp(skip_call);
1493 __ Bind(&fall_through); 1530 __ Bind(&fall_through);
1494 } 1531 }
1495 1532
1496 1533
1497 // This function must be in sync with FlowGraphCompiler::RecordSafepoint. 1534 // This function must be in sync with FlowGraphCompiler::RecordSafepoint.
1498 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) { 1535 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) {
1499 // TODO(vegorov): consider saving only caller save (volatile) registers. 1536 // TODO(vegorov): consider saving only caller save (volatile) registers.
1500 const intptr_t xmm_regs_count = locs->live_registers()->fpu_regs_count(); 1537 const intptr_t xmm_regs_count = locs->live_registers()->fpu_regs_count();
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1563 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); 1600 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0));
1564 Label match_found; 1601 Label match_found;
1565 const intptr_t len = ic_data.NumberOfChecks(); 1602 const intptr_t len = ic_data.NumberOfChecks();
1566 GrowableArray<CidTarget> sorted(len); 1603 GrowableArray<CidTarget> sorted(len);
1567 SortICDataByCount(ic_data, &sorted); 1604 SortICDataByCount(ic_data, &sorted);
1568 ASSERT(class_id_reg != R10); 1605 ASSERT(class_id_reg != R10);
1569 ASSERT(len > 0); // Why bother otherwise. 1606 ASSERT(len > 0); // Why bother otherwise.
1570 const Array& arguments_descriptor = 1607 const Array& arguments_descriptor =
1571 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 1608 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
1572 argument_names)); 1609 argument_names));
1573 __ LoadObject(R10, arguments_descriptor); 1610 __ LoadObject(R10, arguments_descriptor, Assembler::kNotPatchable);
1574 for (intptr_t i = 0; i < len; i++) { 1611 for (intptr_t i = 0; i < len; i++) {
1575 const bool is_last_check = (i == (len - 1)); 1612 const bool is_last_check = (i == (len - 1));
1576 Label next_test; 1613 Label next_test;
1577 assembler()->cmpl(class_id_reg, Immediate(sorted[i].cid)); 1614 assembler()->cmpl(class_id_reg, Immediate(sorted[i].cid));
1578 if (is_last_check) { 1615 if (is_last_check) {
1579 assembler()->j(NOT_EQUAL, deopt); 1616 assembler()->j(NOT_EQUAL, deopt);
1580 } else { 1617 } else {
1581 assembler()->j(NOT_EQUAL, &next_test); 1618 assembler()->j(NOT_EQUAL, &next_test);
1582 } 1619 }
1583 // Do not use the code from the function, but let the code be patched so 1620 // Do not use the code from the function, but let the code be patched so
(...skipping 21 matching lines...) Expand all
1605 BranchInstr* branch) { 1642 BranchInstr* branch) {
1606 ASSERT(branch != NULL); 1643 ASSERT(branch != NULL);
1607 assembler()->comisd(left, right); 1644 assembler()->comisd(left, right);
1608 BlockEntryInstr* nan_result = (true_condition == NOT_EQUAL) ? 1645 BlockEntryInstr* nan_result = (true_condition == NOT_EQUAL) ?
1609 branch->true_successor() : branch->false_successor(); 1646 branch->true_successor() : branch->false_successor();
1610 assembler()->j(PARITY_EVEN, GetJumpLabel(nan_result)); 1647 assembler()->j(PARITY_EVEN, GetJumpLabel(nan_result));
1611 branch->EmitBranchOnCondition(this, true_condition); 1648 branch->EmitBranchOnCondition(this, true_condition);
1612 } 1649 }
1613 1650
1614 1651
1615
1616 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition, 1652 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition,
1617 FpuRegister left, 1653 FpuRegister left,
1618 FpuRegister right, 1654 FpuRegister right,
1619 Register result) { 1655 Register result) {
1620 assembler()->comisd(left, right); 1656 assembler()->comisd(left, right);
1621 Label is_false, is_true, done; 1657 Label is_false, is_true, done;
1622 assembler()->j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN false; 1658 assembler()->j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN false;
1623 assembler()->j(true_condition, &is_true, Assembler::kNearJump); 1659 assembler()->j(true_condition, &is_true, Assembler::kNearJump);
1624 assembler()->Bind(&is_false); 1660 assembler()->Bind(&is_false);
1625 assembler()->LoadObject(result, Bool::False()); 1661 assembler()->LoadObject(result, Bool::False(), Assembler::kNotPatchable);
1626 assembler()->jmp(&done); 1662 assembler()->jmp(&done);
1627 assembler()->Bind(&is_true); 1663 assembler()->Bind(&is_true);
1628 assembler()->LoadObject(result, Bool::True()); 1664 assembler()->LoadObject(result, Bool::True(), Assembler::kNotPatchable);
1629 assembler()->Bind(&done); 1665 assembler()->Bind(&done);
1630 } 1666 }
1631 1667
1632 1668
1633 FieldAddress FlowGraphCompiler::ElementAddressForIntIndex(intptr_t cid, 1669 FieldAddress FlowGraphCompiler::ElementAddressForIntIndex(intptr_t cid,
1634 intptr_t index_scale, 1670 intptr_t index_scale,
1635 Register array, 1671 Register array,
1636 intptr_t index) { 1672 intptr_t index) {
1637 const int64_t disp = 1673 const int64_t disp =
1638 static_cast<int64_t>(index) * index_scale + DataOffsetFor(cid); 1674 static_cast<int64_t>(index) * index_scale + DataOffsetFor(cid);
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
1739 __ movups(XMM0, source.ToStackSlotAddress()); 1775 __ movups(XMM0, source.ToStackSlotAddress());
1740 __ movups(destination.ToStackSlotAddress(), XMM0); 1776 __ movups(destination.ToStackSlotAddress(), XMM0);
1741 } 1777 }
1742 } else { 1778 } else {
1743 ASSERT(source.IsConstant()); 1779 ASSERT(source.IsConstant());
1744 if (destination.IsRegister()) { 1780 if (destination.IsRegister()) {
1745 const Object& constant = source.constant(); 1781 const Object& constant = source.constant();
1746 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) { 1782 if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) {
1747 __ xorq(destination.reg(), destination.reg()); 1783 __ xorq(destination.reg(), destination.reg());
1748 } else { 1784 } else {
1749 __ LoadObject(destination.reg(), constant); 1785 __ LoadObject(destination.reg(), constant, Assembler::kNotPatchable);
1750 } 1786 }
1751 } else { 1787 } else {
1752 ASSERT(destination.IsStackSlot()); 1788 ASSERT(destination.IsStackSlot());
1753 StoreObject(destination.ToStackSlotAddress(), source.constant()); 1789 StoreObject(destination.ToStackSlotAddress(), source.constant());
1754 } 1790 }
1755 } 1791 }
1756 1792
1757 move->Eliminate(); 1793 move->Eliminate();
1758 } 1794 }
1759 1795
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
1888 __ movups(reg, Address(RSP, 0)); 1924 __ movups(reg, Address(RSP, 0));
1889 __ addq(RSP, Immediate(kFpuRegisterSize)); 1925 __ addq(RSP, Immediate(kFpuRegisterSize));
1890 } 1926 }
1891 1927
1892 1928
1893 #undef __ 1929 #undef __
1894 1930
1895 } // namespace dart 1931 } // namespace dart
1896 1932
1897 #endif // defined TARGET_ARCH_X64 1933 #endif // defined TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698