Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(185)

Side by Side Diff: runtime/vm/flow_graph_compiler_dbc.cc

Issue 1992963002: Enable optimizer pipeline for DBC. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/instructions_dbc.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC.
6 #if defined(TARGET_ARCH_DBC) 6 #if defined(TARGET_ARCH_DBC)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
83 83
84 void FlowGraphCompiler::ExitIntrinsicMode() { 84 void FlowGraphCompiler::ExitIntrinsicMode() {
85 ASSERT(intrinsic_mode()); 85 ASSERT(intrinsic_mode());
86 intrinsic_mode_ = false; 86 intrinsic_mode_ = false;
87 } 87 }
88 88
89 89
90 RawTypedData* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler, 90 RawTypedData* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler,
91 DeoptInfoBuilder* builder, 91 DeoptInfoBuilder* builder,
92 const Array& deopt_table) { 92 const Array& deopt_table) {
93 UNIMPLEMENTED(); 93 if (deopt_env_ == NULL) {
94 return TypedData::null(); 94 ++builder->current_info_number_;
95 return TypedData::null();
96 }
97
98 intptr_t stack_height = compiler->StackSize();
99 AllocateIncomingParametersRecursive(deopt_env_, &stack_height);
100
101 intptr_t slot_ix = 0;
102 Environment* current = deopt_env_;
103
104 // Emit all kMaterializeObject instructions describing objects to be
105 // materialized on the deoptimization as a prefix to the deoptimization info.
106 EmitMaterializations(deopt_env_, builder);
107
108 // The real frame starts here.
109 builder->MarkFrameStart();
110
111 Zone* zone = compiler->zone();
112
113 builder->AddCallerFp(slot_ix++);
114 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++);
115 builder->AddPcMarker(Function::ZoneHandle(zone), slot_ix++);
116 builder->AddConstant(Function::ZoneHandle(zone), slot_ix++);
117
118 // Emit all values that are needed for materialization as a part of the
119 // expression stack for the bottom-most frame. This guarantees that GC
120 // will be able to find them during materialization.
121 slot_ix = builder->EmitMaterializationArguments(slot_ix);
122
123 // For the innermost environment, set outgoing arguments and the locals.
124 for (intptr_t i = current->Length() - 1;
125 i >= current->fixed_parameter_count();
126 i--) {
127 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
128 }
129
130 builder->AddCallerFp(slot_ix++);
131
132 Environment* previous = current;
133 current = current->outer();
134 while (current != NULL) {
135 // For any outer environment the deopt id is that of the call instruction
136 // which is recorded in the outer environment.
137 builder->AddReturnAddress(
138 current->function(),
139 Thread::ToDeoptAfter(current->deopt_id()),
140 slot_ix++);
141
142 builder->AddPcMarker(previous->function(), slot_ix++);
143 builder->AddConstant(previous->function(), slot_ix++);
144
145 // The values of outgoing arguments can be changed from the inlined call so
146 // we must read them from the previous environment.
147 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
148 builder->AddCopy(previous->ValueAt(i),
149 previous->LocationAt(i),
150 slot_ix++);
151 }
152
153 // Set the locals, note that outgoing arguments are not in the environment.
154 for (intptr_t i = current->Length() - 1;
155 i >= current->fixed_parameter_count();
156 i--) {
157 builder->AddCopy(current->ValueAt(i),
158 current->LocationAt(i),
159 slot_ix++);
160 }
161
162 builder->AddCallerFp(slot_ix++);
163
164 // Iterate on the outer environment.
165 previous = current;
166 current = current->outer();
167 }
168 // The previous pointer is now the outermost environment.
169 ASSERT(previous != NULL);
170
171 // For the outermost environment, set caller PC.
172 builder->AddCallerPc(slot_ix++);
173
174 builder->AddPcMarker(previous->function(), slot_ix++);
175 builder->AddConstant(previous->function(), slot_ix++);
176
177
178 // For the outermost environment, set the incoming arguments.
179 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
180 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++);
181 }
182
183 return builder->CreateDeoptInfo(deopt_table);
95 } 184 }
96 185
97 186
187 void FlowGraphCompiler::RecordAfterCall(Instruction* instr) {
188 RecordSafepoint(instr->locs());
189 // Marks either the continuation point in unoptimized code or the
190 // deoptimization point in optimized code, after call.
191 const intptr_t deopt_id_after = Thread::ToDeoptAfter(instr->deopt_id());
192 if (is_optimizing()) {
193 // Return/ReturnTOS instruction drops incoming arguments so
194 // we have to drop outgoing arguments from the innermost environment.
195 // On all other architectures caller drops outgoing arguments itself
196 // hence the difference.
197 pending_deoptimization_env_->DropArguments(instr->ArgumentCount());
198 AddDeoptIndexAtCall(deopt_id_after, instr->token_pos());
199 } else {
200 // Add deoptimization continuation point after the call and before the
201 // arguments are removed.
202 // In optimized code this descriptor is needed for exception handling.
203 AddCurrentDescriptor(RawPcDescriptors::kDeopt,
204 deopt_id_after,
205 instr->token_pos());
206 }
207 }
208
209
98 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, 210 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
99 intptr_t stub_ix) { 211 intptr_t stub_ix) {
100 UNIMPLEMENTED(); 212 UNIMPLEMENTED();
101 } 213 }
102 214
103 215
104 #define __ assembler()-> 216 #define __ assembler()->
105 217
106 218
107 void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos, 219 void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos,
108 intptr_t deopt_id, 220 intptr_t deopt_id,
109 const AbstractType& dst_type, 221 const AbstractType& dst_type,
110 const String& dst_name, 222 const String& dst_name,
111 LocationSummary* locs) { 223 LocationSummary* locs) {
112 ASSERT(!is_optimizing());
113 SubtypeTestCache& test_cache = SubtypeTestCache::Handle(); 224 SubtypeTestCache& test_cache = SubtypeTestCache::Handle();
114 if (!dst_type.IsVoidType() && dst_type.IsInstantiated()) { 225 if (!dst_type.IsVoidType() && dst_type.IsInstantiated()) {
115 test_cache = SubtypeTestCache::New(); 226 test_cache = SubtypeTestCache::New();
116 } 227 }
117 228
229 if (is_optimizing()) {
230 __ Push(locs->in(0).reg());
231 __ Push(locs->in(1).reg());
232 }
118 __ PushConstant(dst_type); 233 __ PushConstant(dst_type);
119 __ PushConstant(dst_name); 234 __ PushConstant(dst_name);
120 __ AssertAssignable(__ AddConstant(test_cache)); 235 __ AssertAssignable(__ AddConstant(test_cache));
236 RecordSafepoint(locs);
121 AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos); 237 AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos);
238 if (is_optimizing()) {
239 // Assert assignable keeps the instance on the stack as the result,
240 // all other arguments are popped.
241 // In optimized code we need to drop it because optimized code
242 // expects the result in the register and it is already there
243 // because locs()->in(0).reg() == locs()->out(0).reg().
244 __ Drop1();
245 }
122 } 246 }
123 247
124 248
125 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { 249 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) {
126 if (!is_optimizing()) { 250 if (!is_optimizing()) {
127 Definition* defn = instr->AsDefinition(); 251 Definition* defn = instr->AsDefinition();
128 if ((defn != NULL) && 252 if ((defn != NULL) &&
129 (defn->tag() != Instruction::kPushArgument) && 253 (defn->tag() != Instruction::kPushArgument) &&
130 (defn->tag() != Instruction::kStoreIndexed) && 254 (defn->tag() != Instruction::kStoreIndexed) &&
131 (defn->tag() != Instruction::kStoreStaticField) && 255 (defn->tag() != Instruction::kStoreStaticField) &&
(...skipping 29 matching lines...) Expand all
161 const int num_opt_pos_params = function.NumOptionalPositionalParameters(); 285 const int num_opt_pos_params = function.NumOptionalPositionalParameters();
162 const int num_opt_named_params = function.NumOptionalNamedParameters(); 286 const int num_opt_named_params = function.NumOptionalNamedParameters();
163 const int num_params = 287 const int num_params =
164 num_fixed_params + num_opt_pos_params + num_opt_named_params; 288 num_fixed_params + num_opt_pos_params + num_opt_named_params;
165 const bool has_optional_params = (num_opt_pos_params != 0) || 289 const bool has_optional_params = (num_opt_pos_params != 0) ||
166 (num_opt_named_params != 0); 290 (num_opt_named_params != 0);
167 const int num_locals = parsed_function().num_stack_locals(); 291 const int num_locals = parsed_function().num_stack_locals();
168 const intptr_t context_index = 292 const intptr_t context_index =
169 -parsed_function().current_context_var()->index() - 1; 293 -parsed_function().current_context_var()->index() - 1;
170 294
295 if (CanOptimizeFunction() &&
296 function.IsOptimizable() &&
297 (!is_optimizing() || may_reoptimize())) {
298 __ HotCheck(!is_optimizing(), GetOptimizationThreshold());
299 }
300
171 if (has_optional_params) { 301 if (has_optional_params) {
172 __ EntryOpt(num_fixed_params, num_opt_pos_params, num_opt_named_params); 302 __ EntryOptional(num_fixed_params,
303 num_opt_pos_params,
304 num_opt_named_params);
305 } else if (!is_optimizing()) {
306 __ Entry(num_fixed_params, num_locals, context_index);
173 } else { 307 } else {
174 __ Entry(num_fixed_params, num_locals, context_index); 308 __ EntryOptimized(num_fixed_params,
309 flow_graph_.graph_entry()->spill_slot_count());
175 } 310 }
176 311
177 if (num_opt_named_params != 0) { 312 if (num_opt_named_params != 0) {
178 LocalScope* scope = parsed_function().node_sequence()->scope(); 313 LocalScope* scope = parsed_function().node_sequence()->scope();
179 314
180 // Start by alphabetically sorting the names of the optional parameters. 315 // Start by alphabetically sorting the names of the optional parameters.
181 LocalVariable** opt_param = 316 LocalVariable** opt_param =
182 zone()->Alloc<LocalVariable*>(num_opt_named_params); 317 zone()->Alloc<LocalVariable*>(num_opt_named_params);
183 int* opt_param_position = zone()->Alloc<int>(num_opt_named_params); 318 int* opt_param_position = zone()->Alloc<int>(num_opt_named_params);
184 for (int pos = num_fixed_params; pos < num_params; pos++) { 319 for (int pos = num_fixed_params; pos < num_params; pos++) {
(...skipping 20 matching lines...) Expand all
205 __ LoadConstant(param_pos, value); 340 __ LoadConstant(param_pos, value);
206 } 341 }
207 } else if (num_opt_pos_params != 0) { 342 } else if (num_opt_pos_params != 0) {
208 for (intptr_t i = 0; i < num_opt_pos_params; i++) { 343 for (intptr_t i = 0; i < num_opt_pos_params; i++) {
209 const Object& value = parsed_function().DefaultParameterValueAt(i); 344 const Object& value = parsed_function().DefaultParameterValueAt(i);
210 __ LoadConstant(num_fixed_params + i, value); 345 __ LoadConstant(num_fixed_params + i, value);
211 } 346 }
212 } 347 }
213 348
214 349
215 ASSERT(num_locals > 0); // There is always at least context_var.
216 if (has_optional_params) { 350 if (has_optional_params) {
217 ASSERT(!is_optimizing()); 351 if (!is_optimizing()) {
218 __ Frame(num_locals); // Reserve space for locals. 352 ASSERT(num_locals > 0); // There is always at least context_var.
353 __ Frame(num_locals); // Reserve space for locals.
354 } else if (flow_graph_.graph_entry()->spill_slot_count() >
355 flow_graph_.num_copied_params()) {
356 __ Frame(flow_graph_.graph_entry()->spill_slot_count() -
357 flow_graph_.num_copied_params());
358 }
219 } 359 }
220 360
221 if (function.IsClosureFunction()) { 361 if (function.IsClosureFunction()) {
222 Register reg = context_index; 362 Register reg = is_optimizing() ? flow_graph_.num_copied_params()
363 : context_index;
223 Register closure_reg = reg; 364 Register closure_reg = reg;
224 LocalScope* scope = parsed_function().node_sequence()->scope(); 365 LocalScope* scope = parsed_function().node_sequence()->scope();
225 LocalVariable* local = scope->VariableAt(0); 366 LocalVariable* local = scope->VariableAt(0);
226 if (local->index() > 0) { 367 if (local->index() > 0) {
227 __ Move(reg, -local->index()); 368 __ Move(reg, -local->index());
228 } else { 369 } else {
229 closure_reg = -local->index() - 1; 370 closure_reg = -local->index() - 1;
230 } 371 }
231 __ LoadField(reg, closure_reg, Closure::context_offset() / kWordSize); 372 __ LoadField(reg, closure_reg, Closure::context_offset() / kWordSize);
232 } else if (has_optional_params) { 373 } else if (has_optional_params && !is_optimizing()) {
233 __ LoadConstant(context_index, 374 __ LoadConstant(context_index,
234 Object::Handle(isolate()->object_store()->empty_context())); 375 Object::Handle(isolate()->object_store()->empty_context()));
235 } 376 }
236 } 377 }
237 378
238 379
239 void FlowGraphCompiler::CompileGraph() { 380 void FlowGraphCompiler::CompileGraph() {
240 InitCompiler(); 381 InitCompiler();
241 382
242 if (TryIntrinsify()) { 383 if (TryIntrinsify()) {
243 // Skip regular code generation. 384 // Skip regular code generation.
244 return; 385 return;
245 } 386 }
246 387
247 EmitFrameEntry(); 388 EmitFrameEntry();
248 VisitBlocks(); 389 VisitBlocks();
249 } 390 }
250 391
251 392
252 #undef __ 393 #undef __
253 #define __ compiler_->assembler()-> 394 #define __ compiler_->assembler()->
254 395
255 396
256 void ParallelMoveResolver::EmitMove(int index) { 397 void ParallelMoveResolver::EmitMove(int index) {
257 UNIMPLEMENTED(); 398 MoveOperands* move = moves_[index];
399 const Location source = move->src();
400 const Location destination = move->dest();
401 if (source.IsStackSlot() && destination.IsRegister()) {
402 // Only allow access to the arguments.
403 ASSERT(source.base_reg() == FPREG);
404 ASSERT(source.stack_index() < 0);
405 __ Move(destination.reg(), -kParamEndSlotFromFp + source.stack_index());
406 } else if (source.IsRegister() && destination.IsRegister()) {
407 __ Move(destination.reg(), source.reg());
408 } else if (source.IsConstant() && destination.IsRegister()) {
409 __ LoadConstant(destination.reg(), source.constant());
410 } else {
411 compiler_->Bailout("Unsupported move");
412 }
413
414 move->Eliminate();
258 } 415 }
259 416
260 417
261 void ParallelMoveResolver::EmitSwap(int index) { 418 void ParallelMoveResolver::EmitSwap(int index) {
262 UNIMPLEMENTED(); 419 MoveOperands* move = moves_[index];
420 const Location source = move->src();
421 const Location destination = move->dest();
422 ASSERT(source.IsRegister() && destination.IsRegister());
423 __ Swap(destination.reg(), source.reg());
424
425 // The swap of source and destination has executed a move from source to
426 // destination.
427 move->Eliminate();
428
429 // Any unperformed (including pending) move with a source of either
430 // this move's source or destination needs to have their source
431 // changed to reflect the state of affairs after the swap.
432 for (int i = 0; i < moves_.length(); ++i) {
433 const MoveOperands& other_move = *moves_[i];
434 if (other_move.Blocks(source)) {
435 moves_[i]->set_src(destination);
436 } else if (other_move.Blocks(destination)) {
437 moves_[i]->set_src(source);
438 }
439 }
263 } 440 }
264 441
265 442
266 void ParallelMoveResolver::MoveMemoryToMemory(const Address& dst, 443 void ParallelMoveResolver::MoveMemoryToMemory(const Address& dst,
267 const Address& src) { 444 const Address& src) {
268 UNREACHABLE(); 445 UNREACHABLE();
269 } 446 }
270 447
271 448
272 void ParallelMoveResolver::StoreObject(const Address& dst, const Object& obj) { 449 void ParallelMoveResolver::StoreObject(const Address& dst, const Object& obj) {
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
321 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { 498 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) {
322 UNIMPLEMENTED(); 499 UNIMPLEMENTED();
323 } 500 }
324 501
325 502
326 #undef __ 503 #undef __
327 504
328 } // namespace dart 505 } // namespace dart
329 506
330 #endif // defined TARGET_ARCH_DBC 507 #endif // defined TARGET_ARCH_DBC
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/instructions_dbc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698