Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(140)

Side by Side Diff: runtime/vm/flow_graph_compiler.cc

Issue 2734323003: Re-landing of "replace TrySync with Metadata". (Closed)
Patch Set: Address review comments Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler.h ('k') | runtime/vm/flow_graph_compiler_arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_XXX. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_XXX.
6 6
7 #include "vm/flow_graph_compiler.h" 7 #include "vm/flow_graph_compiler.h"
8 8
9 #include "vm/bit_vector.h" 9 #include "vm/bit_vector.h"
10 #include "vm/cha.h" 10 #include "vm/cha.h"
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
189 zone_(Thread::Current()->zone()), 189 zone_(Thread::Current()->zone()),
190 assembler_(assembler), 190 assembler_(assembler),
191 parsed_function_(parsed_function), 191 parsed_function_(parsed_function),
192 flow_graph_(*flow_graph), 192 flow_graph_(*flow_graph),
193 block_order_(*flow_graph->CodegenBlockOrder(is_optimizing)), 193 block_order_(*flow_graph->CodegenBlockOrder(is_optimizing)),
194 current_block_(NULL), 194 current_block_(NULL),
195 exception_handlers_list_(NULL), 195 exception_handlers_list_(NULL),
196 pc_descriptors_list_(NULL), 196 pc_descriptors_list_(NULL),
197 stackmap_table_builder_(NULL), 197 stackmap_table_builder_(NULL),
198 code_source_map_builder_(NULL), 198 code_source_map_builder_(NULL),
199 catch_entry_state_maps_builder_(NULL),
199 block_info_(block_order_.length()), 200 block_info_(block_order_.length()),
200 deopt_infos_(), 201 deopt_infos_(),
201 static_calls_target_table_(), 202 static_calls_target_table_(),
202 is_optimizing_(is_optimizing), 203 is_optimizing_(is_optimizing),
203 may_reoptimize_(false), 204 may_reoptimize_(false),
204 intrinsic_mode_(false), 205 intrinsic_mode_(false),
205 double_class_( 206 double_class_(
206 Class::ZoneHandle(isolate()->object_store()->double_class())), 207 Class::ZoneHandle(isolate()->object_store()->double_class())),
207 mint_class_(Class::ZoneHandle(isolate()->object_store()->mint_class())), 208 mint_class_(Class::ZoneHandle(isolate()->object_store()->mint_class())),
208 float32x4_class_( 209 float32x4_class_(
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
260 bool FlowGraphCompiler::IsPotentialUnboxedField(const Field& field) { 261 bool FlowGraphCompiler::IsPotentialUnboxedField(const Field& field) {
261 return field.is_unboxing_candidate() && 262 return field.is_unboxing_candidate() &&
262 (FlowGraphCompiler::IsUnboxedField(field) || 263 (FlowGraphCompiler::IsUnboxedField(field) ||
263 (!field.is_final() && (field.guarded_cid() == kIllegalCid))); 264 (!field.is_final() && (field.guarded_cid() == kIllegalCid)));
264 } 265 }
265 266
266 267
267 void FlowGraphCompiler::InitCompiler() { 268 void FlowGraphCompiler::InitCompiler() {
268 pc_descriptors_list_ = new (zone()) DescriptorList(64); 269 pc_descriptors_list_ = new (zone()) DescriptorList(64);
269 exception_handlers_list_ = new (zone()) ExceptionHandlerList(); 270 exception_handlers_list_ = new (zone()) ExceptionHandlerList();
271 catch_entry_state_maps_builder_ = new (zone()) CatchEntryStateMapBuilder();
270 block_info_.Clear(); 272 block_info_.Clear();
271 // Conservative detection of leaf routines used to remove the stack check 273 // Conservative detection of leaf routines used to remove the stack check
272 // on function entry. 274 // on function entry.
273 bool is_leaf = is_optimizing() && !flow_graph().IsCompiledForOsr(); 275 bool is_leaf = is_optimizing() && !flow_graph().IsCompiledForOsr();
274 // Initialize block info and search optimized (non-OSR) code for calls 276 // Initialize block info and search optimized (non-OSR) code for calls
275 // indicating a non-leaf routine and calls without IC data indicating 277 // indicating a non-leaf routine and calls without IC data indicating
276 // possible reoptimization. 278 // possible reoptimization.
277 for (int i = 0; i < block_order_.length(); ++i) { 279 for (int i = 0; i < block_order_.length(); ++i) {
278 block_info_.Add(new (zone()) BlockInfo()); 280 block_info_.Add(new (zone()) BlockInfo());
279 if (is_optimizing() && !flow_graph().IsCompiledForOsr()) { 281 if (is_optimizing() && !flow_graph().IsCompiledForOsr()) {
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
405 nonempty_label = GetJumpLabel(block); 407 nonempty_label = GetJumpLabel(block);
406 } 408 }
407 } 409 }
408 410
409 ASSERT(block_order()[0]->IsGraphEntry()); 411 ASSERT(block_order()[0]->IsGraphEntry());
410 BlockInfo* block_info = block_info_[block_order()[0]->postorder_number()]; 412 BlockInfo* block_info = block_info_[block_order()[0]->postorder_number()];
411 block_info->set_next_nonempty_label(nonempty_label); 413 block_info->set_next_nonempty_label(nonempty_label);
412 } 414 }
413 415
414 416
417 void FlowGraphCompiler::EmitCatchEntryState(Environment* env,
418 intptr_t try_index) {
419 #if defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
420 env = env ? env : pending_deoptimization_env_;
421 try_index = try_index != CatchClauseNode::kInvalidTryIndex
422 ? try_index
423 : CurrentTryIndex();
424 if (is_optimizing() && env != NULL &&
425 (try_index != CatchClauseNode::kInvalidTryIndex)) {
426 env = env->Outermost();
427 CatchBlockEntryInstr* catch_block =
428 flow_graph().graph_entry()->GetCatchEntry(try_index);
429 const GrowableArray<Definition*>* idefs =
430 catch_block->initial_definitions();
431 catch_entry_state_maps_builder_->NewMapping(assembler()->CodeSize());
432 // Parameters first.
433 intptr_t i = 0;
434 const intptr_t num_non_copied_params = flow_graph().num_non_copied_params();
435 for (; i < num_non_copied_params; ++i) {
436 // Don't sync captured parameters. They are not in the environment.
437 if (flow_graph().captured_parameters()->Contains(i)) continue;
438 if ((*idefs)[i]->IsConstant()) continue; // Common constants.
439 Location src = env->LocationAt(i);
440 intptr_t dest_index = i - num_non_copied_params;
441 if (!src.IsStackSlot()) {
442 ASSERT(src.IsConstant());
443 // Skip dead locations.
444 if (src.constant().raw() == Symbols::OptimizedOut().raw()) {
445 continue;
446 }
447 intptr_t id =
448 assembler()->object_pool_wrapper().FindObject(src.constant());
449 catch_entry_state_maps_builder_->AppendConstant(id, dest_index);
450 continue;
451 }
452 if (src.stack_index() != dest_index) {
453 catch_entry_state_maps_builder_->AppendMove(src.stack_index(),
454 dest_index);
455 }
456 }
457
458 // Process locals. Skip exception_var and stacktrace_var.
459 intptr_t local_base = kFirstLocalSlotFromFp + num_non_copied_params;
460 intptr_t ex_idx = local_base - catch_block->exception_var().index();
461 intptr_t st_idx = local_base - catch_block->stacktrace_var().index();
462 for (; i < flow_graph().variable_count(); ++i) {
463 // Don't sync captured parameters. They are not in the environment.
464 if (flow_graph().captured_parameters()->Contains(i)) continue;
465 if (i == ex_idx || i == st_idx) continue;
466 if ((*idefs)[i]->IsConstant()) continue; // Common constants.
467 Location src = env->LocationAt(i);
468 if (src.IsInvalid()) continue;
469 intptr_t dest_index = i - num_non_copied_params;
470 if (!src.IsStackSlot()) {
471 ASSERT(src.IsConstant());
472 // Skip dead locations.
473 if (src.constant().raw() == Symbols::OptimizedOut().raw()) {
474 continue;
475 }
476 intptr_t id =
477 assembler()->object_pool_wrapper().FindObject(src.constant());
478 catch_entry_state_maps_builder_->AppendConstant(id, dest_index);
479 continue;
480 }
481 if (src.stack_index() != dest_index) {
482 catch_entry_state_maps_builder_->AppendMove(src.stack_index(),
483 dest_index);
484 }
485 }
486 catch_entry_state_maps_builder_->EndMapping();
487 }
488 #endif // defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
489 }
490
491
492 void FlowGraphCompiler::EmitCallsiteMetaData(TokenPosition token_pos,
493 intptr_t deopt_id,
494 RawPcDescriptors::Kind kind,
495 LocationSummary* locs) {
496 AddCurrentDescriptor(kind, deopt_id, token_pos);
497 RecordSafepoint(locs);
498 EmitCatchEntryState();
499 }
500
501
415 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) { 502 void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) {
416 if (!is_optimizing()) { 503 if (!is_optimizing()) {
417 if (instr->CanBecomeDeoptimizationTarget() && !instr->IsGoto()) { 504 if (instr->CanBecomeDeoptimizationTarget() && !instr->IsGoto()) {
418 // Instructions that can be deoptimization targets need to record kDeopt 505 // Instructions that can be deoptimization targets need to record kDeopt
419 // PcDescriptor corresponding to their deopt id. GotoInstr records its 506 // PcDescriptor corresponding to their deopt id. GotoInstr records its
420 // own so that it can control the placement. 507 // own so that it can control the placement.
421 AddCurrentDescriptor(RawPcDescriptors::kDeopt, instr->deopt_id(), 508 AddCurrentDescriptor(RawPcDescriptors::kDeopt, instr->deopt_id(),
422 instr->token_pos()); 509 instr->token_pos());
423 } 510 }
424 AllocateRegistersLocally(instr); 511 AllocateRegistersLocally(instr);
425 } else if (instr->MayThrow() &&
426 (CurrentTryIndex() != CatchClauseNode::kInvalidTryIndex)) {
427 // Optimized try-block: Sync locals to fixed stack locations.
428 EmitTrySync(instr, CurrentTryIndex());
429 } 512 }
430 } 513 }
431 514
432 515
433 void FlowGraphCompiler::EmitSourceLine(Instruction* instr) { 516 void FlowGraphCompiler::EmitSourceLine(Instruction* instr) {
434 if (!instr->token_pos().IsReal() || (instr->env() == NULL)) { 517 if (!instr->token_pos().IsReal() || (instr->env() == NULL)) {
435 return; 518 return;
436 } 519 }
437 const Script& script = 520 const Script& script =
438 Script::Handle(zone(), instr->env()->function().script()); 521 Script::Handle(zone(), instr->env()->function().script());
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
537 620
538 set_current_block(NULL); 621 set_current_block(NULL);
539 } 622 }
540 623
541 624
542 void FlowGraphCompiler::Bailout(const char* reason) { 625 void FlowGraphCompiler::Bailout(const char* reason) {
543 parsed_function_.Bailout("FlowGraphCompiler", reason); 626 parsed_function_.Bailout("FlowGraphCompiler", reason);
544 } 627 }
545 628
546 629
547 void FlowGraphCompiler::EmitTrySync(Instruction* instr, intptr_t try_index) {
548 ASSERT(is_optimizing());
549 Environment* env = instr->env()->Outermost();
550 CatchBlockEntryInstr* catch_block =
551 flow_graph().graph_entry()->GetCatchEntry(try_index);
552 const GrowableArray<Definition*>* idefs = catch_block->initial_definitions();
553
554 // Construct a ParallelMove instruction for parameters and locals. Skip the
555 // special locals exception_var and stacktrace_var since they will be filled
556 // when an exception is thrown. Constant locations are known to be the same
557 // at all instructions that may throw, and do not need to be materialized.
558
559 // Parameters first.
560 intptr_t i = 0;
561 const intptr_t num_non_copied_params = flow_graph().num_non_copied_params();
562 ParallelMoveInstr* move_instr = new (zone()) ParallelMoveInstr();
563 for (; i < num_non_copied_params; ++i) {
564 // Don't sync captured parameters. They are not in the environment.
565 if (flow_graph().captured_parameters()->Contains(i)) continue;
566 if ((*idefs)[i]->IsConstant()) continue; // Common constants
567 Location src = env->LocationAt(i);
568 #if defined(TARGET_ARCH_DBC)
569 intptr_t dest_index = kNumberOfCpuRegisters - 1 - i;
570 Location dest = Location::RegisterLocation(dest_index);
571 // Update safepoint bitmap to indicate that the target location
572 // now contains a pointer. With DBC parameters are copied into
573 // the locals area.
574 instr->locs()->SetStackBit(dest_index);
575 #else
576 intptr_t dest_index = i - num_non_copied_params;
577 Location dest = Location::StackSlot(dest_index);
578 #endif
579 move_instr->AddMove(dest, src);
580 }
581
582 // Process locals. Skip exception_var and stacktrace_var.
583 intptr_t local_base = kFirstLocalSlotFromFp + num_non_copied_params;
584 intptr_t ex_idx = local_base - catch_block->exception_var().index();
585 intptr_t st_idx = local_base - catch_block->stacktrace_var().index();
586 for (; i < flow_graph().variable_count(); ++i) {
587 // Don't sync captured parameters. They are not in the environment.
588 if (flow_graph().captured_parameters()->Contains(i)) continue;
589 if (i == ex_idx || i == st_idx) continue;
590 if ((*idefs)[i]->IsConstant()) continue;
591 Location src = env->LocationAt(i);
592 ASSERT(!src.IsFpuRegister());
593 ASSERT(!src.IsDoubleStackSlot());
594 #if defined(TARGET_ARCH_DBC)
595 intptr_t dest_index = kNumberOfCpuRegisters - 1 - i;
596 Location dest = Location::RegisterLocation(dest_index);
597 #else
598 intptr_t dest_index = i - num_non_copied_params;
599 Location dest = Location::StackSlot(dest_index);
600 #endif
601 move_instr->AddMove(dest, src);
602 // Update safepoint bitmap to indicate that the target location
603 // now contains a pointer.
604 instr->locs()->SetStackBit(dest_index);
605 }
606 parallel_move_resolver()->EmitNativeCode(move_instr);
607 }
608
609
610 intptr_t FlowGraphCompiler::StackSize() const { 630 intptr_t FlowGraphCompiler::StackSize() const {
611 if (is_optimizing_) { 631 if (is_optimizing_) {
612 return flow_graph_.graph_entry()->spill_slot_count(); 632 return flow_graph_.graph_entry()->spill_slot_count();
613 } else { 633 } else {
614 return parsed_function_.num_stack_locals() + 634 return parsed_function_.num_stack_locals() +
615 parsed_function_.num_copied_params(); 635 parsed_function_.num_copied_params();
616 } 636 }
617 } 637 }
618 638
619 639
(...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after
1008 info.set_kind(RawLocalVarDescriptors::kSavedCurrentContext); 1028 info.set_kind(RawLocalVarDescriptors::kSavedCurrentContext);
1009 info.scope_id = 0; 1029 info.scope_id = 0;
1010 info.begin_pos = TokenPosition::kMinSource; 1030 info.begin_pos = TokenPosition::kMinSource;
1011 info.end_pos = TokenPosition::kMinSource; 1031 info.end_pos = TokenPosition::kMinSource;
1012 info.set_index(parsed_function().current_context_var()->index()); 1032 info.set_index(parsed_function().current_context_var()->index());
1013 var_descs.SetVar(0, Symbols::CurrentContextVar(), &info); 1033 var_descs.SetVar(0, Symbols::CurrentContextVar(), &info);
1014 } 1034 }
1015 code.set_var_descriptors(var_descs); 1035 code.set_var_descriptors(var_descs);
1016 } 1036 }
1017 1037
1038 void FlowGraphCompiler::FinalizeCatchEntryStateMap(const Code& code) {
1039 #if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
1040 TypedData& maps = TypedData::Handle(
1041 catch_entry_state_maps_builder_->FinalizeCatchEntryStateMap());
1042 code.set_catch_entry_state_maps(maps);
1043 #else
1044 code.set_variables(Smi::Handle(Smi::New(flow_graph().variable_count())));
1045 #endif
1046 }
1018 1047
1019 void FlowGraphCompiler::FinalizeStaticCallTargetsTable(const Code& code) { 1048 void FlowGraphCompiler::FinalizeStaticCallTargetsTable(const Code& code) {
1020 ASSERT(code.static_calls_target_table() == Array::null()); 1049 ASSERT(code.static_calls_target_table() == Array::null());
1021 const Array& targets = 1050 const Array& targets =
1022 Array::Handle(zone(), Array::New((static_calls_target_table_.length() * 1051 Array::Handle(zone(), Array::New((static_calls_target_table_.length() *
1023 Code::kSCallTableEntryLength), 1052 Code::kSCallTableEntryLength),
1024 Heap::kOld)); 1053 Heap::kOld));
1025 Smi& smi_offset = Smi::Handle(zone()); 1054 Smi& smi_offset = Smi::Handle(zone());
1026 for (intptr_t i = 0; i < static_calls_target_table_.length(); i++) { 1055 for (intptr_t i = 0; i < static_calls_target_table_.length(); i++) {
1027 const intptr_t target_ix = Code::kSCallTableEntryLength * i; 1056 const intptr_t target_ix = Code::kSCallTableEntryLength * i;
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
1116 // before any deoptimization point. 1145 // before any deoptimization point.
1117 ASSERT(!intrinsic_slow_path_label_.IsBound()); 1146 ASSERT(!intrinsic_slow_path_label_.IsBound());
1118 assembler()->Bind(&intrinsic_slow_path_label_); 1147 assembler()->Bind(&intrinsic_slow_path_label_);
1119 return complete; 1148 return complete;
1120 } 1149 }
1121 1150
1122 1151
1123 // DBC is very different from other architectures in how it performs instance 1152 // DBC is very different from other architectures in how it performs instance
1124 // and static calls because it does not use stubs. 1153 // and static calls because it does not use stubs.
1125 #if !defined(TARGET_ARCH_DBC) 1154 #if !defined(TARGET_ARCH_DBC)
1155 void FlowGraphCompiler::GenerateCallWithDeopt(TokenPosition token_pos,
1156 intptr_t deopt_id,
1157 const StubEntry& stub_entry,
1158 RawPcDescriptors::Kind kind,
1159 LocationSummary* locs) {
1160 GenerateCall(token_pos, stub_entry, kind, locs);
1161 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id);
1162 if (is_optimizing()) {
1163 AddDeoptIndexAtCall(deopt_id_after);
1164 } else {
1165 // Add deoptimization continuation point after the call and before the
1166 // arguments are removed.
1167 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1168 }
1169 }
1170
1171
1126 void FlowGraphCompiler::GenerateInstanceCall(intptr_t deopt_id, 1172 void FlowGraphCompiler::GenerateInstanceCall(intptr_t deopt_id,
1127 TokenPosition token_pos, 1173 TokenPosition token_pos,
1128 intptr_t argument_count, 1174 intptr_t argument_count,
1129 LocationSummary* locs, 1175 LocationSummary* locs,
1130 const ICData& ic_data_in) { 1176 const ICData& ic_data_in) {
1131 ICData& ic_data = ICData::ZoneHandle(ic_data_in.Original()); 1177 ICData& ic_data = ICData::ZoneHandle(ic_data_in.Original());
1132 if (FLAG_precompiled_mode) { 1178 if (FLAG_precompiled_mode) {
1133 ic_data = ic_data.AsUnaryClassChecks(); 1179 ic_data = ic_data.AsUnaryClassChecks();
1134 EmitSwitchableInstanceCall(ic_data, argument_count, deopt_id, token_pos, 1180 EmitSwitchableInstanceCall(ic_data, argument_count, deopt_id, token_pos,
1135 locs); 1181 locs);
(...skipping 750 matching lines...) Expand 10 before | Expand all | Expand 10 after
1886 1932
1887 1933
1888 void FlowGraphCompiler::FrameStateClear() { 1934 void FlowGraphCompiler::FrameStateClear() {
1889 ASSERT(!is_optimizing()); 1935 ASSERT(!is_optimizing());
1890 frame_state_.TruncateTo(0); 1936 frame_state_.TruncateTo(0);
1891 } 1937 }
1892 #endif // defined(DEBUG) && !defined(TARGET_ARCH_DBC) 1938 #endif // defined(DEBUG) && !defined(TARGET_ARCH_DBC)
1893 1939
1894 1940
1895 } // namespace dart 1941 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler.h ('k') | runtime/vm/flow_graph_compiler_arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698