OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_XXX. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_XXX. |
6 | 6 |
7 #include "vm/flow_graph_compiler.h" | 7 #include "vm/flow_graph_compiler.h" |
8 | 8 |
9 #include "vm/bit_vector.h" | 9 #include "vm/bit_vector.h" |
10 #include "vm/cha.h" | 10 #include "vm/cha.h" |
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
189 zone_(Thread::Current()->zone()), | 189 zone_(Thread::Current()->zone()), |
190 assembler_(assembler), | 190 assembler_(assembler), |
191 parsed_function_(parsed_function), | 191 parsed_function_(parsed_function), |
192 flow_graph_(*flow_graph), | 192 flow_graph_(*flow_graph), |
193 block_order_(*flow_graph->CodegenBlockOrder(is_optimizing)), | 193 block_order_(*flow_graph->CodegenBlockOrder(is_optimizing)), |
194 current_block_(NULL), | 194 current_block_(NULL), |
195 exception_handlers_list_(NULL), | 195 exception_handlers_list_(NULL), |
196 pc_descriptors_list_(NULL), | 196 pc_descriptors_list_(NULL), |
197 stackmap_table_builder_(NULL), | 197 stackmap_table_builder_(NULL), |
198 code_source_map_builder_(NULL), | 198 code_source_map_builder_(NULL), |
199 saved_code_size_(0), | |
200 block_info_(block_order_.length()), | 199 block_info_(block_order_.length()), |
201 deopt_infos_(), | 200 deopt_infos_(), |
202 static_calls_target_table_(), | 201 static_calls_target_table_(), |
203 is_optimizing_(is_optimizing), | 202 is_optimizing_(is_optimizing), |
204 may_reoptimize_(false), | 203 may_reoptimize_(false), |
205 intrinsic_mode_(false), | 204 intrinsic_mode_(false), |
206 double_class_( | 205 double_class_( |
207 Class::ZoneHandle(isolate()->object_store()->double_class())), | 206 Class::ZoneHandle(isolate()->object_store()->double_class())), |
208 mint_class_(Class::ZoneHandle(isolate()->object_store()->mint_class())), | 207 mint_class_(Class::ZoneHandle(isolate()->object_store()->mint_class())), |
209 float32x4_class_( | 208 float32x4_class_( |
210 Class::ZoneHandle(isolate()->object_store()->float32x4_class())), | 209 Class::ZoneHandle(isolate()->object_store()->float32x4_class())), |
211 float64x2_class_( | 210 float64x2_class_( |
212 Class::ZoneHandle(isolate()->object_store()->float64x2_class())), | 211 Class::ZoneHandle(isolate()->object_store()->float64x2_class())), |
213 int32x4_class_( | 212 int32x4_class_( |
214 Class::ZoneHandle(isolate()->object_store()->int32x4_class())), | 213 Class::ZoneHandle(isolate()->object_store()->int32x4_class())), |
215 list_class_(Class::ZoneHandle(Library::Handle(Library::CoreLibrary()) | 214 list_class_(Class::ZoneHandle(Library::Handle(Library::CoreLibrary()) |
216 .LookupClass(Symbols::List()))), | 215 .LookupClass(Symbols::List()))), |
217 parallel_move_resolver_(this), | 216 parallel_move_resolver_(this), |
218 pending_deoptimization_env_(NULL), | 217 pending_deoptimization_env_(NULL), |
219 deopt_id_to_ic_data_(NULL), | 218 deopt_id_to_ic_data_(NULL), |
220 edge_counters_array_(Array::ZoneHandle()), | 219 edge_counters_array_(Array::ZoneHandle()) { |
221 inlined_code_intervals_(Array::ZoneHandle(Object::empty_array().raw())), | |
222 inline_id_to_function_(inline_id_to_function), | |
223 inline_id_to_token_pos_(inline_id_to_token_pos), | |
224 caller_inline_id_(caller_inline_id) { | |
225 ASSERT(flow_graph->parsed_function().function().raw() == | 220 ASSERT(flow_graph->parsed_function().function().raw() == |
226 parsed_function.function().raw()); | 221 parsed_function.function().raw()); |
227 if (!is_optimizing) { | 222 if (!is_optimizing) { |
228 const intptr_t len = thread()->deopt_id(); | 223 const intptr_t len = thread()->deopt_id(); |
229 deopt_id_to_ic_data_ = new (zone()) ZoneGrowableArray<const ICData*>(len); | 224 deopt_id_to_ic_data_ = new (zone()) ZoneGrowableArray<const ICData*>(len); |
230 deopt_id_to_ic_data_->SetLength(len); | 225 deopt_id_to_ic_data_->SetLength(len); |
231 for (intptr_t i = 0; i < len; i++) { | 226 for (intptr_t i = 0; i < len; i++) { |
232 (*deopt_id_to_ic_data_)[i] = NULL; | 227 (*deopt_id_to_ic_data_)[i] = NULL; |
233 } | 228 } |
234 // TODO(fschneider): Abstract iteration into ICDataArrayIterator. | 229 // TODO(fschneider): Abstract iteration into ICDataArrayIterator. |
235 const Array& old_saved_ic_data = | 230 const Array& old_saved_ic_data = |
236 Array::Handle(zone(), flow_graph->function().ic_data_array()); | 231 Array::Handle(zone(), flow_graph->function().ic_data_array()); |
237 const intptr_t saved_len = | 232 const intptr_t saved_len = |
238 old_saved_ic_data.IsNull() ? 0 : old_saved_ic_data.Length(); | 233 old_saved_ic_data.IsNull() ? 0 : old_saved_ic_data.Length(); |
239 for (intptr_t i = 1; i < saved_len; i++) { | 234 for (intptr_t i = 1; i < saved_len; i++) { |
240 ICData& ic_data = ICData::ZoneHandle(zone()); | 235 ICData& ic_data = ICData::ZoneHandle(zone()); |
241 ic_data ^= old_saved_ic_data.At(i); | 236 ic_data ^= old_saved_ic_data.At(i); |
242 (*deopt_id_to_ic_data_)[ic_data.deopt_id()] = &ic_data; | 237 (*deopt_id_to_ic_data_)[ic_data.deopt_id()] = &ic_data; |
243 } | 238 } |
244 } | 239 } |
245 ASSERT(assembler != NULL); | 240 ASSERT(assembler != NULL); |
246 ASSERT(!list_class_.IsNull()); | 241 ASSERT(!list_class_.IsNull()); |
| 242 |
| 243 code_source_map_builder_ = new (zone_) CodeSourceMapBuilder( |
| 244 caller_inline_id, inline_id_to_token_pos, inline_id_to_function); |
247 } | 245 } |
248 | 246 |
249 | 247 |
250 bool FlowGraphCompiler::IsUnboxedField(const Field& field) { | 248 bool FlowGraphCompiler::IsUnboxedField(const Field& field) { |
251 bool valid_class = | 249 bool valid_class = |
252 (SupportsUnboxedDoubles() && (field.guarded_cid() == kDoubleCid)) || | 250 (SupportsUnboxedDoubles() && (field.guarded_cid() == kDoubleCid)) || |
253 (SupportsUnboxedSimd128() && (field.guarded_cid() == kFloat32x4Cid)) || | 251 (SupportsUnboxedSimd128() && (field.guarded_cid() == kFloat32x4Cid)) || |
254 (SupportsUnboxedSimd128() && (field.guarded_cid() == kFloat64x2Cid)); | 252 (SupportsUnboxedSimd128() && (field.guarded_cid() == kFloat64x2Cid)); |
255 return field.is_unboxing_candidate() && !field.is_final() && | 253 return field.is_unboxing_candidate() && !field.is_final() && |
256 !field.is_nullable() && valid_class; | 254 !field.is_nullable() && valid_class; |
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
456 !loop_it.Done(); loop_it.Advance()) { | 454 !loop_it.Done(); loop_it.Advance()) { |
457 if (loop_it.Current() == block.preorder_number()) { | 455 if (loop_it.Current() == block.preorder_number()) { |
458 assembler->Comment(" Loop %" Pd "", loop_id); | 456 assembler->Comment(" Loop %" Pd "", loop_id); |
459 } | 457 } |
460 } | 458 } |
461 } | 459 } |
462 } | 460 } |
463 } | 461 } |
464 | 462 |
465 | 463 |
466 // We collect intervals while generating code. | |
467 struct IntervalStruct { | |
468 // 'start' is the pc-offsets where the inlined code started. | |
469 // 'pos' is the token position where the inlined call occured. | |
470 intptr_t start; | |
471 TokenPosition pos; | |
472 intptr_t inlining_id; | |
473 IntervalStruct(intptr_t s, TokenPosition tp, intptr_t id) | |
474 : start(s), pos(tp), inlining_id(id) {} | |
475 void Dump() { | |
476 THR_Print("start: 0x%" Px " iid: %" Pd " pos: %s", start, inlining_id, | |
477 pos.ToCString()); | |
478 } | |
479 }; | |
480 | |
481 | |
482 void FlowGraphCompiler::VisitBlocks() { | 464 void FlowGraphCompiler::VisitBlocks() { |
483 CompactBlocks(); | 465 CompactBlocks(); |
484 const ZoneGrowableArray<BlockEntryInstr*>* loop_headers = NULL; | 466 const ZoneGrowableArray<BlockEntryInstr*>* loop_headers = NULL; |
485 if (Assembler::EmittingComments()) { | 467 if (Assembler::EmittingComments()) { |
486 // 'loop_headers' were cleared, recompute. | 468 // 'loop_headers' were cleared, recompute. |
487 loop_headers = flow_graph().ComputeLoops(); | 469 loop_headers = flow_graph().ComputeLoops(); |
488 ASSERT(loop_headers != NULL); | 470 ASSERT(loop_headers != NULL); |
489 } | 471 } |
490 | 472 |
491 // For collecting intervals of inlined code. | |
492 GrowableArray<IntervalStruct> intervals; | |
493 intptr_t prev_offset = 0; | |
494 intptr_t prev_inlining_id = 0; | |
495 TokenPosition prev_inlining_pos = parsed_function_.function().token_pos(); | |
496 intptr_t max_inlining_id = 0; | |
497 for (intptr_t i = 0; i < block_order().length(); ++i) { | 473 for (intptr_t i = 0; i < block_order().length(); ++i) { |
498 // Compile the block entry. | 474 // Compile the block entry. |
499 BlockEntryInstr* entry = block_order()[i]; | 475 BlockEntryInstr* entry = block_order()[i]; |
500 assembler()->Comment("B%" Pd "", entry->block_id()); | 476 assembler()->Comment("B%" Pd "", entry->block_id()); |
501 set_current_block(entry); | 477 set_current_block(entry); |
502 | 478 |
503 if (WasCompacted(entry)) { | 479 if (WasCompacted(entry)) { |
504 continue; | 480 continue; |
505 } | 481 } |
506 | 482 |
507 #if defined(DEBUG) && !defined(TARGET_ARCH_DBC) | 483 #if defined(DEBUG) && !defined(TARGET_ARCH_DBC) |
508 if (!is_optimizing()) { | 484 if (!is_optimizing()) { |
509 FrameStateClear(); | 485 FrameStateClear(); |
510 } | 486 } |
511 #endif | 487 #endif |
512 | 488 |
513 LoopInfoComment(assembler(), *entry, *loop_headers); | 489 LoopInfoComment(assembler(), *entry, *loop_headers); |
514 | 490 |
515 entry->set_offset(assembler()->CodeSize()); | 491 entry->set_offset(assembler()->CodeSize()); |
516 BeginCodeSourceRange(); | 492 BeginCodeSourceRange(); |
517 ASSERT(pending_deoptimization_env_ == NULL); | 493 ASSERT(pending_deoptimization_env_ == NULL); |
518 pending_deoptimization_env_ = entry->env(); | 494 pending_deoptimization_env_ = entry->env(); |
519 entry->EmitNativeCode(this); | 495 entry->EmitNativeCode(this); |
520 pending_deoptimization_env_ = NULL; | 496 pending_deoptimization_env_ = NULL; |
521 EndCodeSourceRange(entry->token_pos()); | 497 EndCodeSourceRange(entry->token_pos()); |
522 // Compile all successors until an exit, branch, or a block entry. | 498 // Compile all successors until an exit, branch, or a block entry. |
523 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) { | 499 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) { |
524 Instruction* instr = it.Current(); | 500 Instruction* instr = it.Current(); |
525 // Compose intervals. | 501 // Compose intervals. |
526 if (instr->has_inlining_id() && is_optimizing()) { | 502 code_source_map_builder_->StartInliningInterval(assembler()->CodeSize(), |
527 if (prev_inlining_id != instr->inlining_id()) { | 503 instr->inlining_id()); |
528 intervals.Add( | |
529 IntervalStruct(prev_offset, prev_inlining_pos, prev_inlining_id)); | |
530 prev_offset = assembler()->CodeSize(); | |
531 prev_inlining_id = instr->inlining_id(); | |
532 if (prev_inlining_id < inline_id_to_token_pos_.length()) { | |
533 prev_inlining_pos = inline_id_to_token_pos_[prev_inlining_id]; | |
534 } else { | |
535 // We will add this token position later when generating the | |
536 // profile. | |
537 prev_inlining_pos = TokenPosition::kNoSource; | |
538 } | |
539 if (prev_inlining_id > max_inlining_id) { | |
540 max_inlining_id = prev_inlining_id; | |
541 } | |
542 } | |
543 } | |
544 if (FLAG_code_comments || FLAG_disassemble || | 504 if (FLAG_code_comments || FLAG_disassemble || |
545 FLAG_disassemble_optimized) { | 505 FLAG_disassemble_optimized) { |
546 if (FLAG_source_lines) { | 506 if (FLAG_source_lines) { |
547 EmitSourceLine(instr); | 507 EmitSourceLine(instr); |
548 } | 508 } |
549 EmitComment(instr); | 509 EmitComment(instr); |
550 } | 510 } |
551 if (instr->IsParallelMove()) { | 511 if (instr->IsParallelMove()) { |
552 parallel_move_resolver_.EmitNativeCode(instr->AsParallelMove()); | 512 parallel_move_resolver_.EmitNativeCode(instr->AsParallelMove()); |
553 } else { | 513 } else { |
(...skipping 12 matching lines...) Expand all Loading... |
566 FrameStateUpdateWith(instr); | 526 FrameStateUpdateWith(instr); |
567 } | 527 } |
568 #endif | 528 #endif |
569 } | 529 } |
570 | 530 |
571 #if defined(DEBUG) && !defined(TARGET_ARCH_DBC) | 531 #if defined(DEBUG) && !defined(TARGET_ARCH_DBC) |
572 ASSERT(is_optimizing() || FrameStateIsSafeToCall()); | 532 ASSERT(is_optimizing() || FrameStateIsSafeToCall()); |
573 #endif | 533 #endif |
574 } | 534 } |
575 | 535 |
576 if (is_optimizing()) { | |
577 LogBlock lb; | |
578 intervals.Add( | |
579 IntervalStruct(prev_offset, prev_inlining_pos, prev_inlining_id)); | |
580 inlined_code_intervals_ = | |
581 Array::New(intervals.length() * Code::kInlIntNumEntries, Heap::kOld); | |
582 Smi& start_h = Smi::Handle(); | |
583 Smi& caller_inline_id = Smi::Handle(); | |
584 Smi& inline_id = Smi::Handle(); | |
585 for (intptr_t i = 0; i < intervals.length(); i++) { | |
586 if (FLAG_trace_inlining_intervals && is_optimizing()) { | |
587 const Function& function = | |
588 *inline_id_to_function_.At(intervals[i].inlining_id); | |
589 intervals[i].Dump(); | |
590 THR_Print(" parent iid %" Pd " %s\n", | |
591 caller_inline_id_[intervals[i].inlining_id], | |
592 function.ToQualifiedCString()); | |
593 } | |
594 | |
595 const intptr_t id = intervals[i].inlining_id; | |
596 start_h = Smi::New(intervals[i].start); | |
597 inline_id = Smi::New(id); | |
598 caller_inline_id = Smi::New(caller_inline_id_[intervals[i].inlining_id]); | |
599 | |
600 const intptr_t p = i * Code::kInlIntNumEntries; | |
601 inlined_code_intervals_.SetAt(p + Code::kInlIntStart, start_h); | |
602 inlined_code_intervals_.SetAt(p + Code::kInlIntInliningId, inline_id); | |
603 } | |
604 } | |
605 set_current_block(NULL); | 536 set_current_block(NULL); |
606 if (FLAG_trace_inlining_intervals && is_optimizing()) { | |
607 LogBlock lb; | |
608 THR_Print("Intervals:\n"); | |
609 for (intptr_t cc = 0; cc < caller_inline_id_.length(); cc++) { | |
610 THR_Print(" iid: %" Pd " caller iid: %" Pd "\n", cc, | |
611 caller_inline_id_[cc]); | |
612 } | |
613 Smi& temp = Smi::Handle(); | |
614 for (intptr_t i = 0; i < inlined_code_intervals_.Length(); | |
615 i += Code::kInlIntNumEntries) { | |
616 temp ^= inlined_code_intervals_.At(i + Code::kInlIntStart); | |
617 ASSERT(!temp.IsNull()); | |
618 THR_Print("% " Pd " start: 0x%" Px " ", i, temp.Value()); | |
619 temp ^= inlined_code_intervals_.At(i + Code::kInlIntInliningId); | |
620 THR_Print("iid: %" Pd " ", temp.Value()); | |
621 } | |
622 } | |
623 } | 537 } |
624 | 538 |
625 | 539 |
626 void FlowGraphCompiler::Bailout(const char* reason) { | 540 void FlowGraphCompiler::Bailout(const char* reason) { |
627 parsed_function_.Bailout("FlowGraphCompiler", reason); | 541 parsed_function_.Bailout("FlowGraphCompiler", reason); |
628 } | 542 } |
629 | 543 |
630 | 544 |
631 void FlowGraphCompiler::EmitTrySync(Instruction* instr, intptr_t try_index) { | 545 void FlowGraphCompiler::EmitTrySync(Instruction* instr, intptr_t try_index) { |
632 ASSERT(is_optimizing()); | 546 ASSERT(is_optimizing()); |
(...skipping 472 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1105 targets.SetAt(target_ix + Code::kSCallTableCodeEntry, | 1019 targets.SetAt(target_ix + Code::kSCallTableCodeEntry, |
1106 *static_calls_target_table_[i]->code); | 1020 *static_calls_target_table_[i]->code); |
1107 } | 1021 } |
1108 } | 1022 } |
1109 code.set_static_calls_target_table(targets); | 1023 code.set_static_calls_target_table(targets); |
1110 INC_STAT(Thread::Current(), total_code_size, | 1024 INC_STAT(Thread::Current(), total_code_size, |
1111 targets.Length() * sizeof(uword)); | 1025 targets.Length() * sizeof(uword)); |
1112 } | 1026 } |
1113 | 1027 |
1114 | 1028 |
| 1029 void FlowGraphCompiler::FinalizeCodeSourceMap(const Code& code) { |
| 1030 #ifdef PRODUCT |
| 1031 // This data is only used by the profiler. |
| 1032 #else |
| 1033 if (FLAG_precompiled_mode) { |
| 1034 // TODO(rmacnak): Include a filtered verion of this to produce stack traces |
| 1035 // with inlined frames. |
| 1036 return; |
| 1037 } |
| 1038 |
| 1039 const Array& inlined_id_array = |
| 1040 Array::Handle(zone(), code_source_map_builder_->InliningIdToFunction()); |
| 1041 INC_STAT(Thread::Current(), total_code_size, |
| 1042 inlined_id_array.Length() * sizeof(uword)); |
| 1043 code.set_inlined_id_to_function(inlined_id_array); |
| 1044 |
| 1045 const CodeSourceMap& map = |
| 1046 CodeSourceMap::Handle(code_source_map_builder_->Finalize()); |
| 1047 INC_STAT(Thread::Current(), total_code_size, map.Length() * sizeof(uint8_t)); |
| 1048 code.set_code_source_map(map); |
| 1049 #endif |
| 1050 |
| 1051 #if defined(DEBUG) |
| 1052 // Force simulation through the last pc offset. This checks we can decode |
| 1053 // the whole CodeSourceMap without hitting an unknown opcode, stack underflow, |
| 1054 // etc. |
| 1055 GrowableArray<const Function*> fs; |
| 1056 GrowableArray<TokenPosition> tokens; |
| 1057 code.GetInlinedFunctionsAt(code.Size() - 1, &fs, &tokens); |
| 1058 #endif |
| 1059 } |
| 1060 |
| 1061 |
1115 // Returns 'true' if regular code generation should be skipped. | 1062 // Returns 'true' if regular code generation should be skipped. |
1116 bool FlowGraphCompiler::TryIntrinsify() { | 1063 bool FlowGraphCompiler::TryIntrinsify() { |
1117 // Intrinsification skips arguments checks, therefore disable if in checked | 1064 // Intrinsification skips arguments checks, therefore disable if in checked |
1118 // mode. | 1065 // mode. |
1119 if (FLAG_intrinsify && !isolate()->type_checks()) { | 1066 if (FLAG_intrinsify && !isolate()->type_checks()) { |
1120 const Class& owner = Class::Handle(parsed_function().function().Owner()); | 1067 const Class& owner = Class::Handle(parsed_function().function().Owner()); |
1121 String& name = String::Handle(parsed_function().function().name()); | 1068 String& name = String::Handle(parsed_function().function().name()); |
1122 | 1069 |
1123 if (parsed_function().function().kind() == RawFunction::kImplicitGetter) { | 1070 if (parsed_function().function().kind() == RawFunction::kImplicitGetter) { |
1124 // TODO(27590) Store Field object inside RawFunction::data_ if possible. | 1071 // TODO(27590) Store Field object inside RawFunction::data_ if possible. |
(...skipping 656 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1781 return int32x4_class(); | 1728 return int32x4_class(); |
1782 case kUnboxedMint: | 1729 case kUnboxedMint: |
1783 return mint_class(); | 1730 return mint_class(); |
1784 default: | 1731 default: |
1785 UNREACHABLE(); | 1732 UNREACHABLE(); |
1786 return Class::ZoneHandle(); | 1733 return Class::ZoneHandle(); |
1787 } | 1734 } |
1788 } | 1735 } |
1789 | 1736 |
1790 | 1737 |
1791 RawArray* FlowGraphCompiler::InliningIdToFunction() const { | |
1792 if (inline_id_to_function_.length() == 0) { | |
1793 return Object::empty_array().raw(); | |
1794 } | |
1795 const Array& res = | |
1796 Array::Handle(Array::New(inline_id_to_function_.length(), Heap::kOld)); | |
1797 for (intptr_t i = 0; i < inline_id_to_function_.length(); i++) { | |
1798 res.SetAt(i, *inline_id_to_function_[i]); | |
1799 } | |
1800 return res.raw(); | |
1801 } | |
1802 | |
1803 | |
1804 RawArray* FlowGraphCompiler::InliningIdToTokenPos() const { | |
1805 if (inline_id_to_token_pos_.length() == 0) { | |
1806 return Object::empty_array().raw(); | |
1807 } | |
1808 const Array& res = Array::Handle( | |
1809 zone(), Array::New(inline_id_to_token_pos_.length(), Heap::kOld)); | |
1810 Smi& smi = Smi::Handle(zone()); | |
1811 for (intptr_t i = 0; i < inline_id_to_token_pos_.length(); i++) { | |
1812 smi = Smi::New(inline_id_to_token_pos_[i].value()); | |
1813 res.SetAt(i, smi); | |
1814 } | |
1815 return res.raw(); | |
1816 } | |
1817 | |
1818 | |
1819 RawArray* FlowGraphCompiler::CallerInliningIdMap() const { | |
1820 if (caller_inline_id_.length() == 0) { | |
1821 return Object::empty_array().raw(); | |
1822 } | |
1823 const Array& res = | |
1824 Array::Handle(Array::New(caller_inline_id_.length(), Heap::kOld)); | |
1825 Smi& smi = Smi::Handle(); | |
1826 for (intptr_t i = 0; i < caller_inline_id_.length(); i++) { | |
1827 smi = Smi::New(caller_inline_id_[i]); | |
1828 res.SetAt(i, smi); | |
1829 } | |
1830 return res.raw(); | |
1831 } | |
1832 | |
1833 | |
1834 void FlowGraphCompiler::BeginCodeSourceRange() { | 1738 void FlowGraphCompiler::BeginCodeSourceRange() { |
1835 #if !defined(PRODUCT) | 1739 code_source_map_builder_->BeginCodeSourceRange(assembler()->CodeSize()); |
1836 // Remember how many bytes of code we emitted so far. This function | |
1837 // is called before we call into an instruction's EmitNativeCode. | |
1838 saved_code_size_ = assembler()->CodeSize(); | |
1839 #endif // !defined(PRODUCT) | |
1840 } | 1740 } |
1841 | 1741 |
1842 | 1742 |
1843 bool FlowGraphCompiler::EndCodeSourceRange(TokenPosition token_pos) { | 1743 void FlowGraphCompiler::EndCodeSourceRange(TokenPosition token_pos) { |
1844 #if !defined(PRODUCT) | 1744 code_source_map_builder_->EndCodeSourceRange(assembler()->CodeSize(), |
1845 // This function is called after each instructions' EmitNativeCode. | 1745 token_pos); |
1846 if (saved_code_size_ < assembler()->CodeSize()) { | |
1847 // We emitted more code, now associate the emitted code chunk with | |
1848 // |token_pos|. | |
1849 code_source_map_builder()->AddEntry(saved_code_size_, token_pos); | |
1850 BeginCodeSourceRange(); | |
1851 return true; | |
1852 } | |
1853 #endif // !defined(PRODUCT) | |
1854 return false; | |
1855 } | 1746 } |
1856 | 1747 |
1857 | 1748 |
1858 #if !defined(TARGET_ARCH_DBC) | 1749 #if !defined(TARGET_ARCH_DBC) |
1859 // DBC emits calls very differently from other architectures due to its | 1750 // DBC emits calls very differently from other architectures due to its |
1860 // interpreted nature. | 1751 // interpreted nature. |
1861 void FlowGraphCompiler::EmitPolymorphicInstanceCall(const ICData& ic_data, | 1752 void FlowGraphCompiler::EmitPolymorphicInstanceCall(const ICData& ic_data, |
1862 intptr_t argument_count, | 1753 intptr_t argument_count, |
1863 const Array& argument_names, | 1754 const Array& argument_names, |
1864 intptr_t deopt_id, | 1755 intptr_t deopt_id, |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1959 | 1850 |
1960 | 1851 |
1961 void FlowGraphCompiler::FrameStateClear() { | 1852 void FlowGraphCompiler::FrameStateClear() { |
1962 ASSERT(!is_optimizing()); | 1853 ASSERT(!is_optimizing()); |
1963 frame_state_.TruncateTo(0); | 1854 frame_state_.TruncateTo(0); |
1964 } | 1855 } |
1965 #endif // defined(DEBUG) && !defined(TARGET_ARCH_DBC) | 1856 #endif // defined(DEBUG) && !defined(TARGET_ARCH_DBC) |
1966 | 1857 |
1967 | 1858 |
1968 } // namespace dart | 1859 } // namespace dart |
OLD | NEW |