Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(305)

Side by Side Diff: src/compiler/code-generator.cc

Issue 2229243003: [turbofan] Split CodeGenerator::GenerateCode into AssembleCode and FinishCodeObject. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/code-generator.h" 5 #include "src/compiler/code-generator.h"
6 6
7 #include "src/address-map.h" 7 #include "src/address-map.h"
8 #include "src/base/adapters.h" 8 #include "src/base/adapters.h"
9 #include "src/compiler/code-generator-impl.h" 9 #include "src/compiler/code-generator-impl.h"
10 #include "src/compiler/linkage.h" 10 #include "src/compiler/linkage.h"
(...skipping 14 matching lines...) Expand all
25 Label** targets() const { return targets_; } 25 Label** targets() const { return targets_; }
26 size_t target_count() const { return target_count_; } 26 size_t target_count() const { return target_count_; }
27 27
28 private: 28 private:
29 Label label_; 29 Label label_;
30 JumpTable* const next_; 30 JumpTable* const next_;
31 Label** const targets_; 31 Label** const targets_;
32 size_t const target_count_; 32 size_t const target_count_;
33 }; 33 };
34 34
35 CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage, 35 CodeGenerator::CodeGenerator(Zone* zone, CompilationInfo* info)
36 InstructionSequence* code, CompilationInfo* info)
37 : frame_access_state_(nullptr), 36 : frame_access_state_(nullptr),
38 linkage_(linkage), 37 linkage_(nullptr),
39 code_(code), 38 code_(nullptr),
40 unwinding_info_writer_(zone()), 39 unwinding_info_writer_(zone),
41 info_(info), 40 info_(info),
42 labels_(zone()->NewArray<Label>(code->InstructionBlockCount())), 41 labels_(nullptr),
43 current_block_(RpoNumber::Invalid()), 42 current_block_(RpoNumber::Invalid()),
44 current_source_position_(SourcePosition::Unknown()), 43 current_source_position_(SourcePosition::Unknown()),
45 masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kNo), 44 masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kNo),
46 resolver_(this), 45 resolver_(this),
47 safepoints_(code->zone()), 46 safepoints_(zone),
48 handlers_(code->zone()), 47 handlers_(zone),
49 deoptimization_exits_(code->zone()), 48 deoptimization_exits_(zone),
50 deoptimization_states_(code->zone()), 49 deoptimization_states_(zone),
51 deoptimization_literals_(code->zone()), 50 deoptimization_literals_(zone),
52 inlined_function_count_(0), 51 inlined_function_count_(0),
53 translations_(code->zone()), 52 translations_(zone),
54 last_lazy_deopt_pc_(0), 53 last_lazy_deopt_pc_(0),
55 jump_tables_(nullptr), 54 jump_tables_(nullptr),
56 ools_(nullptr), 55 ools_(nullptr),
57 osr_pc_offset_(-1), 56 osr_pc_offset_(-1),
58 source_position_table_builder_(info->isolate(), code->zone(), 57 source_position_table_builder_(info->isolate(), zone,
59 info->SourcePositionRecordingMode()) { 58 info->SourcePositionRecordingMode()),
59 assemble_code_successful_(false) {}
60
61 void CodeGenerator::Initialize(Frame* frame, Linkage* linkage,
62 InstructionSequence* code) {
63 linkage_ = linkage;
64 code_ = code;
65 labels_ = zone()->NewArray<Label>(code->InstructionBlockCount());
60 for (int i = 0; i < code->InstructionBlockCount(); ++i) { 66 for (int i = 0; i < code->InstructionBlockCount(); ++i) {
61 new (&labels_[i]) Label; 67 new (&labels_[i]) Label;
62 } 68 }
63 CreateFrameAccessState(frame); 69
70 // Create the FrameAccessState object. The Frame is immutable from here on.
71 FinishFrame(frame);
72 frame_access_state_ = new (code->zone()) FrameAccessState(frame);
64 } 73 }
65 74
66 void CodeGenerator::CreateFrameAccessState(Frame* frame) { 75 bool CodeGenerator::AssembleCode() {
67 FinishFrame(frame); 76 DCHECK(!assemble_code_successful());
68 frame_access_state_ = new (code()->zone()) FrameAccessState(frame);
69 }
70
71 Handle<Code> CodeGenerator::GenerateCode() {
72 CompilationInfo* info = this->info();
73
74 // Open a frame scope to indicate that there is a frame on the stack. The 77 // Open a frame scope to indicate that there is a frame on the stack. The
Michael Starzinger 2016/08/11 14:16:26 nit: Empty newline after DCHECK for readability.
ahaas 2016/08/11 14:54:19 Done.
75 // MANUAL indicates that the scope shouldn't actually generate code to set up 78 // MANUAL indicates that the scope shouldn't actually generate code to set up
76 // the frame (that is done in AssemblePrologue). 79 // the frame (that is done in AssemblePrologue).
77 FrameScope frame_scope(masm(), StackFrame::MANUAL); 80 FrameScope frame_scope(masm(), StackFrame::MANUAL);
78 81
79 // Place function entry hook if requested to do so. 82 // Place function entry hook if requested to do so.
80 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) { 83 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
81 ProfileEntryHookStub::MaybeCallEntryHook(masm()); 84 ProfileEntryHookStub::MaybeCallEntryHook(masm());
82 } 85 }
83 // Architecture-specific, linkage-specific prologue. 86 // Architecture-specific, linkage-specific prologue.
84 info->set_prologue_offset(masm()->pc_offset()); 87 info()->set_prologue_offset(masm()->pc_offset());
85 88
86 // Define deoptimization literals for all inlined functions. 89 // Define deoptimization literals for all inlined functions.
87 DCHECK_EQ(0u, deoptimization_literals_.size()); 90 DCHECK_EQ(0u, deoptimization_literals_.size());
88 for (const CompilationInfo::InlinedFunctionHolder& inlined : 91 for (const CompilationInfo::InlinedFunctionHolder& inlined :
89 info->inlined_functions()) { 92 info()->inlined_functions()) {
90 if (!inlined.shared_info.is_identical_to(info->shared_info())) { 93 if (!inlined.shared_info.is_identical_to(info()->shared_info())) {
91 DefineDeoptimizationLiteral(inlined.shared_info); 94 DefineDeoptimizationLiteral(inlined.shared_info);
92 } 95 }
93 } 96 }
94 inlined_function_count_ = deoptimization_literals_.size(); 97 inlined_function_count_ = deoptimization_literals_.size();
95 98
96 // Define deoptimization literals for all unoptimized code objects of inlined 99 // Define deoptimization literals for all unoptimized code objects of inlined
97 // functions. This ensures unoptimized code is kept alive by optimized code. 100 // functions. This ensures unoptimized code is kept alive by optimized code.
98 for (const CompilationInfo::InlinedFunctionHolder& inlined : 101 for (const CompilationInfo::InlinedFunctionHolder& inlined :
99 info->inlined_functions()) { 102 info()->inlined_functions()) {
100 if (!inlined.shared_info.is_identical_to(info->shared_info())) { 103 if (!inlined.shared_info.is_identical_to(info()->shared_info())) {
101 DefineDeoptimizationLiteral(inlined.inlined_code_object_root); 104 DefineDeoptimizationLiteral(inlined.inlined_code_object_root);
102 } 105 }
103 } 106 }
104 107
105 unwinding_info_writer_.SetNumberOfInstructionBlocks( 108 unwinding_info_writer_.SetNumberOfInstructionBlocks(
106 code()->InstructionBlockCount()); 109 code()->InstructionBlockCount());
107 110
108 // Assemble all non-deferred blocks, followed by deferred ones. 111 // Assemble all non-deferred blocks, followed by deferred ones.
109 for (int deferred = 0; deferred < 2; ++deferred) { 112 for (int deferred = 0; deferred < 2; ++deferred) {
110 for (const InstructionBlock* block : code()->instruction_blocks()) { 113 for (const InstructionBlock* block : code()->instruction_blocks()) {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
160 } 163 }
161 } 164 }
162 165
163 CodeGenResult result; 166 CodeGenResult result;
164 if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) { 167 if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) {
165 ConstantPoolUnavailableScope constant_pool_unavailable(masm()); 168 ConstantPoolUnavailableScope constant_pool_unavailable(masm());
166 result = AssembleBlock(block); 169 result = AssembleBlock(block);
167 } else { 170 } else {
168 result = AssembleBlock(block); 171 result = AssembleBlock(block);
169 } 172 }
170 if (result != kSuccess) return Handle<Code>(); 173 if (result != kSuccess) {
174 assemble_code_successful_ = false;
175 return false;
176 }
171 unwinding_info_writer_.EndInstructionBlock(block); 177 unwinding_info_writer_.EndInstructionBlock(block);
172 } 178 }
173 } 179 }
174 180
181 assemble_code_successful_ = true;
182 return true;
183 }
184
185 Handle<Code> CodeGenerator::FinishCodeObject() {
186 if (!assemble_code_successful_) {
187 return Handle<Code>::null();
188 }
189
175 // Assemble all out-of-line code. 190 // Assemble all out-of-line code.
176 if (ools_) { 191 if (ools_) {
177 masm()->RecordComment("-- Out of line code --"); 192 masm()->RecordComment("-- Out of line code --");
178 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) { 193 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
179 masm()->bind(ool->entry()); 194 masm()->bind(ool->entry());
180 ool->Generate(); 195 ool->Generate();
181 if (ool->exit()->is_bound()) masm()->jmp(ool->exit()); 196 if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
182 } 197 }
183 } 198 }
184 199
185 // Assemble all eager deoptimization exits. 200 // Assemble all eager deoptimization exits.
186 for (DeoptimizationExit* exit : deoptimization_exits_) { 201 for (DeoptimizationExit* exit : deoptimization_exits_) {
187 masm()->bind(exit->label()); 202 masm()->bind(exit->label());
188 AssembleDeoptimizerCall(exit->deoptimization_id(), Deoptimizer::EAGER); 203 AssembleDeoptimizerCall(exit->deoptimization_id(), Deoptimizer::EAGER);
189 } 204 }
190 205
191 // Ensure there is space for lazy deoptimization in the code. 206 // Ensure there is space for lazy deoptimization in the code.
192 if (info->ShouldEnsureSpaceForLazyDeopt()) { 207 if (info()->ShouldEnsureSpaceForLazyDeopt()) {
193 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size(); 208 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
194 while (masm()->pc_offset() < target_offset) { 209 while (masm()->pc_offset() < target_offset) {
195 masm()->nop(); 210 masm()->nop();
196 } 211 }
197 } 212 }
198 213
199 FinishCode(masm()); 214 FinishCode(masm());
200 215
201 // Emit the jump tables. 216 // Emit the jump tables.
202 if (jump_tables_) { 217 if (jump_tables_) {
203 masm()->Align(kPointerSize); 218 masm()->Align(kPointerSize);
204 for (JumpTable* table = jump_tables_; table; table = table->next()) { 219 for (JumpTable* table = jump_tables_; table; table = table->next()) {
205 masm()->bind(table->label()); 220 masm()->bind(table->label());
206 AssembleJumpTable(table->targets(), table->target_count()); 221 AssembleJumpTable(table->targets(), table->target_count());
207 } 222 }
208 } 223 }
209 224
210 safepoints()->Emit(masm(), frame()->GetTotalFrameSlotCount()); 225 safepoints()->Emit(masm(), frame()->GetTotalFrameSlotCount());
211 226
212 unwinding_info_writer_.Finish(masm()->pc_offset()); 227 unwinding_info_writer_.Finish(masm()->pc_offset());
213 228
214 Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue( 229 Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
215 masm(), unwinding_info_writer_.eh_frame_writer(), info, Handle<Object>()); 230 masm(), unwinding_info_writer_.eh_frame_writer(), info(),
231 Handle<Object>());
216 result->set_is_turbofanned(true); 232 result->set_is_turbofanned(true);
217 result->set_stack_slots(frame()->GetTotalFrameSlotCount()); 233 result->set_stack_slots(frame()->GetTotalFrameSlotCount());
218 result->set_safepoint_table_offset(safepoints()->GetCodeOffset()); 234 result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
219 Handle<ByteArray> source_positions = 235 Handle<ByteArray> source_positions =
220 source_position_table_builder_.ToSourcePositionTable(); 236 source_position_table_builder_.ToSourcePositionTable();
221 result->set_source_position_table(*source_positions); 237 result->set_source_position_table(*source_positions);
222 source_position_table_builder_.EndJitLogging(AbstractCode::cast(*result)); 238 source_position_table_builder_.EndJitLogging(AbstractCode::cast(*result));
223 239
224 // Emit exception handler table. 240 // Emit exception handler table.
225 if (!handlers_.empty()) { 241 if (!handlers_.empty()) {
226 Handle<HandlerTable> table = 242 Handle<HandlerTable> table =
227 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray( 243 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
228 HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())), 244 HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
229 TENURED)); 245 TENURED));
230 for (size_t i = 0; i < handlers_.size(); ++i) { 246 for (size_t i = 0; i < handlers_.size(); ++i) {
231 table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset); 247 table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
232 table->SetReturnHandler(static_cast<int>(i), handlers_[i].handler->pos()); 248 table->SetReturnHandler(static_cast<int>(i), handlers_[i].handler->pos());
233 } 249 }
234 result->set_handler_table(*table); 250 result->set_handler_table(*table);
235 } 251 }
236 252
237 PopulateDeoptimizationData(result); 253 PopulateDeoptimizationData(result);
238 254
239 // Ensure there is space for lazy deoptimization in the relocation info. 255 // Ensure there is space for lazy deoptimization in the relocation info.
240 if (info->ShouldEnsureSpaceForLazyDeopt()) { 256 if (info()->ShouldEnsureSpaceForLazyDeopt()) {
241 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result); 257 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
242 } 258 }
243 259
244 return result; 260 return result;
245 } 261 }
246 262
247
248 bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const { 263 bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
249 return code() 264 return code()
250 ->InstructionBlockAt(current_block_) 265 ->InstructionBlockAt(current_block_)
251 ->ao_number() 266 ->ao_number()
252 .IsNext(code()->InstructionBlockAt(block)->ao_number()); 267 .IsNext(code()->InstructionBlockAt(block)->ao_number());
253 } 268 }
254 269
255 270
256 void CodeGenerator::RecordSafepoint(ReferenceMap* references, 271 void CodeGenerator::RecordSafepoint(ReferenceMap* references,
257 Safepoint::Kind kind, int arguments, 272 Safepoint::Kind kind, int arguments,
(...skipping 678 matching lines...) Expand 10 before | Expand all | Expand 10 after
936 : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) { 951 : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) {
937 gen->ools_ = this; 952 gen->ools_ = this;
938 } 953 }
939 954
940 955
941 OutOfLineCode::~OutOfLineCode() {} 956 OutOfLineCode::~OutOfLineCode() {}
942 957
943 } // namespace compiler 958 } // namespace compiler
944 } // namespace internal 959 } // namespace internal
945 } // namespace v8 960 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698