OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/instruction-selector.h" | 5 #include "src/compiler/instruction-selector.h" |
6 | 6 |
7 #include "src/compiler/instruction-selector-impl.h" | 7 #include "src/compiler/instruction-selector-impl.h" |
8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
9 #include "src/compiler/node-properties-inl.h" | 9 #include "src/compiler/node-properties-inl.h" |
10 #include "src/compiler/pipeline.h" | 10 #include "src/compiler/pipeline.h" |
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
244 | 244 |
245 void InstructionSelector::MarkAsRepresentation(MachineType rep, Node* node) { | 245 void InstructionSelector::MarkAsRepresentation(MachineType rep, Node* node) { |
246 DCHECK_NOT_NULL(node); | 246 DCHECK_NOT_NULL(node); |
247 if (RepresentationOf(rep) == kRepFloat64) MarkAsDouble(node); | 247 if (RepresentationOf(rep) == kRepFloat64) MarkAsDouble(node); |
248 if (RepresentationOf(rep) == kRepTagged) MarkAsReference(node); | 248 if (RepresentationOf(rep) == kRepTagged) MarkAsReference(node); |
249 } | 249 } |
250 | 250 |
251 | 251 |
252 // TODO(bmeurer): Get rid of the CallBuffer business and make | 252 // TODO(bmeurer): Get rid of the CallBuffer business and make |
253 // InstructionSelector::VisitCall platform independent instead. | 253 // InstructionSelector::VisitCall platform independent instead. |
254 CallBuffer::CallBuffer(Zone* zone, CallDescriptor* d) | 254 CallBuffer::CallBuffer(Zone* zone, CallDescriptor* d, |
255 : output_count(0), | 255 FrameStateDescriptor* frame_desc) |
256 descriptor(d), | 256 : descriptor(d), frame_state_descriptor(frame_desc) { |
257 output_nodes(zone->NewArray<Node*>(d->ReturnCount())), | 257 output_nodes.reserve(d->ReturnCount()); |
258 outputs(zone->NewArray<InstructionOperand*>(d->ReturnCount())), | 258 outputs.reserve(d->ReturnCount()); |
259 fixed_and_control_args( | 259 pushed_nodes.reserve(input_count()); |
260 zone->NewArray<InstructionOperand*>(input_count() + control_count())), | 260 instruction_args.reserve(input_count() + control_count() + |
261 fixed_count(0), | 261 frame_state_value_count()); |
262 pushed_nodes(zone->NewArray<Node*>(input_count())), | |
263 pushed_count(0) { | |
264 if (d->ReturnCount() > 1) { | |
265 memset(output_nodes, 0, sizeof(Node*) * d->ReturnCount()); // NOLINT | |
266 } | |
267 memset(pushed_nodes, 0, sizeof(Node*) * input_count()); // NOLINT | |
268 } | 262 } |
269 | 263 |
270 | 264 |
271 // TODO(bmeurer): Get rid of the CallBuffer business and make | 265 // TODO(bmeurer): Get rid of the CallBuffer business and make |
272 // InstructionSelector::VisitCall platform independent instead. | 266 // InstructionSelector::VisitCall platform independent instead. |
273 void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer, | 267 void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer, |
274 bool call_code_immediate, | 268 bool call_code_immediate, |
275 bool call_address_immediate, | 269 bool call_address_immediate, |
276 BasicBlock* cont_node, | 270 BasicBlock* cont_node, |
277 BasicBlock* deopt_node) { | 271 BasicBlock* deopt_node) { |
278 OperandGenerator g(this); | 272 OperandGenerator g(this); |
279 DCHECK_EQ(call->op()->OutputCount(), buffer->descriptor->ReturnCount()); | 273 DCHECK_EQ(call->op()->OutputCount(), buffer->descriptor->ReturnCount()); |
280 DCHECK_EQ(OperatorProperties::GetValueInputCount(call->op()), | 274 DCHECK_EQ(OperatorProperties::GetValueInputCount(call->op()), |
281 buffer->input_count()); | 275 buffer->input_count() + buffer->frame_state_count()); |
282 | 276 |
283 if (buffer->descriptor->ReturnCount() > 0) { | 277 if (buffer->descriptor->ReturnCount() > 0) { |
284 // Collect the projections that represent multiple outputs from this call. | 278 // Collect the projections that represent multiple outputs from this call. |
285 if (buffer->descriptor->ReturnCount() == 1) { | 279 if (buffer->descriptor->ReturnCount() == 1) { |
286 buffer->output_nodes[0] = call; | 280 buffer->output_nodes.push_back(call); |
287 } else { | 281 } else { |
288 call->CollectProjections(buffer->descriptor->ReturnCount(), | 282 call->CollectProjections(&buffer->output_nodes); |
289 buffer->output_nodes); | 283 DCHECK(buffer->output_nodes.size() <= |
284 static_cast<size_t>(buffer->descriptor->ReturnCount())); | |
290 } | 285 } |
291 | 286 |
292 // Filter out the outputs that aren't live because no projection uses them. | 287 // Filter out the outputs that aren't live because no projection uses them. |
293 for (int i = 0; i < buffer->descriptor->ReturnCount(); i++) { | 288 for (size_t i = 0; i < buffer->output_nodes.size(); i++) { |
294 if (buffer->output_nodes[i] != NULL) { | 289 if (buffer->output_nodes[i] != NULL) { |
295 Node* output = buffer->output_nodes[i]; | 290 Node* output = buffer->output_nodes[i]; |
296 LinkageLocation location = buffer->descriptor->GetReturnLocation(i); | 291 LinkageLocation location = buffer->descriptor->GetReturnLocation(i); |
297 MarkAsRepresentation(location.representation(), output); | 292 MarkAsRepresentation(location.representation(), output); |
298 buffer->outputs[buffer->output_count++] = | 293 buffer->outputs.push_back(g.DefineAsLocation(output, location)); |
299 g.DefineAsLocation(output, location); | |
300 } | 294 } |
301 } | 295 } |
302 } | 296 } |
303 | 297 |
304 buffer->fixed_count = 1; // First argument is always the callee. | 298 // The first argument is always the callee code. |
305 Node* callee = call->InputAt(0); | 299 Node* callee = call->InputAt(0); |
306 switch (buffer->descriptor->kind()) { | 300 switch (buffer->descriptor->kind()) { |
307 case CallDescriptor::kCallCodeObject: | 301 case CallDescriptor::kCallCodeObject: |
308 buffer->fixed_and_control_args[0] = | 302 buffer->instruction_args.push_back( |
309 (call_code_immediate && callee->opcode() == IrOpcode::kHeapConstant) | 303 (call_code_immediate && callee->opcode() == IrOpcode::kHeapConstant) |
310 ? g.UseImmediate(callee) | 304 ? g.UseImmediate(callee) |
311 : g.UseRegister(callee); | 305 : g.UseRegister(callee)); |
312 break; | 306 break; |
313 case CallDescriptor::kCallAddress: | 307 case CallDescriptor::kCallAddress: |
314 buffer->fixed_and_control_args[0] = | 308 buffer->instruction_args.push_back( |
315 (call_address_immediate && | 309 (call_address_immediate && |
316 (callee->opcode() == IrOpcode::kInt32Constant || | 310 (callee->opcode() == IrOpcode::kInt32Constant || |
317 callee->opcode() == IrOpcode::kInt64Constant)) | 311 callee->opcode() == IrOpcode::kInt64Constant)) |
318 ? g.UseImmediate(callee) | 312 ? g.UseImmediate(callee) |
319 : g.UseRegister(callee); | 313 : g.UseRegister(callee)); |
320 break; | 314 break; |
321 case CallDescriptor::kCallJSFunction: | 315 case CallDescriptor::kCallJSFunction: |
322 buffer->fixed_and_control_args[0] = | 316 buffer->instruction_args.push_back( |
323 g.UseLocation(callee, buffer->descriptor->GetInputLocation(0)); | 317 g.UseLocation(callee, buffer->descriptor->GetInputLocation(0))); |
324 break; | 318 break; |
325 } | 319 } |
320 DCHECK_EQ(1, buffer->instruction_args.size()); | |
321 | |
322 // If the call needs a frame state, it must go to offset 1. | |
323 if (buffer->frame_state_descriptor != NULL) { | |
324 int deoptimization_id = | |
325 sequence()->AddDeoptimizationEntry(buffer->frame_state_descriptor); | |
326 buffer->instruction_args.push_back(g.TempImmediate(deoptimization_id)); | |
327 | |
328 Node* frame_state = call->InputAt(buffer->descriptor->InputCount()); | |
329 AddFrameStateInputs(frame_state, &buffer->instruction_args, | |
330 buffer->frame_state_descriptor); | |
331 } | |
332 DCHECK_EQ(1 + buffer->frame_state_value_count(), | |
333 buffer->instruction_args.size()); | |
326 | 334 |
327 int input_count = buffer->input_count(); | 335 int input_count = buffer->input_count(); |
328 | 336 |
329 // Split the arguments into pushed_nodes and fixed_args. Pushed arguments | 337 // Split the arguments into pushed_nodes and instruction_args. Pushed |
330 // require an explicit push instruction before the call and do not appear | 338 // arguments require an explicit push instruction before the call and do |
331 // as arguments to the call. Everything else ends up as an InstructionOperand | 339 // not appear as arguments to the call. Everything else ends up |
332 // argument to the call. | 340 // as an InstructionOperand argument to the call. |
333 InputIter iter(call->inputs().begin()); | 341 InputIter iter(call->inputs().begin()); |
342 int pushed_count = 0; | |
334 for (int index = 0; index < input_count; ++iter, ++index) { | 343 for (int index = 0; index < input_count; ++iter, ++index) { |
335 DCHECK(iter != call->inputs().end()); | 344 DCHECK(iter != call->inputs().end()); |
336 DCHECK(index == iter.index()); | 345 DCHECK(index == iter.index()); |
346 DCHECK((*iter)->op()->opcode() != IrOpcode::kFrameState); | |
337 if (index == 0) continue; // The first argument (callee) is already done. | 347 if (index == 0) continue; // The first argument (callee) is already done. |
338 InstructionOperand* op = | 348 InstructionOperand* op = |
339 g.UseLocation(*iter, buffer->descriptor->GetInputLocation(index)); | 349 g.UseLocation(*iter, buffer->descriptor->GetInputLocation(index)); |
340 if (UnallocatedOperand::cast(op)->HasFixedSlotPolicy()) { | 350 if (UnallocatedOperand::cast(op)->HasFixedSlotPolicy()) { |
341 int stack_index = -UnallocatedOperand::cast(op)->fixed_slot_index() - 1; | 351 int stack_index = -UnallocatedOperand::cast(op)->fixed_slot_index() - 1; |
342 DCHECK(buffer->pushed_nodes[stack_index] == NULL); | 352 if (static_cast<size_t>(stack_index) >= buffer->pushed_nodes.size()) { |
353 buffer->pushed_nodes.resize(stack_index + 1, NULL); | |
354 } | |
355 DCHECK_EQ(NULL, buffer->pushed_nodes[stack_index]); | |
343 buffer->pushed_nodes[stack_index] = *iter; | 356 buffer->pushed_nodes[stack_index] = *iter; |
344 buffer->pushed_count++; | 357 pushed_count++; |
345 } else { | 358 } else { |
346 buffer->fixed_and_control_args[buffer->fixed_count] = op; | 359 buffer->instruction_args.push_back(op); |
347 buffer->fixed_count++; | |
348 } | 360 } |
349 } | 361 } |
362 CHECK_EQ(pushed_count, buffer->pushed_nodes.size()); | |
350 | 363 |
351 // If the call can deoptimize, we add the continuation and deoptimization | 364 // If the call can deoptimize, we add the continuation and deoptimization |
352 // block labels. | 365 // block labels. |
353 if (buffer->descriptor->CanLazilyDeoptimize()) { | 366 if (buffer->descriptor->CanLazilyDeoptimize()) { |
354 DCHECK(cont_node != NULL); | 367 DCHECK(cont_node != NULL); |
355 DCHECK(deopt_node != NULL); | 368 DCHECK(deopt_node != NULL); |
356 buffer->fixed_and_control_args[buffer->fixed_count] = g.Label(cont_node); | 369 buffer->instruction_args.push_back(g.Label(cont_node)); |
357 buffer->fixed_and_control_args[buffer->fixed_count + 1] = | 370 buffer->instruction_args.push_back(g.Label(deopt_node)); |
358 g.Label(deopt_node); | |
359 } else { | 371 } else { |
360 DCHECK(cont_node == NULL); | 372 DCHECK(cont_node == NULL); |
361 DCHECK(deopt_node == NULL); | 373 DCHECK(deopt_node == NULL); |
362 } | 374 } |
363 | 375 |
364 DCHECK(input_count == (buffer->fixed_count + buffer->pushed_count)); | 376 DCHECK(input_count == |
377 (buffer->instruction_args.size() - buffer->control_count() + | |
378 buffer->pushed_nodes.size() - buffer->frame_state_value_count())); | |
365 } | 379 } |
366 | 380 |
367 | 381 |
368 void InstructionSelector::VisitBlock(BasicBlock* block) { | 382 void InstructionSelector::VisitBlock(BasicBlock* block) { |
369 DCHECK_EQ(NULL, current_block_); | 383 DCHECK_EQ(NULL, current_block_); |
370 current_block_ = block; | 384 current_block_ = block; |
371 int current_block_end = static_cast<int>(instructions_.size()); | 385 int current_block_end = static_cast<int>(instructions_.size()); |
372 | 386 |
373 // Generate code for the block control "top down", but schedule the code | 387 // Generate code for the block control "top down", but schedule the code |
374 // "bottom up". | 388 // "bottom up". |
(...skipping 612 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
987 Emit(kArchRet, NULL); | 1001 Emit(kArchRet, NULL); |
988 } | 1002 } |
989 } | 1003 } |
990 | 1004 |
991 | 1005 |
992 void InstructionSelector::VisitThrow(Node* value) { | 1006 void InstructionSelector::VisitThrow(Node* value) { |
993 UNIMPLEMENTED(); // TODO(titzer) | 1007 UNIMPLEMENTED(); // TODO(titzer) |
994 } | 1008 } |
995 | 1009 |
996 | 1010 |
1011 FrameStateDescriptor* InstructionSelector::GetFrameStateDescriptor( | |
1012 Node* state) { | |
1013 DCHECK(state->op()->opcode() == IrOpcode::kFrameState); | |
1014 BailoutId ast_id = OpParameter<BailoutId>(state); | |
1015 Node* parameters = state->InputAt(0); | |
1016 Node* locals = state->InputAt(1); | |
1017 Node* stack = state->InputAt(2); | |
1018 | |
1019 return new (instruction_zone()) | |
1020 FrameStateDescriptor(ast_id, OpParameter<int>(parameters), | |
1021 OpParameter<int>(locals), OpParameter<int>(stack)); | |
1022 } | |
1023 | |
1024 | |
997 static InstructionOperand* UseOrImmediate(OperandGenerator* g, Node* input) { | 1025 static InstructionOperand* UseOrImmediate(OperandGenerator* g, Node* input) { |
998 switch (input->opcode()) { | 1026 switch (input->opcode()) { |
999 case IrOpcode::kInt32Constant: | 1027 case IrOpcode::kInt32Constant: |
1000 case IrOpcode::kNumberConstant: | 1028 case IrOpcode::kNumberConstant: |
1001 case IrOpcode::kFloat64Constant: | 1029 case IrOpcode::kFloat64Constant: |
1002 case IrOpcode::kHeapConstant: | 1030 case IrOpcode::kHeapConstant: |
1003 return g->UseImmediate(input); | 1031 return g->UseImmediate(input); |
1004 default: | 1032 default: |
1005 return g->Use(input); | 1033 return g->Use(input); |
1006 } | 1034 } |
1007 } | 1035 } |
1008 | 1036 |
1009 | 1037 |
1038 void InstructionSelector::AddFrameStateInputs( | |
1039 Node* state, std::vector<InstructionOperand*>* inputs, | |
Benedikt Meurer
2014/08/21 08:19:29
I don't like hardcoding the std::vector here. But
Jarin
2014/08/21 09:54:42
We just use vector::push_back here, so it will be
| |
1040 FrameStateDescriptor* descriptor) { | |
1041 DCHECK_EQ(IrOpcode::kFrameState, state->op()->opcode()); | |
1042 | |
1043 Node* parameters = state->InputAt(0); | |
1044 Node* locals = state->InputAt(1); | |
1045 Node* stack = state->InputAt(2); | |
1046 | |
1047 DCHECK_EQ(descriptor->parameters_count(), parameters->InputCount()); | |
1048 DCHECK_EQ(descriptor->locals_count(), locals->InputCount()); | |
1049 DCHECK_EQ(descriptor->stack_count(), stack->InputCount()); | |
1050 | |
1051 OperandGenerator g(this); | |
1052 for (int i = 0; i < descriptor->parameters_count(); i++) { | |
1053 inputs->push_back(UseOrImmediate(&g, parameters->InputAt(i))); | |
1054 } | |
1055 for (int i = 0; i < descriptor->locals_count(); i++) { | |
1056 inputs->push_back(UseOrImmediate(&g, locals->InputAt(i))); | |
1057 } | |
1058 for (int i = 0; i < descriptor->stack_count(); i++) { | |
1059 inputs->push_back(UseOrImmediate(&g, stack->InputAt(i))); | |
1060 } | |
1061 } | |
1062 | |
1063 | |
1010 void InstructionSelector::VisitDeoptimize(Node* deopt) { | 1064 void InstructionSelector::VisitDeoptimize(Node* deopt) { |
1011 DCHECK(deopt->op()->opcode() == IrOpcode::kDeoptimize); | 1065 DCHECK(deopt->op()->opcode() == IrOpcode::kDeoptimize); |
1012 Node* state = deopt->InputAt(0); | 1066 Node* state = deopt->InputAt(0); |
1013 DCHECK(state->op()->opcode() == IrOpcode::kFrameState); | 1067 FrameStateDescriptor* descriptor = GetFrameStateDescriptor(state); |
1014 BailoutId ast_id = OpParameter<BailoutId>(state); | |
1015 | 1068 |
1016 // Add the inputs. | 1069 std::vector<InstructionOperand*> inputs; |
1017 Node* parameters = state->InputAt(0); | 1070 inputs.reserve(descriptor->size()); |
1018 int parameters_count = OpParameter<int>(parameters); | |
1019 | 1071 |
1020 Node* locals = state->InputAt(1); | 1072 AddFrameStateInputs(state, &inputs, descriptor); |
1021 int locals_count = OpParameter<int>(locals); | |
1022 | |
1023 Node* stack = state->InputAt(2); | |
1024 int stack_count = OpParameter<int>(stack); | |
1025 | |
1026 OperandGenerator g(this); | |
1027 std::vector<InstructionOperand*> inputs; | |
1028 inputs.reserve(parameters_count + locals_count + stack_count); | |
1029 for (int i = 0; i < parameters_count; i++) { | |
1030 inputs.push_back(UseOrImmediate(&g, parameters->InputAt(i))); | |
1031 } | |
1032 for (int i = 0; i < locals_count; i++) { | |
1033 inputs.push_back(UseOrImmediate(&g, locals->InputAt(i))); | |
1034 } | |
1035 for (int i = 0; i < stack_count; i++) { | |
1036 inputs.push_back(UseOrImmediate(&g, stack->InputAt(i))); | |
1037 } | |
1038 | |
1039 FrameStateDescriptor* descriptor = new (instruction_zone()) | |
1040 FrameStateDescriptor(ast_id, parameters_count, locals_count, stack_count); | |
1041 | 1073 |
1042 DCHECK_EQ(descriptor->size(), inputs.size()); | 1074 DCHECK_EQ(descriptor->size(), inputs.size()); |
1043 | 1075 |
1044 int deoptimization_id = sequence()->AddDeoptimizationEntry(descriptor); | 1076 int deoptimization_id = sequence()->AddDeoptimizationEntry(descriptor); |
1045 Emit(kArchDeoptimize | MiscField::encode(deoptimization_id), 0, NULL, | 1077 Emit(kArchDeoptimize | MiscField::encode(deoptimization_id), 0, NULL, |
1046 inputs.size(), &inputs.front(), 0, NULL); | 1078 inputs.size(), &inputs.front(), 0, NULL); |
1047 } | 1079 } |
1048 | 1080 |
1049 | 1081 |
1050 #if !V8_TURBOFAN_BACKEND | 1082 #if !V8_TURBOFAN_BACKEND |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1085 | 1117 |
1086 | 1118 |
1087 void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, | 1119 void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation, |
1088 BasicBlock* deoptimization) {} | 1120 BasicBlock* deoptimization) {} |
1089 | 1121 |
1090 #endif // !V8_TURBOFAN_BACKEND | 1122 #endif // !V8_TURBOFAN_BACKEND |
1091 | 1123 |
1092 } // namespace compiler | 1124 } // namespace compiler |
1093 } // namespace internal | 1125 } // namespace internal |
1094 } // namespace v8 | 1126 } // namespace v8 |
OLD | NEW |