OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/address-map.h" | 7 #include "src/address-map.h" |
| 8 #include "src/base/adapters.h" |
8 #include "src/compiler/code-generator-impl.h" | 9 #include "src/compiler/code-generator-impl.h" |
9 #include "src/compiler/linkage.h" | 10 #include "src/compiler/linkage.h" |
10 #include "src/compiler/pipeline.h" | 11 #include "src/compiler/pipeline.h" |
11 #include "src/frames-inl.h" | 12 #include "src/frames-inl.h" |
12 | 13 |
13 namespace v8 { | 14 namespace v8 { |
14 namespace internal { | 15 namespace internal { |
15 namespace compiler { | 16 namespace compiler { |
16 | 17 |
17 class CodeGenerator::JumpTable final : public ZoneObject { | 18 class CodeGenerator::JumpTable final : public ZoneObject { |
(...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
313 CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock( | 314 CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock( |
314 const InstructionBlock* block) { | 315 const InstructionBlock* block) { |
315 for (int i = block->code_start(); i < block->code_end(); ++i) { | 316 for (int i = block->code_start(); i < block->code_end(); ++i) { |
316 Instruction* instr = code()->InstructionAt(i); | 317 Instruction* instr = code()->InstructionAt(i); |
317 CodeGenResult result = AssembleInstruction(instr, block); | 318 CodeGenResult result = AssembleInstruction(instr, block); |
318 if (result != kSuccess) return result; | 319 if (result != kSuccess) return result; |
319 } | 320 } |
320 return kSuccess; | 321 return kSuccess; |
321 } | 322 } |
322 | 323 |
| 324 bool CodeGenerator::IsValidPush(InstructionOperand source, |
| 325 CodeGenerator::PushTypeFlags push_type) { |
| 326 if (source.IsImmediate() && |
| 327 ((push_type & CodeGenerator::kImmediatePush) != 0)) { |
| 328 return true; |
| 329 } |
| 330 if ((source.IsRegister() || source.IsStackSlot()) && |
| 331 ((push_type & CodeGenerator::kScalarPush) != 0)) { |
| 332 return true; |
| 333 } |
| 334 if ((source.IsFloatRegister() || source.IsFloatStackSlot()) && |
| 335 ((push_type & CodeGenerator::kFloat32Push) != 0)) { |
| 336 return true; |
| 337 } |
| 338 if ((source.IsDoubleRegister() || source.IsFloatStackSlot()) && |
| 339 ((push_type & CodeGenerator::kFloat64Push) != 0)) { |
| 340 return true; |
| 341 } |
| 342 return false; |
| 343 } |
| 344 |
| 345 void CodeGenerator::GetPushCompatibleMoves(Instruction* instr, |
| 346 PushTypeFlags push_type, |
| 347 ZoneVector<MoveOperands*>* pushes) { |
| 348 pushes->clear(); |
| 349 for (int i = Instruction::FIRST_GAP_POSITION; |
| 350 i <= Instruction::LAST_GAP_POSITION; ++i) { |
| 351 Instruction::GapPosition inner_pos = |
| 352 static_cast<Instruction::GapPosition>(i); |
| 353 ParallelMove* parallel_move = instr->GetParallelMove(inner_pos); |
| 354 if (parallel_move != nullptr) { |
| 355 for (auto move : *parallel_move) { |
| 356 InstructionOperand source = move->source(); |
| 357 InstructionOperand destination = move->destination(); |
| 358 int first_push_compatible_index = |
| 359 V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0; |
| 360 // If there are any moves from slots that will be overridden by pushes, |
| 361 // then the full gap resolver must be used since optimization with |
| 362 // pushes don't participate in the parallel move and might clobber |
| 363 // values needed for the gap resolve. |
| 364 if (source.IsStackSlot() && |
| 365 LocationOperand::cast(source).index() >= |
| 366 first_push_compatible_index) { |
| 367 pushes->clear(); |
| 368 return; |
| 369 } |
| 370 // TODO(danno): Right now, only consider moves from the FIRST gap for |
| 371 // pushes. Theoretically, we could extract pushes for both gaps (there |
| 372 // are cases where this happens), but the logic for that would also have |
| 373 // to check to make sure that non-memory inputs to the pushes from the |
| 374 // LAST gap don't get clobbered in the FIRST gap. |
| 375 if (i == Instruction::FIRST_GAP_POSITION) { |
| 376 if (destination.IsStackSlot() && |
| 377 LocationOperand::cast(destination).index() >= |
| 378 first_push_compatible_index) { |
| 379 int index = LocationOperand::cast(destination).index(); |
| 380 if (IsValidPush(source, push_type)) { |
| 381 if (index >= static_cast<int>(pushes->size())) { |
| 382 pushes->resize(index + 1); |
| 383 } |
| 384 (*pushes)[index] = move; |
| 385 } |
| 386 } |
| 387 } |
| 388 } |
| 389 } |
| 390 } |
| 391 |
| 392 // For now, only support a set of continuous pushes at the end of the list. |
| 393 size_t push_count_upper_bound = pushes->size(); |
| 394 size_t push_begin = push_count_upper_bound; |
| 395 for (auto move : base::Reversed(*pushes)) { |
| 396 if (move == nullptr) break; |
| 397 push_begin--; |
| 398 } |
| 399 size_t push_count = pushes->size() - push_begin; |
| 400 std::copy(pushes->begin() + push_begin, |
| 401 pushes->begin() + push_begin + push_count, pushes->begin()); |
| 402 pushes->resize(push_count); |
| 403 } |
| 404 |
323 CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction( | 405 CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction( |
324 Instruction* instr, const InstructionBlock* block) { | 406 Instruction* instr, const InstructionBlock* block) { |
| 407 int first_unused_stack_slot; |
| 408 bool adjust_stack = |
| 409 GetSlotAboveSPBeforeTailCall(instr, &first_unused_stack_slot); |
| 410 if (adjust_stack) AssembleTailCallBeforeGap(instr, first_unused_stack_slot); |
325 AssembleGaps(instr); | 411 AssembleGaps(instr); |
| 412 if (adjust_stack) AssembleTailCallAfterGap(instr, first_unused_stack_slot); |
326 DCHECK_IMPLIES( | 413 DCHECK_IMPLIES( |
327 block->must_deconstruct_frame(), | 414 block->must_deconstruct_frame(), |
328 instr != code()->InstructionAt(block->last_instruction_index()) || | 415 instr != code()->InstructionAt(block->last_instruction_index()) || |
329 instr->IsRet() || instr->IsJump()); | 416 instr->IsRet() || instr->IsJump()); |
330 if (instr->IsJump() && block->must_deconstruct_frame()) { | 417 if (instr->IsJump() && block->must_deconstruct_frame()) { |
331 AssembleDeconstructFrame(); | 418 AssembleDeconstructFrame(); |
332 } | 419 } |
333 AssembleSourcePosition(instr); | 420 AssembleSourcePosition(instr); |
334 // Assemble architecture-specific code for the instruction. | 421 // Assemble architecture-specific code for the instruction. |
335 CodeGenResult result = AssembleArchInstruction(instr); | 422 CodeGenResult result = AssembleArchInstruction(instr); |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
415 base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --", | 502 base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --", |
416 file->ToCString().get(), ln, cn); | 503 file->ToCString().get(), ln, cn); |
417 } else { | 504 } else { |
418 base::OS::SNPrintF(buffer.start(), buffer.length(), | 505 base::OS::SNPrintF(buffer.start(), buffer.length(), |
419 "-- <unknown>:%d:%d --", ln, cn); | 506 "-- <unknown>:%d:%d --", ln, cn); |
420 } | 507 } |
421 masm()->RecordComment(buffer.start()); | 508 masm()->RecordComment(buffer.start()); |
422 } | 509 } |
423 } | 510 } |
424 | 511 |
| 512 bool CodeGenerator::GetSlotAboveSPBeforeTailCall(Instruction* instr, |
| 513 int* slot) { |
| 514 if (instr->IsTailCall()) { |
| 515 InstructionOperandConverter g(this, instr); |
| 516 *slot = g.InputInt32(instr->InputCount() - 1); |
| 517 return true; |
| 518 } else { |
| 519 return false; |
| 520 } |
| 521 } |
425 | 522 |
426 void CodeGenerator::AssembleGaps(Instruction* instr) { | 523 void CodeGenerator::AssembleGaps(Instruction* instr) { |
427 for (int i = Instruction::FIRST_GAP_POSITION; | 524 for (int i = Instruction::FIRST_GAP_POSITION; |
428 i <= Instruction::LAST_GAP_POSITION; i++) { | 525 i <= Instruction::LAST_GAP_POSITION; i++) { |
429 Instruction::GapPosition inner_pos = | 526 Instruction::GapPosition inner_pos = |
430 static_cast<Instruction::GapPosition>(i); | 527 static_cast<Instruction::GapPosition>(i); |
431 ParallelMove* move = instr->GetParallelMove(inner_pos); | 528 ParallelMove* move = instr->GetParallelMove(inner_pos); |
432 if (move != nullptr) resolver()->Resolve(move); | 529 if (move != nullptr) resolver()->Resolve(move); |
433 } | 530 } |
434 } | 531 } |
(...skipping 362 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
797 DeoptimizationExit* CodeGenerator::AddDeoptimizationExit( | 894 DeoptimizationExit* CodeGenerator::AddDeoptimizationExit( |
798 Instruction* instr, size_t frame_state_offset) { | 895 Instruction* instr, size_t frame_state_offset) { |
799 int const deoptimization_id = BuildTranslation( | 896 int const deoptimization_id = BuildTranslation( |
800 instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore()); | 897 instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore()); |
801 DeoptimizationExit* const exit = | 898 DeoptimizationExit* const exit = |
802 new (zone()) DeoptimizationExit(deoptimization_id); | 899 new (zone()) DeoptimizationExit(deoptimization_id); |
803 deoptimization_exits_.push_back(exit); | 900 deoptimization_exits_.push_back(exit); |
804 return exit; | 901 return exit; |
805 } | 902 } |
806 | 903 |
807 int CodeGenerator::TailCallFrameStackSlotDelta(int stack_param_delta) { | |
808 // Leave the PC on the stack on platforms that have that as part of their ABI | |
809 int pc_slots = V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0; | |
810 int sp_slot_delta = frame_access_state()->has_frame() | |
811 ? (frame()->GetTotalFrameSlotCount() - pc_slots) | |
812 : 0; | |
813 // Discard only slots that won't be used by new parameters. | |
814 sp_slot_delta += stack_param_delta; | |
815 return sp_slot_delta; | |
816 } | |
817 | |
818 | |
819 OutOfLineCode::OutOfLineCode(CodeGenerator* gen) | 904 OutOfLineCode::OutOfLineCode(CodeGenerator* gen) |
820 : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) { | 905 : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) { |
821 gen->ools_ = this; | 906 gen->ools_ = this; |
822 } | 907 } |
823 | 908 |
824 | 909 |
825 OutOfLineCode::~OutOfLineCode() {} | 910 OutOfLineCode::~OutOfLineCode() {} |
826 | 911 |
827 } // namespace compiler | 912 } // namespace compiler |
828 } // namespace internal | 913 } // namespace internal |
829 } // namespace v8 | 914 } // namespace v8 |
OLD | NEW |