Chromium Code Reviews| Index: src/compiler/gap-resolver.cc |
| diff --git a/src/compiler/gap-resolver.cc b/src/compiler/gap-resolver.cc |
| index 7c397002cb59e6c51efcfebd896a70561448d4c0..ad6f4dfc147dafc7281e260a4366d942c4283334 100644 |
| --- a/src/compiler/gap-resolver.cc |
| +++ b/src/compiler/gap-resolver.cc |
| @@ -8,6 +8,8 @@ |
| #include <functional> |
| #include <set> |
| +#include "src/base/adapters.h" |
| + |
| namespace v8 { |
| namespace internal { |
| namespace compiler { |
| @@ -34,6 +36,88 @@ void GapResolver::Resolve(ParallelMove* moves) const { |
| } |
| } |
| +namespace { |
| +bool ValidPush(InstructionOperand source, |
|
Benedikt Meurer
2016/06/30 07:57:06
Nit: IsValidPush
danno
2016/07/01 07:31:56
Done.
|
| + GapResolver::PushTypeFlags push_type) { |
| + if (source.IsImmediate() && |
| + ((push_type & GapResolver::kImmediatePush) != 0)) { |
| + return true; |
| + } |
| + if ((source.IsRegister() || source.IsStackSlot()) && |
| + ((push_type & GapResolver::kScalarPush) != 0)) { |
| + return true; |
| + } |
| + if ((source.IsFloatRegister() || source.IsFloatStackSlot()) && |
| + ((push_type & GapResolver::kFloat32Push) != 0)) { |
| + return true; |
| + } |
| + if ((source.IsDoubleRegister() || source.IsFloatStackSlot()) && |
| + ((push_type & GapResolver::kFloat64Push) != 0)) { |
| + return true; |
| + } |
| + return false; |
| +} |
| +} // namespace |
|
Benedikt Meurer
2016/06/30 07:57:06
Nit: empty line (here and in other places for anon
danno
2016/07/01 07:31:56
Done.
|
| + |
| +void GapResolver::GetPushCompatibleMoves(Zone* zone, Instruction* instr, |
|
Mircea Trofin
2016/06/30 15:32:34
Is cycles something to worry about?
danno
2016/07/01 07:31:56
Nope, since the test for valid pushes are moves wh
|
| + PushTypeFlags push_type, |
| + ZoneVector<MoveOperands*>* pushes) { |
| + pushes->resize(0); |
| + for (int i = Instruction::FIRST_GAP_POSITION; |
| + i <= Instruction::LAST_GAP_POSITION; ++i) { |
| + Instruction::GapPosition inner_pos = |
| + static_cast<Instruction::GapPosition>(i); |
| + ParallelMove* parallel_move = instr->GetParallelMove(inner_pos); |
| + if (parallel_move != nullptr) { |
| + for (auto& move : *parallel_move) { |
| + InstructionOperand source = move->source(); |
| + InstructionOperand destination = move->destination(); |
| + int first_push_compatible_index = |
| + V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0; |
| + // If there are any moves from slots that will be overridden by pushes, |
| + // then the full gap resolver must be used since optimization with |
| + // pushes don't participate in the parallel move and might clobber |
| + // values needed for the gap resolve. |
| + if (source.IsStackSlot() && |
| + LocationOperand::cast(source).index() >= |
| + first_push_compatible_index) { |
| + pushes->resize(0); |
| + return; |
| + } |
| + // TODO(danno): Right now, only consider moves from the FIRST gap for |
| + // pushes. Theoretically, we could extract pushes for both gaps (there |
| + // are cases where this happens), but the logic for that would also have |
| + // to check to make sure that non-memory inputs to the pushes from the |
| + // LAST gap don't get clobbered in the FIRST gap. |
|
Mircea Trofin
2016/06/30 15:32:33
The move optimizer should have detected and elided
danno
2016/07/01 07:31:56
Yes. The problem isn't redundancy, it's the fact t
|
| + if (i == Instruction::FIRST_GAP_POSITION) { |
| + if (destination.IsStackSlot() && |
| + LocationOperand::cast(destination).index() >= |
| + first_push_compatible_index) { |
| + int index = LocationOperand::cast(destination).index(); |
| + if (ValidPush(source, push_type)) { |
| + if (index >= static_cast<int>(pushes->size())) { |
| + pushes->resize(index + 1); |
| + } |
| + (*pushes)[index] = move; |
|
Mircea Trofin
2016/06/30 15:32:34
pushes->at(index) = move rather?
Jarin
2016/06/30 16:48:04
No, please do not use vector::at - it does bounds
danno
2016/07/01 07:31:56
Done.
danno
2016/07/01 07:31:56
OK. Not done :-)
|
| + } |
| + } |
| + } |
| + } |
| + } |
| + } |
| + |
| + // For now, only support a set of continuous pushes at the end of the list. |
| + size_t push_count_upper_bound = pushes->size(); |
| + size_t push_begin = push_count_upper_bound; |
| + for (auto move : base::Reversed(*pushes)) { |
| + if (move == nullptr) break; |
| + push_begin--; |
| + } |
| + size_t push_count = pushes->size() - push_begin; |
| + std::copy(pushes->begin() + push_begin, |
| + pushes->begin() + push_begin + push_count, pushes->begin()); |
| + pushes->resize(push_count); |
| +} |
| void GapResolver::PerformMove(ParallelMove* moves, MoveOperands* move) const { |
| // Each call to this function performs a move and deletes it from the move |