| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "v8.h" | 5 #include "v8.h" |
| 6 | 6 |
| 7 #include "arm64/lithium-gap-resolver-arm64.h" | 7 #include "arm64/lithium-gap-resolver-arm64.h" |
| 8 #include "arm64/lithium-codegen-arm64.h" | 8 #include "arm64/lithium-codegen-arm64.h" |
| 9 | 9 |
| 10 namespace v8 { | 10 namespace v8 { |
| (...skipping 23 matching lines...) Expand all Loading... |
| 34 | 34 |
| 35 void LGapResolver::Resolve(LParallelMove* parallel_move) { | 35 void LGapResolver::Resolve(LParallelMove* parallel_move) { |
| 36 ASSERT(moves_.is_empty()); | 36 ASSERT(moves_.is_empty()); |
| 37 | 37 |
| 38 // Build up a worklist of moves. | 38 // Build up a worklist of moves. |
| 39 BuildInitialMoveList(parallel_move); | 39 BuildInitialMoveList(parallel_move); |
| 40 | 40 |
| 41 for (int i = 0; i < moves_.length(); ++i) { | 41 for (int i = 0; i < moves_.length(); ++i) { |
| 42 LMoveOperands move = moves_[i]; | 42 LMoveOperands move = moves_[i]; |
| 43 | 43 |
| 44 // Skip constants to perform them last. They don't block other moves | 44 // Skip constants and value regenerations to perform them last. They don't |
| 45 // and skipping such moves with register destinations keeps those | 45 // block other moves and skipping such moves with register destinations |
| 46 // registers free for the whole algorithm. | 46 // keeps those registers free for the whole algorithm. |
| 47 if (!move.IsEliminated() && !move.source()->IsConstantOperand()) { | 47 if (!move.IsEliminated() && |
| 48 !move.UsesRegeneration() && !move.source()->IsConstantOperand()) { |
| 48 root_index_ = i; // Any cycle is found when we reach this move again. | 49 root_index_ = i; // Any cycle is found when we reach this move again. |
| 49 PerformMove(i); | 50 PerformMove(i); |
| 50 if (in_cycle_) RestoreValue(); | 51 if (in_cycle_) RestoreValue(); |
| 51 } | 52 } |
| 52 } | 53 } |
| 53 | 54 |
| 54 // Perform the moves with constant sources. | 55 // Perform the moves with constant sources or regenerating the result. |
| 55 for (int i = 0; i < moves_.length(); ++i) { | 56 for (int i = 0; i < moves_.length(); ++i) { |
| 56 LMoveOperands move = moves_[i]; | 57 LMoveOperands move = moves_[i]; |
| 57 | 58 |
| 58 if (!move.IsEliminated()) { | 59 if (!move.IsEliminated()) { |
| 59 ASSERT(move.source()->IsConstantOperand()); | 60 ASSERT(move.UsesRegeneration() || move.source()->IsConstantOperand()); |
| 60 EmitMove(i); | 61 EmitMove(i); |
| 61 } | 62 } |
| 62 } | 63 } |
| 63 | 64 |
| 64 if (need_to_restore_root_) { | 65 if (need_to_restore_root_) { |
| 65 ASSERT(kSavedValue.Is(root)); | 66 ASSERT(kSavedValue.Is(root)); |
| 66 __ InitializeRootRegister(); | 67 __ InitializeRootRegister(); |
| 67 need_to_restore_root_ = false; | 68 need_to_restore_root_ = false; |
| 68 } | 69 } |
| 69 | 70 |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 209 | 210 |
| 210 in_cycle_ = false; | 211 in_cycle_ = false; |
| 211 saved_destination_ = NULL; | 212 saved_destination_ = NULL; |
| 212 } | 213 } |
| 213 | 214 |
| 214 | 215 |
| 215 void LGapResolver::EmitMove(int index) { | 216 void LGapResolver::EmitMove(int index) { |
| 216 LOperand* source = moves_[index].source(); | 217 LOperand* source = moves_[index].source(); |
| 217 LOperand* destination = moves_[index].destination(); | 218 LOperand* destination = moves_[index].destination(); |
| 218 | 219 |
| 220 if (moves_[index].UsesRegeneration()) { |
| 221 if (source->IsInRegister()) { |
| 222 ASSERT(destination->IsInMemory()); |
| 223 if (source->parent_linstr()->hydrogen_value()->HasPhiUses()) { |
| 224 // Phis insertion does not have access to the constant cache, so they |
| 225 // won't be replaced by regenerations. If we skip the spill operation, |
| 226 // the phi would then load garbage from the stack. |
| 227 } else { |
| 228 // Skip the store to memory. All gaps loading this value will instead |
| 229 // regenerate it. |
| 230 moves_[index].Eliminate(); |
| 231 return; |
| 232 } |
| 233 } else { |
| 234 ASSERT(source->IsInMemory() && destination->IsInRegister()); |
| 235 ASSERT(destination->parent_linstr()->HasResult()); |
| 236 LTemplateResultInstruction<1>* instr = |
| 237 reinterpret_cast<LTemplateResultInstruction<1>*>( |
| 238 destination->parent_linstr()); |
| 239 ASSERT(instr->PreferRegenerateToSpill()); |
| 240 LOperand* prev_result = instr->result(); |
| 241 instr->set_result(destination); |
| 242 instr->CompileToNative(cgen_); |
| 243 instr->set_result(prev_result); |
| 244 moves_[index].Eliminate(); |
| 245 return; |
| 246 } |
| 247 } |
| 248 |
| 219 // Dispatch on the source and destination operand kinds. Not all | 249 // Dispatch on the source and destination operand kinds. Not all |
| 220 // combinations are possible. | 250 // combinations are possible. |
| 221 | 251 |
| 222 if (source->IsRegister()) { | 252 if (source->IsRegister()) { |
| 223 Register source_register = cgen_->ToRegister(source); | 253 Register source_register = cgen_->ToRegister(source); |
| 224 if (destination->IsRegister()) { | 254 if (destination->IsRegister()) { |
| 225 __ Mov(cgen_->ToRegister(destination), source_register); | 255 __ Mov(cgen_->ToRegister(destination), source_register); |
| 226 } else { | 256 } else { |
| 227 ASSERT(destination->IsStackSlot()); | 257 ASSERT(destination->IsStackSlot()); |
| 228 __ Str(source_register, cgen_->ToMemOperand(destination)); | 258 __ Str(source_register, cgen_->ToMemOperand(destination)); |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 302 LOperand* src = moves_[index].source(); | 332 LOperand* src = moves_[index].source(); |
| 303 LOperand* dst = moves_[index].destination(); | 333 LOperand* dst = moves_[index].destination(); |
| 304 | 334 |
| 305 ASSERT(src->IsStackSlot()); | 335 ASSERT(src->IsStackSlot()); |
| 306 ASSERT(dst->IsStackSlot()); | 336 ASSERT(dst->IsStackSlot()); |
| 307 __ Ldr(temp, cgen_->ToMemOperand(src)); | 337 __ Ldr(temp, cgen_->ToMemOperand(src)); |
| 308 __ Str(temp, cgen_->ToMemOperand(dst)); | 338 __ Str(temp, cgen_->ToMemOperand(dst)); |
| 309 } | 339 } |
| 310 | 340 |
| 311 } } // namespace v8::internal | 341 } } // namespace v8::internal |
| OLD | NEW |