Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(436)

Side by Side Diff: src/arm/lithium-gap-resolver-arm.cc

Issue 255883002: ARM: don't use r9 anymore for gap resolution (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: rename kSavedValue Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-gap-resolver-arm.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 15 matching lines...) Expand all
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #include "arm/lithium-gap-resolver-arm.h" 30 #include "arm/lithium-gap-resolver-arm.h"
31 #include "arm/lithium-codegen-arm.h" 31 #include "arm/lithium-codegen-arm.h"
32 32
33 namespace v8 { 33 namespace v8 {
34 namespace internal { 34 namespace internal {
35 35
36 static const Register kSavedValueRegister = { 9 }; 36 // We use the root register to spill a value while breaking a cycle in parallel
37 // moves. We don't need access to roots while resolving the move list and using
38 // the root register has two advantages:
39 // - It is not in crankshaft allocatable registers list, so it can't interfere
40 // with any of the moves we are resolving.
41 // - We don't need to push it on the stack, as we can reload it with its value
42 // once we have resolved a cycle.
43 #define kSavedValueRegister kRootRegister
44
37 45
38 LGapResolver::LGapResolver(LCodeGen* owner) 46 LGapResolver::LGapResolver(LCodeGen* owner)
39 : cgen_(owner), moves_(32, owner->zone()), root_index_(0), in_cycle_(false), 47 : cgen_(owner), moves_(32, owner->zone()), root_index_(0), in_cycle_(false),
40 saved_destination_(NULL) { } 48 saved_destination_(NULL), need_to_restore_root_(false) { }
49
50
51 #define __ ACCESS_MASM(cgen_->masm())
41 52
42 53
43 void LGapResolver::Resolve(LParallelMove* parallel_move) { 54 void LGapResolver::Resolve(LParallelMove* parallel_move) {
44 ASSERT(moves_.is_empty()); 55 ASSERT(moves_.is_empty());
45 // Build up a worklist of moves. 56 // Build up a worklist of moves.
46 BuildInitialMoveList(parallel_move); 57 BuildInitialMoveList(parallel_move);
47 58
48 for (int i = 0; i < moves_.length(); ++i) { 59 for (int i = 0; i < moves_.length(); ++i) {
49 LMoveOperands move = moves_[i]; 60 LMoveOperands move = moves_[i];
50 // Skip constants to perform them last. They don't block other moves 61 // Skip constants to perform them last. They don't block other moves
51 // and skipping such moves with register destinations keeps those 62 // and skipping such moves with register destinations keeps those
52 // registers free for the whole algorithm. 63 // registers free for the whole algorithm.
53 if (!move.IsEliminated() && !move.source()->IsConstantOperand()) { 64 if (!move.IsEliminated() && !move.source()->IsConstantOperand()) {
54 root_index_ = i; // Any cycle is found when by reaching this move again. 65 root_index_ = i; // Any cycle is found when by reaching this move again.
55 PerformMove(i); 66 PerformMove(i);
56 if (in_cycle_) { 67 if (in_cycle_) {
57 RestoreValue(); 68 RestoreValue();
58 } 69 }
59 } 70 }
60 } 71 }
61 72
62 // Perform the moves with constant sources. 73 // Perform the moves with constant sources.
63 for (int i = 0; i < moves_.length(); ++i) { 74 for (int i = 0; i < moves_.length(); ++i) {
64 if (!moves_[i].IsEliminated()) { 75 if (!moves_[i].IsEliminated()) {
65 ASSERT(moves_[i].source()->IsConstantOperand()); 76 ASSERT(moves_[i].source()->IsConstantOperand());
66 EmitMove(i); 77 EmitMove(i);
67 } 78 }
68 } 79 }
69 80
81 if (need_to_restore_root_) {
82 ASSERT(kSavedValueRegister.is(kRootRegister));
83 __ InitializeRootRegister();
84 need_to_restore_root_ = false;
85 }
86
70 moves_.Rewind(0); 87 moves_.Rewind(0);
71 } 88 }
72 89
73 90
74 void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) { 91 void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
75 // Perform a linear sweep of the moves to add them to the initial list of 92 // Perform a linear sweep of the moves to add them to the initial list of
76 // moves to perform, ignoring any move that is redundant (the source is 93 // moves to perform, ignoring any move that is redundant (the source is
77 // the same as the destination, the destination is ignored and 94 // the same as the destination, the destination is ignored and
78 // unallocated, or the move was already eliminated). 95 // unallocated, or the move was already eliminated).
79 const ZoneList<LMoveOperands>* moves = parallel_move->move_operands(); 96 const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
148 // No operand should be the destination for more than one move. 165 // No operand should be the destination for more than one move.
149 for (int i = 0; i < moves_.length(); ++i) { 166 for (int i = 0; i < moves_.length(); ++i) {
150 LOperand* destination = moves_[i].destination(); 167 LOperand* destination = moves_[i].destination();
151 for (int j = i + 1; j < moves_.length(); ++j) { 168 for (int j = i + 1; j < moves_.length(); ++j) {
152 SLOW_ASSERT(!destination->Equals(moves_[j].destination())); 169 SLOW_ASSERT(!destination->Equals(moves_[j].destination()));
153 } 170 }
154 } 171 }
155 #endif 172 #endif
156 } 173 }
157 174
158 #define __ ACCESS_MASM(cgen_->masm())
159 175
160 void LGapResolver::BreakCycle(int index) { 176 void LGapResolver::BreakCycle(int index) {
161 // We save in a register the value that should end up in the source of 177 // We save in a register the source of that move and we remember its
162 // moves_[root_index]. After performing all moves in the tree rooted 178 // destination. Then we mark this move as resolved so the cycle is
163 // in that move, we save the value to that source. 179 // broken and we can perform the other moves.
164 ASSERT(moves_[index].destination()->Equals(moves_[root_index_].source())); 180 ASSERT(moves_[index].destination()->Equals(moves_[root_index_].source()));
165 ASSERT(!in_cycle_); 181 ASSERT(!in_cycle_);
166 in_cycle_ = true; 182 in_cycle_ = true;
167 LOperand* source = moves_[index].source(); 183 LOperand* source = moves_[index].source();
168 saved_destination_ = moves_[index].destination(); 184 saved_destination_ = moves_[index].destination();
169 if (source->IsRegister()) { 185 if (source->IsRegister()) {
186 need_to_restore_root_ = true;
170 __ mov(kSavedValueRegister, cgen_->ToRegister(source)); 187 __ mov(kSavedValueRegister, cgen_->ToRegister(source));
171 } else if (source->IsStackSlot()) { 188 } else if (source->IsStackSlot()) {
189 need_to_restore_root_ = true;
172 __ ldr(kSavedValueRegister, cgen_->ToMemOperand(source)); 190 __ ldr(kSavedValueRegister, cgen_->ToMemOperand(source));
173 } else if (source->IsDoubleRegister()) { 191 } else if (source->IsDoubleRegister()) {
174 __ vmov(kScratchDoubleReg, cgen_->ToDoubleRegister(source)); 192 __ vmov(kScratchDoubleReg, cgen_->ToDoubleRegister(source));
175 } else if (source->IsDoubleStackSlot()) { 193 } else if (source->IsDoubleStackSlot()) {
176 __ vldr(kScratchDoubleReg, cgen_->ToMemOperand(source)); 194 __ vldr(kScratchDoubleReg, cgen_->ToMemOperand(source));
177 } else { 195 } else {
178 UNREACHABLE(); 196 UNREACHABLE();
179 } 197 }
180 // This move will be done by restoring the saved value to the destination. 198 // This move will be done by restoring the saved value to the destination.
181 moves_[index].Eliminate(); 199 moves_[index].Eliminate();
182 } 200 }
183 201
184 202
185 void LGapResolver::RestoreValue() { 203 void LGapResolver::RestoreValue() {
186 ASSERT(in_cycle_); 204 ASSERT(in_cycle_);
187 ASSERT(saved_destination_ != NULL); 205 ASSERT(saved_destination_ != NULL);
188 206
189 // Spilled value is in kSavedValueRegister or kSavedDoubleValueRegister.
190 if (saved_destination_->IsRegister()) { 207 if (saved_destination_->IsRegister()) {
191 __ mov(cgen_->ToRegister(saved_destination_), kSavedValueRegister); 208 __ mov(cgen_->ToRegister(saved_destination_), kSavedValueRegister);
192 } else if (saved_destination_->IsStackSlot()) { 209 } else if (saved_destination_->IsStackSlot()) {
193 __ str(kSavedValueRegister, cgen_->ToMemOperand(saved_destination_)); 210 __ str(kSavedValueRegister, cgen_->ToMemOperand(saved_destination_));
194 } else if (saved_destination_->IsDoubleRegister()) { 211 } else if (saved_destination_->IsDoubleRegister()) {
195 __ vmov(cgen_->ToDoubleRegister(saved_destination_), kScratchDoubleReg); 212 __ vmov(cgen_->ToDoubleRegister(saved_destination_), kScratchDoubleReg);
196 } else if (saved_destination_->IsDoubleStackSlot()) { 213 } else if (saved_destination_->IsDoubleStackSlot()) {
197 __ vstr(kScratchDoubleReg, cgen_->ToMemOperand(saved_destination_)); 214 __ vstr(kScratchDoubleReg, cgen_->ToMemOperand(saved_destination_));
198 } else { 215 } else {
199 UNREACHABLE(); 216 UNREACHABLE();
(...skipping 19 matching lines...) Expand all
219 ASSERT(destination->IsStackSlot()); 236 ASSERT(destination->IsStackSlot());
220 __ str(source_register, cgen_->ToMemOperand(destination)); 237 __ str(source_register, cgen_->ToMemOperand(destination));
221 } 238 }
222 } else if (source->IsStackSlot()) { 239 } else if (source->IsStackSlot()) {
223 MemOperand source_operand = cgen_->ToMemOperand(source); 240 MemOperand source_operand = cgen_->ToMemOperand(source);
224 if (destination->IsRegister()) { 241 if (destination->IsRegister()) {
225 __ ldr(cgen_->ToRegister(destination), source_operand); 242 __ ldr(cgen_->ToRegister(destination), source_operand);
226 } else { 243 } else {
227 ASSERT(destination->IsStackSlot()); 244 ASSERT(destination->IsStackSlot());
228 MemOperand destination_operand = cgen_->ToMemOperand(destination); 245 MemOperand destination_operand = cgen_->ToMemOperand(destination);
229 if (in_cycle_) { 246 if (!destination_operand.OffsetIsUint12Encodable()) {
230 if (!destination_operand.OffsetIsUint12Encodable()) { 247 // ip is overwritten while saving the value to the destination.
231 // ip is overwritten while saving the value to the destination. 248 // Therefore we can't use ip. It is OK if the read from the source
232 // Therefore we can't use ip. It is OK if the read from the source 249 // destroys ip, since that happens before the value is read.
233 // destroys ip, since that happens before the value is read. 250 __ vldr(kScratchDoubleReg.low(), source_operand);
234 __ vldr(kScratchDoubleReg.low(), source_operand); 251 __ vstr(kScratchDoubleReg.low(), destination_operand);
235 __ vstr(kScratchDoubleReg.low(), destination_operand);
236 } else {
237 __ ldr(ip, source_operand);
238 __ str(ip, destination_operand);
239 }
240 } else { 252 } else {
241 __ ldr(kSavedValueRegister, source_operand); 253 __ ldr(ip, source_operand);
242 __ str(kSavedValueRegister, destination_operand); 254 __ str(ip, destination_operand);
243 } 255 }
244 } 256 }
245 257
246 } else if (source->IsConstantOperand()) { 258 } else if (source->IsConstantOperand()) {
247 LConstantOperand* constant_source = LConstantOperand::cast(source); 259 LConstantOperand* constant_source = LConstantOperand::cast(source);
248 if (destination->IsRegister()) { 260 if (destination->IsRegister()) {
249 Register dst = cgen_->ToRegister(destination); 261 Register dst = cgen_->ToRegister(destination);
250 Representation r = cgen_->IsSmi(constant_source) 262 Representation r = cgen_->IsSmi(constant_source)
251 ? Representation::Smi() : Representation::Integer32(); 263 ? Representation::Smi() : Representation::Integer32();
252 if (cgen_->IsInteger32(constant_source)) { 264 if (cgen_->IsInteger32(constant_source)) {
253 __ mov(dst, Operand(cgen_->ToRepresentation(constant_source, r))); 265 __ mov(dst, Operand(cgen_->ToRepresentation(constant_source, r)));
254 } else { 266 } else {
255 __ Move(dst, cgen_->ToHandle(constant_source)); 267 __ Move(dst, cgen_->ToHandle(constant_source));
256 } 268 }
257 } else if (destination->IsDoubleRegister()) { 269 } else if (destination->IsDoubleRegister()) {
258 DwVfpRegister result = cgen_->ToDoubleRegister(destination); 270 DwVfpRegister result = cgen_->ToDoubleRegister(destination);
259 double v = cgen_->ToDouble(constant_source); 271 double v = cgen_->ToDouble(constant_source);
260 __ Vmov(result, v, ip); 272 __ Vmov(result, v, ip);
261 } else { 273 } else {
262 ASSERT(destination->IsStackSlot()); 274 ASSERT(destination->IsStackSlot());
263 ASSERT(!in_cycle_); // Constant moves happen after all cycles are gone. 275 ASSERT(!in_cycle_); // Constant moves happen after all cycles are gone.
276 need_to_restore_root_ = true;
264 Representation r = cgen_->IsSmi(constant_source) 277 Representation r = cgen_->IsSmi(constant_source)
265 ? Representation::Smi() : Representation::Integer32(); 278 ? Representation::Smi() : Representation::Integer32();
266 if (cgen_->IsInteger32(constant_source)) { 279 if (cgen_->IsInteger32(constant_source)) {
267 __ mov(kSavedValueRegister, 280 __ mov(kSavedValueRegister,
268 Operand(cgen_->ToRepresentation(constant_source, r))); 281 Operand(cgen_->ToRepresentation(constant_source, r)));
269 } else { 282 } else {
270 __ Move(kSavedValueRegister, 283 __ Move(kSavedValueRegister, cgen_->ToHandle(constant_source));
271 cgen_->ToHandle(constant_source));
272 } 284 }
273 __ str(kSavedValueRegister, cgen_->ToMemOperand(destination)); 285 __ str(kSavedValueRegister, cgen_->ToMemOperand(destination));
274 } 286 }
275 287
276 } else if (source->IsDoubleRegister()) { 288 } else if (source->IsDoubleRegister()) {
277 DwVfpRegister source_register = cgen_->ToDoubleRegister(source); 289 DwVfpRegister source_register = cgen_->ToDoubleRegister(source);
278 if (destination->IsDoubleRegister()) { 290 if (destination->IsDoubleRegister()) {
279 __ vmov(cgen_->ToDoubleRegister(destination), source_register); 291 __ vmov(cgen_->ToDoubleRegister(destination), source_register);
280 } else { 292 } else {
281 ASSERT(destination->IsDoubleStackSlot()); 293 ASSERT(destination->IsDoubleStackSlot());
282 __ vstr(source_register, cgen_->ToMemOperand(destination)); 294 __ vstr(source_register, cgen_->ToMemOperand(destination));
283 } 295 }
284 296
285 } else if (source->IsDoubleStackSlot()) { 297 } else if (source->IsDoubleStackSlot()) {
286 MemOperand source_operand = cgen_->ToMemOperand(source); 298 MemOperand source_operand = cgen_->ToMemOperand(source);
287 if (destination->IsDoubleRegister()) { 299 if (destination->IsDoubleRegister()) {
288 __ vldr(cgen_->ToDoubleRegister(destination), source_operand); 300 __ vldr(cgen_->ToDoubleRegister(destination), source_operand);
289 } else { 301 } else {
290 ASSERT(destination->IsDoubleStackSlot()); 302 ASSERT(destination->IsDoubleStackSlot());
291 MemOperand destination_operand = cgen_->ToMemOperand(destination); 303 MemOperand destination_operand = cgen_->ToMemOperand(destination);
292 if (in_cycle_) { 304 if (in_cycle_) {
293 // kSavedDoubleValueRegister was used to break the cycle, 305 // kScratchDoubleReg was used to break the cycle.
294 // but kSavedValueRegister is free. 306 __ vstm(db_w, sp, kScratchDoubleReg, kScratchDoubleReg);
295 MemOperand source_high_operand = 307 __ vldr(kScratchDoubleReg, source_operand);
296 cgen_->ToHighMemOperand(source); 308 __ vstr(kScratchDoubleReg, destination_operand);
297 MemOperand destination_high_operand = 309 __ vldm(ia_w, sp, kScratchDoubleReg, kScratchDoubleReg);
298 cgen_->ToHighMemOperand(destination);
299 __ ldr(kSavedValueRegister, source_operand);
300 __ str(kSavedValueRegister, destination_operand);
301 __ ldr(kSavedValueRegister, source_high_operand);
302 __ str(kSavedValueRegister, destination_high_operand);
303 } else { 310 } else {
304 __ vldr(kScratchDoubleReg, source_operand); 311 __ vldr(kScratchDoubleReg, source_operand);
305 __ vstr(kScratchDoubleReg, destination_operand); 312 __ vstr(kScratchDoubleReg, destination_operand);
306 } 313 }
307 } 314 }
308 } else { 315 } else {
309 UNREACHABLE(); 316 UNREACHABLE();
310 } 317 }
311 318
312 moves_[index].Eliminate(); 319 moves_[index].Eliminate();
313 } 320 }
314 321
315 322
316 #undef __ 323 #undef __
317 324
318 } } // namespace v8::internal 325 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-gap-resolver-arm.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698