OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 12 matching lines...) Expand all Loading... | |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #ifndef V8_LITHIUM_H_ | 28 #ifndef V8_LITHIUM_H_ |
29 #define V8_LITHIUM_H_ | 29 #define V8_LITHIUM_H_ |
30 | 30 |
31 #include "allocation.h" | 31 #include "allocation.h" |
32 #include "hydrogen.h" | 32 #include "hydrogen.h" |
33 #include "once.h" | |
33 #include "safepoint-table.h" | 34 #include "safepoint-table.h" |
34 | 35 |
35 namespace v8 { | 36 namespace v8 { |
36 namespace internal { | 37 namespace internal { |
37 | 38 |
38 class LOperand: public ZoneObject { | 39 class LOperand: public ZoneObject { |
39 public: | 40 public: |
40 enum Kind { | 41 enum Kind { |
41 INVALID, | 42 INVALID, |
42 UNALLOCATED, | 43 UNALLOCATED, |
(...skipping 26 matching lines...) Expand all Loading... | |
69 ASSERT(this->index() == index); | 70 ASSERT(this->index() == index); |
70 } | 71 } |
71 | 72 |
72 protected: | 73 protected: |
73 static const int kKindFieldWidth = 3; | 74 static const int kKindFieldWidth = 3; |
74 class KindField : public BitField<Kind, 0, kKindFieldWidth> { }; | 75 class KindField : public BitField<Kind, 0, kKindFieldWidth> { }; |
75 | 76 |
76 LOperand(Kind kind, int index) { ConvertTo(kind, index); } | 77 LOperand(Kind kind, int index) { ConvertTo(kind, index); } |
77 | 78 |
78 unsigned value_; | 79 unsigned value_; |
80 | |
81 // Function used by subclasses to populate their cache. | |
82 template <typename T, int Size, LOperand::Kind _Kind> | |
83 static void InitCache(T** cache) { | |
84 *cache = new T[Size]; | |
85 for (int i = 0; i < Size; i++) { | |
86 (*cache)[i].ConvertTo(_Kind, i); | |
87 } | |
88 } | |
79 }; | 89 }; |
80 | 90 |
81 | 91 |
82 class LUnallocated: public LOperand { | 92 class LUnallocated: public LOperand { |
83 public: | 93 public: |
84 enum Policy { | 94 enum Policy { |
85 NONE, | 95 NONE, |
86 ANY, | 96 ANY, |
87 FIXED_REGISTER, | 97 FIXED_REGISTER, |
88 FIXED_DOUBLE_REGISTER, | 98 FIXED_DOUBLE_REGISTER, |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
238 void Eliminate() { source_ = destination_ = NULL; } | 248 void Eliminate() { source_ = destination_ = NULL; } |
239 bool IsEliminated() const { | 249 bool IsEliminated() const { |
240 ASSERT(source_ != NULL || destination_ == NULL); | 250 ASSERT(source_ != NULL || destination_ == NULL); |
241 return source_ == NULL; | 251 return source_ == NULL; |
242 } | 252 } |
243 | 253 |
244 private: | 254 private: |
245 LOperand* source_; | 255 LOperand* source_; |
246 LOperand* destination_; | 256 LOperand* destination_; |
247 }; | 257 }; |
248 | 258 |
fschneider
2012/02/28 16:54:48
Accidental edit?
| |
249 | |
250 class LConstantOperand: public LOperand { | 259 class LConstantOperand: public LOperand { |
251 public: | 260 public: |
252 static LConstantOperand* Create(int index) { | 261 static LConstantOperand* Create(int index) { |
253 ASSERT(index >= 0); | 262 ASSERT(index >= 0); |
263 SetUpCache(); | |
fschneider
2012/02/28 16:54:48
Creation of LOperand is a really frequent operatio
Philippe
2012/02/29 10:24:45
Done.
| |
254 if (index < kNumCachedOperands) return &cache[index]; | 264 if (index < kNumCachedOperands) return &cache[index]; |
255 return new LConstantOperand(index); | 265 return new LConstantOperand(index); |
256 } | 266 } |
257 | 267 |
258 static LConstantOperand* cast(LOperand* op) { | 268 static LConstantOperand* cast(LOperand* op) { |
259 ASSERT(op->IsConstantOperand()); | 269 ASSERT(op->IsConstantOperand()); |
260 return reinterpret_cast<LConstantOperand*>(op); | 270 return reinterpret_cast<LConstantOperand*>(op); |
261 } | 271 } |
262 | 272 |
263 static void SetUpCache(); | 273 static void SetUpCache(); |
264 | 274 |
265 private: | 275 private: |
266 static const int kNumCachedOperands = 128; | 276 static const int kNumCachedOperands = 128; |
267 static LConstantOperand cache[]; | 277 static LConstantOperand* cache; |
278 static OnceType init_cache_once_; | |
268 | 279 |
269 LConstantOperand() : LOperand() { } | 280 LConstantOperand() : LOperand() { } |
270 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { } | 281 explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { } |
282 | |
283 friend void LOperand::InitCache< | |
284 LConstantOperand, kNumCachedOperands, CONSTANT_OPERAND>( | |
285 LConstantOperand** cache); | |
271 }; | 286 }; |
272 | 287 |
273 | 288 |
274 class LArgument: public LOperand { | 289 class LArgument: public LOperand { |
275 public: | 290 public: |
276 explicit LArgument(int index) : LOperand(ARGUMENT, index) { } | 291 explicit LArgument(int index) : LOperand(ARGUMENT, index) { } |
277 | 292 |
278 static LArgument* cast(LOperand* op) { | 293 static LArgument* cast(LOperand* op) { |
279 ASSERT(op->IsArgument()); | 294 ASSERT(op->IsArgument()); |
280 return reinterpret_cast<LArgument*>(op); | 295 return reinterpret_cast<LArgument*>(op); |
281 } | 296 } |
282 }; | 297 }; |
283 | 298 |
284 | 299 |
285 class LStackSlot: public LOperand { | 300 class LStackSlot: public LOperand { |
286 public: | 301 public: |
287 static LStackSlot* Create(int index) { | 302 static LStackSlot* Create(int index) { |
288 ASSERT(index >= 0); | 303 ASSERT(index >= 0); |
304 SetUpCache(); | |
289 if (index < kNumCachedOperands) return &cache[index]; | 305 if (index < kNumCachedOperands) return &cache[index]; |
290 return new LStackSlot(index); | 306 return new LStackSlot(index); |
291 } | 307 } |
292 | 308 |
293 static LStackSlot* cast(LOperand* op) { | 309 static LStackSlot* cast(LOperand* op) { |
294 ASSERT(op->IsStackSlot()); | 310 ASSERT(op->IsStackSlot()); |
295 return reinterpret_cast<LStackSlot*>(op); | 311 return reinterpret_cast<LStackSlot*>(op); |
296 } | 312 } |
297 | 313 |
298 static void SetUpCache(); | 314 static void SetUpCache(); |
299 | 315 |
300 private: | 316 private: |
301 static const int kNumCachedOperands = 128; | 317 static const int kNumCachedOperands = 128; |
302 static LStackSlot cache[]; | 318 static LStackSlot* cache; |
319 static OnceType init_cache_once_; | |
303 | 320 |
304 LStackSlot() : LOperand() { } | 321 LStackSlot() : LOperand() { } |
305 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { } | 322 explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { } |
323 | |
324 friend void LOperand::InitCache<LStackSlot, kNumCachedOperands, STACK_SLOT>( | |
325 LStackSlot** cache); | |
306 }; | 326 }; |
307 | 327 |
308 | 328 |
309 class LDoubleStackSlot: public LOperand { | 329 class LDoubleStackSlot: public LOperand { |
310 public: | 330 public: |
311 static LDoubleStackSlot* Create(int index) { | 331 static LDoubleStackSlot* Create(int index) { |
312 ASSERT(index >= 0); | 332 ASSERT(index >= 0); |
333 SetUpCache(); | |
313 if (index < kNumCachedOperands) return &cache[index]; | 334 if (index < kNumCachedOperands) return &cache[index]; |
314 return new LDoubleStackSlot(index); | 335 return new LDoubleStackSlot(index); |
315 } | 336 } |
316 | 337 |
317 static LDoubleStackSlot* cast(LOperand* op) { | 338 static LDoubleStackSlot* cast(LOperand* op) { |
318 ASSERT(op->IsStackSlot()); | 339 ASSERT(op->IsStackSlot()); |
319 return reinterpret_cast<LDoubleStackSlot*>(op); | 340 return reinterpret_cast<LDoubleStackSlot*>(op); |
320 } | 341 } |
321 | 342 |
322 static void SetUpCache(); | 343 static void SetUpCache(); |
323 | 344 |
324 private: | 345 private: |
325 static const int kNumCachedOperands = 128; | 346 static const int kNumCachedOperands = 128; |
326 static LDoubleStackSlot cache[]; | 347 static LDoubleStackSlot* cache; |
348 static OnceType init_cache_once_; | |
327 | 349 |
328 LDoubleStackSlot() : LOperand() { } | 350 LDoubleStackSlot() : LOperand() { } |
329 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { } | 351 explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { } |
352 | |
353 friend void LOperand::InitCache< | |
354 LDoubleStackSlot, kNumCachedOperands, DOUBLE_STACK_SLOT>( | |
355 LDoubleStackSlot** cache); | |
330 }; | 356 }; |
331 | 357 |
332 | 358 |
333 class LRegister: public LOperand { | 359 class LRegister: public LOperand { |
334 public: | 360 public: |
335 static LRegister* Create(int index) { | 361 static LRegister* Create(int index) { |
336 ASSERT(index >= 0); | 362 ASSERT(index >= 0); |
363 SetUpCache(); | |
337 if (index < kNumCachedOperands) return &cache[index]; | 364 if (index < kNumCachedOperands) return &cache[index]; |
338 return new LRegister(index); | 365 return new LRegister(index); |
339 } | 366 } |
340 | 367 |
341 static LRegister* cast(LOperand* op) { | 368 static LRegister* cast(LOperand* op) { |
342 ASSERT(op->IsRegister()); | 369 ASSERT(op->IsRegister()); |
343 return reinterpret_cast<LRegister*>(op); | 370 return reinterpret_cast<LRegister*>(op); |
344 } | 371 } |
345 | 372 |
346 static void SetUpCache(); | 373 static void SetUpCache(); |
347 | 374 |
348 private: | 375 private: |
349 static const int kNumCachedOperands = 16; | 376 static const int kNumCachedOperands = 16; |
350 static LRegister cache[]; | 377 static LRegister* cache; |
378 static OnceType init_cache_once_; | |
351 | 379 |
352 LRegister() : LOperand() { } | 380 LRegister() : LOperand() { } |
353 explicit LRegister(int index) : LOperand(REGISTER, index) { } | 381 explicit LRegister(int index) : LOperand(REGISTER, index) { } |
382 | |
383 friend void LOperand::InitCache<LRegister, kNumCachedOperands, REGISTER>( | |
384 LRegister** cache); | |
354 }; | 385 }; |
355 | 386 |
356 | 387 |
357 class LDoubleRegister: public LOperand { | 388 class LDoubleRegister: public LOperand { |
358 public: | 389 public: |
359 static LDoubleRegister* Create(int index) { | 390 static LDoubleRegister* Create(int index) { |
360 ASSERT(index >= 0); | 391 ASSERT(index >= 0); |
392 SetUpCache(); | |
361 if (index < kNumCachedOperands) return &cache[index]; | 393 if (index < kNumCachedOperands) return &cache[index]; |
362 return new LDoubleRegister(index); | 394 return new LDoubleRegister(index); |
363 } | 395 } |
364 | 396 |
365 static LDoubleRegister* cast(LOperand* op) { | 397 static LDoubleRegister* cast(LOperand* op) { |
366 ASSERT(op->IsDoubleRegister()); | 398 ASSERT(op->IsDoubleRegister()); |
367 return reinterpret_cast<LDoubleRegister*>(op); | 399 return reinterpret_cast<LDoubleRegister*>(op); |
368 } | 400 } |
369 | 401 |
370 static void SetUpCache(); | 402 static void SetUpCache(); |
371 | 403 |
372 private: | 404 private: |
373 static const int kNumCachedOperands = 16; | 405 static const int kNumCachedOperands = 16; |
374 static LDoubleRegister cache[]; | 406 static LDoubleRegister* cache; |
407 static OnceType init_cache_once_; | |
375 | 408 |
376 LDoubleRegister() : LOperand() { } | 409 LDoubleRegister() : LOperand() { } |
377 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { } | 410 explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { } |
411 | |
412 friend void LOperand::InitCache< | |
413 LDoubleRegister, kNumCachedOperands, DOUBLE_REGISTER>( | |
414 LDoubleRegister** cache); | |
378 }; | 415 }; |
379 | 416 |
380 | 417 |
381 class LParallelMove : public ZoneObject { | 418 class LParallelMove : public ZoneObject { |
382 public: | 419 public: |
383 LParallelMove() : move_operands_(4) { } | 420 LParallelMove() : move_operands_(4) { } |
384 | 421 |
385 void AddMove(LOperand* from, LOperand* to) { | 422 void AddMove(LOperand* from, LOperand* to) { |
386 move_operands_.Add(LMoveOperands(from, to)); | 423 move_operands_.Add(LMoveOperands(from, to)); |
387 } | 424 } |
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
601 ShallowIterator current_iterator_; | 638 ShallowIterator current_iterator_; |
602 }; | 639 }; |
603 | 640 |
604 | 641 |
605 int ElementsKindToShiftSize(ElementsKind elements_kind); | 642 int ElementsKindToShiftSize(ElementsKind elements_kind); |
606 | 643 |
607 | 644 |
608 } } // namespace v8::internal | 645 } } // namespace v8::internal |
609 | 646 |
610 #endif // V8_LITHIUM_H_ | 647 #endif // V8_LITHIUM_H_ |
OLD | NEW |