| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 241 | 241 |
| 242 | 242 |
| 243 void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; } | 243 void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; } |
| 244 HValue* hydrogen_value() const { return hydrogen_value_; } | 244 HValue* hydrogen_value() const { return hydrogen_value_; } |
| 245 | 245 |
| 246 virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { } | 246 virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { } |
| 247 | 247 |
| 248 void MarkAsCall() { is_call_ = true; } | 248 void MarkAsCall() { is_call_ = true; } |
| 249 | 249 |
| 250 // Interface to the register allocator and iterators. | 250 // Interface to the register allocator and iterators. |
| 251 bool IsMarkedAsCall() const { return is_call_; } | 251 bool ClobbersTemps() const { return is_call_; } |
| 252 bool ClobbersRegisters() const { return is_call_; } |
| 253 virtual bool ClobbersDoubleRegisters() const { |
| 254 return is_call_ || !CpuFeatures::IsSupported(SSE2); |
| 255 } |
| 252 | 256 |
| 253 virtual bool HasResult() const = 0; | 257 virtual bool HasResult() const = 0; |
| 254 virtual LOperand* result() = 0; | 258 virtual LOperand* result() = 0; |
| 255 | 259 |
| 256 LOperand* FirstInput() { return InputAt(0); } | 260 LOperand* FirstInput() { return InputAt(0); } |
| 257 LOperand* Output() { return HasResult() ? result() : NULL; } | 261 LOperand* Output() { return HasResult() ? result() : NULL; } |
| 258 | 262 |
| 259 #ifdef DEBUG | 263 #ifdef DEBUG |
| 260 void VerifyCall(); | 264 void VerifyCall(); |
| 261 #endif | 265 #endif |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 347 | 351 |
| 348 private: | 352 private: |
| 349 LParallelMove* parallel_moves_[LAST_INNER_POSITION + 1]; | 353 LParallelMove* parallel_moves_[LAST_INNER_POSITION + 1]; |
| 350 HBasicBlock* block_; | 354 HBasicBlock* block_; |
| 351 }; | 355 }; |
| 352 | 356 |
| 353 | 357 |
| 354 class LInstructionGap: public LGap { | 358 class LInstructionGap: public LGap { |
| 355 public: | 359 public: |
| 356 explicit LInstructionGap(HBasicBlock* block) : LGap(block) { } | 360 explicit LInstructionGap(HBasicBlock* block) : LGap(block) { } |
| 361 virtual bool ClobbersDoubleRegisters() const { return false; } |
| 357 | 362 |
| 358 DECLARE_CONCRETE_INSTRUCTION(InstructionGap, "gap") | 363 DECLARE_CONCRETE_INSTRUCTION(InstructionGap, "gap") |
| 359 }; | 364 }; |
| 360 | 365 |
| 361 | 366 |
| 362 class LGoto: public LTemplateInstruction<0, 0, 0> { | 367 class LGoto: public LTemplateInstruction<0, 0, 0> { |
| 363 public: | 368 public: |
| 364 explicit LGoto(int block_id) : block_id_(block_id) { } | 369 explicit LGoto(int block_id) : block_id_(block_id) { } |
| 365 | 370 |
| 366 DECLARE_CONCRETE_INSTRUCTION(Goto, "goto") | 371 DECLARE_CONCRETE_INSTRUCTION(Goto, "goto") |
| (...skipping 1017 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1384 "load-external-array-pointer") | 1389 "load-external-array-pointer") |
| 1385 }; | 1390 }; |
| 1386 | 1391 |
| 1387 | 1392 |
| 1388 class LLoadKeyed: public LTemplateInstruction<1, 2, 0> { | 1393 class LLoadKeyed: public LTemplateInstruction<1, 2, 0> { |
| 1389 public: | 1394 public: |
| 1390 LLoadKeyed(LOperand* elements, LOperand* key) { | 1395 LLoadKeyed(LOperand* elements, LOperand* key) { |
| 1391 inputs_[0] = elements; | 1396 inputs_[0] = elements; |
| 1392 inputs_[1] = key; | 1397 inputs_[1] = key; |
| 1393 } | 1398 } |
| 1394 | |
| 1395 LOperand* elements() { return inputs_[0]; } | 1399 LOperand* elements() { return inputs_[0]; } |
| 1396 LOperand* key() { return inputs_[1]; } | 1400 LOperand* key() { return inputs_[1]; } |
| 1397 ElementsKind elements_kind() const { | 1401 ElementsKind elements_kind() const { |
| 1398 return hydrogen()->elements_kind(); | 1402 return hydrogen()->elements_kind(); |
| 1399 } | 1403 } |
| 1400 bool is_external() const { | 1404 bool is_external() const { |
| 1401 return hydrogen()->is_external(); | 1405 return hydrogen()->is_external(); |
| 1402 } | 1406 } |
| 1403 | 1407 |
| 1408 virtual bool ClobbersDoubleRegisters() const { |
| 1409 return !IsDoubleOrFloatElementsKind(hydrogen()->elements_kind()); |
| 1410 } |
| 1411 |
| 1404 DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed") | 1412 DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed") |
| 1405 DECLARE_HYDROGEN_ACCESSOR(LoadKeyed) | 1413 DECLARE_HYDROGEN_ACCESSOR(LoadKeyed) |
| 1406 | 1414 |
| 1407 uint32_t additional_index() const { return hydrogen()->index_offset(); } | 1415 uint32_t additional_index() const { return hydrogen()->index_offset(); } |
| 1416 bool key_is_smi() { |
| 1417 return hydrogen()->key()->representation().IsTagged(); |
| 1418 } |
| 1408 }; | 1419 }; |
| 1409 | 1420 |
| 1410 | 1421 |
| 1411 inline static bool ExternalArrayOpRequiresTemp( | 1422 inline static bool ExternalArrayOpRequiresTemp( |
| 1412 Representation key_representation, | 1423 Representation key_representation, |
| 1413 ElementsKind elements_kind) { | 1424 ElementsKind elements_kind) { |
| 1414 // Operations that require the key to be divided by two to be converted into | 1425 // Operations that require the key to be divided by two to be converted into |
| 1415 // an index cannot fold the scale operation into a load and need an extra | 1426 // an index cannot fold the scale operation into a load and need an extra |
| 1416 // temp register to do the work. | 1427 // temp register to do the work. |
| 1417 return key_representation.IsTagged() && | 1428 return key_representation.IsTagged() && |
| (...skipping 960 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2378 | 2389 |
| 2379 void MarkSpilledRegister(int allocation_index, LOperand* spill_operand); | 2390 void MarkSpilledRegister(int allocation_index, LOperand* spill_operand); |
| 2380 void MarkSpilledDoubleRegister(int allocation_index, | 2391 void MarkSpilledDoubleRegister(int allocation_index, |
| 2381 LOperand* spill_operand); | 2392 LOperand* spill_operand); |
| 2382 | 2393 |
| 2383 private: | 2394 private: |
| 2384 // Arrays of spill slot operands for registers with an assigned spill | 2395 // Arrays of spill slot operands for registers with an assigned spill |
| 2385 // slot, i.e., that must also be restored to the spill slot on OSR entry. | 2396 // slot, i.e., that must also be restored to the spill slot on OSR entry. |
| 2386 // NULL if the register has no assigned spill slot. Indexed by allocation | 2397 // NULL if the register has no assigned spill slot. Indexed by allocation |
| 2387 // index. | 2398 // index. |
| 2388 LOperand* register_spills_[Register::kNumAllocatableRegisters]; | 2399 LOperand* register_spills_[Register::kMaxNumAllocatableRegisters]; |
| 2389 LOperand* double_register_spills_[DoubleRegister::kNumAllocatableRegisters]; | 2400 LOperand* double_register_spills_[ |
| 2401 DoubleRegister::kMaxNumAllocatableRegisters]; |
| 2390 }; | 2402 }; |
| 2391 | 2403 |
| 2392 | 2404 |
| 2393 class LStackCheck: public LTemplateInstruction<0, 1, 0> { | 2405 class LStackCheck: public LTemplateInstruction<0, 1, 0> { |
| 2394 public: | 2406 public: |
| 2395 explicit LStackCheck(LOperand* context) { | 2407 explicit LStackCheck(LOperand* context) { |
| 2396 inputs_[0] = context; | 2408 inputs_[0] = context; |
| 2397 } | 2409 } |
| 2398 | 2410 |
| 2399 LOperand* context() { return inputs_[0]; } | 2411 LOperand* context() { return inputs_[0]; } |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2543 bool is_unused() const { return status_ == UNUSED; } | 2555 bool is_unused() const { return status_ == UNUSED; } |
| 2544 bool is_building() const { return status_ == BUILDING; } | 2556 bool is_building() const { return status_ == BUILDING; } |
| 2545 bool is_done() const { return status_ == DONE; } | 2557 bool is_done() const { return status_ == DONE; } |
| 2546 bool is_aborted() const { return status_ == ABORTED; } | 2558 bool is_aborted() const { return status_ == ABORTED; } |
| 2547 | 2559 |
| 2548 void Abort(const char* reason); | 2560 void Abort(const char* reason); |
| 2549 | 2561 |
| 2550 // Methods for getting operands for Use / Define / Temp. | 2562 // Methods for getting operands for Use / Define / Temp. |
| 2551 LUnallocated* ToUnallocated(Register reg); | 2563 LUnallocated* ToUnallocated(Register reg); |
| 2552 LUnallocated* ToUnallocated(XMMRegister reg); | 2564 LUnallocated* ToUnallocated(XMMRegister reg); |
| 2565 LUnallocated* ToUnallocated(X87TopOfStackProxyRegister reg); |
| 2553 | 2566 |
| 2554 // Methods for setting up define-use relationships. | 2567 // Methods for setting up define-use relationships. |
| 2555 MUST_USE_RESULT LOperand* Use(HValue* value, LUnallocated* operand); | 2568 MUST_USE_RESULT LOperand* Use(HValue* value, LUnallocated* operand); |
| 2556 MUST_USE_RESULT LOperand* UseFixed(HValue* value, Register fixed_register); | 2569 MUST_USE_RESULT LOperand* UseFixed(HValue* value, Register fixed_register); |
| 2557 MUST_USE_RESULT LOperand* UseFixedDouble(HValue* value, | 2570 MUST_USE_RESULT LOperand* UseFixedDouble(HValue* value, |
| 2558 XMMRegister fixed_register); | 2571 XMMRegister fixed_register); |
| 2559 | 2572 |
| 2560 // A value that is guaranteed to be allocated to a register. | 2573 // A value that is guaranteed to be allocated to a register. |
| 2561 // Operand created by UseRegister is guaranteed to be live until the end of | 2574 // Operand created by UseRegister is guaranteed to be live until the end of |
| 2562 // instruction. This means that register allocator will not reuse it's | 2575 // instruction. This means that register allocator will not reuse it's |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2603 LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr, | 2616 LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr, |
| 2604 int index); | 2617 int index); |
| 2605 template<int I, int T> | 2618 template<int I, int T> |
| 2606 LInstruction* DefineSameAsFirst(LTemplateInstruction<1, I, T>* instr); | 2619 LInstruction* DefineSameAsFirst(LTemplateInstruction<1, I, T>* instr); |
| 2607 template<int I, int T> | 2620 template<int I, int T> |
| 2608 LInstruction* DefineFixed(LTemplateInstruction<1, I, T>* instr, | 2621 LInstruction* DefineFixed(LTemplateInstruction<1, I, T>* instr, |
| 2609 Register reg); | 2622 Register reg); |
| 2610 template<int I, int T> | 2623 template<int I, int T> |
| 2611 LInstruction* DefineFixedDouble(LTemplateInstruction<1, I, T>* instr, | 2624 LInstruction* DefineFixedDouble(LTemplateInstruction<1, I, T>* instr, |
| 2612 XMMRegister reg); | 2625 XMMRegister reg); |
| 2626 template<int I, int T> |
| 2627 LInstruction* DefineX87TOS(LTemplateInstruction<1, I, T>* instr); |
| 2613 // Assigns an environment to an instruction. An instruction which can | 2628 // Assigns an environment to an instruction. An instruction which can |
| 2614 // deoptimize must have an environment. | 2629 // deoptimize must have an environment. |
| 2615 LInstruction* AssignEnvironment(LInstruction* instr); | 2630 LInstruction* AssignEnvironment(LInstruction* instr); |
| 2616 // Assigns a pointer map to an instruction. An instruction which can | 2631 // Assigns a pointer map to an instruction. An instruction which can |
| 2617 // trigger a GC or a lazy deoptimization must have a pointer map. | 2632 // trigger a GC or a lazy deoptimization must have a pointer map. |
| 2618 LInstruction* AssignPointerMap(LInstruction* instr); | 2633 LInstruction* AssignPointerMap(LInstruction* instr); |
| 2619 | 2634 |
| 2620 enum CanDeoptimize { CAN_DEOPTIMIZE_EAGERLY, CANNOT_DEOPTIMIZE_EAGERLY }; | 2635 enum CanDeoptimize { CAN_DEOPTIMIZE_EAGERLY, CANNOT_DEOPTIMIZE_EAGERLY }; |
| 2621 | 2636 |
| 2622 // Marks a call for the register allocator. Assigns a pointer map to | 2637 // Marks a call for the register allocator. Assigns a pointer map to |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2655 | 2670 |
| 2656 DISALLOW_COPY_AND_ASSIGN(LChunkBuilder); | 2671 DISALLOW_COPY_AND_ASSIGN(LChunkBuilder); |
| 2657 }; | 2672 }; |
| 2658 | 2673 |
| 2659 #undef DECLARE_HYDROGEN_ACCESSOR | 2674 #undef DECLARE_HYDROGEN_ACCESSOR |
| 2660 #undef DECLARE_CONCRETE_INSTRUCTION | 2675 #undef DECLARE_CONCRETE_INSTRUCTION |
| 2661 | 2676 |
| 2662 } } // namespace v8::internal | 2677 } } // namespace v8::internal |
| 2663 | 2678 |
| 2664 #endif // V8_IA32_LITHIUM_IA32_H_ | 2679 #endif // V8_IA32_LITHIUM_IA32_H_ |
| OLD | NEW |