| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 243 | 243 |
| 244 | 244 |
| 245 void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; } | 245 void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; } |
| 246 HValue* hydrogen_value() const { return hydrogen_value_; } | 246 HValue* hydrogen_value() const { return hydrogen_value_; } |
| 247 | 247 |
| 248 virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { } | 248 virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { } |
| 249 | 249 |
| 250 void MarkAsCall() { is_call_ = true; } | 250 void MarkAsCall() { is_call_ = true; } |
| 251 | 251 |
| 252 // Interface to the register allocator and iterators. | 252 // Interface to the register allocator and iterators. |
| 253 bool ClobbersTemps() const { return is_call_; } | 253 bool IsMarkedAsCall() const { return is_call_; } |
| 254 bool ClobbersRegisters() const { return is_call_; } | |
| 255 virtual bool ClobbersDoubleRegisters() const { | |
| 256 return is_call_ || !CpuFeatures::IsSupported(SSE2); | |
| 257 } | |
| 258 | 254 |
| 259 virtual bool HasResult() const = 0; | 255 virtual bool HasResult() const = 0; |
| 260 virtual LOperand* result() = 0; | 256 virtual LOperand* result() = 0; |
| 261 | 257 |
| 262 LOperand* FirstInput() { return InputAt(0); } | 258 LOperand* FirstInput() { return InputAt(0); } |
| 263 LOperand* Output() { return HasResult() ? result() : NULL; } | 259 LOperand* Output() { return HasResult() ? result() : NULL; } |
| 264 | 260 |
| 265 #ifdef DEBUG | 261 #ifdef DEBUG |
| 266 void VerifyCall(); | 262 void VerifyCall(); |
| 267 #endif | 263 #endif |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 353 | 349 |
| 354 private: | 350 private: |
| 355 LParallelMove* parallel_moves_[LAST_INNER_POSITION + 1]; | 351 LParallelMove* parallel_moves_[LAST_INNER_POSITION + 1]; |
| 356 HBasicBlock* block_; | 352 HBasicBlock* block_; |
| 357 }; | 353 }; |
| 358 | 354 |
| 359 | 355 |
| 360 class LInstructionGap: public LGap { | 356 class LInstructionGap: public LGap { |
| 361 public: | 357 public: |
| 362 explicit LInstructionGap(HBasicBlock* block) : LGap(block) { } | 358 explicit LInstructionGap(HBasicBlock* block) : LGap(block) { } |
| 363 virtual bool ClobbersDoubleRegisters() const { return false; } | |
| 364 | 359 |
| 365 DECLARE_CONCRETE_INSTRUCTION(InstructionGap, "gap") | 360 DECLARE_CONCRETE_INSTRUCTION(InstructionGap, "gap") |
| 366 }; | 361 }; |
| 367 | 362 |
| 368 | 363 |
| 369 class LGoto: public LTemplateInstruction<0, 0, 0> { | 364 class LGoto: public LTemplateInstruction<0, 0, 0> { |
| 370 public: | 365 public: |
| 371 explicit LGoto(int block_id) : block_id_(block_id) { } | 366 explicit LGoto(int block_id) : block_id_(block_id) { } |
| 372 | 367 |
| 373 DECLARE_CONCRETE_INSTRUCTION(Goto, "goto") | 368 DECLARE_CONCRETE_INSTRUCTION(Goto, "goto") |
| (...skipping 1062 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1436 "load-external-array-pointer") | 1431 "load-external-array-pointer") |
| 1437 }; | 1432 }; |
| 1438 | 1433 |
| 1439 | 1434 |
| 1440 class LLoadKeyed: public LTemplateInstruction<1, 2, 0> { | 1435 class LLoadKeyed: public LTemplateInstruction<1, 2, 0> { |
| 1441 public: | 1436 public: |
| 1442 LLoadKeyed(LOperand* elements, LOperand* key) { | 1437 LLoadKeyed(LOperand* elements, LOperand* key) { |
| 1443 inputs_[0] = elements; | 1438 inputs_[0] = elements; |
| 1444 inputs_[1] = key; | 1439 inputs_[1] = key; |
| 1445 } | 1440 } |
| 1441 |
| 1446 LOperand* elements() { return inputs_[0]; } | 1442 LOperand* elements() { return inputs_[0]; } |
| 1447 LOperand* key() { return inputs_[1]; } | 1443 LOperand* key() { return inputs_[1]; } |
| 1448 ElementsKind elements_kind() const { | 1444 ElementsKind elements_kind() const { |
| 1449 return hydrogen()->elements_kind(); | 1445 return hydrogen()->elements_kind(); |
| 1450 } | 1446 } |
| 1451 bool is_external() const { | 1447 bool is_external() const { |
| 1452 return hydrogen()->is_external(); | 1448 return hydrogen()->is_external(); |
| 1453 } | 1449 } |
| 1454 | 1450 |
| 1455 virtual bool ClobbersDoubleRegisters() const { | |
| 1456 return !IsDoubleOrFloatElementsKind(hydrogen()->elements_kind()); | |
| 1457 } | |
| 1458 | |
| 1459 DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed") | 1451 DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed") |
| 1460 DECLARE_HYDROGEN_ACCESSOR(LoadKeyed) | 1452 DECLARE_HYDROGEN_ACCESSOR(LoadKeyed) |
| 1461 | 1453 |
| 1462 virtual void PrintDataTo(StringStream* stream); | 1454 virtual void PrintDataTo(StringStream* stream); |
| 1463 uint32_t additional_index() const { return hydrogen()->index_offset(); } | 1455 uint32_t additional_index() const { return hydrogen()->index_offset(); } |
| 1464 bool key_is_smi() { | |
| 1465 return hydrogen()->key()->representation().IsTagged(); | |
| 1466 } | |
| 1467 }; | 1456 }; |
| 1468 | 1457 |
| 1469 | 1458 |
| 1470 inline static bool ExternalArrayOpRequiresTemp( | 1459 inline static bool ExternalArrayOpRequiresTemp( |
| 1471 Representation key_representation, | 1460 Representation key_representation, |
| 1472 ElementsKind elements_kind) { | 1461 ElementsKind elements_kind) { |
| 1473 // Operations that require the key to be divided by two to be converted into | 1462 // Operations that require the key to be divided by two to be converted into |
| 1474 // an index cannot fold the scale operation into a load and need an extra | 1463 // an index cannot fold the scale operation into a load and need an extra |
| 1475 // temp register to do the work. | 1464 // temp register to do the work. |
| 1476 return key_representation.IsTagged() && | 1465 return key_representation.IsTagged() && |
| (...skipping 960 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2437 | 2426 |
| 2438 void MarkSpilledRegister(int allocation_index, LOperand* spill_operand); | 2427 void MarkSpilledRegister(int allocation_index, LOperand* spill_operand); |
| 2439 void MarkSpilledDoubleRegister(int allocation_index, | 2428 void MarkSpilledDoubleRegister(int allocation_index, |
| 2440 LOperand* spill_operand); | 2429 LOperand* spill_operand); |
| 2441 | 2430 |
| 2442 private: | 2431 private: |
| 2443 // Arrays of spill slot operands for registers with an assigned spill | 2432 // Arrays of spill slot operands for registers with an assigned spill |
| 2444 // slot, i.e., that must also be restored to the spill slot on OSR entry. | 2433 // slot, i.e., that must also be restored to the spill slot on OSR entry. |
| 2445 // NULL if the register has no assigned spill slot. Indexed by allocation | 2434 // NULL if the register has no assigned spill slot. Indexed by allocation |
| 2446 // index. | 2435 // index. |
| 2447 LOperand* register_spills_[Register::kMaxNumAllocatableRegisters]; | 2436 LOperand* register_spills_[Register::kNumAllocatableRegisters]; |
| 2448 LOperand* double_register_spills_[ | 2437 LOperand* double_register_spills_[DoubleRegister::kNumAllocatableRegisters]; |
| 2449 DoubleRegister::kMaxNumAllocatableRegisters]; | |
| 2450 }; | 2438 }; |
| 2451 | 2439 |
| 2452 | 2440 |
| 2453 class LStackCheck: public LTemplateInstruction<0, 1, 0> { | 2441 class LStackCheck: public LTemplateInstruction<0, 1, 0> { |
| 2454 public: | 2442 public: |
| 2455 explicit LStackCheck(LOperand* context) { | 2443 explicit LStackCheck(LOperand* context) { |
| 2456 inputs_[0] = context; | 2444 inputs_[0] = context; |
| 2457 } | 2445 } |
| 2458 | 2446 |
| 2459 LOperand* context() { return inputs_[0]; } | 2447 LOperand* context() { return inputs_[0]; } |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2603 bool is_unused() const { return status_ == UNUSED; } | 2591 bool is_unused() const { return status_ == UNUSED; } |
| 2604 bool is_building() const { return status_ == BUILDING; } | 2592 bool is_building() const { return status_ == BUILDING; } |
| 2605 bool is_done() const { return status_ == DONE; } | 2593 bool is_done() const { return status_ == DONE; } |
| 2606 bool is_aborted() const { return status_ == ABORTED; } | 2594 bool is_aborted() const { return status_ == ABORTED; } |
| 2607 | 2595 |
| 2608 void Abort(const char* reason); | 2596 void Abort(const char* reason); |
| 2609 | 2597 |
| 2610 // Methods for getting operands for Use / Define / Temp. | 2598 // Methods for getting operands for Use / Define / Temp. |
| 2611 LUnallocated* ToUnallocated(Register reg); | 2599 LUnallocated* ToUnallocated(Register reg); |
| 2612 LUnallocated* ToUnallocated(XMMRegister reg); | 2600 LUnallocated* ToUnallocated(XMMRegister reg); |
| 2613 LUnallocated* ToUnallocated(X87TopOfStackRegister reg); | |
| 2614 | 2601 |
| 2615 // Methods for setting up define-use relationships. | 2602 // Methods for setting up define-use relationships. |
| 2616 MUST_USE_RESULT LOperand* Use(HValue* value, LUnallocated* operand); | 2603 MUST_USE_RESULT LOperand* Use(HValue* value, LUnallocated* operand); |
| 2617 MUST_USE_RESULT LOperand* UseFixed(HValue* value, Register fixed_register); | 2604 MUST_USE_RESULT LOperand* UseFixed(HValue* value, Register fixed_register); |
| 2618 MUST_USE_RESULT LOperand* UseFixedDouble(HValue* value, | 2605 MUST_USE_RESULT LOperand* UseFixedDouble(HValue* value, |
| 2619 XMMRegister fixed_register); | 2606 XMMRegister fixed_register); |
| 2620 | 2607 |
| 2621 // A value that is guaranteed to be allocated to a register. | 2608 // A value that is guaranteed to be allocated to a register. |
| 2622 // Operand created by UseRegister is guaranteed to be live until the end of | 2609 // Operand created by UseRegister is guaranteed to be live until the end of |
| 2623 // instruction. This means that register allocator will not reuse it's | 2610 // instruction. This means that register allocator will not reuse it's |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2664 LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr, | 2651 LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr, |
| 2665 int index); | 2652 int index); |
| 2666 template<int I, int T> | 2653 template<int I, int T> |
| 2667 LInstruction* DefineSameAsFirst(LTemplateInstruction<1, I, T>* instr); | 2654 LInstruction* DefineSameAsFirst(LTemplateInstruction<1, I, T>* instr); |
| 2668 template<int I, int T> | 2655 template<int I, int T> |
| 2669 LInstruction* DefineFixed(LTemplateInstruction<1, I, T>* instr, | 2656 LInstruction* DefineFixed(LTemplateInstruction<1, I, T>* instr, |
| 2670 Register reg); | 2657 Register reg); |
| 2671 template<int I, int T> | 2658 template<int I, int T> |
| 2672 LInstruction* DefineFixedDouble(LTemplateInstruction<1, I, T>* instr, | 2659 LInstruction* DefineFixedDouble(LTemplateInstruction<1, I, T>* instr, |
| 2673 XMMRegister reg); | 2660 XMMRegister reg); |
| 2674 template<int I, int T> | |
| 2675 LInstruction* DefineX87TOS(LTemplateInstruction<1, I, T>* instr); | |
| 2676 // Assigns an environment to an instruction. An instruction which can | 2661 // Assigns an environment to an instruction. An instruction which can |
| 2677 // deoptimize must have an environment. | 2662 // deoptimize must have an environment. |
| 2678 LInstruction* AssignEnvironment(LInstruction* instr); | 2663 LInstruction* AssignEnvironment(LInstruction* instr); |
| 2679 // Assigns a pointer map to an instruction. An instruction which can | 2664 // Assigns a pointer map to an instruction. An instruction which can |
| 2680 // trigger a GC or a lazy deoptimization must have a pointer map. | 2665 // trigger a GC or a lazy deoptimization must have a pointer map. |
| 2681 LInstruction* AssignPointerMap(LInstruction* instr); | 2666 LInstruction* AssignPointerMap(LInstruction* instr); |
| 2682 | 2667 |
| 2683 enum CanDeoptimize { CAN_DEOPTIMIZE_EAGERLY, CANNOT_DEOPTIMIZE_EAGERLY }; | 2668 enum CanDeoptimize { CAN_DEOPTIMIZE_EAGERLY, CANNOT_DEOPTIMIZE_EAGERLY }; |
| 2684 | 2669 |
| 2685 // Marks a call for the register allocator. Assigns a pointer map to | 2670 // Marks a call for the register allocator. Assigns a pointer map to |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2718 | 2703 |
| 2719 DISALLOW_COPY_AND_ASSIGN(LChunkBuilder); | 2704 DISALLOW_COPY_AND_ASSIGN(LChunkBuilder); |
| 2720 }; | 2705 }; |
| 2721 | 2706 |
| 2722 #undef DECLARE_HYDROGEN_ACCESSOR | 2707 #undef DECLARE_HYDROGEN_ACCESSOR |
| 2723 #undef DECLARE_CONCRETE_INSTRUCTION | 2708 #undef DECLARE_CONCRETE_INSTRUCTION |
| 2724 | 2709 |
| 2725 } } // namespace v8::internal | 2710 } } // namespace v8::internal |
| 2726 | 2711 |
| 2727 #endif // V8_IA32_LITHIUM_IA32_H_ | 2712 #endif // V8_IA32_LITHIUM_IA32_H_ |
| OLD | NEW |