| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 242 | 242 |
| 243 | 243 |
| 244 void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; } | 244 void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; } |
| 245 HValue* hydrogen_value() const { return hydrogen_value_; } | 245 HValue* hydrogen_value() const { return hydrogen_value_; } |
| 246 | 246 |
| 247 virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { } | 247 virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { } |
| 248 | 248 |
| 249 void MarkAsCall() { is_call_ = true; } | 249 void MarkAsCall() { is_call_ = true; } |
| 250 | 250 |
| 251 // Interface to the register allocator and iterators. | 251 // Interface to the register allocator and iterators. |
| 252 bool ClobbersTemps() const { return is_call_; } | 252 bool IsMarkedAsCall() const { return is_call_; } |
| 253 bool ClobbersRegisters() const { return is_call_; } | |
| 254 virtual bool ClobbersDoubleRegisters() const { | |
| 255 return is_call_ || !CpuFeatures::IsSupported(SSE2); | |
| 256 } | |
| 257 | 253 |
| 258 virtual bool HasResult() const = 0; | 254 virtual bool HasResult() const = 0; |
| 259 virtual LOperand* result() = 0; | 255 virtual LOperand* result() = 0; |
| 260 | 256 |
| 261 LOperand* FirstInput() { return InputAt(0); } | 257 LOperand* FirstInput() { return InputAt(0); } |
| 262 LOperand* Output() { return HasResult() ? result() : NULL; } | 258 LOperand* Output() { return HasResult() ? result() : NULL; } |
| 263 | 259 |
| 264 #ifdef DEBUG | 260 #ifdef DEBUG |
| 265 void VerifyCall(); | 261 void VerifyCall(); |
| 266 #endif | 262 #endif |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 352 | 348 |
| 353 private: | 349 private: |
| 354 LParallelMove* parallel_moves_[LAST_INNER_POSITION + 1]; | 350 LParallelMove* parallel_moves_[LAST_INNER_POSITION + 1]; |
| 355 HBasicBlock* block_; | 351 HBasicBlock* block_; |
| 356 }; | 352 }; |
| 357 | 353 |
| 358 | 354 |
| 359 class LInstructionGap: public LGap { | 355 class LInstructionGap: public LGap { |
| 360 public: | 356 public: |
| 361 explicit LInstructionGap(HBasicBlock* block) : LGap(block) { } | 357 explicit LInstructionGap(HBasicBlock* block) : LGap(block) { } |
| 362 virtual bool ClobbersDoubleRegisters() const { return false; } | |
| 363 | 358 |
| 364 DECLARE_CONCRETE_INSTRUCTION(InstructionGap, "gap") | 359 DECLARE_CONCRETE_INSTRUCTION(InstructionGap, "gap") |
| 365 }; | 360 }; |
| 366 | 361 |
| 367 | 362 |
| 368 class LGoto: public LTemplateInstruction<0, 0, 0> { | 363 class LGoto: public LTemplateInstruction<0, 0, 0> { |
| 369 public: | 364 public: |
| 370 explicit LGoto(int block_id) : block_id_(block_id) { } | 365 explicit LGoto(int block_id) : block_id_(block_id) { } |
| 371 | 366 |
| 372 DECLARE_CONCRETE_INSTRUCTION(Goto, "goto") | 367 DECLARE_CONCRETE_INSTRUCTION(Goto, "goto") |
| (...skipping 1038 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1411 "load-external-array-pointer") | 1406 "load-external-array-pointer") |
| 1412 }; | 1407 }; |
| 1413 | 1408 |
| 1414 | 1409 |
| 1415 class LLoadKeyed: public LTemplateInstruction<1, 2, 0> { | 1410 class LLoadKeyed: public LTemplateInstruction<1, 2, 0> { |
| 1416 public: | 1411 public: |
| 1417 LLoadKeyed(LOperand* elements, LOperand* key) { | 1412 LLoadKeyed(LOperand* elements, LOperand* key) { |
| 1418 inputs_[0] = elements; | 1413 inputs_[0] = elements; |
| 1419 inputs_[1] = key; | 1414 inputs_[1] = key; |
| 1420 } | 1415 } |
| 1416 |
| 1421 LOperand* elements() { return inputs_[0]; } | 1417 LOperand* elements() { return inputs_[0]; } |
| 1422 LOperand* key() { return inputs_[1]; } | 1418 LOperand* key() { return inputs_[1]; } |
| 1423 ElementsKind elements_kind() const { | 1419 ElementsKind elements_kind() const { |
| 1424 return hydrogen()->elements_kind(); | 1420 return hydrogen()->elements_kind(); |
| 1425 } | 1421 } |
| 1426 bool is_external() const { | 1422 bool is_external() const { |
| 1427 return hydrogen()->is_external(); | 1423 return hydrogen()->is_external(); |
| 1428 } | 1424 } |
| 1429 | 1425 |
| 1430 virtual bool ClobbersDoubleRegisters() const { | |
| 1431 return !IsDoubleOrFloatElementsKind(hydrogen()->elements_kind()); | |
| 1432 } | |
| 1433 | |
| 1434 DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed") | 1426 DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed") |
| 1435 DECLARE_HYDROGEN_ACCESSOR(LoadKeyed) | 1427 DECLARE_HYDROGEN_ACCESSOR(LoadKeyed) |
| 1436 | 1428 |
| 1437 virtual void PrintDataTo(StringStream* stream); | 1429 virtual void PrintDataTo(StringStream* stream); |
| 1438 uint32_t additional_index() const { return hydrogen()->index_offset(); } | 1430 uint32_t additional_index() const { return hydrogen()->index_offset(); } |
| 1439 bool key_is_smi() { | |
| 1440 return hydrogen()->key()->representation().IsTagged(); | |
| 1441 } | |
| 1442 }; | 1431 }; |
| 1443 | 1432 |
| 1444 | 1433 |
| 1445 inline static bool ExternalArrayOpRequiresTemp( | 1434 inline static bool ExternalArrayOpRequiresTemp( |
| 1446 Representation key_representation, | 1435 Representation key_representation, |
| 1447 ElementsKind elements_kind) { | 1436 ElementsKind elements_kind) { |
| 1448 // Operations that require the key to be divided by two to be converted into | 1437 // Operations that require the key to be divided by two to be converted into |
| 1449 // an index cannot fold the scale operation into a load and need an extra | 1438 // an index cannot fold the scale operation into a load and need an extra |
| 1450 // temp register to do the work. | 1439 // temp register to do the work. |
| 1451 return key_representation.IsTagged() && | 1440 return key_representation.IsTagged() && |
| (...skipping 960 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2412 | 2401 |
| 2413 void MarkSpilledRegister(int allocation_index, LOperand* spill_operand); | 2402 void MarkSpilledRegister(int allocation_index, LOperand* spill_operand); |
| 2414 void MarkSpilledDoubleRegister(int allocation_index, | 2403 void MarkSpilledDoubleRegister(int allocation_index, |
| 2415 LOperand* spill_operand); | 2404 LOperand* spill_operand); |
| 2416 | 2405 |
| 2417 private: | 2406 private: |
| 2418 // Arrays of spill slot operands for registers with an assigned spill | 2407 // Arrays of spill slot operands for registers with an assigned spill |
| 2419 // slot, i.e., that must also be restored to the spill slot on OSR entry. | 2408 // slot, i.e., that must also be restored to the spill slot on OSR entry. |
| 2420 // NULL if the register has no assigned spill slot. Indexed by allocation | 2409 // NULL if the register has no assigned spill slot. Indexed by allocation |
| 2421 // index. | 2410 // index. |
| 2422 LOperand* register_spills_[Register::kMaxNumAllocatableRegisters]; | 2411 LOperand* register_spills_[Register::kNumAllocatableRegisters]; |
| 2423 LOperand* double_register_spills_[ | 2412 LOperand* double_register_spills_[DoubleRegister::kNumAllocatableRegisters]; |
| 2424 DoubleRegister::kMaxNumAllocatableRegisters]; | |
| 2425 }; | 2413 }; |
| 2426 | 2414 |
| 2427 | 2415 |
| 2428 class LStackCheck: public LTemplateInstruction<0, 1, 0> { | 2416 class LStackCheck: public LTemplateInstruction<0, 1, 0> { |
| 2429 public: | 2417 public: |
| 2430 explicit LStackCheck(LOperand* context) { | 2418 explicit LStackCheck(LOperand* context) { |
| 2431 inputs_[0] = context; | 2419 inputs_[0] = context; |
| 2432 } | 2420 } |
| 2433 | 2421 |
| 2434 LOperand* context() { return inputs_[0]; } | 2422 LOperand* context() { return inputs_[0]; } |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2578 bool is_unused() const { return status_ == UNUSED; } | 2566 bool is_unused() const { return status_ == UNUSED; } |
| 2579 bool is_building() const { return status_ == BUILDING; } | 2567 bool is_building() const { return status_ == BUILDING; } |
| 2580 bool is_done() const { return status_ == DONE; } | 2568 bool is_done() const { return status_ == DONE; } |
| 2581 bool is_aborted() const { return status_ == ABORTED; } | 2569 bool is_aborted() const { return status_ == ABORTED; } |
| 2582 | 2570 |
| 2583 void Abort(const char* reason); | 2571 void Abort(const char* reason); |
| 2584 | 2572 |
| 2585 // Methods for getting operands for Use / Define / Temp. | 2573 // Methods for getting operands for Use / Define / Temp. |
| 2586 LUnallocated* ToUnallocated(Register reg); | 2574 LUnallocated* ToUnallocated(Register reg); |
| 2587 LUnallocated* ToUnallocated(XMMRegister reg); | 2575 LUnallocated* ToUnallocated(XMMRegister reg); |
| 2588 LUnallocated* ToUnallocated(X87TopOfStackRegister reg); | |
| 2589 | 2576 |
| 2590 // Methods for setting up define-use relationships. | 2577 // Methods for setting up define-use relationships. |
| 2591 MUST_USE_RESULT LOperand* Use(HValue* value, LUnallocated* operand); | 2578 MUST_USE_RESULT LOperand* Use(HValue* value, LUnallocated* operand); |
| 2592 MUST_USE_RESULT LOperand* UseFixed(HValue* value, Register fixed_register); | 2579 MUST_USE_RESULT LOperand* UseFixed(HValue* value, Register fixed_register); |
| 2593 MUST_USE_RESULT LOperand* UseFixedDouble(HValue* value, | 2580 MUST_USE_RESULT LOperand* UseFixedDouble(HValue* value, |
| 2594 XMMRegister fixed_register); | 2581 XMMRegister fixed_register); |
| 2595 | 2582 |
| 2596 // A value that is guaranteed to be allocated to a register. | 2583 // A value that is guaranteed to be allocated to a register. |
| 2597 // Operand created by UseRegister is guaranteed to be live until the end of | 2584 // Operand created by UseRegister is guaranteed to be live until the end of |
| 2598 // instruction. This means that register allocator will not reuse it's | 2585 // instruction. This means that register allocator will not reuse it's |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2639 LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr, | 2626 LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr, |
| 2640 int index); | 2627 int index); |
| 2641 template<int I, int T> | 2628 template<int I, int T> |
| 2642 LInstruction* DefineSameAsFirst(LTemplateInstruction<1, I, T>* instr); | 2629 LInstruction* DefineSameAsFirst(LTemplateInstruction<1, I, T>* instr); |
| 2643 template<int I, int T> | 2630 template<int I, int T> |
| 2644 LInstruction* DefineFixed(LTemplateInstruction<1, I, T>* instr, | 2631 LInstruction* DefineFixed(LTemplateInstruction<1, I, T>* instr, |
| 2645 Register reg); | 2632 Register reg); |
| 2646 template<int I, int T> | 2633 template<int I, int T> |
| 2647 LInstruction* DefineFixedDouble(LTemplateInstruction<1, I, T>* instr, | 2634 LInstruction* DefineFixedDouble(LTemplateInstruction<1, I, T>* instr, |
| 2648 XMMRegister reg); | 2635 XMMRegister reg); |
| 2649 template<int I, int T> | |
| 2650 LInstruction* DefineX87TOS(LTemplateInstruction<1, I, T>* instr); | |
| 2651 // Assigns an environment to an instruction. An instruction which can | 2636 // Assigns an environment to an instruction. An instruction which can |
| 2652 // deoptimize must have an environment. | 2637 // deoptimize must have an environment. |
| 2653 LInstruction* AssignEnvironment(LInstruction* instr); | 2638 LInstruction* AssignEnvironment(LInstruction* instr); |
| 2654 // Assigns a pointer map to an instruction. An instruction which can | 2639 // Assigns a pointer map to an instruction. An instruction which can |
| 2655 // trigger a GC or a lazy deoptimization must have a pointer map. | 2640 // trigger a GC or a lazy deoptimization must have a pointer map. |
| 2656 LInstruction* AssignPointerMap(LInstruction* instr); | 2641 LInstruction* AssignPointerMap(LInstruction* instr); |
| 2657 | 2642 |
| 2658 enum CanDeoptimize { CAN_DEOPTIMIZE_EAGERLY, CANNOT_DEOPTIMIZE_EAGERLY }; | 2643 enum CanDeoptimize { CAN_DEOPTIMIZE_EAGERLY, CANNOT_DEOPTIMIZE_EAGERLY }; |
| 2659 | 2644 |
| 2660 // Marks a call for the register allocator. Assigns a pointer map to | 2645 // Marks a call for the register allocator. Assigns a pointer map to |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2693 | 2678 |
| 2694 DISALLOW_COPY_AND_ASSIGN(LChunkBuilder); | 2679 DISALLOW_COPY_AND_ASSIGN(LChunkBuilder); |
| 2695 }; | 2680 }; |
| 2696 | 2681 |
| 2697 #undef DECLARE_HYDROGEN_ACCESSOR | 2682 #undef DECLARE_HYDROGEN_ACCESSOR |
| 2698 #undef DECLARE_CONCRETE_INSTRUCTION | 2683 #undef DECLARE_CONCRETE_INSTRUCTION |
| 2699 | 2684 |
| 2700 } } // namespace v8::internal | 2685 } } // namespace v8::internal |
| 2701 | 2686 |
| 2702 #endif // V8_IA32_LITHIUM_IA32_H_ | 2687 #endif // V8_IA32_LITHIUM_IA32_H_ |
| OLD | NEW |