OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 647 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
658 class GenericBinaryOpStub: public CodeStub { | 658 class GenericBinaryOpStub: public CodeStub { |
659 public: | 659 public: |
660 GenericBinaryOpStub(Token::Value op, | 660 GenericBinaryOpStub(Token::Value op, |
661 OverwriteMode mode, | 661 OverwriteMode mode, |
662 GenericBinaryFlags flags) | 662 GenericBinaryFlags flags) |
663 : op_(op), | 663 : op_(op), |
664 mode_(mode), | 664 mode_(mode), |
665 flags_(flags), | 665 flags_(flags), |
666 args_in_registers_(false), | 666 args_in_registers_(false), |
667 args_reversed_(false) { | 667 args_reversed_(false) { |
668 use_sse3_ = CpuFeatures::IsSupported(CpuFeatures::SSE3); | 668 use_sse3_ = CpuFeatures::IsSupported(SSE3); |
669 ASSERT(OpBits::is_valid(Token::NUM_TOKENS)); | 669 ASSERT(OpBits::is_valid(Token::NUM_TOKENS)); |
670 } | 670 } |
671 | 671 |
672 // Generate code to call the stub with the supplied arguments. This will add | 672 // Generate code to call the stub with the supplied arguments. This will add |
673 // code at the call site to prepare arguments either in registers or on the | 673 // code at the call site to prepare arguments either in registers or on the |
674 // stack together with the actual call. | 674 // stack together with the actual call. |
675 void GenerateCall(MacroAssembler* masm, Register left, Register right); | 675 void GenerateCall(MacroAssembler* masm, Register left, Register right); |
676 void GenerateCall(MacroAssembler* masm, Register left, Smi* right); | 676 void GenerateCall(MacroAssembler* masm, Register left, Smi* right); |
677 void GenerateCall(MacroAssembler* masm, Smi* left, Register right); | 677 void GenerateCall(MacroAssembler* masm, Smi* left, Register right); |
678 | 678 |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
735 void SetArgsReversed() { args_reversed_ = true; } | 735 void SetArgsReversed() { args_reversed_ = true; } |
736 bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; } | 736 bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; } |
737 bool HasArgumentsInRegisters() { return args_in_registers_; } | 737 bool HasArgumentsInRegisters() { return args_in_registers_; } |
738 bool HasArgumentsReversed() { return args_reversed_; } | 738 bool HasArgumentsReversed() { return args_reversed_; } |
739 }; | 739 }; |
740 | 740 |
741 | 741 |
742 } } // namespace v8::internal | 742 } } // namespace v8::internal |
743 | 743 |
744 #endif // V8_X64_CODEGEN_X64_H_ | 744 #endif // V8_X64_CODEGEN_X64_H_ |
OLD | NEW |