| OLD | NEW | 
|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #ifndef V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ | 5 #ifndef V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ | 
| 6 #define V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ | 6 #define V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ | 
| 7 | 7 | 
| 8 namespace v8 { | 8 namespace v8 { | 
| 9 namespace internal { | 9 namespace internal { | 
| 10 namespace compiler { | 10 namespace compiler { | 
| (...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 144   V(X64Poke)                       \ | 144   V(X64Poke)                       \ | 
| 145   V(X64StackCheck)                 \ | 145   V(X64StackCheck)                 \ | 
| 146   V(X64Xchgb)                      \ | 146   V(X64Xchgb)                      \ | 
| 147   V(X64Xchgw)                      \ | 147   V(X64Xchgw)                      \ | 
| 148   V(X64Xchgl)                      \ | 148   V(X64Xchgl)                      \ | 
| 149   V(X64Int32x4Splat)               \ | 149   V(X64Int32x4Splat)               \ | 
| 150   V(X64Int32x4ExtractLane)         \ | 150   V(X64Int32x4ExtractLane)         \ | 
| 151   V(X64Int32x4ReplaceLane)         \ | 151   V(X64Int32x4ReplaceLane)         \ | 
| 152   V(X64Int32x4Add)                 \ | 152   V(X64Int32x4Add)                 \ | 
| 153   V(X64Int32x4Sub)                 \ | 153   V(X64Int32x4Sub)                 \ | 
|  | 154   V(X64Int32x4Mul)                 \ | 
|  | 155   V(X64Int32x4Min)                 \ | 
|  | 156   V(X64Int32x4Max)                 \ | 
|  | 157   V(X64Int32x4Equal)               \ | 
|  | 158   V(X64Int32x4NotEqual)            \ | 
|  | 159   V(X64Int32x4ShiftLeftByScalar)   \ | 
|  | 160   V(X64Int32x4ShiftRightByScalar)  \ | 
|  | 161   V(X64Uint32x4ShiftRightByScalar) \ | 
|  | 162   V(X64Uint32x4Min)                \ | 
|  | 163   V(X64Uint32x4Max)                \ | 
|  | 164   V(X64Simd32x4Select)             \ | 
| 154   V(X64Simd128Zero) | 165   V(X64Simd128Zero) | 
| 155 | 166 | 
| 156 // Addressing modes represent the "shape" of inputs to an instruction. | 167 // Addressing modes represent the "shape" of inputs to an instruction. | 
| 157 // Many instructions support multiple addressing modes. Addressing modes | 168 // Many instructions support multiple addressing modes. Addressing modes | 
| 158 // are encoded into the InstructionCode of the instruction and tell the | 169 // are encoded into the InstructionCode of the instruction and tell the | 
| 159 // code generator after register allocation which assembler method to call. | 170 // code generator after register allocation which assembler method to call. | 
| 160 // | 171 // | 
| 161 // We use the following local notation for addressing modes: | 172 // We use the following local notation for addressing modes: | 
| 162 // | 173 // | 
| 163 // M = memory operand | 174 // M = memory operand | 
| (...skipping 22 matching lines...) Expand all  Loading... | 
| 186   V(M8I)  /* [      %r2*8 + K] */      \ | 197   V(M8I)  /* [      %r2*8 + K] */      \ | 
| 187   V(Root) /* [%root       + K] */ | 198   V(Root) /* [%root       + K] */ | 
| 188 | 199 | 
| 189 enum X64MemoryProtection { kUnprotected = 0, kProtected = 1 }; | 200 enum X64MemoryProtection { kUnprotected = 0, kProtected = 1 }; | 
| 190 | 201 | 
| 191 }  // namespace compiler | 202 }  // namespace compiler | 
| 192 }  // namespace internal | 203 }  // namespace internal | 
| 193 }  // namespace v8 | 204 }  // namespace v8 | 
| 194 | 205 | 
| 195 #endif  // V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ | 206 #endif  // V8_COMPILER_X64_INSTRUCTION_CODES_X64_H_ | 
| OLD | NEW | 
|---|