| OLD | NEW | 
|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #ifndef V8_COMPILER_PPC_INSTRUCTION_CODES_PPC_H_ | 5 #ifndef V8_COMPILER_PPC_INSTRUCTION_CODES_PPC_H_ | 
| 6 #define V8_COMPILER_PPC_INSTRUCTION_CODES_PPC_H_ | 6 #define V8_COMPILER_PPC_INSTRUCTION_CODES_PPC_H_ | 
| 7 | 7 | 
| 8 namespace v8 { | 8 namespace v8 { | 
| 9 namespace internal { | 9 namespace internal { | 
| 10 namespace compiler { | 10 namespace compiler { | 
| 11 | 11 | 
| 12 // PPC-specific opcodes that specify which assembly sequence to emit. | 12 // PPC-specific opcodes that specify which assembly sequence to emit. | 
| 13 // Most opcodes specify a single instruction. | 13 // Most opcodes specify a single instruction. | 
| 14 #define TARGET_ARCH_OPCODE_LIST(V) \ | 14 #define TARGET_ARCH_OPCODE_LIST(V) \ | 
| 15   V(PPC_And)                       \ | 15   V(PPC_And)                       \ | 
| 16   V(PPC_AndComplement)             \ | 16   V(PPC_AndComplement)             \ | 
| 17   V(PPC_Or)                        \ | 17   V(PPC_Or)                        \ | 
| 18   V(PPC_OrComplement)              \ | 18   V(PPC_OrComplement)              \ | 
| 19   V(PPC_Xor)                       \ | 19   V(PPC_Xor)                       \ | 
| 20   V(PPC_ShiftLeft32)               \ | 20   V(PPC_ShiftLeft32)               \ | 
| 21   V(PPC_ShiftLeft64)               \ | 21   V(PPC_ShiftLeft64)               \ | 
| 22   V(PPC_ShiftRight32)              \ | 22   V(PPC_ShiftRight32)              \ | 
| 23   V(PPC_ShiftRight64)              \ | 23   V(PPC_ShiftRight64)              \ | 
| 24   V(PPC_ShiftRightAlg32)           \ | 24   V(PPC_ShiftRightAlg32)           \ | 
| 25   V(PPC_ShiftRightAlg64)           \ | 25   V(PPC_ShiftRightAlg64)           \ | 
|  | 26   V(PPC_PairShiftLeft)             \ | 
| 26   V(PPC_RotRight32)                \ | 27   V(PPC_RotRight32)                \ | 
| 27   V(PPC_RotRight64)                \ | 28   V(PPC_RotRight64)                \ | 
| 28   V(PPC_Not)                       \ | 29   V(PPC_Not)                       \ | 
| 29   V(PPC_RotLeftAndMask32)          \ | 30   V(PPC_RotLeftAndMask32)          \ | 
| 30   V(PPC_RotLeftAndClear64)         \ | 31   V(PPC_RotLeftAndClear64)         \ | 
| 31   V(PPC_RotLeftAndClearLeft64)     \ | 32   V(PPC_RotLeftAndClearLeft64)     \ | 
| 32   V(PPC_RotLeftAndClearRight64)    \ | 33   V(PPC_RotLeftAndClearRight64)    \ | 
| 33   V(PPC_Add)                       \ | 34   V(PPC_Add)                       \ | 
| 34   V(PPC_AddWithOverflow32)         \ | 35   V(PPC_AddWithOverflow32)         \ | 
| 35   V(PPC_AddDouble)                 \ | 36   V(PPC_AddDouble)                 \ | 
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 131 // MRR = [register + register] | 132 // MRR = [register + register] | 
| 132 #define TARGET_ADDRESSING_MODE_LIST(V) \ | 133 #define TARGET_ADDRESSING_MODE_LIST(V) \ | 
| 133   V(MRI) /* [%r0 + K] */               \ | 134   V(MRI) /* [%r0 + K] */               \ | 
| 134   V(MRR) /* [%r0 + %r1] */ | 135   V(MRR) /* [%r0 + %r1] */ | 
| 135 | 136 | 
| 136 }  // namespace compiler | 137 }  // namespace compiler | 
| 137 }  // namespace internal | 138 }  // namespace internal | 
| 138 }  // namespace v8 | 139 }  // namespace v8 | 
| 139 | 140 | 
| 140 #endif  // V8_COMPILER_PPC_INSTRUCTION_CODES_PPC_H_ | 141 #endif  // V8_COMPILER_PPC_INSTRUCTION_CODES_PPC_H_ | 
| OLD | NEW | 
|---|