OLD | NEW |
1 /* | 1 /* |
2 * x86 identifier recognition and instruction handling | 2 * x86 identifier recognition and instruction handling |
3 * | 3 * |
4 * Copyright (C) 2002-2007 Peter Johnson | 4 * Copyright (C) 2002-2007 Peter Johnson |
5 * | 5 * |
6 * Redistribution and use in source and binary forms, with or without | 6 * Redistribution and use in source and binary forms, with or without |
7 * modification, are permitted provided that the following conditions | 7 * modification, are permitted provided that the following conditions |
8 * are met: | 8 * are met: |
9 * 1. Redistributions of source code must retain the above copyright | 9 * 1. Redistributions of source code must retain the above copyright |
10 * notice, this list of conditions and the following disclaimer. | 10 * notice, this list of conditions and the following disclaimer. |
11 * 2. Redistributions in binary form must reproduce the above copyright | 11 * 2. Redistributions in binary form must reproduce the above copyright |
12 * notice, this list of conditions and the following disclaimer in the | 12 * notice, this list of conditions and the following disclaimer in the |
13 * documentation and/or other materials provided with the distribution. | 13 * documentation and/or other materials provided with the distribution. |
14 * | 14 * |
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND OTHER CONTRIBUTORS ``AS IS'' | 15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND OTHER CONTRIBUTORS ``AS IS'' |
16 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | 16 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE | 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS BE | 18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS BE |
19 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | 19 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
20 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | 20 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
21 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | 21 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
22 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | 22 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | 23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
24 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | 24 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
25 * POSSIBILITY OF SUCH DAMAGE. | 25 * POSSIBILITY OF SUCH DAMAGE. |
26 */ | 26 */ |
27 #include <ctype.h> | 27 #include <ctype.h> |
28 #include <util.h> | 28 #include <util.h> |
29 RCSID("$Id: x86id.c 2130 2008-10-07 05:38:11Z peter $"); | 29 RCSID("$Id: x86id.c 2279 2010-01-19 07:57:43Z peter $"); |
30 | 30 |
31 #include <libyasm.h> | 31 #include <libyasm.h> |
32 #include <libyasm/phash.h> | 32 #include <libyasm/phash.h> |
33 | 33 |
34 #include "modules/arch/x86/x86arch.h" | 34 #include "modules/arch/x86/x86arch.h" |
35 | 35 |
36 | 36 |
37 static const char *cpu_find_reverse(unsigned int cpu0, unsigned int cpu1, | 37 static const char *cpu_find_reverse(unsigned int cpu0, unsigned int cpu1, |
38 unsigned int cpu2); | 38 unsigned int cpu2); |
39 | 39 |
40 /* Opcode modifiers. */ | 40 /* Opcode modifiers. */ |
41 #define MOD_Gap 0 /* Eats a parameter / does nothing */ | 41 #define MOD_Gap 0 /* Eats a parameter / does nothing */ |
42 #define MOD_PreAdd 1 /* Parameter adds to "special" prefix */ | 42 #define MOD_PreAdd 1 /* Parameter adds to "special" prefix */ |
43 #define MOD_Op0Add 2 /* Parameter adds to opcode byte 0 */ | 43 #define MOD_Op0Add 2 /* Parameter adds to opcode byte 0 */ |
44 #define MOD_Op1Add 3 /* Parameter adds to opcode byte 1 */ | 44 #define MOD_Op1Add 3 /* Parameter adds to opcode byte 1 */ |
45 #define MOD_Op2Add 4 /* Parameter adds to opcode byte 2 */ | 45 #define MOD_Op2Add 4 /* Parameter adds to opcode byte 2 */ |
46 #define MOD_SpAdd 5 /* Parameter adds to "spare" value */ | 46 #define MOD_SpAdd 5 /* Parameter adds to "spare" value */ |
47 #define MOD_OpSizeR 6 /* Parameter replaces opersize */ | 47 #define MOD_OpSizeR 6 /* Parameter replaces opersize */ |
48 #define MOD_Imm8 7 /* Parameter is included as immediate byte */ | 48 #define MOD_Imm8 7 /* Parameter is included as immediate byte */ |
49 #define MOD_AdSizeR 8 /* Parameter replaces addrsize (jmp only) */ | 49 #define MOD_AdSizeR 8 /* Parameter replaces addrsize (jmp only) */ |
50 #define MOD_DOpS64R 9 /* Parameter replaces default 64-bit opersize */ | 50 #define MOD_DOpS64R 9 /* Parameter replaces default 64-bit opersize */ |
51 #define MOD_Op1AddSp 10 /* Parameter is added as "spare" to opcode byte 2 */ | 51 #define MOD_Op1AddSp 10 /* Parameter is added as "spare" to opcode byte 2 */ |
52 #define MOD_SetVEX 11 /* Parameter replaces internal VEX prefix value */ | 52 #define MOD_SetVEX 11 /* Parameter replaces internal VEX prefix value */ |
53 | 53 |
54 /* GAS suffix flags for instructions */ | 54 /* GAS suffix flags for instructions */ |
55 enum x86_gas_suffix_flags { | 55 enum x86_gas_suffix_flags { |
56 NONE = 0, | 56 SUF_Z = 1<<0, /* no suffix */ |
57 SUF_B = 1<<0, | 57 SUF_B = 1<<1, |
58 SUF_W = 1<<1, | 58 SUF_W = 1<<2, |
59 SUF_L = 1<<2, | 59 SUF_L = 1<<3, |
60 SUF_Q = 1<<3, | 60 SUF_Q = 1<<4, |
61 SUF_S = 1<<4, | 61 SUF_S = 1<<5, |
62 SUF_MASK = SUF_B|SUF_W|SUF_L|SUF_Q|SUF_S, | 62 SUF_MASK = SUF_Z|SUF_B|SUF_W|SUF_L|SUF_Q|SUF_S, |
63 | 63 |
64 /* Flags only used in x86_insn_info */ | 64 /* Flags only used in x86_insn_info */ |
65 GAS_ONLY = 1<<5, /* Only available in GAS mode */ | 65 GAS_ONLY = 1<<6, /* Only available in GAS mode */ |
66 GAS_ILLEGAL = 1<<6, /* Illegal in GAS mode */ | 66 GAS_ILLEGAL = 1<<7, /* Illegal in GAS mode */ |
67 GAS_NO_REV = 1<<7, /* Don't reverse operands in GAS mode */ | 67 GAS_NO_REV = 1<<8 /* Don't reverse operands in GAS mode */ |
68 | |
69 /* Flags only used in insnprefix_parse_data */ | |
70 WEAK = 1<<5 /* Relaxed operand mode for GAS */ | |
71 }; | 68 }; |
72 | 69 |
73 /* Miscellaneous flag tests for instructions */ | 70 /* Miscellaneous flag tests for instructions */ |
74 enum x86_misc_flags { | 71 enum x86_misc_flags { |
75 /* These are tested against BITS==64. */ | 72 /* These are tested against BITS==64. */ |
76 ONLY_64 = 1<<0, /* Only available in 64-bit mode */ | 73 ONLY_64 = 1<<0, /* Only available in 64-bit mode */ |
77 NOT_64 = 1<<1, /* Not available (invalid) in 64-bit mode */ | 74 NOT_64 = 1<<1, /* Not available (invalid) in 64-bit mode */ |
78 /* These are tested against whether the base instruction is an AVX one. */ | 75 /* These are tested against whether the base instruction is an AVX one. */ |
79 ONLY_AVX = 1<<2, /* Only available in AVX instruction */ | 76 ONLY_AVX = 1<<2, /* Only available in AVX instruction */ |
80 NOT_AVX = 1<<3 /* Not available (invalid) in AVX instruction */ | 77 NOT_AVX = 1<<3 /* Not available (invalid) in AVX instruction */ |
(...skipping 26 matching lines...) Expand all Loading... |
107 */ | 104 */ |
108 OPT_MemOffs = 21, | 105 OPT_MemOffs = 21, |
109 OPT_Imm1 = 22, /* immediate, value=1 (for special-case shift) */ | 106 OPT_Imm1 = 22, /* immediate, value=1 (for special-case shift) */ |
110 /* immediate, does not contain SEG:OFF (for jmp/call) */ | 107 /* immediate, does not contain SEG:OFF (for jmp/call) */ |
111 OPT_ImmNotSegOff = 23, | 108 OPT_ImmNotSegOff = 23, |
112 OPT_XMM0 = 24, /* XMM0 */ | 109 OPT_XMM0 = 24, /* XMM0 */ |
113 /* AX/EAX/RAX memory operand only (EA) [special case for SVM opcodes] | 110 /* AX/EAX/RAX memory operand only (EA) [special case for SVM opcodes] |
114 */ | 111 */ |
115 OPT_MemrAX = 25, | 112 OPT_MemrAX = 25, |
116 /* EAX memory operand only (EA) [special case for SVM skinit opcode] */ | 113 /* EAX memory operand only (EA) [special case for SVM skinit opcode] */ |
117 OPT_MemEAX = 26, | 114 OPT_MemEAX = 26 |
118 /* SIMDReg with value equal to operand 0 SIMDReg */ | |
119 OPT_SIMDRegMatch0 = 27 | |
120 }; | 115 }; |
121 | 116 |
122 enum x86_operand_size { | 117 enum x86_operand_size { |
123 /* any size acceptable/no size spec acceptable (dep. on strict) */ | 118 /* any size acceptable/no size spec acceptable (dep. on strict) */ |
124 OPS_Any = 0, | 119 OPS_Any = 0, |
125 /* 8/16/32/64/80/128/256 bits (from user or reg size) */ | 120 /* 8/16/32/64/80/128/256 bits (from user or reg size) */ |
126 OPS_8 = 1, | 121 OPS_8 = 1, |
127 OPS_16 = 2, | 122 OPS_16 = 2, |
128 OPS_32 = 3, | 123 OPS_32 = 3, |
129 OPS_64 = 4, | 124 OPS_64 = 4, |
(...skipping 27 matching lines...) Expand all Loading... |
157 */ | 152 */ |
158 OPA_SpareEA = 7, | 153 OPA_SpareEA = 7, |
159 /* relative jump (outputs a jmp instead of normal insn) */ | 154 /* relative jump (outputs a jmp instead of normal insn) */ |
160 OPA_JmpRel = 8, | 155 OPA_JmpRel = 8, |
161 /* operand size goes into address size (jmp only) */ | 156 /* operand size goes into address size (jmp only) */ |
162 OPA_AdSizeR = 9, | 157 OPA_AdSizeR = 9, |
163 /* far jump (outputs a farjmp instead of normal insn) */ | 158 /* far jump (outputs a farjmp instead of normal insn) */ |
164 OPA_JmpFar = 10, | 159 OPA_JmpFar = 10, |
165 /* ea operand only sets address size (no actual ea field) */ | 160 /* ea operand only sets address size (no actual ea field) */ |
166 OPA_AdSizeEA = 11, | 161 OPA_AdSizeEA = 11, |
167 OPA_DREX = 12, /* operand data goes into DREX "dest" field */ | 162 OPA_VEX = 12, /* operand data goes into VEX/XOP "vvvv" field */ |
168 OPA_VEX = 13, /* operand data goes into VEX "vvvv" field */ | 163 /* operand data goes into BOTH VEX/XOP "vvvv" field and ea field */ |
169 /* operand data goes into BOTH VEX "vvvv" field and ea field */ | 164 OPA_EAVEX = 13, |
170 OPA_EAVEX = 14, | 165 /* operand data goes into BOTH VEX/XOP "vvvv" field and spare field */ |
171 /* operand data goes into BOTH VEX "vvvv" field and spare field */ | 166 OPA_SpareVEX = 14, |
172 OPA_SpareVEX = 15, | |
173 /* operand data goes into upper 4 bits of immediate byte (VEX is4 field) */ | 167 /* operand data goes into upper 4 bits of immediate byte (VEX is4 field) */ |
174 OPA_VEXImmSrc = 16, | 168 OPA_VEXImmSrc = 15, |
175 /* operand data goes into bottom 4 bits of immediate byte | 169 /* operand data goes into bottom 4 bits of immediate byte |
176 * (currently only VEX imz2 field) | 170 * (currently only VEX imz2 field) |
177 */ | 171 */ |
178 OPA_VEXImm = 17 | 172 OPA_VEXImm = 16 |
179 }; | 173 }; |
180 | 174 |
181 enum x86_operand_post_action { | 175 enum x86_operand_post_action { |
182 OPAP_None = 0, | 176 OPAP_None = 0, |
183 /* sign-extended imm8 that could expand to a large imm16/32 */ | 177 /* sign-extended imm8 that could expand to a large imm16/32 */ |
184 OPAP_SImm8 = 1, | 178 OPAP_SImm8 = 1, |
185 /* could become a short opcode mov with bits=64 and a32 prefix */ | 179 /* could become a short opcode mov with bits=64 and a32 prefix */ |
186 OPAP_ShortMov = 2, | 180 OPAP_ShortMov = 2, |
187 /* forced 16-bit address size (override ignored, no prefix) */ | 181 /* forced 16-bit address size (override ignored, no prefix) */ |
188 OPAP_A16 = 3, | 182 OPAP_A16 = 3, |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
229 * parse-time due to possibly dependent expressions. For these, some | 223 * parse-time due to possibly dependent expressions. For these, some |
230 * additional data (stored in the second byte of the opcode with a | 224 * additional data (stored in the second byte of the opcode with a |
231 * one-byte opcode) is passed to later stages of the assembler with | 225 * one-byte opcode) is passed to later stages of the assembler with |
232 * flags set to indicate postponed actions. | 226 * flags set to indicate postponed actions. |
233 */ | 227 */ |
234 unsigned int post_action:3; | 228 unsigned int post_action:3; |
235 } x86_info_operand; | 229 } x86_info_operand; |
236 | 230 |
237 typedef struct x86_insn_info { | 231 typedef struct x86_insn_info { |
238 /* GAS suffix flags */ | 232 /* GAS suffix flags */ |
239 unsigned int gas_flags:8; /* Enabled for these GAS suffixes */ | 233 unsigned int gas_flags:9; /* Enabled for these GAS suffixes */ |
240 | 234 |
241 /* Tests against BITS==64 and AVX */ | 235 /* Tests against BITS==64, AVX, and XOP */ |
242 unsigned int misc_flags:6; | 236 unsigned int misc_flags:5; |
243 | 237 |
244 /* The CPU feature flags needed to execute this instruction. This is OR'ed | 238 /* The CPU feature flags needed to execute this instruction. This is OR'ed |
245 * with arch-specific data[2]. This combined value is compared with | 239 * with arch-specific data[2]. This combined value is compared with |
246 * cpu_enabled to see if all bits set here are set in cpu_enabled--if so, | 240 * cpu_enabled to see if all bits set here are set in cpu_enabled--if so, |
247 * the instruction is available on this CPU. | 241 * the instruction is available on this CPU. |
248 */ | 242 */ |
249 unsigned int cpu0:6; | 243 unsigned int cpu0:6; |
250 unsigned int cpu1:6; | 244 unsigned int cpu1:6; |
251 unsigned int cpu2:6; | 245 unsigned int cpu2:6; |
252 | 246 |
(...skipping 17 matching lines...) Expand all Loading... |
270 * to be *after* the F2/F3/66 "prefix"). | 264 * to be *after* the F2/F3/66 "prefix"). |
271 * (0=no special prefix) | 265 * (0=no special prefix) |
272 * 0xC0 - 0xCF indicate a VEX prefix, with the four LSBs holding "WLpp": | 266 * 0xC0 - 0xCF indicate a VEX prefix, with the four LSBs holding "WLpp": |
273 * W: VEX.W field (meaning depends on opcode) | 267 * W: VEX.W field (meaning depends on opcode) |
274 * L: 0=128-bit, 1=256-bit | 268 * L: 0=128-bit, 1=256-bit |
275 * pp: SIMD prefix designation: | 269 * pp: SIMD prefix designation: |
276 * 00: None | 270 * 00: None |
277 * 01: 66 | 271 * 01: 66 |
278 * 10: F3 | 272 * 10: F3 |
279 * 11: F2 | 273 * 11: F2 |
| 274 * 0x80 - 0x8F indicate a XOP prefix, with the four LSBs holding "WLpp": |
| 275 * same meanings as VEX prefix. |
280 */ | 276 */ |
281 unsigned char special_prefix; | 277 unsigned char special_prefix; |
282 | 278 |
283 /* The DREX base byte value (almost). The only bit kept from this | |
284 * value is the OC0 bit (0x08). The MSB (0x80) of this value indicates | |
285 * if the DREX byte needs to be present in the instruction. | |
286 */ | |
287 #define NEED_DREX_MASK 0x80 | |
288 #define DREX_OC0_MASK 0x08 | |
289 unsigned char drex_oc0; | |
290 | |
291 /* The length of the basic opcode */ | 279 /* The length of the basic opcode */ |
292 unsigned char opcode_len; | 280 unsigned char opcode_len; |
293 | 281 |
294 /* The basic 1-3 byte opcode (not including the special instruction | 282 /* The basic 1-3 byte opcode (not including the special instruction |
295 * prefix). | 283 * prefix). |
296 */ | 284 */ |
297 unsigned char opcode[3]; | 285 unsigned char opcode[3]; |
298 | 286 |
299 /* The 3-bit "spare" value (extended opcode) for the R/M byte field */ | 287 /* The 3-bit "spare" value (extended opcode) for the R/M byte field */ |
300 unsigned char spare; | 288 unsigned char spare; |
(...skipping 19 matching lines...) Expand all Loading... |
320 /* Modifier data */ | 308 /* Modifier data */ |
321 unsigned char mod_data[3]; | 309 unsigned char mod_data[3]; |
322 | 310 |
323 /* Number of elements in the instruction parse group */ | 311 /* Number of elements in the instruction parse group */ |
324 unsigned int num_info:8; | 312 unsigned int num_info:8; |
325 | 313 |
326 /* BITS setting active at the time of parsing the instruction */ | 314 /* BITS setting active at the time of parsing the instruction */ |
327 unsigned int mode_bits:8; | 315 unsigned int mode_bits:8; |
328 | 316 |
329 /* Suffix flags */ | 317 /* Suffix flags */ |
330 unsigned int suffix:8; | 318 unsigned int suffix:9; |
331 | 319 |
332 /* Tests against BITS==64 and AVX */ | 320 /* Tests against BITS==64 and AVX */ |
333 unsigned int misc_flags:6; | 321 unsigned int misc_flags:5; |
334 | 322 |
335 /* Parser enabled at the time of parsing the instruction */ | 323 /* Parser enabled at the time of parsing the instruction */ |
336 unsigned int parser:2; | 324 unsigned int parser:2; |
337 | 325 |
338 /* Strict forced setting at the time of parsing the instruction */ | 326 /* Strict forced setting at the time of parsing the instruction */ |
339 unsigned int force_strict:1; | 327 unsigned int force_strict:1; |
340 | 328 |
341 /* Default rel setting at the time of parsing the instruction */ | 329 /* Default rel setting at the time of parsing the instruction */ |
342 unsigned int default_rel:1; | 330 unsigned int default_rel:1; |
343 } x86_id_insn; | 331 } x86_id_insn; |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
387 op->type = YASM_INSN__OPERAND_REG; | 375 op->type = YASM_INSN__OPERAND_REG; |
388 op = yasm_insn_op_next(op); | 376 op = yasm_insn_op_next(op); |
389 } | 377 } |
390 } | 378 } |
391 | 379 |
392 static void | 380 static void |
393 x86_finalize_jmpfar(yasm_bytecode *bc, yasm_bytecode *prev_bc, | 381 x86_finalize_jmpfar(yasm_bytecode *bc, yasm_bytecode *prev_bc, |
394 const x86_insn_info *info) | 382 const x86_insn_info *info) |
395 { | 383 { |
396 x86_id_insn *id_insn = (x86_id_insn *)bc->contents; | 384 x86_id_insn *id_insn = (x86_id_insn *)bc->contents; |
| 385 unsigned char *mod_data = id_insn->mod_data; |
397 unsigned int mode_bits = id_insn->mode_bits; | 386 unsigned int mode_bits = id_insn->mode_bits; |
398 x86_jmpfar *jmpfar; | 387 x86_jmpfar *jmpfar; |
399 yasm_insn_operand *op; | 388 yasm_insn_operand *op; |
| 389 unsigned int i; |
400 | 390 |
401 jmpfar = yasm_xmalloc(sizeof(x86_jmpfar)); | 391 jmpfar = yasm_xmalloc(sizeof(x86_jmpfar)); |
402 x86_finalize_common(&jmpfar->common, info, mode_bits); | 392 x86_finalize_common(&jmpfar->common, info, mode_bits); |
403 x86_finalize_opcode(&jmpfar->opcode, info); | 393 x86_finalize_opcode(&jmpfar->opcode, info); |
404 | 394 |
405 op = yasm_insn_ops_first(&id_insn->insn); | 395 op = yasm_insn_ops_first(&id_insn->insn); |
406 | 396 |
407 if (op->type == YASM_INSN__OPERAND_IMM && op->seg) { | 397 if (op->type == YASM_INSN__OPERAND_IMM && op->seg) { |
408 /* SEG:OFF */ | 398 /* SEG:OFF */ |
409 if (yasm_value_finalize_expr(&jmpfar->segment, op->seg, prev_bc, 16)) | 399 if (yasm_value_finalize_expr(&jmpfar->segment, op->seg, prev_bc, 16)) |
410 yasm_error_set(YASM_ERROR_TOO_COMPLEX, | 400 yasm_error_set(YASM_ERROR_TOO_COMPLEX, |
411 N_("jump target segment too complex")); | 401 N_("jump target segment too complex")); |
412 if (yasm_value_finalize_expr(&jmpfar->offset, op->data.val, prev_bc, | 402 if (yasm_value_finalize_expr(&jmpfar->offset, op->data.val, prev_bc, |
413 0)) | 403 0)) |
414 yasm_error_set(YASM_ERROR_TOO_COMPLEX, | 404 yasm_error_set(YASM_ERROR_TOO_COMPLEX, |
415 N_("jump target offset too complex")); | 405 N_("jump target offset too complex")); |
416 } else if (op->targetmod == X86_FAR) { | 406 } else if (op->targetmod == X86_FAR) { |
417 /* "FAR imm" target needs to become "seg imm:imm". */ | 407 /* "FAR imm" target needs to become "seg imm:imm". */ |
418 yasm_expr *e = yasm_expr_create_branch(YASM_EXPR_SEG, | 408 yasm_expr *e = yasm_expr_create_branch(YASM_EXPR_SEG, |
419 yasm_expr_copy(op->data.val), | 409 yasm_expr_copy(op->data.val), |
420 op->data.val->line); | 410 op->data.val->line); |
421 if (yasm_value_finalize_expr(&jmpfar->offset, op->data.val, prev_bc, 0) | 411 if (yasm_value_finalize_expr(&jmpfar->offset, op->data.val, prev_bc, 0) |
422 || yasm_value_finalize_expr(&jmpfar->segment, e, prev_bc, 16)) | 412 || yasm_value_finalize_expr(&jmpfar->segment, e, prev_bc, 16)) |
423 yasm_error_set(YASM_ERROR_TOO_COMPLEX, | 413 yasm_error_set(YASM_ERROR_TOO_COMPLEX, |
424 N_("jump target expression too complex")); | 414 N_("jump target expression too complex")); |
| 415 } else if (yasm_insn_op_next(op)) { |
| 416 /* Two operand form (gas) */ |
| 417 yasm_insn_operand *op2 = yasm_insn_op_next(op); |
| 418 if (yasm_value_finalize_expr(&jmpfar->segment, op->data.val, prev_bc, |
| 419 16)) |
| 420 yasm_error_set(YASM_ERROR_TOO_COMPLEX, |
| 421 N_("jump target segment too complex")); |
| 422 if (yasm_value_finalize_expr(&jmpfar->offset, op2->data.val, prev_bc, |
| 423 0)) |
| 424 yasm_error_set(YASM_ERROR_TOO_COMPLEX, |
| 425 N_("jump target offset too complex")); |
| 426 if (op2->size == OPS_BITS) |
| 427 jmpfar->common.opersize = (unsigned char)mode_bits; |
425 } else | 428 } else |
426 yasm_internal_error(N_("didn't get FAR expression in jmpfar")); | 429 yasm_internal_error(N_("didn't get FAR expression in jmpfar")); |
427 | 430 |
| 431 /* Apply modifiers */ |
| 432 for (i=0; i<NELEMS(info->modifiers); i++) { |
| 433 switch (info->modifiers[i]) { |
| 434 case MOD_Gap: |
| 435 break; |
| 436 case MOD_Op0Add: |
| 437 jmpfar->opcode.opcode[0] += mod_data[i]; |
| 438 break; |
| 439 case MOD_Op1Add: |
| 440 jmpfar->opcode.opcode[1] += mod_data[i]; |
| 441 break; |
| 442 case MOD_Op2Add: |
| 443 jmpfar->opcode.opcode[2] += mod_data[i]; |
| 444 break; |
| 445 case MOD_Op1AddSp: |
| 446 jmpfar->opcode.opcode[1] += mod_data[i]<<3; |
| 447 break; |
| 448 default: |
| 449 break; |
| 450 } |
| 451 } |
| 452 |
428 yasm_x86__bc_apply_prefixes((x86_common *)jmpfar, NULL, | 453 yasm_x86__bc_apply_prefixes((x86_common *)jmpfar, NULL, |
429 info->def_opersize_64, | 454 info->def_opersize_64, |
430 id_insn->insn.num_prefixes, | 455 id_insn->insn.num_prefixes, |
431 id_insn->insn.prefixes); | 456 id_insn->insn.prefixes); |
432 | 457 |
433 x86_id_insn_clear_operands(id_insn); | 458 x86_id_insn_clear_operands(id_insn); |
434 | 459 |
435 /* Transform the bytecode */ | 460 /* Transform the bytecode */ |
436 yasm_x86__bc_transform_jmpfar(bc, jmpfar); | 461 yasm_x86__bc_transform_jmpfar(bc, jmpfar); |
437 } | 462 } |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
607 if ((id_insn->misc_flags & ONLY_AVX) && (misc_flags & NOT_AVX)) | 632 if ((id_insn->misc_flags & ONLY_AVX) && (misc_flags & NOT_AVX)) |
608 continue; | 633 continue; |
609 | 634 |
610 /* Match parser mode */ | 635 /* Match parser mode */ |
611 if ((gas_flags & GAS_ONLY) && id_insn->parser != X86_PARSER_GAS) | 636 if ((gas_flags & GAS_ONLY) && id_insn->parser != X86_PARSER_GAS) |
612 continue; | 637 continue; |
613 if ((gas_flags & GAS_ILLEGAL) && id_insn->parser == X86_PARSER_GAS) | 638 if ((gas_flags & GAS_ILLEGAL) && id_insn->parser == X86_PARSER_GAS) |
614 continue; | 639 continue; |
615 | 640 |
616 /* Match suffix (if required) */ | 641 /* Match suffix (if required) */ |
617 if (suffix != 0 && suffix != WEAK | 642 if (id_insn->parser == X86_PARSER_GAS |
618 && ((suffix & SUF_MASK) & (gas_flags & SUF_MASK)) == 0) | 643 && ((suffix & SUF_MASK) & (gas_flags & SUF_MASK)) == 0) |
619 continue; | 644 continue; |
620 | 645 |
621 /* Use reversed operands in GAS mode if not otherwise specified */ | 646 /* Use reversed operands in GAS mode if not otherwise specified */ |
622 use_ops = ops; | 647 use_ops = ops; |
623 if (id_insn->parser == X86_PARSER_GAS && !(gas_flags & GAS_NO_REV)) | 648 if (id_insn->parser == X86_PARSER_GAS && !(gas_flags & GAS_NO_REV)) |
624 use_ops = rev_ops; | 649 use_ops = rev_ops; |
625 | 650 |
626 if (id_insn->insn.num_operands == 0) { | 651 if (id_insn->insn.num_operands == 0) { |
627 found = 1; /* no operands -> must have a match here. */ | 652 found = 1; /* no operands -> must have a match here. */ |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
660 } | 685 } |
661 break; | 686 break; |
662 case OPT_Mem: | 687 case OPT_Mem: |
663 if (op->type != YASM_INSN__OPERAND_MEMORY) | 688 if (op->type != YASM_INSN__OPERAND_MEMORY) |
664 mismatch = 1; | 689 mismatch = 1; |
665 break; | 690 break; |
666 case OPT_SIMDRM: | 691 case OPT_SIMDRM: |
667 if (op->type == YASM_INSN__OPERAND_MEMORY) | 692 if (op->type == YASM_INSN__OPERAND_MEMORY) |
668 break; | 693 break; |
669 /*@fallthrough@*/ | 694 /*@fallthrough@*/ |
670 case OPT_SIMDRegMatch0: | |
671 case OPT_SIMDReg: | 695 case OPT_SIMDReg: |
672 if (op->type != YASM_INSN__OPERAND_REG) | 696 if (op->type != YASM_INSN__OPERAND_REG) |
673 mismatch = 1; | 697 mismatch = 1; |
674 else { | 698 else { |
675 switch ((x86_expritem_reg_size)(op->data.reg&~0xFUL)) { | 699 switch ((x86_expritem_reg_size)(op->data.reg&~0xFUL)) { |
676 case X86_MMXREG: | 700 case X86_MMXREG: |
677 case X86_XMMREG: | 701 case X86_XMMREG: |
678 case X86_YMMREG: | 702 case X86_YMMREG: |
679 break; | 703 break; |
680 default: | 704 default: |
681 mismatch = 1; | 705 mismatch = 1; |
682 break; | 706 break; |
683 } | 707 } |
684 } | 708 } |
685 if (!mismatch && info_ops[i].type == OPT_SIMDRegMatch0 && | |
686 bypass != 7 && op->data.reg != use_ops[0]->data.reg) | |
687 mismatch = 1; | |
688 break; | 709 break; |
689 case OPT_SegReg: | 710 case OPT_SegReg: |
690 if (op->type != YASM_INSN__OPERAND_SEGREG) | 711 if (op->type != YASM_INSN__OPERAND_SEGREG) |
691 mismatch = 1; | 712 mismatch = 1; |
692 break; | 713 break; |
693 case OPT_CRReg: | 714 case OPT_CRReg: |
694 if (op->type != YASM_INSN__OPERAND_REG || | 715 if (op->type != YASM_INSN__OPERAND_REG || |
695 (op->data.reg & ~0xFUL) != X86_CRREG) | 716 (op->data.reg & ~0xFUL) != X86_CRREG) |
696 mismatch = 1; | 717 mismatch = 1; |
697 break; | 718 break; |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
832 } | 853 } |
833 default: | 854 default: |
834 yasm_internal_error(N_("invalid operand type")); | 855 yasm_internal_error(N_("invalid operand type")); |
835 } | 856 } |
836 | 857 |
837 if (mismatch) | 858 if (mismatch) |
838 break; | 859 break; |
839 | 860 |
840 /* Check operand size */ | 861 /* Check operand size */ |
841 size = size_lookup[info_ops[i].size]; | 862 size = size_lookup[info_ops[i].size]; |
842 if (suffix != 0) { | 863 if (id_insn->parser == X86_PARSER_GAS) { |
843 /* Require relaxed operands for GAS mode (don't allow | 864 /* Require relaxed operands for GAS mode (don't allow |
844 * per-operand sizing). | 865 * per-operand sizing). |
845 */ | 866 */ |
846 if (op->type == YASM_INSN__OPERAND_REG && op->size == 0) { | 867 if (op->type == YASM_INSN__OPERAND_REG && op->size == 0) { |
847 /* Register size must exactly match */ | 868 /* Register size must exactly match */ |
848 if (yasm_x86__get_reg_size(op->data.reg) != size) | 869 if (yasm_x86__get_reg_size(op->data.reg) != size) |
849 mismatch = 1; | 870 mismatch = 1; |
850 } else if ((info_ops[i].type == OPT_Imm | 871 } else if ((info_ops[i].type == OPT_Imm |
851 || info_ops[i].type == OPT_ImmNotSegOff | 872 || info_ops[i].type == OPT_ImmNotSegOff |
852 || info_ops[i].type == OPT_Imm1) | 873 || info_ops[i].type == OPT_Imm1) |
(...skipping 21 matching lines...) Expand all Loading... |
874 if (op->size != size) | 895 if (op->size != size) |
875 mismatch = 1; | 896 mismatch = 1; |
876 } | 897 } |
877 } | 898 } |
878 } | 899 } |
879 | 900 |
880 if (mismatch) | 901 if (mismatch) |
881 break; | 902 break; |
882 | 903 |
883 /* Check for 64-bit effective address size in NASM mode */ | 904 /* Check for 64-bit effective address size in NASM mode */ |
884 if (suffix == 0 && op->type == YASM_INSN__OPERAND_MEMORY) { | 905 if (id_insn->parser != X86_PARSER_GAS && |
| 906 op->type == YASM_INSN__OPERAND_MEMORY) { |
885 if (info_ops[i].eas64) { | 907 if (info_ops[i].eas64) { |
886 if (op->data.ea->disp.size != 64) | 908 if (op->data.ea->disp.size != 64) |
887 mismatch = 1; | 909 mismatch = 1; |
888 } else if (op->data.ea->disp.size == 64) | 910 } else if (op->data.ea->disp.size == 64) |
889 mismatch = 1; | 911 mismatch = 1; |
890 } | 912 } |
891 | 913 |
892 if (mismatch) | 914 if (mismatch) |
893 break; | 915 break; |
894 | 916 |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
998 x86_id_insn *id_insn = (x86_id_insn *)bc->contents; | 1020 x86_id_insn *id_insn = (x86_id_insn *)bc->contents; |
999 x86_insn *insn; | 1021 x86_insn *insn; |
1000 const x86_insn_info *info = id_insn->group; | 1022 const x86_insn_info *info = id_insn->group; |
1001 unsigned int mode_bits = id_insn->mode_bits; | 1023 unsigned int mode_bits = id_insn->mode_bits; |
1002 unsigned char *mod_data = id_insn->mod_data; | 1024 unsigned char *mod_data = id_insn->mod_data; |
1003 yasm_insn_operand *op, *ops[5], *rev_ops[5]; | 1025 yasm_insn_operand *op, *ops[5], *rev_ops[5]; |
1004 /*@null@*/ yasm_expr *imm; | 1026 /*@null@*/ yasm_expr *imm; |
1005 unsigned char im_len; | 1027 unsigned char im_len; |
1006 unsigned char im_sign; | 1028 unsigned char im_sign; |
1007 unsigned char spare; | 1029 unsigned char spare; |
1008 unsigned char drex; | |
1009 unsigned char *pdrex; | |
1010 unsigned char vexdata, vexreg; | 1030 unsigned char vexdata, vexreg; |
1011 unsigned int i; | 1031 unsigned int i; |
1012 unsigned int size_lookup[] = {0, 8, 16, 32, 64, 80, 128, 256, 0}; | 1032 unsigned int size_lookup[] = {0, 8, 16, 32, 64, 80, 128, 256, 0}; |
1013 unsigned long do_postop = 0; | 1033 unsigned long do_postop = 0; |
1014 | 1034 |
1015 size_lookup[OPS_BITS] = mode_bits; | 1035 size_lookup[OPS_BITS] = mode_bits; |
1016 | 1036 |
1017 yasm_insn_finalize(&id_insn->insn); | 1037 yasm_insn_finalize(&id_insn->insn); |
1018 | 1038 |
1019 /* Build local array of operands from list, since we know we have a max | 1039 /* Build local array of operands from list, since we know we have a max |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1092 | 1112 |
1093 /* Copy what we can from info */ | 1113 /* Copy what we can from info */ |
1094 insn = yasm_xmalloc(sizeof(x86_insn)); | 1114 insn = yasm_xmalloc(sizeof(x86_insn)); |
1095 x86_finalize_common(&insn->common, info, mode_bits); | 1115 x86_finalize_common(&insn->common, info, mode_bits); |
1096 x86_finalize_opcode(&insn->opcode, info); | 1116 x86_finalize_opcode(&insn->opcode, info); |
1097 insn->x86_ea = NULL; | 1117 insn->x86_ea = NULL; |
1098 imm = NULL; | 1118 imm = NULL; |
1099 insn->def_opersize_64 = info->def_opersize_64; | 1119 insn->def_opersize_64 = info->def_opersize_64; |
1100 insn->special_prefix = info->special_prefix; | 1120 insn->special_prefix = info->special_prefix; |
1101 spare = info->spare; | 1121 spare = info->spare; |
1102 drex = info->drex_oc0 & DREX_OC0_MASK; | |
1103 vexdata = 0; | 1122 vexdata = 0; |
1104 vexreg = 0; | 1123 vexreg = 0; |
1105 im_len = 0; | 1124 im_len = 0; |
1106 im_sign = 0; | 1125 im_sign = 0; |
1107 insn->postop = X86_POSTOP_NONE; | 1126 insn->postop = X86_POSTOP_NONE; |
1108 insn->rex = 0; | 1127 insn->rex = 0; |
1109 pdrex = (info->drex_oc0 & NEED_DREX_MASK) ? &drex : NULL; | |
1110 | 1128 |
1111 /* Move VEX data (stored in special prefix) to separate location to | 1129 /* Move VEX/XOP data (stored in special prefix) to separate location to |
1112 * allow overriding of special prefix by modifiers. | 1130 * allow overriding of special prefix by modifiers. |
1113 */ | 1131 */ |
1114 if ((insn->special_prefix & 0xF0) == 0xC0) { | 1132 if ((insn->special_prefix & 0xF0) == 0xC0 || |
| 1133 (insn->special_prefix & 0xF0) == 0x80) { |
1115 vexdata = insn->special_prefix; | 1134 vexdata = insn->special_prefix; |
1116 insn->special_prefix = 0; | 1135 insn->special_prefix = 0; |
1117 } | 1136 } |
1118 | 1137 |
1119 /* Apply modifiers */ | 1138 /* Apply modifiers */ |
1120 for (i=0; i<NELEMS(info->modifiers); i++) { | 1139 for (i=0; i<NELEMS(info->modifiers); i++) { |
1121 switch (info->modifiers[i]) { | 1140 switch (info->modifiers[i]) { |
1122 case MOD_Gap: | 1141 case MOD_Gap: |
1123 break; | 1142 break; |
1124 case MOD_PreAdd: | 1143 case MOD_PreAdd: |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1192 yasm_expr_destroy(op->data.val); | 1211 yasm_expr_destroy(op->data.val); |
1193 break; | 1212 break; |
1194 } | 1213 } |
1195 break; | 1214 break; |
1196 case OPA_EA: | 1215 case OPA_EA: |
1197 switch (op->type) { | 1216 switch (op->type) { |
1198 case YASM_INSN__OPERAND_REG: | 1217 case YASM_INSN__OPERAND_REG: |
1199 insn->x86_ea = | 1218 insn->x86_ea = |
1200 yasm_x86__ea_create_reg(insn->x86_ea, | 1219 yasm_x86__ea_create_reg(insn->x86_ea, |
1201 (unsigned long)op->data.reg, &insn->rex, | 1220 (unsigned long)op->data.reg, &insn->rex, |
1202 pdrex, mode_bits); | 1221 mode_bits); |
1203 break; | 1222 break; |
1204 case YASM_INSN__OPERAND_SEGREG: | 1223 case YASM_INSN__OPERAND_SEGREG: |
1205 yasm_internal_error( | 1224 yasm_internal_error( |
1206 N_("invalid operand conversion")); | 1225 N_("invalid operand conversion")); |
1207 case YASM_INSN__OPERAND_MEMORY: | 1226 case YASM_INSN__OPERAND_MEMORY: |
1208 if (op->seg) | 1227 if (op->seg) |
1209 yasm_error_set(YASM_ERROR_VALUE, | 1228 yasm_error_set(YASM_ERROR_VALUE, |
1210 N_("invalid segment in effective address")); | 1229 N_("invalid segment in effective address")); |
1211 insn->x86_ea = (x86_effaddr *)op->data.ea; | 1230 insn->x86_ea = (x86_effaddr *)op->data.ea; |
1212 if (info_ops[i].type == OPT_MemOffs) | 1231 if (info_ops[i].type == OPT_MemOffs) |
(...skipping 16 matching lines...) Expand all Loading... |
1229 op->data.val, | 1248 op->data.val, |
1230 size_lookup[info_ops[i].size]); | 1249 size_lookup[info_ops[i].size]); |
1231 break; | 1250 break; |
1232 } | 1251 } |
1233 break; | 1252 break; |
1234 case OPA_EAVEX: | 1253 case OPA_EAVEX: |
1235 if (op->type != YASM_INSN__OPERAND_REG) | 1254 if (op->type != YASM_INSN__OPERAND_REG) |
1236 yasm_internal_error(N_("invalid operand conversion")); | 1255 yasm_internal_error(N_("invalid operand conversion")); |
1237 insn->x86_ea = | 1256 insn->x86_ea = |
1238 yasm_x86__ea_create_reg(insn->x86_ea, | 1257 yasm_x86__ea_create_reg(insn->x86_ea, |
1239 (unsigned long)op->data.reg, &insn->rex, pdrex, | 1258 (unsigned long)op->data.reg, &insn->rex, mode_bits); |
1240 mode_bits); | |
1241 vexreg = op->data.reg & 0xF; | 1259 vexreg = op->data.reg & 0xF; |
1242 break; | 1260 break; |
1243 case OPA_Imm: | 1261 case OPA_Imm: |
1244 if (op->seg) | 1262 if (op->seg) |
1245 yasm_error_set(YASM_ERROR_VALUE, | 1263 yasm_error_set(YASM_ERROR_VALUE, |
1246 N_("immediate does not support segment"))
; | 1264 N_("immediate does not support segment"))
; |
1247 if (op->type == YASM_INSN__OPERAND_IMM) { | 1265 if (op->type == YASM_INSN__OPERAND_IMM) { |
1248 imm = op->data.val; | 1266 imm = op->data.val; |
1249 im_len = size_lookup[info_ops[i].size]; | 1267 im_len = size_lookup[info_ops[i].size]; |
1250 } else | 1268 } else |
1251 yasm_internal_error(N_("invalid operand conversion")); | 1269 yasm_internal_error(N_("invalid operand conversion")); |
1252 break; | 1270 break; |
1253 case OPA_SImm: | 1271 case OPA_SImm: |
1254 if (op->seg) | 1272 if (op->seg) |
1255 yasm_error_set(YASM_ERROR_VALUE, | 1273 yasm_error_set(YASM_ERROR_VALUE, |
1256 N_("immediate does not support segment"))
; | 1274 N_("immediate does not support segment"))
; |
1257 if (op->type == YASM_INSN__OPERAND_IMM) { | 1275 if (op->type == YASM_INSN__OPERAND_IMM) { |
1258 imm = op->data.val; | 1276 imm = op->data.val; |
1259 im_len = size_lookup[info_ops[i].size]; | 1277 im_len = size_lookup[info_ops[i].size]; |
1260 im_sign = 1; | 1278 im_sign = 1; |
1261 } else | 1279 } else |
1262 yasm_internal_error(N_("invalid operand conversion")); | 1280 yasm_internal_error(N_("invalid operand conversion")); |
1263 break; | 1281 break; |
1264 case OPA_Spare: | 1282 case OPA_Spare: |
1265 if (op->type == YASM_INSN__OPERAND_SEGREG) | 1283 if (op->type == YASM_INSN__OPERAND_SEGREG) |
1266 spare = (unsigned char)(op->data.reg&7); | 1284 spare = (unsigned char)(op->data.reg&7); |
1267 else if (op->type == YASM_INSN__OPERAND_REG) { | 1285 else if (op->type == YASM_INSN__OPERAND_REG) { |
1268 if (yasm_x86__set_rex_from_reg(&insn->rex, pdrex, | 1286 if (yasm_x86__set_rex_from_reg(&insn->rex, &spare, |
1269 &spare, op->data.reg, mode_bits, X86_REX_R)) | 1287 op->data.reg, mode_bits, X86_REX_R)) |
1270 return; | 1288 return; |
1271 } else | 1289 } else |
1272 yasm_internal_error(N_("invalid operand conversion")); | 1290 yasm_internal_error(N_("invalid operand conversion")); |
1273 break; | 1291 break; |
1274 case OPA_SpareVEX: | 1292 case OPA_SpareVEX: |
1275 if (op->type != YASM_INSN__OPERAND_REG) | 1293 if (op->type != YASM_INSN__OPERAND_REG) |
1276 yasm_internal_error(N_("invalid operand conversion")); | 1294 yasm_internal_error(N_("invalid operand conversion")); |
1277 if (yasm_x86__set_rex_from_reg(&insn->rex, pdrex, | 1295 if (yasm_x86__set_rex_from_reg(&insn->rex, &spare, |
1278 &spare, op->data.reg, mode_bits, X86_REX_R)) | 1296 op->data.reg, mode_bits, X86_REX_R)) |
1279 return; | 1297 return; |
1280 vexreg = op->data.reg & 0xF; | 1298 vexreg = op->data.reg & 0xF; |
1281 break; | 1299 break; |
1282 case OPA_Op0Add: | 1300 case OPA_Op0Add: |
1283 if (op->type == YASM_INSN__OPERAND_REG) { | 1301 if (op->type == YASM_INSN__OPERAND_REG) { |
1284 unsigned char opadd; | 1302 unsigned char opadd; |
1285 if (yasm_x86__set_rex_from_reg(&insn->rex, pdrex, | 1303 if (yasm_x86__set_rex_from_reg(&insn->rex, &opadd, |
1286 &opadd, op->data.reg, mode_bits, X86_REX_B)) | 1304 op->data.reg, mode_bits, X86_REX_B)) |
1287 return; | 1305 return; |
1288 insn->opcode.opcode[0] += opadd; | 1306 insn->opcode.opcode[0] += opadd; |
1289 } else | 1307 } else |
1290 yasm_internal_error(N_("invalid operand conversion")); | 1308 yasm_internal_error(N_("invalid operand conversion")); |
1291 break; | 1309 break; |
1292 case OPA_Op1Add: | 1310 case OPA_Op1Add: |
1293 if (op->type == YASM_INSN__OPERAND_REG) { | 1311 if (op->type == YASM_INSN__OPERAND_REG) { |
1294 unsigned char opadd; | 1312 unsigned char opadd; |
1295 if (yasm_x86__set_rex_from_reg(&insn->rex, pdrex, | 1313 if (yasm_x86__set_rex_from_reg(&insn->rex, &opadd, |
1296 &opadd, op->data.reg, mode_bits, X86_REX_B)) | 1314 op->data.reg, mode_bits, X86_REX_B)) |
1297 return; | 1315 return; |
1298 insn->opcode.opcode[1] += opadd; | 1316 insn->opcode.opcode[1] += opadd; |
1299 } else | 1317 } else |
1300 yasm_internal_error(N_("invalid operand conversion")); | 1318 yasm_internal_error(N_("invalid operand conversion")); |
1301 break; | 1319 break; |
1302 case OPA_SpareEA: | 1320 case OPA_SpareEA: |
1303 if (op->type == YASM_INSN__OPERAND_REG) { | 1321 if (op->type == YASM_INSN__OPERAND_REG) { |
1304 insn->x86_ea = | 1322 insn->x86_ea = |
1305 yasm_x86__ea_create_reg(insn->x86_ea, | 1323 yasm_x86__ea_create_reg(insn->x86_ea, |
1306 (unsigned long)op->data.reg, &insn->rex, | 1324 (unsigned long)op->data.reg, &insn->rex, |
1307 pdrex, mode_bits); | 1325 mode_bits); |
1308 if (!insn->x86_ea || | 1326 if (!insn->x86_ea || |
1309 yasm_x86__set_rex_from_reg(&insn->rex, pdrex, | 1327 yasm_x86__set_rex_from_reg(&insn->rex, &spare, |
1310 &spare, op->data.reg, mode_bits, X86_REX_R)) { | 1328 op->data.reg, mode_bits, X86_REX_R)) { |
1311 if (insn->x86_ea) | 1329 if (insn->x86_ea) |
1312 yasm_xfree(insn->x86_ea); | 1330 yasm_xfree(insn->x86_ea); |
1313 yasm_xfree(insn); | 1331 yasm_xfree(insn); |
1314 return; | 1332 return; |
1315 } | 1333 } |
1316 } else | 1334 } else |
1317 yasm_internal_error(N_("invalid operand conversion")); | 1335 yasm_internal_error(N_("invalid operand conversion")); |
1318 break; | 1336 break; |
1319 case OPA_AdSizeEA: { | 1337 case OPA_AdSizeEA: { |
1320 const uintptr_t *regp = NULL; | 1338 const uintptr_t *regp = NULL; |
(...skipping 12 matching lines...) Expand all Loading... |
1333 else if (*regp == (X86_REG32 | 0)) | 1351 else if (*regp == (X86_REG32 | 0)) |
1334 insn->common.addrsize = 32; | 1352 insn->common.addrsize = 32; |
1335 else if (mode_bits == 64 && *regp == (X86_REG64 | 0)) | 1353 else if (mode_bits == 64 && *regp == (X86_REG64 | 0)) |
1336 insn->common.addrsize = 64; | 1354 insn->common.addrsize = 64; |
1337 else | 1355 else |
1338 yasm_error_set(YASM_ERROR_TYPE, | 1356 yasm_error_set(YASM_ERROR_TYPE, |
1339 N_("unsupported address size")); | 1357 N_("unsupported address size")); |
1340 yasm_x86__ea_destroy(op->data.ea); | 1358 yasm_x86__ea_destroy(op->data.ea); |
1341 break; | 1359 break; |
1342 } | 1360 } |
1343 case OPA_DREX: | |
1344 if (op->type == YASM_INSN__OPERAND_REG) { | |
1345 drex &= 0x0F; | |
1346 drex |= (op->data.reg << 4) & 0xF0; | |
1347 } else | |
1348 yasm_internal_error(N_("invalid operand conversion")); | |
1349 break; | |
1350 case OPA_VEX: | 1361 case OPA_VEX: |
1351 if (op->type != YASM_INSN__OPERAND_REG) | 1362 if (op->type != YASM_INSN__OPERAND_REG) |
1352 yasm_internal_error(N_("invalid operand conversion")); | 1363 yasm_internal_error(N_("invalid operand conversion")); |
1353 vexreg = op->data.reg & 0xF; | 1364 vexreg = op->data.reg & 0xF; |
1354 break; | 1365 break; |
1355 case OPA_VEXImmSrc: | 1366 case OPA_VEXImmSrc: |
1356 if (op->type != YASM_INSN__OPERAND_REG) | 1367 if (op->type != YASM_INSN__OPERAND_REG) |
1357 yasm_internal_error(N_("invalid operand conversion")); | 1368 yasm_internal_error(N_("invalid operand conversion")); |
1358 | 1369 |
1359 if (!imm) { | 1370 if (!imm) { |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1434 do_postop = OPAP_SImm32Avail; | 1445 do_postop = OPAP_SImm32Avail; |
1435 break; | 1446 break; |
1436 default: | 1447 default: |
1437 yasm_internal_error( | 1448 yasm_internal_error( |
1438 N_("unknown operand postponed action")); | 1449 N_("unknown operand postponed action")); |
1439 } | 1450 } |
1440 } | 1451 } |
1441 } | 1452 } |
1442 | 1453 |
1443 if (insn->x86_ea) { | 1454 if (insn->x86_ea) { |
1444 yasm_x86__ea_init(insn->x86_ea, spare, drex, | 1455 yasm_x86__ea_init(insn->x86_ea, spare, prev_bc); |
1445 (unsigned int)(info->drex_oc0 & NEED_DREX_MASK), | |
1446 prev_bc); | |
1447 for (i=0; i<id_insn->insn.num_segregs; i++) | 1456 for (i=0; i<id_insn->insn.num_segregs; i++) |
1448 yasm_ea_set_segreg(&insn->x86_ea->ea, id_insn->insn.segregs[i]); | 1457 yasm_ea_set_segreg(&insn->x86_ea->ea, id_insn->insn.segregs[i]); |
1449 } else if (id_insn->insn.num_segregs > 0 && insn->special_prefix == 0) { | 1458 } else if (id_insn->insn.num_segregs > 0 && insn->special_prefix == 0) { |
1450 if (id_insn->insn.num_segregs > 1) | 1459 if (id_insn->insn.num_segregs > 1) |
1451 yasm_warn_set(YASM_WARN_GENERAL, | 1460 yasm_warn_set(YASM_WARN_GENERAL, |
1452 N_("multiple segment overrides, using leftmost")); | 1461 N_("multiple segment overrides, using leftmost")); |
1453 insn->special_prefix = (unsigned char) | 1462 insn->special_prefix = (unsigned char) |
1454 (id_insn->insn.segregs[id_insn->insn.num_segregs-1]>>8); | 1463 (id_insn->insn.segregs[id_insn->insn.num_segregs-1]>>8); |
1455 } else if (id_insn->insn.num_segregs > 0) | 1464 } else if (id_insn->insn.num_segregs > 0) |
1456 yasm_internal_error(N_("unhandled segment prefix")); | 1465 yasm_internal_error(N_("unhandled segment prefix")); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1506 (yasm_expr_get_intnum(&insn->imm->abs, 0) && | 1515 (yasm_expr_get_intnum(&insn->imm->abs, 0) && |
1507 yasm_intnum_check_size( | 1516 yasm_intnum_check_size( |
1508 yasm_expr_get_intnum(&insn->imm->abs, 0), 32, 0, 1))) { | 1517 yasm_expr_get_intnum(&insn->imm->abs, 0), 32, 0, 1))) { |
1509 /* Throwaway REX byte */ | 1518 /* Throwaway REX byte */ |
1510 unsigned char rex_temp = 0; | 1519 unsigned char rex_temp = 0; |
1511 | 1520 |
1512 /* Build ModRM EA - CAUTION: this depends on | 1521 /* Build ModRM EA - CAUTION: this depends on |
1513 * opcode 0 being a mov instruction! | 1522 * opcode 0 being a mov instruction! |
1514 */ | 1523 */ |
1515 insn->x86_ea = yasm_x86__ea_create_reg(insn->x86_ea, | 1524 insn->x86_ea = yasm_x86__ea_create_reg(insn->x86_ea, |
1516 (unsigned long)insn->opcode.opcode[0]-0xB8, &rex_temp, | 1525 (unsigned long)insn->opcode.opcode[0]-0xB8, &rex_temp, 64); |
1517 NULL, 64); | |
1518 | 1526 |
1519 /* Make the imm32s form permanent. */ | 1527 /* Make the imm32s form permanent. */ |
1520 insn->opcode.opcode[0] = insn->opcode.opcode[1]; | 1528 insn->opcode.opcode[0] = insn->opcode.opcode[1]; |
1521 insn->imm->size = 32; | 1529 insn->imm->size = 32; |
1522 } | 1530 } |
1523 insn->opcode.opcode[1] = 0; /* avoid possible confusion */ | 1531 insn->opcode.opcode[1] = 0; /* avoid possible confusion */ |
1524 break; | 1532 break; |
1525 default: | 1533 default: |
1526 break; | 1534 break; |
1527 } | 1535 } |
1528 | 1536 |
1529 /* Convert to VEX prefixes if requested. | 1537 /* Convert to VEX/XOP prefixes if requested. |
1530 * To save space in the insn structure, the VEX prefix is written into | 1538 * To save space in the insn structure, the VEX/XOP prefix is written into |
1531 * special_prefix and the first 2 bytes of the instruction are set to | 1539 * special_prefix and the first 2 bytes of the instruction are set to |
1532 * the second two VEX bytes. During calc_len() it may be shortened to | 1540 * the second two VEX/XOP bytes. During calc_len() it may be shortened to |
1533 * one VEX byte (this can only be done after knowledge of REX value). | 1541 * one VEX byte (this can only be done after knowledge of REX value); this |
| 1542 * further optimization is not possible for XOP. |
1534 */ | 1543 */ |
1535 if (vexdata) { | 1544 if (vexdata) { |
| 1545 int xop = ((vexdata & 0xF0) == 0x80); |
1536 unsigned char vex1 = 0xE0; /* R=X=B=1, mmmmm=0 */ | 1546 unsigned char vex1 = 0xE0; /* R=X=B=1, mmmmm=0 */ |
1537 unsigned char vex2; | 1547 unsigned char vex2; |
1538 /* Look at the first bytes of the opcode to see what leading bytes | |
1539 * to encode in the VEX mmmmm field. Leave R=X=B=1 for now. | |
1540 */ | |
1541 if (insn->opcode.opcode[0] != 0x0F) | |
1542 yasm_internal_error(N_("first opcode byte of VEX must be 0x0F")); | |
1543 | 1548 |
1544 if (insn->opcode.opcode[1] == 0x38) | 1549 if (xop) { |
1545 vex1 |= 0x02; /* implied 0x0F 0x38 */ | 1550 /* Look at the first bytes of the opcode for the XOP mmmmm field. |
1546 else if (insn->opcode.opcode[1] == 0x3A) | 1551 * Leave R=X=B=1 for now. |
1547 vex1 |= 0x03; /* implied 0x0F 0x3A */ | |
1548 else { | |
1549 /* Originally a 0F-only opcode; move opcode byte back one position | |
1550 * to make room for VEX prefix. | |
1551 */ | 1552 */ |
| 1553 if (insn->opcode.opcode[0] != 0x08 && |
| 1554 insn->opcode.opcode[0] != 0x09) |
| 1555 yasm_internal_error(N_("first opcode byte of XOP must be 0x08 or
0x09")); |
| 1556 vex1 |= insn->opcode.opcode[0]; |
| 1557 /* Move opcode byte back one byte to make room for XOP prefix. */ |
1552 insn->opcode.opcode[2] = insn->opcode.opcode[1]; | 1558 insn->opcode.opcode[2] = insn->opcode.opcode[1]; |
1553 vex1 |= 0x01; /* implied 0x0F */ | 1559 } else { |
| 1560 /* Look at the first bytes of the opcode to see what leading bytes |
| 1561 * to encode in the VEX mmmmm field. Leave R=X=B=1 for now. |
| 1562 */ |
| 1563 if (insn->opcode.opcode[0] != 0x0F) |
| 1564 yasm_internal_error(N_("first opcode byte of VEX must be 0x0F"))
; |
| 1565 |
| 1566 if (insn->opcode.opcode[1] == 0x38) |
| 1567 vex1 |= 0x02; /* implied 0x0F 0x38 */ |
| 1568 else if (insn->opcode.opcode[1] == 0x3A) |
| 1569 vex1 |= 0x03; /* implied 0x0F 0x3A */ |
| 1570 else { |
| 1571 /* Originally a 0F-only opcode; move opcode byte back one |
| 1572 * position to make room for VEX prefix. |
| 1573 */ |
| 1574 insn->opcode.opcode[2] = insn->opcode.opcode[1]; |
| 1575 vex1 |= 0x01; /* implied 0x0F */ |
| 1576 } |
1554 } | 1577 } |
1555 | 1578 |
1556 /* Check for update of special prefix by modifiers */ | 1579 /* Check for update of special prefix by modifiers */ |
1557 if (insn->special_prefix != 0) { | 1580 if (insn->special_prefix != 0) { |
1558 vexdata &= ~0x03; | 1581 vexdata &= ~0x03; |
1559 switch (insn->special_prefix) { | 1582 switch (insn->special_prefix) { |
1560 case 0x66: | 1583 case 0x66: |
1561 vexdata |= 0x01; | 1584 vexdata |= 0x01; |
1562 break; | 1585 break; |
1563 case 0xF3: | 1586 case 0xF3: |
1564 vexdata |= 0x02; | 1587 vexdata |= 0x02; |
1565 break; | 1588 break; |
1566 case 0xF2: | 1589 case 0xF2: |
1567 vexdata |= 0x03; | 1590 vexdata |= 0x03; |
1568 break; | 1591 break; |
1569 default: | 1592 default: |
1570 yasm_internal_error(N_("unrecognized special prefix")); | 1593 yasm_internal_error(N_("unrecognized special prefix")); |
1571 } | 1594 } |
1572 } | 1595 } |
1573 | 1596 |
1574 /* 2nd VEX byte is WvvvvLpp. | 1597 /* 2nd VEX byte is WvvvvLpp. |
1575 * W, L, pp come from vexdata | 1598 * W, L, pp come from vexdata |
1576 * vvvv comes from 1s complement of vexreg | 1599 * vvvv comes from 1s complement of vexreg |
1577 */ | 1600 */ |
1578 vex2 = (((vexdata & 0x8) << 4) | /* W */ | 1601 vex2 = (((vexdata & 0x8) << 4) | /* W */ |
1579 ((15 - (vexreg & 0xF)) << 3) | /* vvvv */ | 1602 ((15 - (vexreg & 0xF)) << 3) | /* vvvv */ |
1580 (vexdata & 0x7)); /* Lpp */ | 1603 (vexdata & 0x7)); /* Lpp */ |
1581 | 1604 |
1582 /* Save to special_prefix and opcode */ | 1605 /* Save to special_prefix and opcode */ |
1583 insn->special_prefix = 0xC4; /* VEX prefix */ | 1606 insn->special_prefix = xop ? 0x8F : 0xC4; /* VEX/XOP prefix */ |
1584 insn->opcode.opcode[0] = vex1; | 1607 insn->opcode.opcode[0] = vex1; |
1585 insn->opcode.opcode[1] = vex2; | 1608 insn->opcode.opcode[1] = vex2; |
1586 insn->opcode.len = 3; /* two prefix bytes and 1 opcode byte */ | 1609 insn->opcode.len = 3; /* two prefix bytes and 1 opcode byte */ |
1587 } | 1610 } |
1588 | 1611 |
1589 x86_id_insn_clear_operands(id_insn); | 1612 x86_id_insn_clear_operands(id_insn); |
1590 | 1613 |
1591 /* Transform the bytecode */ | 1614 /* Transform the bytecode */ |
1592 yasm_x86__bc_transform_insn(bc, insn); | 1615 yasm_x86__bc_transform_insn(bc, insn); |
1593 } | 1616 } |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1723 | 1746 |
1724 *bc = (yasm_bytecode *)NULL; | 1747 *bc = (yasm_bytecode *)NULL; |
1725 *prefix = 0; | 1748 *prefix = 0; |
1726 | 1749 |
1727 if (id_len > 16) | 1750 if (id_len > 16) |
1728 return YASM_ARCH_NOTINSNPREFIX; | 1751 return YASM_ARCH_NOTINSNPREFIX; |
1729 for (i=0; i<id_len; i++) | 1752 for (i=0; i<id_len; i++) |
1730 lcaseid[i] = tolower(id[i]); | 1753 lcaseid[i] = tolower(id[i]); |
1731 lcaseid[id_len] = '\0'; | 1754 lcaseid[id_len] = '\0'; |
1732 | 1755 |
1733 switch (arch_x86->parser) { | 1756 switch (PARSER(arch_x86)) { |
1734 case X86_PARSER_NASM: | 1757 case X86_PARSER_NASM: |
1735 pdata = insnprefix_nasm_find(lcaseid, id_len); | 1758 pdata = insnprefix_nasm_find(lcaseid, id_len); |
1736 break; | 1759 break; |
1737 case X86_PARSER_TASM: | 1760 case X86_PARSER_TASM: |
1738 pdata = insnprefix_nasm_find(lcaseid, id_len); | 1761 pdata = insnprefix_nasm_find(lcaseid, id_len); |
1739 break; | 1762 break; |
1740 case X86_PARSER_GAS: | 1763 case X86_PARSER_GAS: |
1741 pdata = insnprefix_gas_find(lcaseid, id_len); | 1764 pdata = insnprefix_gas_find(lcaseid, id_len); |
1742 break; | 1765 break; |
1743 default: | 1766 default: |
(...skipping 19 matching lines...) Expand all Loading... |
1763 yasm_insn_initialize(&id_insn->insn); | 1786 yasm_insn_initialize(&id_insn->insn); |
1764 id_insn->group = not64_insn; | 1787 id_insn->group = not64_insn; |
1765 id_insn->cpu_enabled = cpu_enabled; | 1788 id_insn->cpu_enabled = cpu_enabled; |
1766 id_insn->mod_data[0] = 0; | 1789 id_insn->mod_data[0] = 0; |
1767 id_insn->mod_data[1] = 0; | 1790 id_insn->mod_data[1] = 0; |
1768 id_insn->mod_data[2] = 0; | 1791 id_insn->mod_data[2] = 0; |
1769 id_insn->num_info = NELEMS(not64_insn); | 1792 id_insn->num_info = NELEMS(not64_insn); |
1770 id_insn->mode_bits = arch_x86->mode_bits; | 1793 id_insn->mode_bits = arch_x86->mode_bits; |
1771 id_insn->suffix = 0; | 1794 id_insn->suffix = 0; |
1772 id_insn->misc_flags = 0; | 1795 id_insn->misc_flags = 0; |
1773 id_insn->parser = arch_x86->parser; | 1796 id_insn->parser = PARSER(arch_x86); |
| 1797 » |
1774 id_insn->force_strict = arch_x86->force_strict != 0; | 1798 id_insn->force_strict = arch_x86->force_strict != 0; |
1775 id_insn->default_rel = arch_x86->default_rel != 0; | 1799 id_insn->default_rel = arch_x86->default_rel != 0; |
1776 *bc = yasm_bc_create_common(&x86_id_insn_callback, id_insn, line); | 1800 *bc = yasm_bc_create_common(&x86_id_insn_callback, id_insn, line); |
1777 return YASM_ARCH_INSN; | 1801 return YASM_ARCH_INSN; |
1778 } | 1802 } |
1779 | 1803 |
1780 cpu0 = pdata->cpu0; | 1804 cpu0 = pdata->cpu0; |
1781 cpu1 = pdata->cpu1; | 1805 cpu1 = pdata->cpu1; |
1782 cpu2 = pdata->cpu2; | 1806 cpu2 = pdata->cpu2; |
1783 | 1807 |
(...skipping 10 matching lines...) Expand all Loading... |
1794 yasm_insn_initialize(&id_insn->insn); | 1818 yasm_insn_initialize(&id_insn->insn); |
1795 id_insn->group = pdata->group; | 1819 id_insn->group = pdata->group; |
1796 id_insn->cpu_enabled = cpu_enabled; | 1820 id_insn->cpu_enabled = cpu_enabled; |
1797 id_insn->mod_data[0] = pdata->mod_data0; | 1821 id_insn->mod_data[0] = pdata->mod_data0; |
1798 id_insn->mod_data[1] = pdata->mod_data1; | 1822 id_insn->mod_data[1] = pdata->mod_data1; |
1799 id_insn->mod_data[2] = pdata->mod_data2; | 1823 id_insn->mod_data[2] = pdata->mod_data2; |
1800 id_insn->num_info = pdata->num_info; | 1824 id_insn->num_info = pdata->num_info; |
1801 id_insn->mode_bits = arch_x86->mode_bits; | 1825 id_insn->mode_bits = arch_x86->mode_bits; |
1802 id_insn->suffix = pdata->flags; | 1826 id_insn->suffix = pdata->flags; |
1803 id_insn->misc_flags = pdata->misc_flags; | 1827 id_insn->misc_flags = pdata->misc_flags; |
1804 id_insn->parser = arch_x86->parser; | 1828 id_insn->parser = PARSER(arch_x86); |
1805 id_insn->force_strict = arch_x86->force_strict != 0; | 1829 id_insn->force_strict = arch_x86->force_strict != 0; |
1806 id_insn->default_rel = arch_x86->default_rel != 0; | 1830 id_insn->default_rel = arch_x86->default_rel != 0; |
1807 *bc = yasm_bc_create_common(&x86_id_insn_callback, id_insn, line); | 1831 *bc = yasm_bc_create_common(&x86_id_insn_callback, id_insn, line); |
1808 return YASM_ARCH_INSN; | 1832 return YASM_ARCH_INSN; |
1809 } else { | 1833 } else { |
1810 unsigned long type = pdata->num_info<<8; | 1834 unsigned long type = pdata->num_info<<8; |
1811 unsigned long value = pdata->flags; | 1835 unsigned long value = pdata->flags; |
1812 | 1836 |
1813 if (arch_x86->mode_bits == 64 && type == X86_OPERSIZE && value == 32) { | 1837 if (arch_x86->mode_bits == 64 && type == X86_OPERSIZE && value == 32) { |
1814 yasm_error_set(YASM_ERROR_GENERAL, | 1838 yasm_error_set(YASM_ERROR_GENERAL, |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1855 x86_id_insn *id_insn = yasm_xmalloc(sizeof(x86_id_insn)); | 1879 x86_id_insn *id_insn = yasm_xmalloc(sizeof(x86_id_insn)); |
1856 | 1880 |
1857 yasm_insn_initialize(&id_insn->insn); | 1881 yasm_insn_initialize(&id_insn->insn); |
1858 id_insn->group = empty_insn; | 1882 id_insn->group = empty_insn; |
1859 id_insn->cpu_enabled = arch_x86->cpu_enables[arch_x86->active_cpu]; | 1883 id_insn->cpu_enabled = arch_x86->cpu_enables[arch_x86->active_cpu]; |
1860 id_insn->mod_data[0] = 0; | 1884 id_insn->mod_data[0] = 0; |
1861 id_insn->mod_data[1] = 0; | 1885 id_insn->mod_data[1] = 0; |
1862 id_insn->mod_data[2] = 0; | 1886 id_insn->mod_data[2] = 0; |
1863 id_insn->num_info = NELEMS(empty_insn); | 1887 id_insn->num_info = NELEMS(empty_insn); |
1864 id_insn->mode_bits = arch_x86->mode_bits; | 1888 id_insn->mode_bits = arch_x86->mode_bits; |
1865 id_insn->suffix = 0; | 1889 id_insn->suffix = (PARSER(arch_x86) == X86_PARSER_GAS) ? SUF_Z : 0; |
1866 id_insn->misc_flags = 0; | 1890 id_insn->misc_flags = 0; |
1867 id_insn->parser = arch_x86->parser; | 1891 id_insn->parser = PARSER(arch_x86); |
1868 id_insn->force_strict = arch_x86->force_strict != 0; | 1892 id_insn->force_strict = arch_x86->force_strict != 0; |
1869 id_insn->default_rel = arch_x86->default_rel != 0; | 1893 id_insn->default_rel = arch_x86->default_rel != 0; |
1870 | 1894 |
1871 return yasm_bc_create_common(&x86_id_insn_callback, id_insn, line); | 1895 return yasm_bc_create_common(&x86_id_insn_callback, id_insn, line); |
1872 } | 1896 } |
1873 | 1897 |
OLD | NEW |