Index: src/wasm/wasm-opcodes.h |
diff --git a/src/wasm/wasm-opcodes.h b/src/wasm/wasm-opcodes.h |
index 1a1e273eff3b5cadb1b5162f077be259f43c81d5..cf7c6e3467a1dfc220af5485049d4725ee8ca23e 100644 |
--- a/src/wasm/wasm-opcodes.h |
+++ b/src/wasm/wasm-opcodes.h |
@@ -148,111 +148,115 @@ std::ostream& operator<<(std::ostream& os, const FunctionSig& function); |
V(I32Shl, 0x4a, i_ii) \ |
V(I32ShrU, 0x4b, i_ii) \ |
V(I32ShrS, 0x4c, i_ii) \ |
- V(I32Eq, 0x4d, i_ii) \ |
- V(I32Ne, 0x4e, i_ii) \ |
- V(I32LtS, 0x4f, i_ii) \ |
- V(I32LeS, 0x50, i_ii) \ |
- V(I32LtU, 0x51, i_ii) \ |
- V(I32LeU, 0x52, i_ii) \ |
- V(I32GtS, 0x53, i_ii) \ |
- V(I32GeS, 0x54, i_ii) \ |
- V(I32GtU, 0x55, i_ii) \ |
- V(I32GeU, 0x56, i_ii) \ |
- V(I32Clz, 0x57, i_i) \ |
- V(I32Ctz, 0x58, i_i) \ |
- V(I32Popcnt, 0x59, i_i) \ |
- V(I32Eqz, 0x5a, i_i) \ |
- V(I64Add, 0x5b, l_ll) \ |
- V(I64Sub, 0x5c, l_ll) \ |
- V(I64Mul, 0x5d, l_ll) \ |
- V(I64DivS, 0x5e, l_ll) \ |
- V(I64DivU, 0x5f, l_ll) \ |
- V(I64RemS, 0x60, l_ll) \ |
- V(I64RemU, 0x61, l_ll) \ |
- V(I64And, 0x62, l_ll) \ |
- V(I64Ior, 0x63, l_ll) \ |
- V(I64Xor, 0x64, l_ll) \ |
- V(I64Shl, 0x65, l_ll) \ |
- V(I64ShrU, 0x66, l_ll) \ |
- V(I64ShrS, 0x67, l_ll) \ |
- V(I64Eq, 0x68, i_ll) \ |
- V(I64Ne, 0x69, i_ll) \ |
- V(I64LtS, 0x6a, i_ll) \ |
- V(I64LeS, 0x6b, i_ll) \ |
- V(I64LtU, 0x6c, i_ll) \ |
- V(I64LeU, 0x6d, i_ll) \ |
- V(I64GtS, 0x6e, i_ll) \ |
- V(I64GeS, 0x6f, i_ll) \ |
- V(I64GtU, 0x70, i_ll) \ |
- V(I64GeU, 0x71, i_ll) \ |
- V(I64Clz, 0x72, l_l) \ |
- V(I64Ctz, 0x73, l_l) \ |
- V(I64Popcnt, 0x74, l_l) \ |
- V(F32Add, 0x75, f_ff) \ |
- V(F32Sub, 0x76, f_ff) \ |
- V(F32Mul, 0x77, f_ff) \ |
- V(F32Div, 0x78, f_ff) \ |
- V(F32Min, 0x79, f_ff) \ |
- V(F32Max, 0x7a, f_ff) \ |
- V(F32Abs, 0x7b, f_f) \ |
- V(F32Neg, 0x7c, f_f) \ |
- V(F32CopySign, 0x7d, f_ff) \ |
- V(F32Ceil, 0x7e, f_f) \ |
- V(F32Floor, 0x7f, f_f) \ |
- V(F32Trunc, 0x80, f_f) \ |
- V(F32NearestInt, 0x81, f_f) \ |
- V(F32Sqrt, 0x82, f_f) \ |
- V(F32Eq, 0x83, i_ff) \ |
- V(F32Ne, 0x84, i_ff) \ |
- V(F32Lt, 0x85, i_ff) \ |
- V(F32Le, 0x86, i_ff) \ |
- V(F32Gt, 0x87, i_ff) \ |
- V(F32Ge, 0x88, i_ff) \ |
- V(F64Add, 0x89, d_dd) \ |
- V(F64Sub, 0x8a, d_dd) \ |
- V(F64Mul, 0x8b, d_dd) \ |
- V(F64Div, 0x8c, d_dd) \ |
- V(F64Min, 0x8d, d_dd) \ |
- V(F64Max, 0x8e, d_dd) \ |
- V(F64Abs, 0x8f, d_d) \ |
- V(F64Neg, 0x90, d_d) \ |
- V(F64CopySign, 0x91, d_dd) \ |
- V(F64Ceil, 0x92, d_d) \ |
- V(F64Floor, 0x93, d_d) \ |
- V(F64Trunc, 0x94, d_d) \ |
- V(F64NearestInt, 0x95, d_d) \ |
- V(F64Sqrt, 0x96, d_d) \ |
- V(F64Eq, 0x97, i_dd) \ |
- V(F64Ne, 0x98, i_dd) \ |
- V(F64Lt, 0x99, i_dd) \ |
- V(F64Le, 0x9a, i_dd) \ |
- V(F64Gt, 0x9b, i_dd) \ |
- V(F64Ge, 0x9c, i_dd) \ |
- V(I32SConvertF32, 0x9d, i_f) \ |
- V(I32SConvertF64, 0x9e, i_d) \ |
- V(I32UConvertF32, 0x9f, i_f) \ |
- V(I32UConvertF64, 0xa0, i_d) \ |
- V(I32ConvertI64, 0xa1, i_l) \ |
- V(I64SConvertF32, 0xa2, l_f) \ |
- V(I64SConvertF64, 0xa3, l_d) \ |
- V(I64UConvertF32, 0xa4, l_f) \ |
- V(I64UConvertF64, 0xa5, l_d) \ |
- V(I64SConvertI32, 0xa6, l_i) \ |
- V(I64UConvertI32, 0xa7, l_i) \ |
- V(F32SConvertI32, 0xa8, f_i) \ |
- V(F32UConvertI32, 0xa9, f_i) \ |
- V(F32SConvertI64, 0xaa, f_l) \ |
- V(F32UConvertI64, 0xab, f_l) \ |
- V(F32ConvertF64, 0xac, f_d) \ |
- V(F32ReinterpretI32, 0xad, f_i) \ |
- V(F64SConvertI32, 0xae, d_i) \ |
- V(F64UConvertI32, 0xaf, d_i) \ |
- V(F64SConvertI64, 0xb0, d_l) \ |
- V(F64UConvertI64, 0xb1, d_l) \ |
- V(F64ConvertF32, 0xb2, d_f) \ |
- V(F64ReinterpretI64, 0xb3, d_l) \ |
- V(I32ReinterpretF32, 0xb4, i_f) \ |
- V(I64ReinterpretF64, 0xb5, l_d) |
+ V(I32Ror, 0x4d, i_ii) \ |
titzer
2016/03/02 22:17:40
Please don't renumber here, since other tools will
|
+ V(I32Rol, 0x4e, i_ii) \ |
+ V(I32Eq, 0x4f, i_ii) \ |
+ V(I32Ne, 0x50, i_ii) \ |
+ V(I32LtS, 0x51, i_ii) \ |
+ V(I32LeS, 0x52, i_ii) \ |
+ V(I32LtU, 0x53, i_ii) \ |
+ V(I32LeU, 0x54, i_ii) \ |
+ V(I32GtS, 0x55, i_ii) \ |
+ V(I32GeS, 0x56, i_ii) \ |
+ V(I32GtU, 0x57, i_ii) \ |
+ V(I32GeU, 0x58, i_ii) \ |
+ V(I32Clz, 0x59, i_i) \ |
+ V(I32Ctz, 0x5a, i_i) \ |
+ V(I32Popcnt, 0x5b, i_i) \ |
+ V(I32Eqz, 0x5c, i_i) \ |
+ V(I64Add, 0x5d, l_ll) \ |
+ V(I64Sub, 0x5e, l_ll) \ |
+ V(I64Mul, 0x5f, l_ll) \ |
+ V(I64DivS, 0x60, l_ll) \ |
+ V(I64DivU, 0x61, l_ll) \ |
+ V(I64RemS, 0x62, l_ll) \ |
+ V(I64RemU, 0x63, l_ll) \ |
+ V(I64And, 0x64, l_ll) \ |
+ V(I64Ior, 0x65, l_ll) \ |
+ V(I64Xor, 0x66, l_ll) \ |
+ V(I64Shl, 0x67, l_ll) \ |
+ V(I64ShrU, 0x68, l_ll) \ |
+ V(I64ShrS, 0x69, l_ll) \ |
+ V(I64Ror, 0x6a, l_ll) \ |
+ V(I64Rol, 0x6b, l_ll) \ |
+ V(I64Eq, 0x6c, i_ll) \ |
+ V(I64Ne, 0x6d, i_ll) \ |
+ V(I64LtS, 0x6e, i_ll) \ |
+ V(I64LeS, 0x6f, i_ll) \ |
+ V(I64LtU, 0x70, i_ll) \ |
+ V(I64LeU, 0x71, i_ll) \ |
+ V(I64GtS, 0x72, i_ll) \ |
+ V(I64GeS, 0x73, i_ll) \ |
+ V(I64GtU, 0x74, i_ll) \ |
+ V(I64GeU, 0x75, i_ll) \ |
+ V(I64Clz, 0x76, l_l) \ |
+ V(I64Ctz, 0x77, l_l) \ |
+ V(I64Popcnt, 0x78, l_l) \ |
+ V(F32Add, 0x79, f_ff) \ |
+ V(F32Sub, 0x7a, f_ff) \ |
+ V(F32Mul, 0x7b, f_ff) \ |
+ V(F32Div, 0x7c, f_ff) \ |
+ V(F32Min, 0x7d, f_ff) \ |
+ V(F32Max, 0x7e, f_ff) \ |
+ V(F32Abs, 0x7f, f_f) \ |
+ V(F32Neg, 0x80, f_f) \ |
+ V(F32CopySign, 0x81, f_ff) \ |
+ V(F32Ceil, 0x82, f_f) \ |
+ V(F32Floor, 0x83, f_f) \ |
+ V(F32Trunc, 0x84, f_f) \ |
+ V(F32NearestInt, 0x85, f_f) \ |
+ V(F32Sqrt, 0x86, f_f) \ |
+ V(F32Eq, 0x87, i_ff) \ |
+ V(F32Ne, 0x88, i_ff) \ |
+ V(F32Lt, 0x89, i_ff) \ |
+ V(F32Le, 0x8a, i_ff) \ |
+ V(F32Gt, 0x8b, i_ff) \ |
+ V(F32Ge, 0x8c, i_ff) \ |
+ V(F64Add, 0x8d, d_dd) \ |
+ V(F64Sub, 0x8e, d_dd) \ |
+ V(F64Mul, 0x8f, d_dd) \ |
+ V(F64Div, 0x90, d_dd) \ |
+ V(F64Min, 0x91, d_dd) \ |
+ V(F64Max, 0x92, d_dd) \ |
+ V(F64Abs, 0x93, d_d) \ |
+ V(F64Neg, 0x94, d_d) \ |
+ V(F64CopySign, 0x95, d_dd) \ |
+ V(F64Ceil, 0x96, d_d) \ |
+ V(F64Floor, 0x97, d_d) \ |
+ V(F64Trunc, 0x98, d_d) \ |
+ V(F64NearestInt, 0x99, d_d) \ |
+ V(F64Sqrt, 0x9a, d_d) \ |
+ V(F64Eq, 0x9b, i_dd) \ |
+ V(F64Ne, 0x9c, i_dd) \ |
+ V(F64Lt, 0x9d, i_dd) \ |
+ V(F64Le, 0x9e, i_dd) \ |
+ V(F64Gt, 0x9f, i_dd) \ |
+ V(F64Ge, 0xa0, i_dd) \ |
+ V(I32SConvertF32, 0xa1, i_f) \ |
+ V(I32SConvertF64, 0xa2, i_d) \ |
+ V(I32UConvertF32, 0xa3, i_f) \ |
+ V(I32UConvertF64, 0xa4, i_d) \ |
+ V(I32ConvertI64, 0xa5, i_l) \ |
+ V(I64SConvertF32, 0xa6, l_f) \ |
+ V(I64SConvertF64, 0xa7, l_d) \ |
+ V(I64UConvertF32, 0xa8, l_f) \ |
+ V(I64UConvertF64, 0xa9, l_d) \ |
+ V(I64SConvertI32, 0xaa, l_i) \ |
+ V(I64UConvertI32, 0xab, l_i) \ |
+ V(F32SConvertI32, 0xac, f_i) \ |
+ V(F32UConvertI32, 0xad, f_i) \ |
+ V(F32SConvertI64, 0xae, f_l) \ |
+ V(F32UConvertI64, 0xaf, f_l) \ |
+ V(F32ConvertF64, 0xb0, f_d) \ |
+ V(F32ReinterpretI32, 0xb1, f_i) \ |
+ V(F64SConvertI32, 0xb2, d_i) \ |
+ V(F64UConvertI32, 0xb3, d_i) \ |
+ V(F64SConvertI64, 0xb4, d_l) \ |
+ V(F64UConvertI64, 0xb5, d_l) \ |
+ V(F64ConvertF32, 0xb6, d_f) \ |
+ V(F64ReinterpretI64, 0xb7, d_l) \ |
+ V(I32ReinterpretF32, 0xb8, i_f) \ |
+ V(I64ReinterpretF64, 0xb9, l_d) |
// All opcodes. |
#define FOREACH_OPCODE(V) \ |