Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(59)

Side by Side Diff: src/trusted/validator_ragel/validator_internal.h

Issue 11000033: Move validator_x86_XX.rl out of unreviewed. (Closed) Base URL: svn://svn.chromium.org/native_client/trunk/src/native_client/
Patch Set: Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The Native Client Authors. All rights reserved. 2 * Copyright (c) 2012 The Native Client Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be 3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file. 4 * found in the LICENSE file.
5 */ 5 */
6 6
7 /* 7 /*
8 * This file contains common parts of x86-32 and x86-64 internals (inline 8 * This file contains common parts of x86-32 and x86-64 internals (inline
9 * functions and defines). 9 * functions and defines).
10 */ 10 */
11 11
12 #ifndef NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_RAGEL_VALIDATOR_INTERNAL_H_ 12 #ifndef NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_RAGEL_VALIDATOR_INTERNAL_H_
13 #define NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_RAGEL_VALIDATOR_INTERNAL_H_ 13 #define NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_RAGEL_VALIDATOR_INTERNAL_H_
14 14
15 #include "native_client/src/shared/platform/nacl_check.h" 15 #include "native_client/src/shared/platform/nacl_check.h"
16 #include "native_client/src/shared/utils/types.h" 16 #include "native_client/src/shared/utils/types.h"
17 #include "native_client/src/trusted/validator_ragel/unreviewed/decoding.h" 17 #include "native_client/src/trusted/validator_ragel/decoding.h"
18 #include "native_client/src/trusted/validator_ragel/validator.h" 18 #include "native_client/src/trusted/validator_ragel/validator.h"
19 19
20 /* Maximum set of R-DFA allowable CPUID features. */ 20 /* Maximum set of R-DFA allowable CPUID features. */
21 extern const NaClCPUFeaturesX86 kValidatorCPUIDFeatures; 21 extern const NaClCPUFeaturesX86 kValidatorCPUIDFeatures;
22 22
23 /* Macroses to suppport CPUID handling. */ 23 /* Macroses to support CPUID handling. */
24 #define SET_CPU_FEATURE(F) \ 24 #define SET_CPU_FEATURE(FEATURE) \
25 if (!(F##_Allowed)) { \ 25 if (!(FEATURE(kValidatorCPUIDFeatures.data))) { \
26 instruction_info_collected |= UNRECOGNIZED_INSTRUCTION; \ 26 instruction_info_collected |= UNRECOGNIZED_INSTRUCTION; \
27 } \ 27 } \
28 if (!(F)) { \ 28 if (!(FEATURE(cpu_features->data))) { \
29 instruction_info_collected |= CPUID_UNSUPPORTED_INSTRUCTION; \ 29 instruction_info_collected |= CPUID_UNSUPPORTED_INSTRUCTION; \
30 } 30 }
31 #define CPUFeature_3DNOW cpu_features->data[NaClCPUFeatureX86_3DNOW] 31 #define CPUFeature_3DNOW(FEATURE) FEATURE[NaClCPUFeatureX86_3DNOW]
32 /* 32 /*
33 * AMD documentation claims it's always available if CPUFeature_LM is present, 33 * AMD documentation claims it's always available if CPUFeature_LM is present,
34 * But Intel documentation does not even mention it! 34 * But Intel documentation does not even mention it!
35 * Keep it as 3DNow! instruction. 35 * Keep it as 3DNow! instruction.
36 */ 36 */
37 #define CPUFeature_3DPRFTCH CPUFeature_3DNOW || CPUFeature_PRE 37 #define CPUFeature_3DPRFTCH(FEATURE) \
38 #define CPUFeature_AES cpu_features->data[NaClCPUFeatureX86_AES] 38 (CPUFeature_3DNOW(FEATURE) || CPUFeature_PRE(FEATURE))
39 #define CPUFeature_AESAVX CPUFeature_AES && CPUFeature_AVX 39 #define CPUFeature_AES(FEATURE) FEATURE[NaClCPUFeatureX86_AES]
40 #define CPUFeature_AVX cpu_features->data[NaClCPUFeatureX86_AVX] 40 #define CPUFeature_AESAVX(FEATURE) \
41 #define CPUFeature_BMI1 cpu_features->data[NaClCPUFeatureX86_BMI1] 41 (CPUFeature_AES(FEATURE) && CPUFeature_AVX(FEATURE))
42 #define CPUFeature_CLFLUSH cpu_features->data[NaClCPUFeatureX86_CLFLUSH] 42 #define CPUFeature_AVX(FEATURE) FEATURE[NaClCPUFeatureX86_AVX]
43 #define CPUFeature_CLMUL cpu_features->data[NaClCPUFeatureX86_CLMUL] 43 #define CPUFeature_BMI1(FEATURE) FEATURE[NaClCPUFeatureX86_BMI1]
44 #define CPUFeature_CLMULAVX CPUFeature_CLMUL && CPUFeature_AVX 44 #define CPUFeature_CLFLUSH(FEATURE) FEATURE[NaClCPUFeatureX86_CLFLUSH]
45 #define CPUFeature_CMOV cpu_features->data[NaClCPUFeatureX86_CMOV] 45 #define CPUFeature_CLMUL(FEATURE) FEATURE[NaClCPUFeatureX86_CLMUL]
46 #define CPUFeature_CMOVx87 CPUFeature_CMOV && CPUFeature_x87 46 #define CPUFeature_CLMULAVX(FEATURE) \
47 #define CPUFeature_CX16 cpu_features->data[NaClCPUFeatureX86_CX16] 47 (CPUFeature_CLMUL(FEATURE) && CPUFeature_AVX(FEATURE))
48 #define CPUFeature_CX8 cpu_features->data[NaClCPUFeatureX86_CX8] 48 #define CPUFeature_CMOV(FEATURE) FEATURE[NaClCPUFeatureX86_CMOV]
49 #define CPUFeature_E3DNOW cpu_features->data[NaClCPUFeatureX86_E3DNOW] 49 #define CPUFeature_CMOVx87(FEATURE) \
50 #define CPUFeature_EMMX cpu_features->data[NaClCPUFeatureX86_EMMX] 50 (CPUFeature_CMOV(FEATURE) && CPUFeature_x87(FEATURE))
51 #define CPUFeature_EMMXSSE CPUFeature_EMMX || CPUFeature_SSE 51 #define CPUFeature_CX16(FEATURE) FEATURE[NaClCPUFeatureX86_CX16]
52 #define CPUFeature_F16C cpu_features->data[NaClCPUFeatureX86_F16C] 52 #define CPUFeature_CX8(FEATURE) FEATURE[NaClCPUFeatureX86_CX8]
53 #define CPUFeature_FMA cpu_features->data[NaClCPUFeatureX86_FMA] 53 #define CPUFeature_E3DNOW(FEATURE) FEATURE[NaClCPUFeatureX86_E3DNOW]
54 #define CPUFeature_FMA4 cpu_features->data[NaClCPUFeatureX86_FMA4] 54 #define CPUFeature_EMMX(FEATURE) FEATURE[NaClCPUFeatureX86_EMMX]
55 #define CPUFeature_FXSR cpu_features->data[NaClCPUFeatureX86_FXSR] 55 #define CPUFeature_EMMXSSE(FEATURE) \
56 #define CPUFeature_LAHF cpu_features->data[NaClCPUFeatureX86_LAHF] 56 (CPUFeature_EMMX(FEATURE) || CPUFeature_SSE(FEATURE))
57 #define CPUFeature_LM cpu_features->data[NaClCPUFeatureX86_LM] 57 #define CPUFeature_F16C(FEATURE) FEATURE[NaClCPUFeatureX86_F16C]
58 #define CPUFeature_LWP cpu_features->data[NaClCPUFeatureX86_LWP] 58 #define CPUFeature_FMA(FEATURE) FEATURE[NaClCPUFeatureX86_FMA]
59 #define CPUFeature_FMA4(FEATURE) FEATURE[NaClCPUFeatureX86_FMA4]
60 #define CPUFeature_FXSR(FEATURE) FEATURE[NaClCPUFeatureX86_FXSR]
61 #define CPUFeature_LAHF(FEATURE) FEATURE[NaClCPUFeatureX86_LAHF]
62 #define CPUFeature_LM(FEATURE) FEATURE[NaClCPUFeatureX86_LM]
63 #define CPUFeature_LWP(FEATURE) FEATURE[NaClCPUFeatureX86_LWP]
59 /* 64 /*
60 * We allow lzcnt unconditionally 65 * We allow lzcnt unconditionally
61 * See http://code.google.com/p/nativeclient/issues/detail?id=2869 66 * See http://code.google.com/p/nativeclient/issues/detail?id=2869
62 */ 67 */
63 #define CPUFeature_LZCNT TRUE 68 #define CPUFeature_LZCNT(FEATURE) TRUE
64 #define CPUFeature_MMX cpu_features->data[NaClCPUFeatureX86_MMX] 69 #define CPUFeature_MMX(FEATURE) FEATURE[NaClCPUFeatureX86_MMX]
65 #define CPUFeature_MON cpu_features->data[NaClCPUFeatureX86_MON] 70 #define CPUFeature_MON(FEATURE) FEATURE[NaClCPUFeatureX86_MON]
66 #define CPUFeature_MOVBE cpu_features->data[NaClCPUFeatureX86_MOVBE] 71 #define CPUFeature_MOVBE(FEATURE) FEATURE[NaClCPUFeatureX86_MOVBE]
67 #define CPUFeature_OSXSAVE cpu_features->data[NaClCPUFeatureX86_OSXSAVE] 72 #define CPUFeature_OSXSAVE(FEATURE) FEATURE[NaClCPUFeatureX86_OSXSAVE]
68 #define CPUFeature_POPCNT cpu_features->data[NaClCPUFeatureX86_POPCNT] 73 #define CPUFeature_POPCNT(FEATURE) FEATURE[NaClCPUFeatureX86_POPCNT]
69 #define CPUFeature_PRE cpu_features->data[NaClCPUFeatureX86_PRE] 74 #define CPUFeature_PRE(FEATURE) FEATURE[NaClCPUFeatureX86_PRE]
70 #define CPUFeature_SSE cpu_features->data[NaClCPUFeatureX86_SSE] 75 #define CPUFeature_SSE(FEATURE) FEATURE[NaClCPUFeatureX86_SSE]
71 #define CPUFeature_SSE2 cpu_features->data[NaClCPUFeatureX86_SSE2] 76 #define CPUFeature_SSE2(FEATURE) FEATURE[NaClCPUFeatureX86_SSE2]
72 #define CPUFeature_SSE3 cpu_features->data[NaClCPUFeatureX86_SSE3] 77 #define CPUFeature_SSE3(FEATURE) FEATURE[NaClCPUFeatureX86_SSE3]
73 #define CPUFeature_SSE41 cpu_features->data[NaClCPUFeatureX86_SSE41] 78 #define CPUFeature_SSE41(FEATURE) FEATURE[NaClCPUFeatureX86_SSE41]
74 #define CPUFeature_SSE42 cpu_features->data[NaClCPUFeatureX86_SSE42] 79 #define CPUFeature_SSE42(FEATURE) FEATURE[NaClCPUFeatureX86_SSE42]
75 #define CPUFeature_SSE4A cpu_features->data[NaClCPUFeatureX86_SSE4A] 80 #define CPUFeature_SSE4A(FEATURE) FEATURE[NaClCPUFeatureX86_SSE4A]
76 #define CPUFeature_SSSE3 cpu_features->data[NaClCPUFeatureX86_SSSE3] 81 #define CPUFeature_SSSE3(FEATURE) FEATURE[NaClCPUFeatureX86_SSSE3]
77 #define CPUFeature_TBM cpu_features->data[NaClCPUFeatureX86_TBM] 82 #define CPUFeature_TBM(FEATURE) FEATURE[NaClCPUFeatureX86_TBM]
78 #define CPUFeature_TSC cpu_features->data[NaClCPUFeatureX86_TSC] 83 #define CPUFeature_TSC(FEATURE) FEATURE[NaClCPUFeatureX86_TSC]
79 /* 84 /*
80 * We allow tzcnt unconditionally 85 * We allow tzcnt unconditionally
81 * See http://code.google.com/p/nativeclient/issues/detail?id=2869 86 * See http://code.google.com/p/nativeclient/issues/detail?id=2869
82 */ 87 */
83 #define CPUFeature_TZCNT TRUE 88 #define CPUFeature_TZCNT(FEATURE) TRUE
84 #define CPUFeature_x87 cpu_features->data[NaClCPUFeatureX86_x87] 89 #define CPUFeature_x87(FEATURE) FEATURE[NaClCPUFeatureX86_x87]
85 #define CPUFeature_XOP cpu_features->data[NaClCPUFeatureX86_XOP] 90 #define CPUFeature_XOP(FEATURE) FEATURE[NaClCPUFeatureX86_XOP]
86
87 #define CPUFeature_3DNOW_Allowed \
88 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_3DNOW]
89 /*
90 * AMD documentation claims it's always available if CPUFeature_LM is present,
91 * But Intel documentation does not even mention it!
92 * Keep it as 3DNow! instruction.
93 */
94 #define CPUFeature_3DPRFTCH_Allowed \
95 CPUFeature_3DNOW_Allowed || CPUFeature_PRE_Allowed
96 #define CPUFeature_AES_Allowed \
97 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_AES]
98 #define CPUFeature_AESAVX_Allowed \
99 CPUFeature_AES_Allowed && CPUFeature_AVX_Allowed
100 #define CPUFeature_AVX_Allowed \
101 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_AVX]
102 #define CPUFeature_BMI1_Allowed \
103 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_BMI1]
104 #define CPUFeature_CLFLUSH_Allowed \
105 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_CLFLUSH]
106 #define CPUFeature_CLMUL_Allowed \
107 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_CLMUL]
108 #define CPUFeature_CLMULAVX_Allowed \
109 CPUFeature_CLMUL_Allowed && CPUFeature_AVX_Allowed
110 #define CPUFeature_CMOV_Allowed \
111 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_CMOV]
112 #define CPUFeature_CMOVx87_Allowed \
113 CPUFeature_CMOV_Allowed && CPUFeature_x87_Allowed
114 #define CPUFeature_CX16_Allowed \
115 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_CX16]
116 #define CPUFeature_CX8_Allowed \
117 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_CX8]
118 #define CPUFeature_E3DNOW_Allowed \
119 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_E3DNOW]
120 #define CPUFeature_EMMX_Allowed \
121 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_EMMX]
122 #define CPUFeature_EMMXSSE_Allowed \
123 CPUFeature_EMMX_Allowed || CPUFeature_SSE_Allowed
124 #define CPUFeature_F16C_Allowed \
125 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_F16C]
126 #define CPUFeature_FMA_Allowed \
127 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_FMA]
128 #define CPUFeature_FMA4_Allowed \
129 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_FMA4]
130 #define CPUFeature_FXSR_Allowed \
131 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_FXSR]
132 #define CPUFeature_LAHF_Allowed \
133 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_LAHF]
134 #define CPUFeature_LM_Allowed \
135 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_LM]
136 #define CPUFeature_LWP_Allowed \
137 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_LWP]
138 /*
139 * We allow lzcnt unconditionally
140 * See http://code.google.com/p/nativeclient/issues/detail?id=2869
141 */
142 #define CPUFeature_LZCNT_Allowed TRUE
143 #define CPUFeature_MMX_Allowed \
144 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_MMX]
145 #define CPUFeature_MON_Allowed \
146 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_MON]
147 #define CPUFeature_MOVBE_Allowed \
148 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_MOVBE]
149 #define CPUFeature_OSXSAVE_Allowed \
150 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_OSXSAVE]
151 #define CPUFeature_POPCNT_Allowed \
152 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_POPCNT]
153 #define CPUFeature_PRE_Allowed \
154 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_PRE]
155 #define CPUFeature_SSE_Allowed \
156 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_SSE]
157 #define CPUFeature_SSE2_Allowed \
158 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_SSE2]
159 #define CPUFeature_SSE3_Allowed \
160 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_SSE3]
161 #define CPUFeature_SSE41_Allowed \
162 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_SSE41]
163 #define CPUFeature_SSE42_Allowed \
164 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_SSE42]
165 #define CPUFeature_SSE4A_Allowed \
166 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_SSE4A]
167 #define CPUFeature_SSSE3_Allowed \
168 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_SSSE3]
169 #define CPUFeature_TBM_Allowed \
170 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_TBM]
171 #define CPUFeature_TSC_Allowed \
172 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_TSC]
173 /*
174 * We allow tzcnt unconditionally
175 * See http://code.google.com/p/nativeclient/issues/detail?id=2869
176 */
177 #define CPUFeature_TZCNT_Allowed TRUE
178 #define CPUFeature_x87_Allowed \
179 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_x87]
180 #define CPUFeature_XOP_Allowed \
181 kValidatorCPUIDFeatures.data[NaClCPUFeatureX86_XOP]
182 91
183 /* Remember some information about instruction for further processing. */ 92 /* Remember some information about instruction for further processing. */
184 #define GET_REX_PREFIX() rex_prefix 93 #define GET_REX_PREFIX() rex_prefix
185 #define SET_REX_PREFIX(P) rex_prefix = (P) 94 #define SET_REX_PREFIX(PREFIX_BYTE) rex_prefix = (PREFIX_BYTE)
186 #define GET_VEX_PREFIX2() vex_prefix2 95 #define GET_VEX_PREFIX2() vex_prefix2
187 #define SET_VEX_PREFIX2(P) vex_prefix2 = (P) 96 #define SET_VEX_PREFIX2(PREFIX_BYTE) vex_prefix2 = (PREFIX_BYTE)
188 #define GET_VEX_PREFIX3() vex_prefix3 97 #define GET_VEX_PREFIX3() vex_prefix3
189 #define SET_VEX_PREFIX3(P) vex_prefix3 = (P) 98 #define SET_VEX_PREFIX3(PREFIX_BYTE) vex_prefix3 = (PREFIX_BYTE)
190 #define SET_MODRM_BASE(N) base = (N) 99 #define SET_MODRM_BASE(REG_NUMBER) base = (REG_NUMBER)
191 #define SET_MODRM_INDEX(N) index = (N) 100 #define SET_MODRM_INDEX(REG_NUMBER) index = (REG)
halyavin 2013/03/19 15:08:09 REG->REG_NUMBER
khim 2013/03/21 14:38:17 Done.
192 101
193 /* Ignore this information for now. */ 102 /* Ignore this information for now. */
194 #define SET_DATA16_PREFIX(S) 103 #define SET_DATA16_PREFIX(STATUS)
195 #define SET_REPZ_PREFIX(S) 104 #define SET_REPZ_PREFIX(STATUS)
196 #define SET_REPNZ_PREFIX(S) 105 #define SET_REPNZ_PREFIX(STATUS)
197 #define SET_MODRM_SCALE(S) 106 #define SET_MODRM_SCALE(STATUS)
198 #define SET_DISP_PTR(P) 107 #define SET_DISP_PTR(PTR)
199 #define SET_IMM_PTR(P) 108 #define SET_IMM_PTR(PTR)
200 #define SET_IMM2_PTR(P) 109 #define SET_IMM2_PTR(PTR)
201 110
202 /* 111 /*
203 * Collect information about anyfields (offsets and immediates). 112 * Collect information about anyfields (offsets and immediates).
204 * Note: we use += below instead of |=. This means two immediate fields will 113 * Note: we use += below instead of |=. This means two immediate fields will
205 * be treated as one. It's not important for safety. 114 * be treated as one. It's not important for safety.
206 */ 115 */
207 #define SET_DISP_TYPE(T) SET_DISP_TYPE_##T 116 #define SET_DISP_TYPE(TYPE) SET_DISP_TYPE_##TYPE
208 #define SET_DISP_TYPE_DISPNONE 117 #define SET_DISP_TYPE_DISPNONE
209 #define SET_DISP_TYPE_DISP8 (instruction_info_collected += DISPLACEMENT_8BIT) 118 #define SET_DISP_TYPE_DISP8 (instruction_info_collected += DISPLACEMENT_8BIT)
210 #define SET_DISP_TYPE_DISP32 (instruction_info_collected += DISPLACEMENT_32BIT) 119 #define SET_DISP_TYPE_DISP32 (instruction_info_collected += DISPLACEMENT_32BIT)
211 #define SET_IMM_TYPE(T) SET_IMM_TYPE_##T 120 #define SET_IMM_TYPE(TYPE) SET_IMM_TYPE_##TYPE
212 /* imm2 field is a flag, not accumulator, like with other immediates */ 121 /* imm2 field is a flag, not accumulator, like other immediates */
213 #define SET_IMM_TYPE_IMM2 (instruction_info_collected |= IMMEDIATE_2BIT) 122 #define SET_IMM_TYPE_IMM2 (instruction_info_collected |= IMMEDIATE_2BIT)
214 #define SET_IMM_TYPE_IMM8 (instruction_info_collected += IMMEDIATE_8BIT) 123 #define SET_IMM_TYPE_IMM8 (instruction_info_collected += IMMEDIATE_8BIT)
215 #define SET_IMM_TYPE_IMM16 (instruction_info_collected += IMMEDIATE_16BIT) 124 #define SET_IMM_TYPE_IMM16 (instruction_info_collected += IMMEDIATE_16BIT)
216 #define SET_IMM_TYPE_IMM32 (instruction_info_collected += IMMEDIATE_32BIT) 125 #define SET_IMM_TYPE_IMM32 (instruction_info_collected += IMMEDIATE_32BIT)
217 #define SET_IMM_TYPE_IMM64 (instruction_info_collected += IMMEDIATE_64BIT) 126 #define SET_IMM_TYPE_IMM64 (instruction_info_collected += IMMEDIATE_64BIT)
218 #define SET_IMM2_TYPE(T) SET_IMM2_TYPE_##T 127 #define SET_SECOND_IMM_TYPE(TYPE) SET_SECOND_IMM_TYPE_##TYPE
219 #define SET_IMM2_TYPE_IMM8 \ 128 #define SET_SECOND_IMM_TYPE_IMM8 \
220 (instruction_info_collected += SECOND_IMMEDIATE_8BIT) 129 (instruction_info_collected += SECOND_IMMEDIATE_8BIT)
221 #define SET_IMM2_TYPE_IMM16 \ 130 #define SET_SECOND_IMM_TYPE_IMM16 \
222 (instruction_info_collected += SECOND_IMMEDIATE_16BIT) 131 (instruction_info_collected += SECOND_IMMEDIATE_16BIT)
223 132
224 /* Mark the destination of a jump instruction and make an early validity check: 133 /*
225 * to jump outside given code region, the target address must be aligned. 134 * Mark the destination of a jump instruction and make an early validity check:
135 * jump target outside of given code region must be aligned.
226 * 136 *
227 * Returns TRUE iff the jump passes the early validity check. 137 * Returns TRUE iff the jump passes the early validity check.
228 */ 138 */
229 static FORCEINLINE int MarkJumpTarget(size_t jump_dest, 139 static FORCEINLINE int MarkJumpTarget(size_t jump_dest,
230 bitmap_word *jump_dests, 140 bitmap_word *jump_dests,
231 size_t size) { 141 size_t size) {
232 if ((jump_dest & kBundleMask) == 0) { 142 if ((jump_dest & kBundleMask) == 0) {
233 return TRUE; 143 return TRUE;
234 } 144 }
235 if (jump_dest >= size) { 145 if (jump_dest >= size) {
(...skipping 22 matching lines...) Expand all
258 /* 168 /*
259 * Mark the given addresses as invalid jump target addresses (that is: unmark 169 * Mark the given addresses as invalid jump target addresses (that is: unmark
260 * them). 170 * them).
261 */ 171 */
262 static FORCEINLINE void UnmarkValidJumpTargets(size_t address, 172 static FORCEINLINE void UnmarkValidJumpTargets(size_t address,
263 size_t bytes, 173 size_t bytes,
264 bitmap_word *valid_targets) { 174 bitmap_word *valid_targets) {
265 BitmapClearBits(valid_targets, address, bytes); 175 BitmapClearBits(valid_targets, address, bytes);
266 } 176 }
267 177
178 /*
179 * Compare valid_targets and jump_dests and call callback for any address in
180 * jump_dests which is not present in valid_targets.
181 */
268 static INLINE Bool ProcessInvalidJumpTargets( 182 static INLINE Bool ProcessInvalidJumpTargets(
269 const uint8_t *data, 183 const uint8_t codeblock[],
270 size_t size, 184 size_t size,
271 bitmap_word *valid_targets, 185 bitmap_word *valid_targets,
272 bitmap_word *jump_dests, 186 bitmap_word *jump_dests,
273 ValidationCallbackFunc user_callback, 187 ValidationCallbackFunc user_callback,
274 void *callback_data) { 188 void *callback_data) {
275 size_t elements = (size + NACL_HOST_WORDSIZE - 1) / NACL_HOST_WORDSIZE; 189 size_t elements = (size + NACL_HOST_WORDSIZE - 1) / NACL_HOST_WORDSIZE;
276 size_t i, j; 190 size_t i, j;
277 Bool result = TRUE; 191 Bool result = TRUE;
278 192
279 for (i = 0; i < elements ; i++) { 193 for (i = 0; i < elements; i++) {
280 bitmap_word jump_dest_mask = jump_dests[i]; 194 bitmap_word jump_dest_mask = jump_dests[i];
281 bitmap_word valid_target_mask = valid_targets[i]; 195 bitmap_word valid_target_mask = valid_targets[i];
282 if ((jump_dest_mask & ~valid_target_mask) != 0) { 196 if ((jump_dest_mask & ~valid_target_mask) != 0) {
283 for (j = i * NACL_HOST_WORDSIZE; j < (i + 1) * NACL_HOST_WORDSIZE; j++) 197 for (j = i * NACL_HOST_WORDSIZE; j < (i + 1) * NACL_HOST_WORDSIZE; j++)
284 if (BitmapIsBitSet(jump_dests, j) && 198 if (BitmapIsBitSet(jump_dests, j) &&
285 !BitmapIsBitSet(valid_targets, j)) { 199 !BitmapIsBitSet(valid_targets, j)) {
286 result &= user_callback(data + j, 200 result &= user_callback(codeblock + j,
287 data + j, 201 codeblock + j,
288 BAD_JUMP_TARGET, 202 BAD_JUMP_TARGET,
289 callback_data); 203 callback_data);
290 } 204 }
291 } 205 }
292 } 206 }
293 207
294 return result; 208 return result;
295 } 209 }
296 210
297 211
298 /* 212 /*
299 * Process rel8_operand. Note: rip points to the beginning of the next 213 * Process rel8_operand. Note: rip points to the beginning of the next
300 * instruction here and x86 encoding guarantees rel8 field is the last one 214 * instruction here and x86 encoding guarantees rel8 field is the last one
301 * in a current instruction. 215 * in a current instruction.
302 */ 216 */
303 static FORCEINLINE void Rel8Operand(const uint8_t *rip, 217 static FORCEINLINE void Rel8Operand(const uint8_t *rip,
304 const uint8_t* codeblock_start, 218 const uint8_t codeblock[],
305 bitmap_word *jump_dests, 219 bitmap_word *jump_dests,
306 size_t jumpdests_size, 220 size_t jumpdests_size,
307 uint32_t *instruction_info_collected) { 221 uint32_t *instruction_info_collected) {
308 int8_t offset = (uint8_t) (rip[-1]); 222 int8_t offset = rip[-1];
309 size_t jump_dest = offset + (rip - codeblock_start); 223 size_t jump_dest = offset + (rip - codeblock);
310 224
311 if (MarkJumpTarget(jump_dest, jump_dests, jumpdests_size)) 225 if (MarkJumpTarget(jump_dest, jump_dests, jumpdests_size))
312 *instruction_info_collected |= RELATIVE_8BIT; 226 *instruction_info_collected |= RELATIVE_8BIT;
313 else 227 else
314 *instruction_info_collected |= RELATIVE_8BIT | DIRECT_JUMP_OUT_OF_RANGE; 228 *instruction_info_collected |= RELATIVE_8BIT | DIRECT_JUMP_OUT_OF_RANGE;
315 } 229 }
316 230
317 /* 231 /*
318 * Process rel32_operand. Note: rip points to the beginning of the next 232 * Process rel32_operand. Note: rip points to the beginning of the next
319 * instruction here and x86 encoding guarantees rel32 field is the last one 233 * instruction here and x86 encoding guarantees rel32 field is the last one
320 * in a current instruction. 234 * in a current instruction.
321 */ 235 */
322 static FORCEINLINE void Rel32Operand(const uint8_t *rip, 236 static FORCEINLINE void Rel32Operand(const uint8_t *rip,
323 const uint8_t* codeblock_start, 237 const uint8_t codeblock[],
324 bitmap_word *jump_dests, 238 bitmap_word *jump_dests,
325 size_t jumpdests_size, 239 size_t jumpdests_size,
326 uint32_t *instruction_info_collected) { 240 uint32_t *instruction_info_collected) {
327 int32_t offset = (rip[-4] + 256U * (rip[-3] + 256U * ( 241 int32_t offset =
328 rip[-2] + 256U * ((uint32_t) rip[-1])))); 242 rip[-4] + 256U * (rip[-3] + 256U * (rip[-2] + 256U * (rip[-1])));
329 size_t jump_dest = offset + (rip - codeblock_start); 243 size_t jump_dest = offset + (rip - codeblock);
330 244
331 if (MarkJumpTarget(jump_dest, jump_dests, jumpdests_size)) 245 if (MarkJumpTarget(jump_dest, jump_dests, jumpdests_size))
332 *instruction_info_collected |= RELATIVE_32BIT; 246 *instruction_info_collected |= RELATIVE_32BIT;
333 else 247 else
334 *instruction_info_collected |= RELATIVE_32BIT | DIRECT_JUMP_OUT_OF_RANGE; 248 *instruction_info_collected |= RELATIVE_32BIT | DIRECT_JUMP_OUT_OF_RANGE;
335 } 249 }
336 250
337 #endif /* NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_RAGEL_VALIDATOR_INTERNAL_H_ */ 251 #endif /* NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_RAGEL_VALIDATOR_INTERNAL_H_ */
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698