Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(616)

Side by Side Diff: test/Transforms/MinSFI/sandbox-memory-accesses.ll

Issue 939073008: Rebased PNaCl localmods in LLVM to 223109 (Closed)
Patch Set: undo localmod Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 ; RUN: opt %s -minsfi-sandbox-memory-accesses -S | FileCheck %s
2 ; RUN: opt %s -minsfi-ptrsize=20 -minsfi-sandbox-memory-accesses -S \
3 ; RUN: | FileCheck %s -check-prefix=CHECK-MASK
4
5 !llvm.module.flags = !{!0}
6 !0 = metadata !{i32 1, metadata !"Debug Info Version", i32 2}
7
8 target datalayout = "p:32:32:32"
9 target triple = "le32-unknown-nacl"
10
11 ; CHECK: @__sfi_memory_base = external global i64
12 ; CHECK: @__sfi_pointer_size = constant i32 32
13 ; CHECK-MASK: @__sfi_pointer_size = constant i32 20
14
15 declare void @llvm.memcpy.p0i8.p0i8.i32(i8* nocapture, i8* nocapture readonly, i 32, i32, i1)
16 declare void @llvm.memmove.p0i8.p0i8.i32(i8* nocapture, i8* nocapture readonly, i32, i32, i1)
17 declare void @llvm.memset.p0i8.i32(i8* nocapture, i8, i32, i32, i1)
18
19 declare i32 @llvm.nacl.atomic.load.i32(i32*, i32)
20 declare void @llvm.nacl.atomic.store.i32(i32, i32*, i32)
21 declare i32 @llvm.nacl.atomic.rmw.i32(i32, i32*, i32, i32)
22 declare i32 @llvm.nacl.atomic.cmpxchg.i32(i32*, i32, i32, i32, i32)
23
24 declare i64 @llvm.nacl.atomic.load.i64(i64*, i32)
25 declare void @llvm.nacl.atomic.store.i64(i64, i64*, i32)
26 declare i64 @llvm.nacl.atomic.rmw.i64(i32, i64*, i64, i32)
27 declare i64 @llvm.nacl.atomic.cmpxchg.i64(i64*, i64, i64, i32, i32)
28
29 declare void @llvm.nacl.atomic.fence(i32)
30 declare void @llvm.nacl.atomic.fence.all()
31 declare i1 @llvm.nacl.atomic.is.lock.free(i32, i8*)
32
33 define i32 @test_no_sandbox(i32 %x, i32 %y) {
34 %sum = add i32 %x, %y
35 ret i32 %sum
36 }
37
38 ; CHECK-LABEL: define i32 @test_no_sandbox(i32 %x, i32 %y) {
39 ; CHECK-NOT: @__sfi_memory_base
40 ; CHECK-NEXT: %sum = add i32 %x, %y
41 ; CHECK-NEXT: ret i32 %sum
42 ; CHECK-NEXT: }
43
44 define i32 @test_load(i32* %ptr) {
45 %val = load i32* %ptr
46 ret i32 %val
47 }
48
49 ; CHECK-LABEL: define i32 @test_load(i32* %ptr) {
50 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
51 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
52 ; CHECK-NEXT: %2 = zext i32 %1 to i64
53 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
54 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
55 ; CHECK-NEXT: %val = load i32* %4
56 ; CHECK-NEXT: ret i32 %val
57 ; CHECK-NEXT: }
58
59 ; CHECK-MASK-LABEL: define i32 @test_load(i32* %ptr) {
60 ; CHECK-MASK-NEXT: %mem_base = load i64* @__sfi_memory_base
61 ; CHECK-MASK-NEXT: %1 = ptrtoint i32* %ptr to i32
62 ; CHECK-MASK-NEXT: %2 = and i32 %1, 1048575
63 ; CHECK-MASK-NEXT: %3 = zext i32 %2 to i64
64 ; CHECK-MASK-NEXT: %4 = add i64 %mem_base, %3
65 ; CHECK-MASK-NEXT: %5 = inttoptr i64 %4 to i32*
66 ; CHECK-MASK-NEXT: %val = load i32* %5
67 ; CHECK-MASK-NEXT: ret i32 %val
68 ; CHECK-MASK-NEXT: }
69
70 define void @test_store(i32* %ptr) {
71 store i32 1234, i32* %ptr
72 ret void
73 }
74
75 ; CHECK-LABEL: define void @test_store(i32* %ptr) {
76 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
77 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
78 ; CHECK-NEXT: %2 = zext i32 %1 to i64
79 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
80 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
81 ; CHECK-NEXT: store i32 1234, i32* %4
82 ; CHECK-NEXT: ret void
83 ; CHECK-NEXT: }
84
85 ; CHECK-MASK-LABEL: define void @test_store(i32* %ptr) {
86 ; CHECK-MASK-NEXT: %mem_base = load i64* @__sfi_memory_base
87 ; CHECK-MASK-NEXT: %1 = ptrtoint i32* %ptr to i32
88 ; CHECK-MASK-NEXT: %2 = and i32 %1, 1048575
89 ; CHECK-MASK-NEXT: %3 = zext i32 %2 to i64
90 ; CHECK-MASK-NEXT: %4 = add i64 %mem_base, %3
91 ; CHECK-MASK-NEXT: %5 = inttoptr i64 %4 to i32*
92 ; CHECK-MASK-NEXT: store i32 1234, i32* %5
93 ; CHECK-MASK-NEXT: ret void
94 ; CHECK-MASK-NEXT: }
95
96 define void @test_memcpy_32(i8* %dest, i8* %src, i32 %len) {
97 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dest, i8* %src, i32 %len, i32 4, i1 false)
98 ret void
99 }
100
101 ; CHECK-LABEL: define void @test_memcpy_32(i8* %dest, i8* %src, i32 %len) {
102 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
103 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32
104 ; CHECK-NEXT: %2 = zext i32 %1 to i64
105 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
106 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
107 ; CHECK-NEXT: %5 = ptrtoint i8* %src to i32
108 ; CHECK-NEXT: %6 = zext i32 %5 to i64
109 ; CHECK-NEXT: %7 = add i64 %mem_base, %6
110 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i8*
111 ; CHECK-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* %4, i8* %8, i32 %len, i32 4, i1 false)
112 ; CHECK-NEXT: ret void
113 ; CHECK-NEXT: }
114
115 ; CHECK-MASK-LABEL: define void @test_memcpy_32(i8* %dest, i8* %src, i32 %len) {
116 ; CHECK-MASK: %11 = and i32 %len, 1048575
117 ; CHECK-MASK-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* %5, i8* %10, i32 %11, i32 4, i1 false)
118
119 define void @test_memmove_32(i8* %dest, i8* %src, i32 %len) {
120 call void @llvm.memmove.p0i8.p0i8.i32(i8* %dest, i8* %src, i32 %len, i32 4, i1 false)
121 ret void
122 }
123
124 ; CHECK-LABEL: define void @test_memmove_32(i8* %dest, i8* %src, i32 %len) {
125 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
126 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32
127 ; CHECK-NEXT: %2 = zext i32 %1 to i64
128 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
129 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
130 ; CHECK-NEXT: %5 = ptrtoint i8* %src to i32
131 ; CHECK-NEXT: %6 = zext i32 %5 to i64
132 ; CHECK-NEXT: %7 = add i64 %mem_base, %6
133 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i8*
134 ; CHECK-NEXT: call void @llvm.memmove.p0i8.p0i8.i32(i8* %4, i8* %8, i32 %len, i32 4, i1 false)
135 ; CHECK-NEXT: ret void
136 ; CHECK-NEXT: }
137
138 ; CHECK-MASK-LABEL: define void @test_memmove_32(i8* %dest, i8* %src, i32 %len) {
139 ; CHECK-MASK: %11 = and i32 %len, 1048575
140 ; CHECK-MASK-NEXT: call void @llvm.memmove.p0i8.p0i8.i32(i8* %5, i8* %10, i32 %11, i32 4, i1 false)
141
142 define void @test_memset_32(i8* %dest, i32 %len) {
143 call void @llvm.memset.p0i8.i32(i8* %dest, i8 5, i32 %len, i32 4, i1 false)
144 ret void
145 }
146
147 ; CHECK-LABEL: define void @test_memset_32(i8* %dest, i32 %len) {
148 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
149 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32
150 ; CHECK-NEXT: %2 = zext i32 %1 to i64
151 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
152 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
153 ; CHECK-NEXT: call void @llvm.memset.p0i8.i32(i8* %4, i8 5, i32 %len, i32 4, i1 false)
154 ; CHECK-NEXT: ret void
155 ; CHECK-NEXT: }
156
157 ; CHECK-MASK-LABEL: define void @test_memset_32(i8* %dest, i32 %len) {
158 ; CHECK-MASK: %6 = and i32 %len, 1048575
159 ; CHECK-MASK-NEXT: call void @llvm.memset.p0i8.i32(i8* %5, i8 5, i32 %6, i32 4, i1 false)
160
161 define i32 @test_atomic_load_32(i32* %ptr) {
162 %val = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 1)
163 ret i32 %val
164 }
165
166 ; CHECK-LABEL: define i32 @test_atomic_load_32(i32* %ptr) {
167 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
168 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
169 ; CHECK-NEXT: %2 = zext i32 %1 to i64
170 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
171 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
172 ; CHECK-NEXT: %val = call i32 @llvm.nacl.atomic.load.i32(i32* %4, i32 1)
173 ; CHECK-NEXT: ret i32 %val
174 ; CHECK-NEXT: }
175
176 ; CHECK-MASK-LABEL: define i32 @test_atomic_load_32(i32* %ptr) {
177 ; CHECK-MASK: %2 = and i32 %1, 1048575
178
179 define i64 @test_atomic_load_64(i64* %ptr) {
180 %val = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 1)
181 ret i64 %val
182 }
183
184 ; CHECK-LABEL: define i64 @test_atomic_load_64(i64* %ptr) {
185 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
186 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32
187 ; CHECK-NEXT: %2 = zext i32 %1 to i64
188 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
189 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64*
190 ; CHECK-NEXT: %val = call i64 @llvm.nacl.atomic.load.i64(i64* %4, i32 1)
191 ; CHECK-NEXT: ret i64 %val
192 ; CHECK-NEXT: }
193
194 ; CHECK-MASK-LABEL: define i64 @test_atomic_load_64(i64* %ptr) {
195 ; CHECK-MASK: %2 = and i32 %1, 1048575
196
197 define void @test_atomic_store_32(i32* %ptr) {
198 call void @llvm.nacl.atomic.store.i32(i32 1234, i32* %ptr, i32 1)
199 ret void
200 }
201
202 ; CHECK-LABEL: define void @test_atomic_store_32(i32* %ptr) {
203 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
204 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
205 ; CHECK-NEXT: %2 = zext i32 %1 to i64
206 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
207 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
208 ; CHECK-NEXT: call void @llvm.nacl.atomic.store.i32(i32 1234, i32* %4, i32 1)
209 ; CHECK-NEXT: ret void
210 ; CHECK-NEXT: }
211
212 ; CHECK-MASK-LABEL: define void @test_atomic_store_32(i32* %ptr) {
213 ; CHECK-MASK: %2 = and i32 %1, 1048575
214
215 define void @test_atomic_store_64(i64* %ptr) {
216 call void @llvm.nacl.atomic.store.i64(i64 1234, i64* %ptr, i32 1)
217 ret void
218 }
219
220 ; CHECK-LABEL: define void @test_atomic_store_64(i64* %ptr) {
221 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
222 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32
223 ; CHECK-NEXT: %2 = zext i32 %1 to i64
224 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
225 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64*
226 ; CHECK-NEXT: call void @llvm.nacl.atomic.store.i64(i64 1234, i64* %4, i32 1)
227 ; CHECK-NEXT: ret void
228 ; CHECK-NEXT: }
229
230 ; CHECK-MASK-LABEL: define void @test_atomic_store_64(i64* %ptr) {
231 ; CHECK-MASK: %2 = and i32 %1, 1048575
232
233 define i32 @test_atomic_rmw_32(i32* %ptr) {
234 %val = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 1234, i32 1)
235 ret i32 %val
236 }
237
238 ; CHECK-LABEL: define i32 @test_atomic_rmw_32(i32* %ptr) {
239 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
240 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
241 ; CHECK-NEXT: %2 = zext i32 %1 to i64
242 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
243 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
244 ; CHECK-NEXT: %val = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %4, i32 1 234, i32 1)
245 ; CHECK-NEXT: ret i32 %val
246 ; CHECK-NEXT: }
247
248 ; CHECK-MASK-LABEL: define i32 @test_atomic_rmw_32(i32* %ptr) {
249 ; CHECK-MASK: %2 = and i32 %1, 1048575
250
251 define i64 @test_atomic_rmw_64(i64* %ptr) {
252 %val = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %ptr, i64 1234, i32 1)
253 ret i64 %val
254 }
255
256 ; CHECK-LABEL: define i64 @test_atomic_rmw_64(i64* %ptr) {
257 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
258 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32
259 ; CHECK-NEXT: %2 = zext i32 %1 to i64
260 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
261 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64*
262 ; CHECK-NEXT: %val = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %4, i64 1 234, i32 1)
263 ; CHECK-NEXT: ret i64 %val
264 ; CHECK-NEXT: }
265
266 ; CHECK-MASK-LABEL: define i64 @test_atomic_rmw_64(i64* %ptr) {
267 ; CHECK-MASK: %2 = and i32 %1, 1048575
268
269 define i32 @test_atomic_cmpxchg_32(i32* %ptr) {
270 %val = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 0, i32 1, i32 1, i32 1)
271 ret i32 %val
272 }
273
274 ; CHECK-LABEL: define i32 @test_atomic_cmpxchg_32(i32* %ptr) {
275 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
276 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
277 ; CHECK-NEXT: %2 = zext i32 %1 to i64
278 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
279 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
280 ; CHECK-NEXT: %val = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %4, i32 0, i 32 1, i32 1, i32 1)
281 ; CHECK-NEXT: ret i32 %val
282 ; CHECK-NEXT: }
283
284 ; CHECK-MASK-LABEL: define i32 @test_atomic_cmpxchg_32(i32* %ptr) {
285 ; CHECK-MASK: %2 = and i32 %1, 1048575
286
287 define i64 @test_atomic_cmpxchg_64(i64* %ptr) {
288 %val = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %ptr, i64 0, i64 1, i32 1, i32 1)
289 ret i64 %val
290 }
291
292 ; CHECK-LABEL: define i64 @test_atomic_cmpxchg_64(i64* %ptr) {
293 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
294 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32
295 ; CHECK-NEXT: %2 = zext i32 %1 to i64
296 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
297 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64*
298 ; CHECK-NEXT: %val = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %4, i64 0, i 64 1, i32 1, i32 1)
299 ; CHECK-NEXT: ret i64 %val
300 ; CHECK-NEXT: }
301
302 ; CHECK-MASK-LABEL: define i64 @test_atomic_cmpxchg_64(i64* %ptr) {
303 ; CHECK-MASK: %2 = and i32 %1, 1048575
304
305 define void @test_atomic_fence() {
306 call void @llvm.nacl.atomic.fence(i32 1)
307 ret void
308 }
309
310 ; CHECK-LABEL: define void @test_atomic_fence() {
311 ; CHECK-NEXT: call void @llvm.nacl.atomic.fence(i32 1)
312 ; CHECK-NEXT: ret void
313 ; CHECK-NEXT: }
314
315 define void @test_atomic_fence_all() {
316 call void @llvm.nacl.atomic.fence.all()
317 ret void
318 }
319
320 ; CHECK-LABEL: define void @test_atomic_fence_all() {
321 ; CHECK-NEXT: call void @llvm.nacl.atomic.fence.all()
322 ; CHECK-NEXT: ret void
323 ; CHECK-NEXT: }
324
325 define i1 @test_atomic_is_lock_free(i8* %ptr) {
326 %val = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %ptr)
327 ret i1 %val
328 }
329
330 ; CHECK-LABEL: define i1 @test_atomic_is_lock_free(i8* %ptr) {
331 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
332 ; CHECK-NEXT: %1 = ptrtoint i8* %ptr to i32
333 ; CHECK-NEXT: %2 = zext i32 %1 to i64
334 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
335 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
336 ; CHECK-NEXT: %val = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %4)
337 ; CHECK-NEXT: ret i1 %val
338 ; CHECK-NEXT: }
339
340 define void @test_bitcast_whitelisted(i32 %val) {
341 %ptr = inttoptr i32 %val to i8*
342 %ptr.bc = bitcast i8* %ptr to i32*
343 ret void
344 }
345
346 ; CHECK-LABEL: define void @test_bitcast_whitelisted(i32 %val) {
347 ; CHECK-NEXT: %ptr = inttoptr i32 %val to i8*
348 ; CHECK-NEXT: %ptr.bc = bitcast i8* %ptr to i32*
349 ; CHECK-NEXT: ret void
350 ; CHECK-NEXT: }
351
352 ; -----------------------------------------------------------------------------
353 ; Test the special case which optimizes sandboxing of the output of
354 ; the ExpandGetElementPtr pass.
355
356 ; this won't get optimized because IntToPtr is not casting a result of an Add
357 define i32 @test_no_opt__cast_not_add(i32 %ptr_int) {
358 %ptr = inttoptr i32 %ptr_int to i32*
359 %val = load i32* %ptr
360 ret i32 %val
361 }
362
363 ; CHECK-LABEL: define i32 @test_no_opt__cast_not_add(i32 %ptr_int) {
364 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
365 ; CHECK-NEXT: %ptr = inttoptr i32 %ptr_int to i32*
366 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
367 ; CHECK-NEXT: %2 = zext i32 %1 to i64
368 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
369 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
370 ; CHECK-NEXT: %val = load i32* %4
371 ; CHECK-NEXT: ret i32 %val
372 ; CHECK-NEXT: }
373
374 ; this won't get optimized because the cast is not from i32
375 define i32 @test_no_opt__cast_not_32(i64 %ptr_int1) {
376 %ptr_sum = add i64 %ptr_int1, 5
377 %ptr = inttoptr i64 %ptr_sum to i32*
378 %val = load i32* %ptr
379 ret i32 %val
380 }
381
382 ; CHECK-LABEL: define i32 @test_no_opt__cast_not_32(i64 %ptr_int1) {
383 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
384 ; CHECK-NEXT: %ptr_sum = add i64 %ptr_int1, 5
385 ; CHECK-NEXT: %ptr = inttoptr i64 %ptr_sum to i32*
386 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
387 ; CHECK-NEXT: %2 = zext i32 %1 to i64
388 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
389 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
390 ; CHECK-NEXT: %val = load i32* %4
391 ; CHECK-NEXT: ret i32 %val
392 ; CHECK-NEXT: }
393
394 ; this won't get optimized because the Add's 2nd operand is not a constant
395 define i32 @test_no_opt__add_not_constant(i32 %ptr_int1, i32 %ptr_int2) {
396 %ptr_sum = add i32 %ptr_int1, %ptr_int2
397 %ptr = inttoptr i32 %ptr_sum to i32*
398 %val = load i32* %ptr
399 ret i32 %val
400 }
401
402 ; CHECK-LABEL: define i32 @test_no_opt__add_not_constant(i32 %ptr_int1, i32 %ptr _int2) {
403 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
404 ; CHECK-NEXT: %ptr_sum = add i32 %ptr_int1, %ptr_int2
405 ; CHECK-NEXT: %ptr = inttoptr i32 %ptr_sum to i32*
406 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
407 ; CHECK-NEXT: %2 = zext i32 %1 to i64
408 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
409 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
410 ; CHECK-NEXT: %val = load i32* %4
411 ; CHECK-NEXT: ret i32 %val
412 ; CHECK-NEXT: }
413
414 ; this won't get optimized because the Add's 2nd operand is not positive
415 define i32 @test_no_opt__add_not_positive(i32 %ptr_int) {
416 %ptr_sum = add i32 %ptr_int, -5
417 %ptr = inttoptr i32 %ptr_sum to i32*
418 %val = load i32* %ptr
419 ret i32 %val
420 }
421
422 ; CHECK-LABEL: define i32 @test_no_opt__add_not_positive(i32 %ptr_int) {
423 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
424 ; CHECK-NEXT: %ptr_sum = add i32 %ptr_int, -5
425 ; CHECK-NEXT: %ptr = inttoptr i32 %ptr_sum to i32*
426 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
427 ; CHECK-NEXT: %2 = zext i32 %1 to i64
428 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
429 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
430 ; CHECK-NEXT: %val = load i32* %4
431 ; CHECK-NEXT: ret i32 %val
432 ; CHECK-NEXT: }
433
434 define i32 @test_opt_dont_remove_cast_if_used(i32 %ptr_int, i32 %replace) {
435 %ptr_sum = add i32 %ptr_int, 5
436 %ptr = inttoptr i32 %ptr_sum to i32*
437 %val = load i32* %ptr ; %ptr is used later => keep cast
438 store i32 %replace, i32* %ptr ; %ptr not used any more => remove cast
439 ret i32 %val
440 }
441
442 ; CHECK-LABEL: define i32 @test_opt_dont_remove_cast_if_used(i32 %ptr_int, i32 % replace) {
443 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
444 ; CHECK-NEXT: %1 = zext i32 %ptr_int to i64
445 ; CHECK-NEXT: %2 = add i64 %mem_base, %1
446 ; CHECK-NEXT: %3 = add i64 %2, 5
447 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
448 ; CHECK-NEXT: %val = load i32* %4
449 ; CHECK-NEXT: %5 = zext i32 %ptr_int to i64
450 ; CHECK-NEXT: %6 = add i64 %mem_base, %5
451 ; CHECK-NEXT: %7 = add i64 %6, 5
452 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i32*
453 ; CHECK-NEXT: store i32 %replace, i32* %8
454 ; CHECK-NEXT: ret i32 %val
455 ; CHECK-NEXT: }
456
457 define i32 @test_opt_dont_remove_add_if_used(i32 %ptr_int, i32 %replace) {
458 %ptr_sum = add i32 %ptr_int, 5
459 %ptr = inttoptr i32 %ptr_sum to i32*
460 store i32 %replace, i32* %ptr
461 ret i32 %ptr_sum
462 }
463
464 ; CHECK-LABEL: define i32 @test_opt_dont_remove_add_if_used(i32 %ptr_int, i32 %r eplace) {
465 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
466 ; CHECK-NEXT: %ptr_sum = add i32 %ptr_int, 5
467 ; CHECK-NEXT: %1 = zext i32 %ptr_int to i64
468 ; CHECK-NEXT: %2 = add i64 %mem_base, %1
469 ; CHECK-NEXT: %3 = add i64 %2, 5
470 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
471 ; CHECK-NEXT: store i32 %replace, i32* %4
472 ; CHECK-NEXT: ret i32 %ptr_sum
473 ; CHECK-NEXT: }
474
475
476 ; ------------------------------------------------------------------------------
477 ; Check that dbg symbols are preserved
478
479 define void @test_len_dbg(i8* %dest, i8* %src, i32 %len) {
480 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dest, i8* %src, i32 %len, i32 4, i1 false), !dbg !1
481 ret void
482 }
483
484 ; CHECK-LABEL: define void @test_len_dbg(i8* %dest, i8* %src, i32 %len) {
485 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
486 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32
487 ; CHECK-NEXT: %2 = zext i32 %1 to i64
488 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
489 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
490 ; CHECK-NEXT: %5 = ptrtoint i8* %src to i32
491 ; CHECK-NEXT: %6 = zext i32 %5 to i64
492 ; CHECK-NEXT: %7 = add i64 %mem_base, %6
493 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i8*
494 ; CHECK-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* %4, i8* %8, i32 %len, i32 4, i1 false), !dbg !1
495 ; CHECK-NEXT: ret void
496 ; CHECK-NEXT: }
497
498 define void @test_opt_dbg(i32 %ptr_int, i32 %replace) {
499 %ptr_sum = add i32 %ptr_int, 5, !dbg !1
500 %ptr = inttoptr i32 %ptr_sum to i32*, !dbg !2
501 store i32 %replace, i32* %ptr, !dbg !3
502 ret void, !dbg !4
503 }
504
505 ; CHECK-LABEL: define void @test_opt_dbg(i32 %ptr_int, i32 %replace) {
506 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base
507 ; CHECK-NEXT: %1 = zext i32 %ptr_int to i64
508 ; CHECK-NEXT: %2 = add i64 %mem_base, %1
509 ; CHECK-NEXT: %3 = add i64 %2, 5, !dbg !1
510 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*, !dbg !2
511 ; CHECK-NEXT: store i32 %replace, i32* %4, !dbg !3
512 ; CHECK-NEXT: ret void, !dbg !4
513 ; CHECK-NEXT: }
514
515 !1 = metadata !{i32 138, i32 0, metadata !1, null}
516 !2 = metadata !{i32 142, i32 0, metadata !2, null}
517 !3 = metadata !{i32 144, i32 0, metadata !3, null}
518 !4 = metadata !{i32 144, i32 0, metadata !4, null}
OLDNEW
« no previous file with comments | « test/Transforms/MinSFI/sandbox-indirect-calls-wrong-use.ll ('k') | test/Transforms/MinSFI/sandbox-memory-accesses-gep.ll » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698