Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(230)

Side by Side Diff: test/Transforms/MinSFI/sandbox-memory-accesses.ll

Issue 1151093004: Changes from 3.7 merge to files not in upstream (Closed) Base URL: https://chromium.googlesource.com/native_client/pnacl-llvm.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 ; RUN: opt %s -minsfi-sandbox-memory-accesses -S | FileCheck %s 1 ; RUN: opt %s -minsfi-sandbox-memory-accesses -S | FileCheck %s
2 ; RUN: opt %s -minsfi-ptrsize=20 -minsfi-sandbox-memory-accesses -S \ 2 ; RUN: opt %s -minsfi-ptrsize=20 -minsfi-sandbox-memory-accesses -S \
3 ; RUN: | FileCheck %s -check-prefix=CHECK-MASK 3 ; RUN: | FileCheck %s -check-prefix=CHECK-MASK
4 4
5 !llvm.module.flags = !{!0} 5 !llvm.module.flags = !{!0}
6 !0 = metadata !{i32 1, metadata !"Debug Info Version", i32 2} 6 !0 = !{i32 1, !"Debug Info Version", i32 3}
7 7
8 target datalayout = "p:32:32:32" 8 target datalayout = "p:32:32:32"
9 target triple = "le32-unknown-nacl" 9 target triple = "le32-unknown-nacl"
10 10
11 ; CHECK: @__sfi_memory_base = external global i64 11 ; CHECK: @__sfi_memory_base = external global i64
12 ; CHECK: @__sfi_pointer_size = constant i32 32 12 ; CHECK: @__sfi_pointer_size = constant i32 32
13 ; CHECK-MASK: @__sfi_pointer_size = constant i32 20 13 ; CHECK-MASK: @__sfi_pointer_size = constant i32 20
14 14
15 declare void @llvm.memcpy.p0i8.p0i8.i32(i8* nocapture, i8* nocapture readonly, i 32, i32, i1) 15 declare void @llvm.memcpy.p0i8.p0i8.i32(i8* nocapture, i8* nocapture readonly, i 32, i32, i1)
16 declare void @llvm.memmove.p0i8.p0i8.i32(i8* nocapture, i8* nocapture readonly, i32, i32, i1) 16 declare void @llvm.memmove.p0i8.p0i8.i32(i8* nocapture, i8* nocapture readonly, i32, i32, i1)
(...skipping 18 matching lines...) Expand all
35 ret i32 %sum 35 ret i32 %sum
36 } 36 }
37 37
38 ; CHECK-LABEL: define i32 @test_no_sandbox(i32 %x, i32 %y) { 38 ; CHECK-LABEL: define i32 @test_no_sandbox(i32 %x, i32 %y) {
39 ; CHECK-NOT: @__sfi_memory_base 39 ; CHECK-NOT: @__sfi_memory_base
40 ; CHECK-NEXT: %sum = add i32 %x, %y 40 ; CHECK-NEXT: %sum = add i32 %x, %y
41 ; CHECK-NEXT: ret i32 %sum 41 ; CHECK-NEXT: ret i32 %sum
42 ; CHECK-NEXT: } 42 ; CHECK-NEXT: }
43 43
44 define i32 @test_load(i32* %ptr) { 44 define i32 @test_load(i32* %ptr) {
45 %val = load i32* %ptr 45 %val = load i32, i32* %ptr
46 ret i32 %val 46 ret i32 %val
47 } 47 }
48 48
49 ; CHECK-LABEL: define i32 @test_load(i32* %ptr) { 49 ; CHECK-LABEL: define i32 @test_load(i32* %ptr) {
50 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 50 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
51 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 51 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
52 ; CHECK-NEXT: %2 = zext i32 %1 to i64 52 ; CHECK-NEXT: %2 = zext i32 %1 to i64
53 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 53 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
54 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 54 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
55 ; CHECK-NEXT: %val = load i32* %4 55 ; CHECK-NEXT: %val = load i32, i32* %4
56 ; CHECK-NEXT: ret i32 %val 56 ; CHECK-NEXT: ret i32 %val
57 ; CHECK-NEXT: } 57 ; CHECK-NEXT: }
58 58
59 ; CHECK-MASK-LABEL: define i32 @test_load(i32* %ptr) { 59 ; CHECK-MASK-LABEL: define i32 @test_load(i32* %ptr) {
60 ; CHECK-MASK-NEXT: %mem_base = load i64* @__sfi_memory_base 60 ; CHECK-MASK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
61 ; CHECK-MASK-NEXT: %1 = ptrtoint i32* %ptr to i32 61 ; CHECK-MASK-NEXT: %1 = ptrtoint i32* %ptr to i32
62 ; CHECK-MASK-NEXT: %2 = and i32 %1, 1048575 62 ; CHECK-MASK-NEXT: %2 = and i32 %1, 1048575
63 ; CHECK-MASK-NEXT: %3 = zext i32 %2 to i64 63 ; CHECK-MASK-NEXT: %3 = zext i32 %2 to i64
64 ; CHECK-MASK-NEXT: %4 = add i64 %mem_base, %3 64 ; CHECK-MASK-NEXT: %4 = add i64 %mem_base, %3
65 ; CHECK-MASK-NEXT: %5 = inttoptr i64 %4 to i32* 65 ; CHECK-MASK-NEXT: %5 = inttoptr i64 %4 to i32*
66 ; CHECK-MASK-NEXT: %val = load i32* %5 66 ; CHECK-MASK-NEXT: %val = load i32, i32* %5
67 ; CHECK-MASK-NEXT: ret i32 %val 67 ; CHECK-MASK-NEXT: ret i32 %val
68 ; CHECK-MASK-NEXT: } 68 ; CHECK-MASK-NEXT: }
69 69
70 define void @test_store(i32* %ptr) { 70 define void @test_store(i32* %ptr) {
71 store i32 1234, i32* %ptr 71 store i32 1234, i32* %ptr
72 ret void 72 ret void
73 } 73 }
74 74
75 ; CHECK-LABEL: define void @test_store(i32* %ptr) { 75 ; CHECK-LABEL: define void @test_store(i32* %ptr) {
76 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 76 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
77 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 77 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
78 ; CHECK-NEXT: %2 = zext i32 %1 to i64 78 ; CHECK-NEXT: %2 = zext i32 %1 to i64
79 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 79 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
80 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 80 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
81 ; CHECK-NEXT: store i32 1234, i32* %4 81 ; CHECK-NEXT: store i32 1234, i32* %4
82 ; CHECK-NEXT: ret void 82 ; CHECK-NEXT: ret void
83 ; CHECK-NEXT: } 83 ; CHECK-NEXT: }
84 84
85 ; CHECK-MASK-LABEL: define void @test_store(i32* %ptr) { 85 ; CHECK-MASK-LABEL: define void @test_store(i32* %ptr) {
86 ; CHECK-MASK-NEXT: %mem_base = load i64* @__sfi_memory_base 86 ; CHECK-MASK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
87 ; CHECK-MASK-NEXT: %1 = ptrtoint i32* %ptr to i32 87 ; CHECK-MASK-NEXT: %1 = ptrtoint i32* %ptr to i32
88 ; CHECK-MASK-NEXT: %2 = and i32 %1, 1048575 88 ; CHECK-MASK-NEXT: %2 = and i32 %1, 1048575
89 ; CHECK-MASK-NEXT: %3 = zext i32 %2 to i64 89 ; CHECK-MASK-NEXT: %3 = zext i32 %2 to i64
90 ; CHECK-MASK-NEXT: %4 = add i64 %mem_base, %3 90 ; CHECK-MASK-NEXT: %4 = add i64 %mem_base, %3
91 ; CHECK-MASK-NEXT: %5 = inttoptr i64 %4 to i32* 91 ; CHECK-MASK-NEXT: %5 = inttoptr i64 %4 to i32*
92 ; CHECK-MASK-NEXT: store i32 1234, i32* %5 92 ; CHECK-MASK-NEXT: store i32 1234, i32* %5
93 ; CHECK-MASK-NEXT: ret void 93 ; CHECK-MASK-NEXT: ret void
94 ; CHECK-MASK-NEXT: } 94 ; CHECK-MASK-NEXT: }
95 95
96 define void @test_memcpy_32(i8* %dest, i8* %src, i32 %len) { 96 define void @test_memcpy_32(i8* %dest, i8* %src, i32 %len) {
97 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dest, i8* %src, i32 %len, i32 4, i1 false) 97 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dest, i8* %src, i32 %len, i32 4, i1 false)
98 ret void 98 ret void
99 } 99 }
100 100
101 ; CHECK-LABEL: define void @test_memcpy_32(i8* %dest, i8* %src, i32 %len) { 101 ; CHECK-LABEL: define void @test_memcpy_32(i8* %dest, i8* %src, i32 %len) {
102 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 102 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
103 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32 103 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32
104 ; CHECK-NEXT: %2 = zext i32 %1 to i64 104 ; CHECK-NEXT: %2 = zext i32 %1 to i64
105 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 105 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
106 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8* 106 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
107 ; CHECK-NEXT: %5 = ptrtoint i8* %src to i32 107 ; CHECK-NEXT: %5 = ptrtoint i8* %src to i32
108 ; CHECK-NEXT: %6 = zext i32 %5 to i64 108 ; CHECK-NEXT: %6 = zext i32 %5 to i64
109 ; CHECK-NEXT: %7 = add i64 %mem_base, %6 109 ; CHECK-NEXT: %7 = add i64 %mem_base, %6
110 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i8* 110 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i8*
111 ; CHECK-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* %4, i8* %8, i32 %len, i32 4, i1 false) 111 ; CHECK-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* %4, i8* %8, i32 %len, i32 4, i1 false)
112 ; CHECK-NEXT: ret void 112 ; CHECK-NEXT: ret void
113 ; CHECK-NEXT: } 113 ; CHECK-NEXT: }
114 114
115 ; CHECK-MASK-LABEL: define void @test_memcpy_32(i8* %dest, i8* %src, i32 %len) { 115 ; CHECK-MASK-LABEL: define void @test_memcpy_32(i8* %dest, i8* %src, i32 %len) {
116 ; CHECK-MASK: %11 = and i32 %len, 1048575 116 ; CHECK-MASK: %11 = and i32 %len, 1048575
117 ; CHECK-MASK-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* %5, i8* %10, i32 %11, i32 4, i1 false) 117 ; CHECK-MASK-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* %5, i8* %10, i32 %11, i32 4, i1 false)
118 118
119 define void @test_memmove_32(i8* %dest, i8* %src, i32 %len) { 119 define void @test_memmove_32(i8* %dest, i8* %src, i32 %len) {
120 call void @llvm.memmove.p0i8.p0i8.i32(i8* %dest, i8* %src, i32 %len, i32 4, i1 false) 120 call void @llvm.memmove.p0i8.p0i8.i32(i8* %dest, i8* %src, i32 %len, i32 4, i1 false)
121 ret void 121 ret void
122 } 122 }
123 123
124 ; CHECK-LABEL: define void @test_memmove_32(i8* %dest, i8* %src, i32 %len) { 124 ; CHECK-LABEL: define void @test_memmove_32(i8* %dest, i8* %src, i32 %len) {
125 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 125 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
126 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32 126 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32
127 ; CHECK-NEXT: %2 = zext i32 %1 to i64 127 ; CHECK-NEXT: %2 = zext i32 %1 to i64
128 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 128 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
129 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8* 129 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
130 ; CHECK-NEXT: %5 = ptrtoint i8* %src to i32 130 ; CHECK-NEXT: %5 = ptrtoint i8* %src to i32
131 ; CHECK-NEXT: %6 = zext i32 %5 to i64 131 ; CHECK-NEXT: %6 = zext i32 %5 to i64
132 ; CHECK-NEXT: %7 = add i64 %mem_base, %6 132 ; CHECK-NEXT: %7 = add i64 %mem_base, %6
133 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i8* 133 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i8*
134 ; CHECK-NEXT: call void @llvm.memmove.p0i8.p0i8.i32(i8* %4, i8* %8, i32 %len, i32 4, i1 false) 134 ; CHECK-NEXT: call void @llvm.memmove.p0i8.p0i8.i32(i8* %4, i8* %8, i32 %len, i32 4, i1 false)
135 ; CHECK-NEXT: ret void 135 ; CHECK-NEXT: ret void
136 ; CHECK-NEXT: } 136 ; CHECK-NEXT: }
137 137
138 ; CHECK-MASK-LABEL: define void @test_memmove_32(i8* %dest, i8* %src, i32 %len) { 138 ; CHECK-MASK-LABEL: define void @test_memmove_32(i8* %dest, i8* %src, i32 %len) {
139 ; CHECK-MASK: %11 = and i32 %len, 1048575 139 ; CHECK-MASK: %11 = and i32 %len, 1048575
140 ; CHECK-MASK-NEXT: call void @llvm.memmove.p0i8.p0i8.i32(i8* %5, i8* %10, i32 %11, i32 4, i1 false) 140 ; CHECK-MASK-NEXT: call void @llvm.memmove.p0i8.p0i8.i32(i8* %5, i8* %10, i32 %11, i32 4, i1 false)
141 141
142 define void @test_memset_32(i8* %dest, i32 %len) { 142 define void @test_memset_32(i8* %dest, i32 %len) {
143 call void @llvm.memset.p0i8.i32(i8* %dest, i8 5, i32 %len, i32 4, i1 false) 143 call void @llvm.memset.p0i8.i32(i8* %dest, i8 5, i32 %len, i32 4, i1 false)
144 ret void 144 ret void
145 } 145 }
146 146
147 ; CHECK-LABEL: define void @test_memset_32(i8* %dest, i32 %len) { 147 ; CHECK-LABEL: define void @test_memset_32(i8* %dest, i32 %len) {
148 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 148 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
149 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32 149 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32
150 ; CHECK-NEXT: %2 = zext i32 %1 to i64 150 ; CHECK-NEXT: %2 = zext i32 %1 to i64
151 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 151 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
152 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8* 152 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
153 ; CHECK-NEXT: call void @llvm.memset.p0i8.i32(i8* %4, i8 5, i32 %len, i32 4, i1 false) 153 ; CHECK-NEXT: call void @llvm.memset.p0i8.i32(i8* %4, i8 5, i32 %len, i32 4, i1 false)
154 ; CHECK-NEXT: ret void 154 ; CHECK-NEXT: ret void
155 ; CHECK-NEXT: } 155 ; CHECK-NEXT: }
156 156
157 ; CHECK-MASK-LABEL: define void @test_memset_32(i8* %dest, i32 %len) { 157 ; CHECK-MASK-LABEL: define void @test_memset_32(i8* %dest, i32 %len) {
158 ; CHECK-MASK: %6 = and i32 %len, 1048575 158 ; CHECK-MASK: %6 = and i32 %len, 1048575
159 ; CHECK-MASK-NEXT: call void @llvm.memset.p0i8.i32(i8* %5, i8 5, i32 %6, i32 4, i1 false) 159 ; CHECK-MASK-NEXT: call void @llvm.memset.p0i8.i32(i8* %5, i8 5, i32 %6, i32 4, i1 false)
160 160
161 define i32 @test_atomic_load_32(i32* %ptr) { 161 define i32 @test_atomic_load_32(i32* %ptr) {
162 %val = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 1) 162 %val = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 1)
163 ret i32 %val 163 ret i32 %val
164 } 164 }
165 165
166 ; CHECK-LABEL: define i32 @test_atomic_load_32(i32* %ptr) { 166 ; CHECK-LABEL: define i32 @test_atomic_load_32(i32* %ptr) {
167 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 167 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
168 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 168 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
169 ; CHECK-NEXT: %2 = zext i32 %1 to i64 169 ; CHECK-NEXT: %2 = zext i32 %1 to i64
170 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 170 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
171 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 171 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
172 ; CHECK-NEXT: %val = call i32 @llvm.nacl.atomic.load.i32(i32* %4, i32 1) 172 ; CHECK-NEXT: %val = call i32 @llvm.nacl.atomic.load.i32(i32* %4, i32 1)
173 ; CHECK-NEXT: ret i32 %val 173 ; CHECK-NEXT: ret i32 %val
174 ; CHECK-NEXT: } 174 ; CHECK-NEXT: }
175 175
176 ; CHECK-MASK-LABEL: define i32 @test_atomic_load_32(i32* %ptr) { 176 ; CHECK-MASK-LABEL: define i32 @test_atomic_load_32(i32* %ptr) {
177 ; CHECK-MASK: %2 = and i32 %1, 1048575 177 ; CHECK-MASK: %2 = and i32 %1, 1048575
178 178
179 define i64 @test_atomic_load_64(i64* %ptr) { 179 define i64 @test_atomic_load_64(i64* %ptr) {
180 %val = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 1) 180 %val = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 1)
181 ret i64 %val 181 ret i64 %val
182 } 182 }
183 183
184 ; CHECK-LABEL: define i64 @test_atomic_load_64(i64* %ptr) { 184 ; CHECK-LABEL: define i64 @test_atomic_load_64(i64* %ptr) {
185 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 185 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
186 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32 186 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32
187 ; CHECK-NEXT: %2 = zext i32 %1 to i64 187 ; CHECK-NEXT: %2 = zext i32 %1 to i64
188 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 188 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
189 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64* 189 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64*
190 ; CHECK-NEXT: %val = call i64 @llvm.nacl.atomic.load.i64(i64* %4, i32 1) 190 ; CHECK-NEXT: %val = call i64 @llvm.nacl.atomic.load.i64(i64* %4, i32 1)
191 ; CHECK-NEXT: ret i64 %val 191 ; CHECK-NEXT: ret i64 %val
192 ; CHECK-NEXT: } 192 ; CHECK-NEXT: }
193 193
194 ; CHECK-MASK-LABEL: define i64 @test_atomic_load_64(i64* %ptr) { 194 ; CHECK-MASK-LABEL: define i64 @test_atomic_load_64(i64* %ptr) {
195 ; CHECK-MASK: %2 = and i32 %1, 1048575 195 ; CHECK-MASK: %2 = and i32 %1, 1048575
196 196
197 define void @test_atomic_store_32(i32* %ptr) { 197 define void @test_atomic_store_32(i32* %ptr) {
198 call void @llvm.nacl.atomic.store.i32(i32 1234, i32* %ptr, i32 1) 198 call void @llvm.nacl.atomic.store.i32(i32 1234, i32* %ptr, i32 1)
199 ret void 199 ret void
200 } 200 }
201 201
202 ; CHECK-LABEL: define void @test_atomic_store_32(i32* %ptr) { 202 ; CHECK-LABEL: define void @test_atomic_store_32(i32* %ptr) {
203 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 203 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
204 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 204 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
205 ; CHECK-NEXT: %2 = zext i32 %1 to i64 205 ; CHECK-NEXT: %2 = zext i32 %1 to i64
206 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 206 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
207 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 207 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
208 ; CHECK-NEXT: call void @llvm.nacl.atomic.store.i32(i32 1234, i32* %4, i32 1) 208 ; CHECK-NEXT: call void @llvm.nacl.atomic.store.i32(i32 1234, i32* %4, i32 1)
209 ; CHECK-NEXT: ret void 209 ; CHECK-NEXT: ret void
210 ; CHECK-NEXT: } 210 ; CHECK-NEXT: }
211 211
212 ; CHECK-MASK-LABEL: define void @test_atomic_store_32(i32* %ptr) { 212 ; CHECK-MASK-LABEL: define void @test_atomic_store_32(i32* %ptr) {
213 ; CHECK-MASK: %2 = and i32 %1, 1048575 213 ; CHECK-MASK: %2 = and i32 %1, 1048575
214 214
215 define void @test_atomic_store_64(i64* %ptr) { 215 define void @test_atomic_store_64(i64* %ptr) {
216 call void @llvm.nacl.atomic.store.i64(i64 1234, i64* %ptr, i32 1) 216 call void @llvm.nacl.atomic.store.i64(i64 1234, i64* %ptr, i32 1)
217 ret void 217 ret void
218 } 218 }
219 219
220 ; CHECK-LABEL: define void @test_atomic_store_64(i64* %ptr) { 220 ; CHECK-LABEL: define void @test_atomic_store_64(i64* %ptr) {
221 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 221 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
222 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32 222 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32
223 ; CHECK-NEXT: %2 = zext i32 %1 to i64 223 ; CHECK-NEXT: %2 = zext i32 %1 to i64
224 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 224 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
225 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64* 225 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64*
226 ; CHECK-NEXT: call void @llvm.nacl.atomic.store.i64(i64 1234, i64* %4, i32 1) 226 ; CHECK-NEXT: call void @llvm.nacl.atomic.store.i64(i64 1234, i64* %4, i32 1)
227 ; CHECK-NEXT: ret void 227 ; CHECK-NEXT: ret void
228 ; CHECK-NEXT: } 228 ; CHECK-NEXT: }
229 229
230 ; CHECK-MASK-LABEL: define void @test_atomic_store_64(i64* %ptr) { 230 ; CHECK-MASK-LABEL: define void @test_atomic_store_64(i64* %ptr) {
231 ; CHECK-MASK: %2 = and i32 %1, 1048575 231 ; CHECK-MASK: %2 = and i32 %1, 1048575
232 232
233 define i32 @test_atomic_rmw_32(i32* %ptr) { 233 define i32 @test_atomic_rmw_32(i32* %ptr) {
234 %val = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 1234, i32 1) 234 %val = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 1234, i32 1)
235 ret i32 %val 235 ret i32 %val
236 } 236 }
237 237
238 ; CHECK-LABEL: define i32 @test_atomic_rmw_32(i32* %ptr) { 238 ; CHECK-LABEL: define i32 @test_atomic_rmw_32(i32* %ptr) {
239 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 239 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
240 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 240 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
241 ; CHECK-NEXT: %2 = zext i32 %1 to i64 241 ; CHECK-NEXT: %2 = zext i32 %1 to i64
242 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 242 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
243 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 243 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
244 ; CHECK-NEXT: %val = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %4, i32 1 234, i32 1) 244 ; CHECK-NEXT: %val = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %4, i32 1 234, i32 1)
245 ; CHECK-NEXT: ret i32 %val 245 ; CHECK-NEXT: ret i32 %val
246 ; CHECK-NEXT: } 246 ; CHECK-NEXT: }
247 247
248 ; CHECK-MASK-LABEL: define i32 @test_atomic_rmw_32(i32* %ptr) { 248 ; CHECK-MASK-LABEL: define i32 @test_atomic_rmw_32(i32* %ptr) {
249 ; CHECK-MASK: %2 = and i32 %1, 1048575 249 ; CHECK-MASK: %2 = and i32 %1, 1048575
250 250
251 define i64 @test_atomic_rmw_64(i64* %ptr) { 251 define i64 @test_atomic_rmw_64(i64* %ptr) {
252 %val = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %ptr, i64 1234, i32 1) 252 %val = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %ptr, i64 1234, i32 1)
253 ret i64 %val 253 ret i64 %val
254 } 254 }
255 255
256 ; CHECK-LABEL: define i64 @test_atomic_rmw_64(i64* %ptr) { 256 ; CHECK-LABEL: define i64 @test_atomic_rmw_64(i64* %ptr) {
257 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 257 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
258 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32 258 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32
259 ; CHECK-NEXT: %2 = zext i32 %1 to i64 259 ; CHECK-NEXT: %2 = zext i32 %1 to i64
260 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 260 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
261 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64* 261 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64*
262 ; CHECK-NEXT: %val = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %4, i64 1 234, i32 1) 262 ; CHECK-NEXT: %val = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %4, i64 1 234, i32 1)
263 ; CHECK-NEXT: ret i64 %val 263 ; CHECK-NEXT: ret i64 %val
264 ; CHECK-NEXT: } 264 ; CHECK-NEXT: }
265 265
266 ; CHECK-MASK-LABEL: define i64 @test_atomic_rmw_64(i64* %ptr) { 266 ; CHECK-MASK-LABEL: define i64 @test_atomic_rmw_64(i64* %ptr) {
267 ; CHECK-MASK: %2 = and i32 %1, 1048575 267 ; CHECK-MASK: %2 = and i32 %1, 1048575
268 268
269 define i32 @test_atomic_cmpxchg_32(i32* %ptr) { 269 define i32 @test_atomic_cmpxchg_32(i32* %ptr) {
270 %val = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 0, i32 1, i32 1, i32 1) 270 %val = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 0, i32 1, i32 1, i32 1)
271 ret i32 %val 271 ret i32 %val
272 } 272 }
273 273
274 ; CHECK-LABEL: define i32 @test_atomic_cmpxchg_32(i32* %ptr) { 274 ; CHECK-LABEL: define i32 @test_atomic_cmpxchg_32(i32* %ptr) {
275 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 275 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
276 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 276 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
277 ; CHECK-NEXT: %2 = zext i32 %1 to i64 277 ; CHECK-NEXT: %2 = zext i32 %1 to i64
278 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 278 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
279 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 279 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
280 ; CHECK-NEXT: %val = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %4, i32 0, i 32 1, i32 1, i32 1) 280 ; CHECK-NEXT: %val = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %4, i32 0, i 32 1, i32 1, i32 1)
281 ; CHECK-NEXT: ret i32 %val 281 ; CHECK-NEXT: ret i32 %val
282 ; CHECK-NEXT: } 282 ; CHECK-NEXT: }
283 283
284 ; CHECK-MASK-LABEL: define i32 @test_atomic_cmpxchg_32(i32* %ptr) { 284 ; CHECK-MASK-LABEL: define i32 @test_atomic_cmpxchg_32(i32* %ptr) {
285 ; CHECK-MASK: %2 = and i32 %1, 1048575 285 ; CHECK-MASK: %2 = and i32 %1, 1048575
286 286
287 define i64 @test_atomic_cmpxchg_64(i64* %ptr) { 287 define i64 @test_atomic_cmpxchg_64(i64* %ptr) {
288 %val = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %ptr, i64 0, i64 1, i32 1, i32 1) 288 %val = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %ptr, i64 0, i64 1, i32 1, i32 1)
289 ret i64 %val 289 ret i64 %val
290 } 290 }
291 291
292 ; CHECK-LABEL: define i64 @test_atomic_cmpxchg_64(i64* %ptr) { 292 ; CHECK-LABEL: define i64 @test_atomic_cmpxchg_64(i64* %ptr) {
293 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 293 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
294 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32 294 ; CHECK-NEXT: %1 = ptrtoint i64* %ptr to i32
295 ; CHECK-NEXT: %2 = zext i32 %1 to i64 295 ; CHECK-NEXT: %2 = zext i32 %1 to i64
296 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 296 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
297 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64* 297 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i64*
298 ; CHECK-NEXT: %val = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %4, i64 0, i 64 1, i32 1, i32 1) 298 ; CHECK-NEXT: %val = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %4, i64 0, i 64 1, i32 1, i32 1)
299 ; CHECK-NEXT: ret i64 %val 299 ; CHECK-NEXT: ret i64 %val
300 ; CHECK-NEXT: } 300 ; CHECK-NEXT: }
301 301
302 ; CHECK-MASK-LABEL: define i64 @test_atomic_cmpxchg_64(i64* %ptr) { 302 ; CHECK-MASK-LABEL: define i64 @test_atomic_cmpxchg_64(i64* %ptr) {
303 ; CHECK-MASK: %2 = and i32 %1, 1048575 303 ; CHECK-MASK: %2 = and i32 %1, 1048575
(...skipping 17 matching lines...) Expand all
321 ; CHECK-NEXT: call void @llvm.nacl.atomic.fence.all() 321 ; CHECK-NEXT: call void @llvm.nacl.atomic.fence.all()
322 ; CHECK-NEXT: ret void 322 ; CHECK-NEXT: ret void
323 ; CHECK-NEXT: } 323 ; CHECK-NEXT: }
324 324
325 define i1 @test_atomic_is_lock_free(i8* %ptr) { 325 define i1 @test_atomic_is_lock_free(i8* %ptr) {
326 %val = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %ptr) 326 %val = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %ptr)
327 ret i1 %val 327 ret i1 %val
328 } 328 }
329 329
330 ; CHECK-LABEL: define i1 @test_atomic_is_lock_free(i8* %ptr) { 330 ; CHECK-LABEL: define i1 @test_atomic_is_lock_free(i8* %ptr) {
331 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 331 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
332 ; CHECK-NEXT: %1 = ptrtoint i8* %ptr to i32 332 ; CHECK-NEXT: %1 = ptrtoint i8* %ptr to i32
333 ; CHECK-NEXT: %2 = zext i32 %1 to i64 333 ; CHECK-NEXT: %2 = zext i32 %1 to i64
334 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 334 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
335 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8* 335 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
336 ; CHECK-NEXT: %val = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %4) 336 ; CHECK-NEXT: %val = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %4)
337 ; CHECK-NEXT: ret i1 %val 337 ; CHECK-NEXT: ret i1 %val
338 ; CHECK-NEXT: } 338 ; CHECK-NEXT: }
339 339
340 define void @test_bitcast_whitelisted(i32 %val) { 340 define void @test_bitcast_whitelisted(i32 %val) {
341 %ptr = inttoptr i32 %val to i8* 341 %ptr = inttoptr i32 %val to i8*
342 %ptr.bc = bitcast i8* %ptr to i32* 342 %ptr.bc = bitcast i8* %ptr to i32*
343 ret void 343 ret void
344 } 344 }
345 345
346 ; CHECK-LABEL: define void @test_bitcast_whitelisted(i32 %val) { 346 ; CHECK-LABEL: define void @test_bitcast_whitelisted(i32 %val) {
347 ; CHECK-NEXT: %ptr = inttoptr i32 %val to i8* 347 ; CHECK-NEXT: %ptr = inttoptr i32 %val to i8*
348 ; CHECK-NEXT: %ptr.bc = bitcast i8* %ptr to i32* 348 ; CHECK-NEXT: %ptr.bc = bitcast i8* %ptr to i32*
349 ; CHECK-NEXT: ret void 349 ; CHECK-NEXT: ret void
350 ; CHECK-NEXT: } 350 ; CHECK-NEXT: }
351 351
352 ; ----------------------------------------------------------------------------- 352 ; -----------------------------------------------------------------------------
353 ; Test the special case which optimizes sandboxing of the output of 353 ; Test the special case which optimizes sandboxing of the output of
354 ; the ExpandGetElementPtr pass. 354 ; the ExpandGetElementPtr pass.
355 355
356 ; this won't get optimized because IntToPtr is not casting a result of an Add 356 ; this won't get optimized because IntToPtr is not casting a result of an Add
357 define i32 @test_no_opt__cast_not_add(i32 %ptr_int) { 357 define i32 @test_no_opt__cast_not_add(i32 %ptr_int) {
358 %ptr = inttoptr i32 %ptr_int to i32* 358 %ptr = inttoptr i32 %ptr_int to i32*
359 %val = load i32* %ptr 359 %val = load i32, i32* %ptr
360 ret i32 %val 360 ret i32 %val
361 } 361 }
362 362
363 ; CHECK-LABEL: define i32 @test_no_opt__cast_not_add(i32 %ptr_int) { 363 ; CHECK-LABEL: define i32 @test_no_opt__cast_not_add(i32 %ptr_int) {
364 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 364 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
365 ; CHECK-NEXT: %ptr = inttoptr i32 %ptr_int to i32* 365 ; CHECK-NEXT: %ptr = inttoptr i32 %ptr_int to i32*
366 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 366 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
367 ; CHECK-NEXT: %2 = zext i32 %1 to i64 367 ; CHECK-NEXT: %2 = zext i32 %1 to i64
368 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 368 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
369 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 369 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
370 ; CHECK-NEXT: %val = load i32* %4 370 ; CHECK-NEXT: %val = load i32, i32* %4
371 ; CHECK-NEXT: ret i32 %val 371 ; CHECK-NEXT: ret i32 %val
372 ; CHECK-NEXT: } 372 ; CHECK-NEXT: }
373 373
374 ; this won't get optimized because the cast is not from i32 374 ; this won't get optimized because the cast is not from i32
375 define i32 @test_no_opt__cast_not_32(i64 %ptr_int1) { 375 define i32 @test_no_opt__cast_not_32(i64 %ptr_int1) {
376 %ptr_sum = add i64 %ptr_int1, 5 376 %ptr_sum = add i64 %ptr_int1, 5
377 %ptr = inttoptr i64 %ptr_sum to i32* 377 %ptr = inttoptr i64 %ptr_sum to i32*
378 %val = load i32* %ptr 378 %val = load i32, i32* %ptr
379 ret i32 %val 379 ret i32 %val
380 } 380 }
381 381
382 ; CHECK-LABEL: define i32 @test_no_opt__cast_not_32(i64 %ptr_int1) { 382 ; CHECK-LABEL: define i32 @test_no_opt__cast_not_32(i64 %ptr_int1) {
383 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 383 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
384 ; CHECK-NEXT: %ptr_sum = add i64 %ptr_int1, 5 384 ; CHECK-NEXT: %ptr_sum = add i64 %ptr_int1, 5
385 ; CHECK-NEXT: %ptr = inttoptr i64 %ptr_sum to i32* 385 ; CHECK-NEXT: %ptr = inttoptr i64 %ptr_sum to i32*
386 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 386 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
387 ; CHECK-NEXT: %2 = zext i32 %1 to i64 387 ; CHECK-NEXT: %2 = zext i32 %1 to i64
388 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 388 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
389 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 389 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
390 ; CHECK-NEXT: %val = load i32* %4 390 ; CHECK-NEXT: %val = load i32, i32* %4
391 ; CHECK-NEXT: ret i32 %val 391 ; CHECK-NEXT: ret i32 %val
392 ; CHECK-NEXT: } 392 ; CHECK-NEXT: }
393 393
394 ; this won't get optimized because the Add's 2nd operand is not a constant 394 ; this won't get optimized because the Add's 2nd operand is not a constant
395 define i32 @test_no_opt__add_not_constant(i32 %ptr_int1, i32 %ptr_int2) { 395 define i32 @test_no_opt__add_not_constant(i32 %ptr_int1, i32 %ptr_int2) {
396 %ptr_sum = add i32 %ptr_int1, %ptr_int2 396 %ptr_sum = add i32 %ptr_int1, %ptr_int2
397 %ptr = inttoptr i32 %ptr_sum to i32* 397 %ptr = inttoptr i32 %ptr_sum to i32*
398 %val = load i32* %ptr 398 %val = load i32, i32* %ptr
399 ret i32 %val 399 ret i32 %val
400 } 400 }
401 401
402 ; CHECK-LABEL: define i32 @test_no_opt__add_not_constant(i32 %ptr_int1, i32 %ptr _int2) { 402 ; CHECK-LABEL: define i32 @test_no_opt__add_not_constant(i32 %ptr_int1, i32 %ptr _int2) {
403 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 403 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
404 ; CHECK-NEXT: %ptr_sum = add i32 %ptr_int1, %ptr_int2 404 ; CHECK-NEXT: %ptr_sum = add i32 %ptr_int1, %ptr_int2
405 ; CHECK-NEXT: %ptr = inttoptr i32 %ptr_sum to i32* 405 ; CHECK-NEXT: %ptr = inttoptr i32 %ptr_sum to i32*
406 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 406 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
407 ; CHECK-NEXT: %2 = zext i32 %1 to i64 407 ; CHECK-NEXT: %2 = zext i32 %1 to i64
408 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 408 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
409 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 409 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
410 ; CHECK-NEXT: %val = load i32* %4 410 ; CHECK-NEXT: %val = load i32, i32* %4
411 ; CHECK-NEXT: ret i32 %val 411 ; CHECK-NEXT: ret i32 %val
412 ; CHECK-NEXT: } 412 ; CHECK-NEXT: }
413 413
414 ; this won't get optimized because the Add's 2nd operand is not positive 414 ; this won't get optimized because the Add's 2nd operand is not positive
415 define i32 @test_no_opt__add_not_positive(i32 %ptr_int) { 415 define i32 @test_no_opt__add_not_positive(i32 %ptr_int) {
416 %ptr_sum = add i32 %ptr_int, -5 416 %ptr_sum = add i32 %ptr_int, -5
417 %ptr = inttoptr i32 %ptr_sum to i32* 417 %ptr = inttoptr i32 %ptr_sum to i32*
418 %val = load i32* %ptr 418 %val = load i32, i32* %ptr
419 ret i32 %val 419 ret i32 %val
420 } 420 }
421 421
422 ; CHECK-LABEL: define i32 @test_no_opt__add_not_positive(i32 %ptr_int) { 422 ; CHECK-LABEL: define i32 @test_no_opt__add_not_positive(i32 %ptr_int) {
423 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 423 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
424 ; CHECK-NEXT: %ptr_sum = add i32 %ptr_int, -5 424 ; CHECK-NEXT: %ptr_sum = add i32 %ptr_int, -5
425 ; CHECK-NEXT: %ptr = inttoptr i32 %ptr_sum to i32* 425 ; CHECK-NEXT: %ptr = inttoptr i32 %ptr_sum to i32*
426 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32 426 ; CHECK-NEXT: %1 = ptrtoint i32* %ptr to i32
427 ; CHECK-NEXT: %2 = zext i32 %1 to i64 427 ; CHECK-NEXT: %2 = zext i32 %1 to i64
428 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 428 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
429 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 429 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
430 ; CHECK-NEXT: %val = load i32* %4 430 ; CHECK-NEXT: %val = load i32, i32* %4
431 ; CHECK-NEXT: ret i32 %val 431 ; CHECK-NEXT: ret i32 %val
432 ; CHECK-NEXT: } 432 ; CHECK-NEXT: }
433 433
434 define i32 @test_opt_dont_remove_cast_if_used(i32 %ptr_int, i32 %replace) { 434 define i32 @test_opt_dont_remove_cast_if_used(i32 %ptr_int, i32 %replace) {
435 %ptr_sum = add i32 %ptr_int, 5 435 %ptr_sum = add i32 %ptr_int, 5
436 %ptr = inttoptr i32 %ptr_sum to i32* 436 %ptr = inttoptr i32 %ptr_sum to i32*
437 %val = load i32* %ptr ; %ptr is used later => keep cast 437 %val = load i32, i32* %ptr ; %ptr is used later => keep cast
438 store i32 %replace, i32* %ptr ; %ptr not used any more => remove cast 438 store i32 %replace, i32* %ptr ; %ptr not used any more => remove cast
439 ret i32 %val 439 ret i32 %val
440 } 440 }
441 441
442 ; CHECK-LABEL: define i32 @test_opt_dont_remove_cast_if_used(i32 %ptr_int, i32 % replace) { 442 ; CHECK-LABEL: define i32 @test_opt_dont_remove_cast_if_used(i32 %ptr_int, i32 % replace) {
443 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 443 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
444 ; CHECK-NEXT: %1 = zext i32 %ptr_int to i64 444 ; CHECK-NEXT: %1 = zext i32 %ptr_int to i64
445 ; CHECK-NEXT: %2 = add i64 %mem_base, %1 445 ; CHECK-NEXT: %2 = add i64 %mem_base, %1
446 ; CHECK-NEXT: %3 = add i64 %2, 5 446 ; CHECK-NEXT: %3 = add i64 %2, 5
447 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 447 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
448 ; CHECK-NEXT: %val = load i32* %4 448 ; CHECK-NEXT: %val = load i32, i32* %4
449 ; CHECK-NEXT: %5 = zext i32 %ptr_int to i64 449 ; CHECK-NEXT: %5 = zext i32 %ptr_int to i64
450 ; CHECK-NEXT: %6 = add i64 %mem_base, %5 450 ; CHECK-NEXT: %6 = add i64 %mem_base, %5
451 ; CHECK-NEXT: %7 = add i64 %6, 5 451 ; CHECK-NEXT: %7 = add i64 %6, 5
452 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i32* 452 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i32*
453 ; CHECK-NEXT: store i32 %replace, i32* %8 453 ; CHECK-NEXT: store i32 %replace, i32* %8
454 ; CHECK-NEXT: ret i32 %val 454 ; CHECK-NEXT: ret i32 %val
455 ; CHECK-NEXT: } 455 ; CHECK-NEXT: }
456 456
457 define i32 @test_opt_dont_remove_add_if_used(i32 %ptr_int, i32 %replace) { 457 define i32 @test_opt_dont_remove_add_if_used(i32 %ptr_int, i32 %replace) {
458 %ptr_sum = add i32 %ptr_int, 5 458 %ptr_sum = add i32 %ptr_int, 5
459 %ptr = inttoptr i32 %ptr_sum to i32* 459 %ptr = inttoptr i32 %ptr_sum to i32*
460 store i32 %replace, i32* %ptr 460 store i32 %replace, i32* %ptr
461 ret i32 %ptr_sum 461 ret i32 %ptr_sum
462 } 462 }
463 463
464 ; CHECK-LABEL: define i32 @test_opt_dont_remove_add_if_used(i32 %ptr_int, i32 %r eplace) { 464 ; CHECK-LABEL: define i32 @test_opt_dont_remove_add_if_used(i32 %ptr_int, i32 %r eplace) {
465 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 465 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
466 ; CHECK-NEXT: %ptr_sum = add i32 %ptr_int, 5 466 ; CHECK-NEXT: %ptr_sum = add i32 %ptr_int, 5
467 ; CHECK-NEXT: %1 = zext i32 %ptr_int to i64 467 ; CHECK-NEXT: %1 = zext i32 %ptr_int to i64
468 ; CHECK-NEXT: %2 = add i64 %mem_base, %1 468 ; CHECK-NEXT: %2 = add i64 %mem_base, %1
469 ; CHECK-NEXT: %3 = add i64 %2, 5 469 ; CHECK-NEXT: %3 = add i64 %2, 5
470 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32* 470 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*
471 ; CHECK-NEXT: store i32 %replace, i32* %4 471 ; CHECK-NEXT: store i32 %replace, i32* %4
472 ; CHECK-NEXT: ret i32 %ptr_sum 472 ; CHECK-NEXT: ret i32 %ptr_sum
473 ; CHECK-NEXT: } 473 ; CHECK-NEXT: }
474 474
475 475
476 ; ------------------------------------------------------------------------------ 476 ; ------------------------------------------------------------------------------
477 ; Check that dbg symbols are preserved 477 ; Check that dbg symbols are preserved
478 478
479 define void @test_len_dbg(i8* %dest, i8* %src, i32 %len) { 479 define void @test_len_dbg(i8* %dest, i8* %src, i32 %len) {
480 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dest, i8* %src, i32 %len, i32 4, i1 false), !dbg !1 480 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dest, i8* %src, i32 %len, i32 4, i1 false), !dbg !1
481 ret void 481 ret void
482 } 482 }
483 483
484 ; CHECK-LABEL: define void @test_len_dbg(i8* %dest, i8* %src, i32 %len) { 484 ; CHECK-LABEL: define void @test_len_dbg(i8* %dest, i8* %src, i32 %len) {
485 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 485 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
486 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32 486 ; CHECK-NEXT: %1 = ptrtoint i8* %dest to i32
487 ; CHECK-NEXT: %2 = zext i32 %1 to i64 487 ; CHECK-NEXT: %2 = zext i32 %1 to i64
488 ; CHECK-NEXT: %3 = add i64 %mem_base, %2 488 ; CHECK-NEXT: %3 = add i64 %mem_base, %2
489 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8* 489 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i8*
490 ; CHECK-NEXT: %5 = ptrtoint i8* %src to i32 490 ; CHECK-NEXT: %5 = ptrtoint i8* %src to i32
491 ; CHECK-NEXT: %6 = zext i32 %5 to i64 491 ; CHECK-NEXT: %6 = zext i32 %5 to i64
492 ; CHECK-NEXT: %7 = add i64 %mem_base, %6 492 ; CHECK-NEXT: %7 = add i64 %mem_base, %6
493 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i8* 493 ; CHECK-NEXT: %8 = inttoptr i64 %7 to i8*
494 ; CHECK-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* %4, i8* %8, i32 %len, i32 4, i1 false), !dbg !1 494 ; CHECK-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* %4, i8* %8, i32 %len, i32 4, i1 false), !dbg !1
495 ; CHECK-NEXT: ret void 495 ; CHECK-NEXT: ret void
496 ; CHECK-NEXT: } 496 ; CHECK-NEXT: }
497 497
498 define void @test_opt_dbg(i32 %ptr_int, i32 %replace) { 498 define void @test_opt_dbg(i32 %ptr_int, i32 %replace) {
499 %ptr_sum = add i32 %ptr_int, 5, !dbg !1 499 %ptr_sum = add i32 %ptr_int, 5, !dbg !1
500 %ptr = inttoptr i32 %ptr_sum to i32*, !dbg !2 500 %ptr = inttoptr i32 %ptr_sum to i32*, !dbg !3
501 store i32 %replace, i32* %ptr, !dbg !3 501 store i32 %replace, i32* %ptr, !dbg !4
502 ret void, !dbg !4 502 ret void, !dbg !5
503 } 503 }
504 504
505 ; CHECK-LABEL: define void @test_opt_dbg(i32 %ptr_int, i32 %replace) { 505 ; CHECK-LABEL: define void @test_opt_dbg(i32 %ptr_int, i32 %replace) {
506 ; CHECK-NEXT: %mem_base = load i64* @__sfi_memory_base 506 ; CHECK-NEXT: %mem_base = load i64, i64* @__sfi_memory_base
507 ; CHECK-NEXT: %1 = zext i32 %ptr_int to i64 507 ; CHECK-NEXT: %1 = zext i32 %ptr_int to i64
508 ; CHECK-NEXT: %2 = add i64 %mem_base, %1 508 ; CHECK-NEXT: %2 = add i64 %mem_base, %1
509 ; CHECK-NEXT: %3 = add i64 %2, 5, !dbg !1 509 ; CHECK-NEXT: %3 = add i64 %2, 5, !dbg !1
510 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*, !dbg !2 510 ; CHECK-NEXT: %4 = inttoptr i64 %3 to i32*, !dbg !3
511 ; CHECK-NEXT: store i32 %replace, i32* %4, !dbg !3 511 ; CHECK-NEXT: store i32 %replace, i32* %4, !dbg !4
512 ; CHECK-NEXT: ret void, !dbg !4 512 ; CHECK-NEXT: ret void, !dbg !5
513 ; CHECK-NEXT: } 513 ; CHECK-NEXT: }
514 514
515 !1 = metadata !{i32 138, i32 0, metadata !1, null} 515
516 !2 = metadata !{i32 142, i32 0, metadata !2, null} 516 !1 = !MDLocation(line: 1, column: 13, scope: !2)
517 !3 = metadata !{i32 144, i32 0, metadata !3, null} 517 !2 = !MDSubprogram(name: "foo")
518 !4 = metadata !{i32 144, i32 0, metadata !4, null} 518 !3 = !MDLocation(line: 2, column: 10, scope: !2)
519 !4 = !MDLocation(line: 2, column: 3, scope: !2)
520 !5 = !MDLocation(line: 23, column: 3, scope: !2)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698