1 ; RUN: llc < %s -march=amdgcn -mcpu=SI -verify-machineinstrs | FileCheck --check-prefix=SI --check-prefix=FUNC %s
3 ; FUNC-LABEL: {{^}}atomic_add_i32_offset:
4 ; SI: buffer_atomic_add v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
5 define void @atomic_add_i32_offset(i32 addrspace(1)* %out, i32 %in) {
7 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
8 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
12 ; FUNC-LABEL: {{^}}atomic_add_i32_ret_offset:
13 ; SI: buffer_atomic_add [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
14 ; SI: buffer_store_dword [[RET]]
15 define void @atomic_add_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
17 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
18 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
19 store i32 %0, i32 addrspace(1)* %out2
23 ; FUNC-LABEL: {{^}}atomic_add_i32_addr64_offset:
24 ; SI: buffer_atomic_add v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
25 define void @atomic_add_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
27 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
28 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
29 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
33 ; FUNC-LABEL: {{^}}atomic_add_i32_ret_addr64_offset:
34 ; SI: buffer_atomic_add [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
35 ; SI: buffer_store_dword [[RET]]
36 define void @atomic_add_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
38 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
39 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
40 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
41 store i32 %0, i32 addrspace(1)* %out2
45 ; FUNC-LABEL: {{^}}atomic_add_i32:
46 ; SI: buffer_atomic_add v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
47 define void @atomic_add_i32(i32 addrspace(1)* %out, i32 %in) {
49 %0 = atomicrmw volatile add i32 addrspace(1)* %out, i32 %in seq_cst
53 ; FUNC-LABEL: {{^}}atomic_add_i32_ret:
54 ; SI: buffer_atomic_add [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
55 ; SI: buffer_store_dword [[RET]]
56 define void @atomic_add_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
58 %0 = atomicrmw volatile add i32 addrspace(1)* %out, i32 %in seq_cst
59 store i32 %0, i32 addrspace(1)* %out2
63 ; FUNC-LABEL: {{^}}atomic_add_i32_addr64:
64 ; SI: buffer_atomic_add v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
65 define void @atomic_add_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
67 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
68 %0 = atomicrmw volatile add i32 addrspace(1)* %ptr, i32 %in seq_cst
72 ; FUNC-LABEL: {{^}}atomic_add_i32_ret_addr64:
73 ; SI: buffer_atomic_add [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
74 ; SI: buffer_store_dword [[RET]]
75 define void @atomic_add_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
77 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
78 %0 = atomicrmw volatile add i32 addrspace(1)* %ptr, i32 %in seq_cst
79 store i32 %0, i32 addrspace(1)* %out2
83 ; FUNC-LABEL: {{^}}atomic_and_i32_offset:
84 ; SI: buffer_atomic_and v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
85 define void @atomic_and_i32_offset(i32 addrspace(1)* %out, i32 %in) {
87 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
88 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
92 ; FUNC-LABEL: {{^}}atomic_and_i32_ret_offset:
93 ; SI: buffer_atomic_and [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
94 ; SI: buffer_store_dword [[RET]]
95 define void @atomic_and_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
97 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
98 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
99 store i32 %0, i32 addrspace(1)* %out2
103 ; FUNC-LABEL: {{^}}atomic_and_i32_addr64_offset:
104 ; SI: buffer_atomic_and v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
105 define void @atomic_and_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
107 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
108 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
109 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
113 ; FUNC-LABEL: {{^}}atomic_and_i32_ret_addr64_offset:
114 ; SI: buffer_atomic_and [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
115 ; SI: buffer_store_dword [[RET]]
116 define void @atomic_and_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
118 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
119 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
120 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
121 store i32 %0, i32 addrspace(1)* %out2
125 ; FUNC-LABEL: {{^}}atomic_and_i32:
126 ; SI: buffer_atomic_and v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
127 define void @atomic_and_i32(i32 addrspace(1)* %out, i32 %in) {
129 %0 = atomicrmw volatile and i32 addrspace(1)* %out, i32 %in seq_cst
133 ; FUNC-LABEL: {{^}}atomic_and_i32_ret:
134 ; SI: buffer_atomic_and [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
135 ; SI: buffer_store_dword [[RET]]
136 define void @atomic_and_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
138 %0 = atomicrmw volatile and i32 addrspace(1)* %out, i32 %in seq_cst
139 store i32 %0, i32 addrspace(1)* %out2
143 ; FUNC-LABEL: {{^}}atomic_and_i32_addr64:
144 ; SI: buffer_atomic_and v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
145 define void @atomic_and_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
147 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
148 %0 = atomicrmw volatile and i32 addrspace(1)* %ptr, i32 %in seq_cst
152 ; FUNC-LABEL: {{^}}atomic_and_i32_ret_addr64:
153 ; SI: buffer_atomic_and [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
154 ; SI: buffer_store_dword [[RET]]
155 define void @atomic_and_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
157 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
158 %0 = atomicrmw volatile and i32 addrspace(1)* %ptr, i32 %in seq_cst
159 store i32 %0, i32 addrspace(1)* %out2
163 ; FUNC-LABEL: {{^}}atomic_sub_i32_offset:
164 ; SI: buffer_atomic_sub v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
165 define void @atomic_sub_i32_offset(i32 addrspace(1)* %out, i32 %in) {
167 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
168 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
172 ; FUNC-LABEL: {{^}}atomic_sub_i32_ret_offset:
173 ; SI: buffer_atomic_sub [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
174 ; SI: buffer_store_dword [[RET]]
175 define void @atomic_sub_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
177 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
178 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
179 store i32 %0, i32 addrspace(1)* %out2
183 ; FUNC-LABEL: {{^}}atomic_sub_i32_addr64_offset:
184 ; SI: buffer_atomic_sub v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
185 define void @atomic_sub_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
187 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
188 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
189 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
193 ; FUNC-LABEL: {{^}}atomic_sub_i32_ret_addr64_offset:
194 ; SI: buffer_atomic_sub [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
195 ; SI: buffer_store_dword [[RET]]
196 define void @atomic_sub_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
198 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
199 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
200 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
201 store i32 %0, i32 addrspace(1)* %out2
205 ; FUNC-LABEL: {{^}}atomic_sub_i32:
206 ; SI: buffer_atomic_sub v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
207 define void @atomic_sub_i32(i32 addrspace(1)* %out, i32 %in) {
209 %0 = atomicrmw volatile sub i32 addrspace(1)* %out, i32 %in seq_cst
213 ; FUNC-LABEL: {{^}}atomic_sub_i32_ret:
214 ; SI: buffer_atomic_sub [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
215 ; SI: buffer_store_dword [[RET]]
216 define void @atomic_sub_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
218 %0 = atomicrmw volatile sub i32 addrspace(1)* %out, i32 %in seq_cst
219 store i32 %0, i32 addrspace(1)* %out2
223 ; FUNC-LABEL: {{^}}atomic_sub_i32_addr64:
224 ; SI: buffer_atomic_sub v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
225 define void @atomic_sub_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
227 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
228 %0 = atomicrmw volatile sub i32 addrspace(1)* %ptr, i32 %in seq_cst
232 ; FUNC-LABEL: {{^}}atomic_sub_i32_ret_addr64:
233 ; SI: buffer_atomic_sub [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
234 ; SI: buffer_store_dword [[RET]]
235 define void @atomic_sub_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
237 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
238 %0 = atomicrmw volatile sub i32 addrspace(1)* %ptr, i32 %in seq_cst
239 store i32 %0, i32 addrspace(1)* %out2
243 ; FUNC-LABEL: {{^}}atomic_max_i32_offset:
244 ; SI: buffer_atomic_smax v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
245 define void @atomic_max_i32_offset(i32 addrspace(1)* %out, i32 %in) {
247 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
248 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
252 ; FUNC-LABEL: {{^}}atomic_max_i32_ret_offset:
253 ; SI: buffer_atomic_smax [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
254 ; SI: buffer_store_dword [[RET]]
255 define void @atomic_max_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
257 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
258 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
259 store i32 %0, i32 addrspace(1)* %out2
263 ; FUNC-LABEL: {{^}}atomic_max_i32_addr64_offset:
264 ; SI: buffer_atomic_smax v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
265 define void @atomic_max_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
267 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
268 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
269 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
273 ; FUNC-LABEL: {{^}}atomic_max_i32_ret_addr64_offset:
274 ; SI: buffer_atomic_smax [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
275 ; SI: buffer_store_dword [[RET]]
276 define void @atomic_max_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
278 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
279 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
280 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
281 store i32 %0, i32 addrspace(1)* %out2
285 ; FUNC-LABEL: {{^}}atomic_max_i32:
286 ; SI: buffer_atomic_smax v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
287 define void @atomic_max_i32(i32 addrspace(1)* %out, i32 %in) {
289 %0 = atomicrmw volatile max i32 addrspace(1)* %out, i32 %in seq_cst
293 ; FUNC-LABEL: {{^}}atomic_max_i32_ret:
294 ; SI: buffer_atomic_smax [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
295 ; SI: buffer_store_dword [[RET]]
296 define void @atomic_max_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
298 %0 = atomicrmw volatile max i32 addrspace(1)* %out, i32 %in seq_cst
299 store i32 %0, i32 addrspace(1)* %out2
303 ; FUNC-LABEL: {{^}}atomic_max_i32_addr64:
304 ; SI: buffer_atomic_smax v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
305 define void @atomic_max_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
307 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
308 %0 = atomicrmw volatile max i32 addrspace(1)* %ptr, i32 %in seq_cst
312 ; FUNC-LABEL: {{^}}atomic_max_i32_ret_addr64:
313 ; SI: buffer_atomic_smax [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
314 ; SI: buffer_store_dword [[RET]]
315 define void @atomic_max_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
317 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
318 %0 = atomicrmw volatile max i32 addrspace(1)* %ptr, i32 %in seq_cst
319 store i32 %0, i32 addrspace(1)* %out2
323 ; FUNC-LABEL: {{^}}atomic_umax_i32_offset:
324 ; SI: buffer_atomic_umax v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
325 define void @atomic_umax_i32_offset(i32 addrspace(1)* %out, i32 %in) {
327 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
328 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
332 ; FUNC-LABEL: {{^}}atomic_umax_i32_ret_offset:
333 ; SI: buffer_atomic_umax [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
334 ; SI: buffer_store_dword [[RET]]
335 define void @atomic_umax_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
337 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
338 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
339 store i32 %0, i32 addrspace(1)* %out2
343 ; FUNC-LABEL: {{^}}atomic_umax_i32_addr64_offset:
344 ; SI: buffer_atomic_umax v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
345 define void @atomic_umax_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
347 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
348 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
349 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
353 ; FUNC-LABEL: {{^}}atomic_umax_i32_ret_addr64_offset:
354 ; SI: buffer_atomic_umax [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
355 ; SI: buffer_store_dword [[RET]]
356 define void @atomic_umax_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
358 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
359 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
360 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
361 store i32 %0, i32 addrspace(1)* %out2
365 ; FUNC-LABEL: {{^}}atomic_umax_i32:
366 ; SI: buffer_atomic_umax v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
367 define void @atomic_umax_i32(i32 addrspace(1)* %out, i32 %in) {
369 %0 = atomicrmw volatile umax i32 addrspace(1)* %out, i32 %in seq_cst
373 ; FUNC-LABEL: {{^}}atomic_umax_i32_ret:
374 ; SI: buffer_atomic_umax [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
375 ; SI: buffer_store_dword [[RET]]
376 define void @atomic_umax_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
378 %0 = atomicrmw volatile umax i32 addrspace(1)* %out, i32 %in seq_cst
379 store i32 %0, i32 addrspace(1)* %out2
383 ; FUNC-LABEL: {{^}}atomic_umax_i32_addr64:
384 ; SI: buffer_atomic_umax v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
385 define void @atomic_umax_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
387 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
388 %0 = atomicrmw volatile umax i32 addrspace(1)* %ptr, i32 %in seq_cst
392 ; FUNC-LABEL: {{^}}atomic_umax_i32_ret_addr64:
393 ; SI: buffer_atomic_umax [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
394 ; SI: buffer_store_dword [[RET]]
395 define void @atomic_umax_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
397 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
398 %0 = atomicrmw volatile umax i32 addrspace(1)* %ptr, i32 %in seq_cst
399 store i32 %0, i32 addrspace(1)* %out2
403 ; FUNC-LABEL: {{^}}atomic_min_i32_offset:
404 ; SI: buffer_atomic_smin v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
405 define void @atomic_min_i32_offset(i32 addrspace(1)* %out, i32 %in) {
407 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
408 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
412 ; FUNC-LABEL: {{^}}atomic_min_i32_ret_offset:
413 ; SI: buffer_atomic_smin [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
414 ; SI: buffer_store_dword [[RET]]
415 define void @atomic_min_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
417 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
418 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
419 store i32 %0, i32 addrspace(1)* %out2
423 ; FUNC-LABEL: {{^}}atomic_min_i32_addr64_offset:
424 ; SI: buffer_atomic_smin v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
425 define void @atomic_min_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
427 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
428 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
429 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
433 ; FUNC-LABEL: {{^}}atomic_min_i32_ret_addr64_offset:
434 ; SI: buffer_atomic_smin [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
435 ; SI: buffer_store_dword [[RET]]
436 define void @atomic_min_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
438 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
439 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
440 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
441 store i32 %0, i32 addrspace(1)* %out2
445 ; FUNC-LABEL: {{^}}atomic_min_i32:
446 ; SI: buffer_atomic_smin v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
447 define void @atomic_min_i32(i32 addrspace(1)* %out, i32 %in) {
449 %0 = atomicrmw volatile min i32 addrspace(1)* %out, i32 %in seq_cst
453 ; FUNC-LABEL: {{^}}atomic_min_i32_ret:
454 ; SI: buffer_atomic_smin [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
455 ; SI: buffer_store_dword [[RET]]
456 define void @atomic_min_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
458 %0 = atomicrmw volatile min i32 addrspace(1)* %out, i32 %in seq_cst
459 store i32 %0, i32 addrspace(1)* %out2
463 ; FUNC-LABEL: {{^}}atomic_min_i32_addr64:
464 ; SI: buffer_atomic_smin v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
465 define void @atomic_min_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
467 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
468 %0 = atomicrmw volatile min i32 addrspace(1)* %ptr, i32 %in seq_cst
472 ; FUNC-LABEL: {{^}}atomic_min_i32_ret_addr64:
473 ; SI: buffer_atomic_smin [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
474 ; SI: buffer_store_dword [[RET]]
475 define void @atomic_min_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
477 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
478 %0 = atomicrmw volatile min i32 addrspace(1)* %ptr, i32 %in seq_cst
479 store i32 %0, i32 addrspace(1)* %out2
483 ; FUNC-LABEL: {{^}}atomic_umin_i32_offset:
484 ; SI: buffer_atomic_umin v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
485 define void @atomic_umin_i32_offset(i32 addrspace(1)* %out, i32 %in) {
487 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
488 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
492 ; FUNC-LABEL: {{^}}atomic_umin_i32_ret_offset:
493 ; SI: buffer_atomic_umin [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
494 ; SI: buffer_store_dword [[RET]]
495 define void @atomic_umin_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
497 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
498 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
499 store i32 %0, i32 addrspace(1)* %out2
503 ; FUNC-LABEL: {{^}}atomic_umin_i32_addr64_offset:
504 ; SI: buffer_atomic_umin v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
505 define void @atomic_umin_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
507 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
508 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
509 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
513 ; FUNC-LABEL: {{^}}atomic_umin_i32_ret_addr64_offset:
514 ; SI: buffer_atomic_umin [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
515 ; SI: buffer_store_dword [[RET]]
516 define void @atomic_umin_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
518 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
519 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
520 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
521 store i32 %0, i32 addrspace(1)* %out2
525 ; FUNC-LABEL: {{^}}atomic_umin_i32:
526 ; SI: buffer_atomic_umin v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
527 define void @atomic_umin_i32(i32 addrspace(1)* %out, i32 %in) {
529 %0 = atomicrmw volatile umin i32 addrspace(1)* %out, i32 %in seq_cst
533 ; FUNC-LABEL: {{^}}atomic_umin_i32_ret:
534 ; SI: buffer_atomic_umin [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
535 ; SI: buffer_store_dword [[RET]]
536 define void @atomic_umin_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
538 %0 = atomicrmw volatile umin i32 addrspace(1)* %out, i32 %in seq_cst
539 store i32 %0, i32 addrspace(1)* %out2
543 ; FUNC-LABEL: {{^}}atomic_umin_i32_addr64:
544 ; SI: buffer_atomic_umin v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
545 define void @atomic_umin_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
547 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
548 %0 = atomicrmw volatile umin i32 addrspace(1)* %ptr, i32 %in seq_cst
552 ; FUNC-LABEL: {{^}}atomic_umin_i32_ret_addr64:
553 ; SI: buffer_atomic_umin [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
554 ; SI: buffer_store_dword [[RET]]
555 define void @atomic_umin_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
557 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
558 %0 = atomicrmw volatile umin i32 addrspace(1)* %ptr, i32 %in seq_cst
559 store i32 %0, i32 addrspace(1)* %out2
563 ; FUNC-LABEL: {{^}}atomic_or_i32_offset:
564 ; SI: buffer_atomic_or v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
565 define void @atomic_or_i32_offset(i32 addrspace(1)* %out, i32 %in) {
567 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
568 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
572 ; FUNC-LABEL: {{^}}atomic_or_i32_ret_offset:
573 ; SI: buffer_atomic_or [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
574 ; SI: buffer_store_dword [[RET]]
575 define void @atomic_or_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
577 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
578 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
579 store i32 %0, i32 addrspace(1)* %out2
583 ; FUNC-LABEL: {{^}}atomic_or_i32_addr64_offset:
584 ; SI: buffer_atomic_or v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
585 define void @atomic_or_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
587 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
588 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
589 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
593 ; FUNC-LABEL: {{^}}atomic_or_i32_ret_addr64_offset:
594 ; SI: buffer_atomic_or [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
595 ; SI: buffer_store_dword [[RET]]
596 define void @atomic_or_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
598 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
599 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
600 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
601 store i32 %0, i32 addrspace(1)* %out2
605 ; FUNC-LABEL: {{^}}atomic_or_i32:
606 ; SI: buffer_atomic_or v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
607 define void @atomic_or_i32(i32 addrspace(1)* %out, i32 %in) {
609 %0 = atomicrmw volatile or i32 addrspace(1)* %out, i32 %in seq_cst
613 ; FUNC-LABEL: {{^}}atomic_or_i32_ret:
614 ; SI: buffer_atomic_or [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
615 ; SI: buffer_store_dword [[RET]]
616 define void @atomic_or_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
618 %0 = atomicrmw volatile or i32 addrspace(1)* %out, i32 %in seq_cst
619 store i32 %0, i32 addrspace(1)* %out2
623 ; FUNC-LABEL: {{^}}atomic_or_i32_addr64:
624 ; SI: buffer_atomic_or v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
625 define void @atomic_or_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
627 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
628 %0 = atomicrmw volatile or i32 addrspace(1)* %ptr, i32 %in seq_cst
632 ; FUNC-LABEL: {{^}}atomic_or_i32_ret_addr64:
633 ; SI: buffer_atomic_or [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
634 ; SI: buffer_store_dword [[RET]]
635 define void @atomic_or_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
637 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
638 %0 = atomicrmw volatile or i32 addrspace(1)* %ptr, i32 %in seq_cst
639 store i32 %0, i32 addrspace(1)* %out2
643 ; FUNC-LABEL: {{^}}atomic_xchg_i32_offset:
644 ; SI: buffer_atomic_swap v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
645 define void @atomic_xchg_i32_offset(i32 addrspace(1)* %out, i32 %in) {
647 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
648 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
652 ; FUNC-LABEL: {{^}}atomic_xchg_i32_ret_offset:
653 ; SI: buffer_atomic_swap [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
654 ; SI: buffer_store_dword [[RET]]
655 define void @atomic_xchg_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
657 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
658 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
659 store i32 %0, i32 addrspace(1)* %out2
663 ; FUNC-LABEL: {{^}}atomic_xchg_i32_addr64_offset:
664 ; SI: buffer_atomic_swap v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
665 define void @atomic_xchg_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
667 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
668 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
669 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
673 ; FUNC-LABEL: {{^}}atomic_xchg_i32_ret_addr64_offset:
674 ; SI: buffer_atomic_swap [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
675 ; SI: buffer_store_dword [[RET]]
676 define void @atomic_xchg_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
678 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
679 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
680 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
681 store i32 %0, i32 addrspace(1)* %out2
685 ; FUNC-LABEL: {{^}}atomic_xchg_i32:
686 ; SI: buffer_atomic_swap v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
687 define void @atomic_xchg_i32(i32 addrspace(1)* %out, i32 %in) {
689 %0 = atomicrmw volatile xchg i32 addrspace(1)* %out, i32 %in seq_cst
693 ; FUNC-LABEL: {{^}}atomic_xchg_i32_ret:
694 ; SI: buffer_atomic_swap [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
695 ; SI: buffer_store_dword [[RET]]
696 define void @atomic_xchg_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
698 %0 = atomicrmw volatile xchg i32 addrspace(1)* %out, i32 %in seq_cst
699 store i32 %0, i32 addrspace(1)* %out2
703 ; FUNC-LABEL: {{^}}atomic_xchg_i32_addr64:
704 ; SI: buffer_atomic_swap v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
705 define void @atomic_xchg_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
707 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
708 %0 = atomicrmw volatile xchg i32 addrspace(1)* %ptr, i32 %in seq_cst
712 ; FUNC-LABEL: {{^}}atomic_xchg_i32_ret_addr64:
713 ; SI: buffer_atomic_swap [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
714 ; SI: buffer_store_dword [[RET]]
715 define void @atomic_xchg_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
717 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
718 %0 = atomicrmw volatile xchg i32 addrspace(1)* %ptr, i32 %in seq_cst
719 store i32 %0, i32 addrspace(1)* %out2
723 ; FUNC-LABEL: {{^}}atomic_xor_i32_offset:
724 ; SI: buffer_atomic_xor v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16{{$}}
725 define void @atomic_xor_i32_offset(i32 addrspace(1)* %out, i32 %in) {
727 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
728 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
732 ; FUNC-LABEL: {{^}}atomic_xor_i32_ret_offset:
733 ; SI: buffer_atomic_xor [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 offset:16 glc {{$}}
734 ; SI: buffer_store_dword [[RET]]
735 define void @atomic_xor_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
737 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
738 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
739 store i32 %0, i32 addrspace(1)* %out2
743 ; FUNC-LABEL: {{^}}atomic_xor_i32_addr64_offset:
744 ; SI: buffer_atomic_xor v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16{{$}}
745 define void @atomic_xor_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
747 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
748 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
749 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
753 ; FUNC-LABEL: {{^}}atomic_xor_i32_ret_addr64_offset:
754 ; SI: buffer_atomic_xor [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 offset:16 glc{{$}}
755 ; SI: buffer_store_dword [[RET]]
756 define void @atomic_xor_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
758 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
759 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
760 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
761 store i32 %0, i32 addrspace(1)* %out2
765 ; FUNC-LABEL: {{^}}atomic_xor_i32:
766 ; SI: buffer_atomic_xor v{{[0-9]+}}, s[{{[0-9]+}}:{{[0-9]+}}], 0{{$}}
767 define void @atomic_xor_i32(i32 addrspace(1)* %out, i32 %in) {
769 %0 = atomicrmw volatile xor i32 addrspace(1)* %out, i32 %in seq_cst
773 ; FUNC-LABEL: {{^}}atomic_xor_i32_ret:
774 ; SI: buffer_atomic_xor [[RET:v[0-9]+]], s[{{[0-9]+}}:{{[0-9]+}}], 0 glc
775 ; SI: buffer_store_dword [[RET]]
776 define void @atomic_xor_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
778 %0 = atomicrmw volatile xor i32 addrspace(1)* %out, i32 %in seq_cst
779 store i32 %0, i32 addrspace(1)* %out2
783 ; FUNC-LABEL: {{^}}atomic_xor_i32_addr64:
784 ; SI: buffer_atomic_xor v{{[0-9]+}}, v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64{{$}}
785 define void @atomic_xor_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
787 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
788 %0 = atomicrmw volatile xor i32 addrspace(1)* %ptr, i32 %in seq_cst
792 ; FUNC-LABEL: {{^}}atomic_xor_i32_ret_addr64:
793 ; SI: buffer_atomic_xor [[RET:v[0-9]+]], v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], 0 addr64 glc{{$}}
794 ; SI: buffer_store_dword [[RET]]
795 define void @atomic_xor_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64 %index) {
797 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
798 %0 = atomicrmw volatile xor i32 addrspace(1)* %ptr, i32 %in seq_cst
799 store i32 %0, i32 addrspace(1)* %out2