1 ; RUN: llc < %s -verify-machineinstrs -mtriple=aarch64-none-linux-gnu -mattr=+neon | FileCheck %s
4 define <8 x i8> @and8xi8(<8 x i8> %a, <8 x i8> %b) {
5 ;CHECK: and {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
6 %tmp1 = and <8 x i8> %a, %b;
10 define <16 x i8> @and16xi8(<16 x i8> %a, <16 x i8> %b) {
11 ;CHECK: and {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
12 %tmp1 = and <16 x i8> %a, %b;
17 define <8 x i8> @orr8xi8(<8 x i8> %a, <8 x i8> %b) {
18 ;CHECK: orr {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
19 %tmp1 = or <8 x i8> %a, %b;
23 define <16 x i8> @orr16xi8(<16 x i8> %a, <16 x i8> %b) {
24 ;CHECK: orr {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
25 %tmp1 = or <16 x i8> %a, %b;
30 define <8 x i8> @xor8xi8(<8 x i8> %a, <8 x i8> %b) {
31 ;CHECK: eor {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
32 %tmp1 = xor <8 x i8> %a, %b;
36 define <16 x i8> @xor16xi8(<16 x i8> %a, <16 x i8> %b) {
37 ;CHECK: eor {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
38 %tmp1 = xor <16 x i8> %a, %b;
42 define <8 x i8> @bsl8xi8_const(<8 x i8> %a, <8 x i8> %b) {
43 ;CHECK: bsl {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
44 %tmp1 = and <8 x i8> %a, < i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1 >
45 %tmp2 = and <8 x i8> %b, < i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0 >
46 %tmp3 = or <8 x i8> %tmp1, %tmp2
50 define <16 x i8> @bsl16xi8_const(<16 x i8> %a, <16 x i8> %b) {
51 ;CHECK: bsl {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
52 %tmp1 = and <16 x i8> %a, < i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1 >
53 %tmp2 = and <16 x i8> %b, < i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0 >
54 %tmp3 = or <16 x i8> %tmp1, %tmp2
58 define <8 x i8> @orn8xi8(<8 x i8> %a, <8 x i8> %b) {
59 ;CHECK: orn {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
60 %tmp1 = xor <8 x i8> %b, < i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1 >
61 %tmp2 = or <8 x i8> %a, %tmp1
65 define <16 x i8> @orn16xi8(<16 x i8> %a, <16 x i8> %b) {
66 ;CHECK: orn {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
67 %tmp1 = xor <16 x i8> %b, < i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1 >
68 %tmp2 = or <16 x i8> %a, %tmp1
72 define <8 x i8> @bic8xi8(<8 x i8> %a, <8 x i8> %b) {
73 ;CHECK: bic {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
74 %tmp1 = xor <8 x i8> %b, < i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1 >
75 %tmp2 = and <8 x i8> %a, %tmp1
79 define <16 x i8> @bic16xi8(<16 x i8> %a, <16 x i8> %b) {
80 ;CHECK: bic {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
81 %tmp1 = xor <16 x i8> %b, < i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1 >
82 %tmp2 = and <16 x i8> %a, %tmp1
86 define <2 x i32> @orrimm2s_lsl0(<2 x i32> %a) {
87 ;CHECK: orr {{v[0-31]+}}.2s, #0xff
88 %tmp1 = or <2 x i32> %a, < i32 255, i32 255>
92 define <2 x i32> @orrimm2s_lsl8(<2 x i32> %a) {
93 ;CHECK: orr {{v[0-31]+}}.2s, #0xff, lsl #8
94 %tmp1 = or <2 x i32> %a, < i32 65280, i32 65280>
98 define <2 x i32> @orrimm2s_lsl16(<2 x i32> %a) {
99 ;CHECK: orr {{v[0-31]+}}.2s, #0xff, lsl #16
100 %tmp1 = or <2 x i32> %a, < i32 16711680, i32 16711680>
104 define <2 x i32> @orrimm2s_lsl24(<2 x i32> %a) {
105 ;CHECK: orr {{v[0-31]+}}.2s, #0xff, lsl #24
106 %tmp1 = or <2 x i32> %a, < i32 4278190080, i32 4278190080>
110 define <4 x i32> @orrimm4s_lsl0(<4 x i32> %a) {
111 ;CHECK: orr {{v[0-31]+}}.4s, #0xff
112 %tmp1 = or <4 x i32> %a, < i32 255, i32 255, i32 255, i32 255>
116 define <4 x i32> @orrimm4s_lsl8(<4 x i32> %a) {
117 ;CHECK: orr {{v[0-31]+}}.4s, #0xff, lsl #8
118 %tmp1 = or <4 x i32> %a, < i32 65280, i32 65280, i32 65280, i32 65280>
122 define <4 x i32> @orrimm4s_lsl16(<4 x i32> %a) {
123 ;CHECK: orr {{v[0-31]+}}.4s, #0xff, lsl #16
124 %tmp1 = or <4 x i32> %a, < i32 16711680, i32 16711680, i32 16711680, i32 16711680>
128 define <4 x i32> @orrimm4s_lsl24(<4 x i32> %a) {
129 ;CHECK: orr {{v[0-31]+}}.4s, #0xff, lsl #24
130 %tmp1 = or <4 x i32> %a, < i32 4278190080, i32 4278190080, i32 4278190080, i32 4278190080>
134 define <4 x i16> @orrimm4h_lsl0(<4 x i16> %a) {
135 ;CHECK: orr {{v[0-31]+}}.4h, #0xff
136 %tmp1 = or <4 x i16> %a, < i16 255, i16 255, i16 255, i16 255 >
140 define <4 x i16> @orrimm4h_lsl8(<4 x i16> %a) {
141 ;CHECK: orr {{v[0-31]+}}.4h, #0xff, lsl #8
142 %tmp1 = or <4 x i16> %a, < i16 65280, i16 65280, i16 65280, i16 65280 >
146 define <8 x i16> @orrimm8h_lsl0(<8 x i16> %a) {
147 ;CHECK: orr {{v[0-31]+}}.8h, #0xff
148 %tmp1 = or <8 x i16> %a, < i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255 >
152 define <8 x i16> @orrimm8h_lsl8(<8 x i16> %a) {
153 ;CHECK: orr {{v[0-31]+}}.8h, #0xff, lsl #8
154 %tmp1 = or <8 x i16> %a, < i16 65280, i16 65280, i16 65280, i16 65280, i16 65280, i16 65280, i16 65280, i16 65280 >
158 define <2 x i32> @bicimm2s_lsl0(<2 x i32> %a) {
159 ;CHECK: bic {{v[0-31]+}}.2s, #0x10
160 %tmp1 = and <2 x i32> %a, < i32 4294967279, i32 4294967279 >
164 define <2 x i32> @bicimm2s_lsl8(<2 x i32> %a) {
165 ;CHECK: bic {{v[0-31]+}}.2s, #0x10, lsl #8
166 %tmp1 = and <2 x i32> %a, < i32 18446744073709547519, i32 18446744073709547519 >
170 define <2 x i32> @bicimm2s_lsl16(<2 x i32> %a) {
171 ;CHECK: bic {{v[0-31]+}}.2s, #0x10, lsl #16
172 %tmp1 = and <2 x i32> %a, < i32 18446744073708503039, i32 18446744073708503039 >
176 define <2 x i32> @bicimm2s_lsl124(<2 x i32> %a) {
177 ;CHECK: bic {{v[0-31]+}}.2s, #0x10, lsl #24
178 %tmp1 = and <2 x i32> %a, < i32 18446744073441116159, i32 18446744073441116159>
182 define <4 x i32> @bicimm4s_lsl0(<4 x i32> %a) {
183 ;CHECK: bic {{v[0-31]+}}.4s, #0x10
184 %tmp1 = and <4 x i32> %a, < i32 4294967279, i32 4294967279, i32 4294967279, i32 4294967279 >
188 define <4 x i32> @bicimm4s_lsl8(<4 x i32> %a) {
189 ;CHECK: bic {{v[0-31]+}}.4s, #0x10, lsl #8
190 %tmp1 = and <4 x i32> %a, < i32 18446744073709547519, i32 18446744073709547519, i32 18446744073709547519, i32 18446744073709547519 >
194 define <4 x i32> @bicimm4s_lsl16(<4 x i32> %a) {
195 ;CHECK: bic {{v[0-31]+}}.4s, #0x10, lsl #16
196 %tmp1 = and <4 x i32> %a, < i32 18446744073708503039, i32 18446744073708503039, i32 18446744073708503039, i32 18446744073708503039 >
200 define <4 x i32> @bicimm4s_lsl124(<4 x i32> %a) {
201 ;CHECK: bic {{v[0-31]+}}.4s, #0x10, lsl #24
202 %tmp1 = and <4 x i32> %a, < i32 18446744073441116159, i32 18446744073441116159, i32 18446744073441116159, i32 18446744073441116159>
206 define <4 x i16> @bicimm4h_lsl0_a(<4 x i16> %a) {
207 ;CHECK: bic {{v[0-31]+}}.4h, #0x10
208 %tmp1 = and <4 x i16> %a, < i16 18446744073709551599, i16 18446744073709551599, i16 18446744073709551599, i16 18446744073709551599 >
212 define <4 x i16> @bicimm4h_lsl0_b(<4 x i16> %a) {
213 ;CHECK: bic {{v[0-31]+}}.4h, #0x0
214 %tmp1 = and <4 x i16> %a, < i16 65280, i16 65280, i16 65280, i16 65280 >
218 define <4 x i16> @bicimm4h_lsl8_a(<4 x i16> %a) {
219 ;CHECK: bic {{v[0-31]+}}.4h, #0x10, lsl #8
220 %tmp1 = and <4 x i16> %a, < i16 18446744073709547519, i16 18446744073709547519, i16 18446744073709547519, i16 18446744073709547519>
224 define <4 x i16> @bicimm4h_lsl8_b(<4 x i16> %a) {
225 ;CHECK: bic {{v[0-31]+}}.4h, #0x0, lsl #8
226 %tmp1 = and <4 x i16> %a, < i16 255, i16 255, i16 255, i16 255>
230 define <8 x i16> @bicimm8h_lsl0_a(<8 x i16> %a) {
231 ;CHECK: bic {{v[0-31]+}}.8h, #0x10
232 %tmp1 = and <8 x i16> %a, < i16 18446744073709551599, i16 18446744073709551599, i16 18446744073709551599, i16 18446744073709551599,
233 i16 18446744073709551599, i16 18446744073709551599, i16 18446744073709551599, i16 18446744073709551599 >
237 define <8 x i16> @bicimm8h_lsl0_b(<8 x i16> %a) {
238 ;CHECK: bic {{v[0-31]+}}.8h, #0x0
239 %tmp1 = and <8 x i16> %a, < i16 65280, i16 65280, i16 65280, i16 65280, i16 65280, i16 65280, i16 65280, i16 65280 >
243 define <8 x i16> @bicimm8h_lsl8_a(<8 x i16> %a) {
244 ;CHECK: bic {{v[0-31]+}}.8h, #0x10, lsl #8
245 %tmp1 = and <8 x i16> %a, < i16 18446744073709547519, i16 18446744073709547519, i16 18446744073709547519, i16 18446744073709547519,
246 i16 18446744073709547519, i16 18446744073709547519, i16 18446744073709547519, i16 18446744073709547519>
250 define <8 x i16> @bicimm8h_lsl8_b(<8 x i16> %a) {
251 ;CHECK: bic {{v[0-31]+}}.8h, #0x0, lsl #8
252 %tmp1 = and <8 x i16> %a, < i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255, i16 255>
256 define <2 x i32> @and2xi32(<2 x i32> %a, <2 x i32> %b) {
257 ;CHECK: and {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
258 %tmp1 = and <2 x i32> %a, %b;
262 define <4 x i16> @and4xi16(<4 x i16> %a, <4 x i16> %b) {
263 ;CHECK: and {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
264 %tmp1 = and <4 x i16> %a, %b;
268 define <1 x i64> @and1xi64(<1 x i64> %a, <1 x i64> %b) {
269 ;CHECK: and {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
270 %tmp1 = and <1 x i64> %a, %b;
274 define <4 x i32> @and4xi32(<4 x i32> %a, <4 x i32> %b) {
275 ;CHECK: and {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
276 %tmp1 = and <4 x i32> %a, %b;
280 define <8 x i16> @and8xi16(<8 x i16> %a, <8 x i16> %b) {
281 ;CHECK: and {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
282 %tmp1 = and <8 x i16> %a, %b;
286 define <2 x i64> @and2xi64(<2 x i64> %a, <2 x i64> %b) {
287 ;CHECK: and {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
288 %tmp1 = and <2 x i64> %a, %b;
292 define <2 x i32> @orr2xi32(<2 x i32> %a, <2 x i32> %b) {
293 ;CHECK: orr {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
294 %tmp1 = or <2 x i32> %a, %b;
298 define <4 x i16> @orr4xi16(<4 x i16> %a, <4 x i16> %b) {
299 ;CHECK: orr {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
300 %tmp1 = or <4 x i16> %a, %b;
304 define <1 x i64> @orr1xi64(<1 x i64> %a, <1 x i64> %b) {
305 ;CHECK: orr {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
306 %tmp1 = or <1 x i64> %a, %b;
310 define <4 x i32> @orr4xi32(<4 x i32> %a, <4 x i32> %b) {
311 ;CHECK: orr {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
312 %tmp1 = or <4 x i32> %a, %b;
316 define <8 x i16> @orr8xi16(<8 x i16> %a, <8 x i16> %b) {
317 ;CHECK: orr {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
318 %tmp1 = or <8 x i16> %a, %b;
322 define <2 x i64> @orr2xi64(<2 x i64> %a, <2 x i64> %b) {
323 ;CHECK: orr {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
324 %tmp1 = or <2 x i64> %a, %b;
328 define <2 x i32> @eor2xi32(<2 x i32> %a, <2 x i32> %b) {
329 ;CHECK: eor {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
330 %tmp1 = xor <2 x i32> %a, %b;
334 define <4 x i16> @eor4xi16(<4 x i16> %a, <4 x i16> %b) {
335 ;CHECK: eor {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
336 %tmp1 = xor <4 x i16> %a, %b;
340 define <1 x i64> @eor1xi64(<1 x i64> %a, <1 x i64> %b) {
341 ;CHECK: eor {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
342 %tmp1 = xor <1 x i64> %a, %b;
346 define <4 x i32> @eor4xi32(<4 x i32> %a, <4 x i32> %b) {
347 ;CHECK: eor {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
348 %tmp1 = xor <4 x i32> %a, %b;
352 define <8 x i16> @eor8xi16(<8 x i16> %a, <8 x i16> %b) {
353 ;CHECK: eor {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
354 %tmp1 = xor <8 x i16> %a, %b;
358 define <2 x i64> @eor2xi64(<2 x i64> %a, <2 x i64> %b) {
359 ;CHECK: eor {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
360 %tmp1 = xor <2 x i64> %a, %b;
365 define <2 x i32> @bic2xi32(<2 x i32> %a, <2 x i32> %b) {
366 ;CHECK: bic {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
367 %tmp1 = xor <2 x i32> %b, < i32 -1, i32 -1 >
368 %tmp2 = and <2 x i32> %a, %tmp1
372 define <4 x i16> @bic4xi16(<4 x i16> %a, <4 x i16> %b) {
373 ;CHECK: bic {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
374 %tmp1 = xor <4 x i16> %b, < i16 -1, i16 -1, i16 -1, i16-1 >
375 %tmp2 = and <4 x i16> %a, %tmp1
379 define <1 x i64> @bic1xi64(<1 x i64> %a, <1 x i64> %b) {
380 ;CHECK: bic {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
381 %tmp1 = xor <1 x i64> %b, < i64 -1>
382 %tmp2 = and <1 x i64> %a, %tmp1
386 define <4 x i32> @bic4xi32(<4 x i32> %a, <4 x i32> %b) {
387 ;CHECK: bic {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
388 %tmp1 = xor <4 x i32> %b, < i32 -1, i32 -1, i32 -1, i32 -1>
389 %tmp2 = and <4 x i32> %a, %tmp1
393 define <8 x i16> @bic8xi16(<8 x i16> %a, <8 x i16> %b) {
394 ;CHECK: bic {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
395 %tmp1 = xor <8 x i16> %b, < i16 -1, i16 -1, i16 -1, i16-1, i16 -1, i16 -1, i16 -1, i16 -1 >
396 %tmp2 = and <8 x i16> %a, %tmp1
400 define <2 x i64> @bic2xi64(<2 x i64> %a, <2 x i64> %b) {
401 ;CHECK: bic {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
402 %tmp1 = xor <2 x i64> %b, < i64 -1, i64 -1>
403 %tmp2 = and <2 x i64> %a, %tmp1
407 define <2 x i32> @orn2xi32(<2 x i32> %a, <2 x i32> %b) {
408 ;CHECK: orn {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
409 %tmp1 = xor <2 x i32> %b, < i32 -1, i32 -1 >
410 %tmp2 = or <2 x i32> %a, %tmp1
414 define <4 x i16> @orn4xi16(<4 x i16> %a, <4 x i16> %b) {
415 ;CHECK: orn {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
416 %tmp1 = xor <4 x i16> %b, < i16 -1, i16 -1, i16 -1, i16-1 >
417 %tmp2 = or <4 x i16> %a, %tmp1
421 define <1 x i64> @orn1xi64(<1 x i64> %a, <1 x i64> %b) {
422 ;CHECK: orn {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
423 %tmp1 = xor <1 x i64> %b, < i64 -1>
424 %tmp2 = or <1 x i64> %a, %tmp1
428 define <4 x i32> @orn4xi32(<4 x i32> %a, <4 x i32> %b) {
429 ;CHECK: orn {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
430 %tmp1 = xor <4 x i32> %b, < i32 -1, i32 -1, i32 -1, i32 -1>
431 %tmp2 = or <4 x i32> %a, %tmp1
435 define <8 x i16> @orn8xi16(<8 x i16> %a, <8 x i16> %b) {
436 ;CHECK: orn {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
437 %tmp1 = xor <8 x i16> %b, < i16 -1, i16 -1, i16 -1, i16-1, i16 -1, i16 -1, i16 -1, i16 -1 >
438 %tmp2 = or <8 x i16> %a, %tmp1
442 define <2 x i64> @orn2xi64(<2 x i64> %a, <2 x i64> %b) {
443 ;CHECK: orn {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
444 %tmp1 = xor <2 x i64> %b, < i64 -1, i64 -1>
445 %tmp2 = or <2 x i64> %a, %tmp1
448 define <2 x i32> @bsl2xi32_const(<2 x i32> %a, <2 x i32> %b) {
449 ;CHECK: bsl {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
450 %tmp1 = and <2 x i32> %a, < i32 -1, i32 -1 >
451 %tmp2 = and <2 x i32> %b, < i32 0, i32 0 >
452 %tmp3 = or <2 x i32> %tmp1, %tmp2
457 define <4 x i16> @bsl4xi16_const(<4 x i16> %a, <4 x i16> %b) {
458 ;CHECK: bsl {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
459 %tmp1 = and <4 x i16> %a, < i16 -1, i16 -1, i16 -1,i16 -1 >
460 %tmp2 = and <4 x i16> %b, < i16 0, i16 0,i16 0, i16 0 >
461 %tmp3 = or <4 x i16> %tmp1, %tmp2
465 define <1 x i64> @bsl1xi64_const(<1 x i64> %a, <1 x i64> %b) {
466 ;CHECK: bsl {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
467 %tmp1 = and <1 x i64> %a, < i64 -1 >
468 %tmp2 = and <1 x i64> %b, < i64 0 >
469 %tmp3 = or <1 x i64> %tmp1, %tmp2
473 define <4 x i32> @bsl4xi32_const(<4 x i32> %a, <4 x i32> %b) {
474 ;CHECK: bsl {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
475 %tmp1 = and <4 x i32> %a, < i32 -1, i32 -1, i32 -1, i32 -1 >
476 %tmp2 = and <4 x i32> %b, < i32 0, i32 0, i32 0, i32 0 >
477 %tmp3 = or <4 x i32> %tmp1, %tmp2
481 define <8 x i16> @bsl8xi16_const(<8 x i16> %a, <8 x i16> %b) {
482 ;CHECK: bsl {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
483 %tmp1 = and <8 x i16> %a, < i16 -1, i16 -1, i16 -1,i16 -1, i16 -1, i16 -1, i16 -1,i16 -1 >
484 %tmp2 = and <8 x i16> %b, < i16 0, i16 0, i16 0, i16 0, i16 0, i16 0, i16 0, i16 0 >
485 %tmp3 = or <8 x i16> %tmp1, %tmp2
489 define <2 x i64> @bsl2xi64_const(<2 x i64> %a, <2 x i64> %b) {
490 ;CHECK: bsl {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
491 %tmp1 = and <2 x i64> %a, < i64 -1, i64 -1 >
492 %tmp2 = and <2 x i64> %b, < i64 0, i64 0 >
493 %tmp3 = or <2 x i64> %tmp1, %tmp2
498 define <8 x i8> @bsl8xi8(<8 x i8> %v1, <8 x i8> %v2, <8 x i8> %v3) {
499 ;CHECK: bsl {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
500 %1 = and <8 x i8> %v1, %v2
501 %2 = xor <8 x i8> %v1, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>
502 %3 = and <8 x i8> %2, %v3
503 %4 = or <8 x i8> %1, %3
507 define <4 x i16> @bsl4xi16(<4 x i16> %v1, <4 x i16> %v2, <4 x i16> %v3) {
508 ;CHECK: bsl {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
509 %1 = and <4 x i16> %v1, %v2
510 %2 = xor <4 x i16> %v1, <i16 -1, i16 -1, i16 -1, i16 -1>
511 %3 = and <4 x i16> %2, %v3
512 %4 = or <4 x i16> %1, %3
516 define <2 x i32> @bsl2xi32(<2 x i32> %v1, <2 x i32> %v2, <2 x i32> %v3) {
517 ;CHECK: bsl {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
518 %1 = and <2 x i32> %v1, %v2
519 %2 = xor <2 x i32> %v1, <i32 -1, i32 -1>
520 %3 = and <2 x i32> %2, %v3
521 %4 = or <2 x i32> %1, %3
525 define <1 x i64> @bsl1xi64(<1 x i64> %v1, <1 x i64> %v2, <1 x i64> %v3) {
526 ;CHECK: bsl {{v[0-31]+}}.8b, {{v[0-31]+}}.8b, {{v[0-31]+}}.8b
527 %1 = and <1 x i64> %v1, %v2
528 %2 = xor <1 x i64> %v1, <i64 -1>
529 %3 = and <1 x i64> %2, %v3
530 %4 = or <1 x i64> %1, %3
534 define <16 x i8> @bsl16xi8(<16 x i8> %v1, <16 x i8> %v2, <16 x i8> %v3) {
535 ;CHECK: bsl {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
536 %1 = and <16 x i8> %v1, %v2
537 %2 = xor <16 x i8> %v1, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>
538 %3 = and <16 x i8> %2, %v3
539 %4 = or <16 x i8> %1, %3
543 define <8 x i16> @bsl8xi16(<8 x i16> %v1, <8 x i16> %v2, <8 x i16> %v3) {
544 ;CHECK: bsl {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
545 %1 = and <8 x i16> %v1, %v2
546 %2 = xor <8 x i16> %v1, <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>
547 %3 = and <8 x i16> %2, %v3
548 %4 = or <8 x i16> %1, %3
552 define <4 x i32> @bsl4xi32(<4 x i32> %v1, <4 x i32> %v2, <4 x i32> %v3) {
553 ;CHECK: bsl {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
554 %1 = and <4 x i32> %v1, %v2
555 %2 = xor <4 x i32> %v1, <i32 -1, i32 -1, i32 -1, i32 -1>
556 %3 = and <4 x i32> %2, %v3
557 %4 = or <4 x i32> %1, %3
561 define <2 x i64> @bsl2xi64(<2 x i64> %v1, <2 x i64> %v2, <2 x i64> %v3) {
562 ;CHECK: bsl {{v[0-31]+}}.16b, {{v[0-31]+}}.16b, {{v[0-31]+}}.16b
563 %1 = and <2 x i64> %v1, %v2
564 %2 = xor <2 x i64> %v1, <i64 -1, i64 -1>
565 %3 = and <2 x i64> %2, %v3
566 %4 = or <2 x i64> %1, %3
570 define <8 x i8> @orrimm8b_as_orrimm4h_lsl0(<8 x i8> %a) {
571 ;CHECK: orr {{v[0-31]+}}.4h, #0xff
572 %val = or <8 x i8> %a, <i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0>
576 define <8 x i8> @orrimm8b_as_orimm4h_lsl8(<8 x i8> %a) {
577 ;CHECK: orr {{v[0-31]+}}.4h, #0xff, lsl #8
578 %val = or <8 x i8> %a, <i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255>
582 define <16 x i8> @orimm16b_as_orrimm8h_lsl0(<16 x i8> %a) {
583 ;CHECK: orr {{v[0-31]+}}.8h, #0xff
584 %val = or <16 x i8> %a, <i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0>
588 define <16 x i8> @orimm16b_as_orrimm8h_lsl8(<16 x i8> %a) {
589 ;CHECK: orr {{v[0-31]+}}.8h, #0xff, lsl #8
590 %val = or <16 x i8> %a, <i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255, i8 0, i8 255>