1 ; RUN: llc -mtriple=aarch64-none-linux-gnu -mattr=+neon < %s | FileCheck %s
3 ;; Scalar Integer Compare
5 define i64 @test_vceqd(i64 %a, i64 %b) {
7 ; CHECK: cmeq {{d[0-9]+}}, {{d[0-9]}}, {{d[0-9]}}
9 %vceq.i = insertelement <1 x i64> undef, i64 %a, i32 0
10 %vceq1.i = insertelement <1 x i64> undef, i64 %b, i32 0
11 %vceq2.i = call <1 x i64> @llvm.aarch64.neon.vceq.v1i64.v1i64.v1i64(<1 x i64> %vceq.i, <1 x i64> %vceq1.i)
12 %0 = extractelement <1 x i64> %vceq2.i, i32 0
16 define i64 @test_vceqzd(i64 %a) {
18 ; CHECK: cmeq {{d[0-9]}}, {{d[0-9]}}, #0x0
20 %vceqz.i = insertelement <1 x i64> undef, i64 %a, i32 0
21 %vceqz1.i = call <1 x i64> @llvm.aarch64.neon.vceq.v1i64.v1i64.v1i64(<1 x i64> %vceqz.i, <1 x i64> zeroinitializer)
22 %0 = extractelement <1 x i64> %vceqz1.i, i32 0
26 define i64 @test_vcged(i64 %a, i64 %b) {
28 ; CHECK: cmge {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
30 %vcge.i = insertelement <1 x i64> undef, i64 %a, i32 0
31 %vcge1.i = insertelement <1 x i64> undef, i64 %b, i32 0
32 %vcge2.i = call <1 x i64> @llvm.aarch64.neon.vcge.v1i64.v1i64.v1i64(<1 x i64> %vcge.i, <1 x i64> %vcge1.i)
33 %0 = extractelement <1 x i64> %vcge2.i, i32 0
37 define i64 @test_vcgezd(i64 %a) {
39 ; CHECK: cmge {{d[0-9]}}, {{d[0-9]}}, #0x0
41 %vcgez.i = insertelement <1 x i64> undef, i64 %a, i32 0
42 %vcgez1.i = call <1 x i64> @llvm.aarch64.neon.vcge.v1i64.v1i64.v1i64(<1 x i64> %vcgez.i, <1 x i64> zeroinitializer)
43 %0 = extractelement <1 x i64> %vcgez1.i, i32 0
47 define i64 @test_vcgtd(i64 %a, i64 %b) {
49 ; CHECK: cmgt {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
51 %vcgt.i = insertelement <1 x i64> undef, i64 %a, i32 0
52 %vcgt1.i = insertelement <1 x i64> undef, i64 %b, i32 0
53 %vcgt2.i = call <1 x i64> @llvm.aarch64.neon.vcgt.v1i64.v1i64.v1i64(<1 x i64> %vcgt.i, <1 x i64> %vcgt1.i)
54 %0 = extractelement <1 x i64> %vcgt2.i, i32 0
58 define i64 @test_vcgtzd(i64 %a) {
60 ; CHECK: cmgt {{d[0-9]}}, {{d[0-9]}}, #0x0
62 %vcgtz.i = insertelement <1 x i64> undef, i64 %a, i32 0
63 %vcgtz1.i = call <1 x i64> @llvm.aarch64.neon.vcgt.v1i64.v1i64.v1i64(<1 x i64> %vcgtz.i, <1 x i64> zeroinitializer)
64 %0 = extractelement <1 x i64> %vcgtz1.i, i32 0
68 define i64 @test_vcled(i64 %a, i64 %b) {
70 ; CHECK: cmgt {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
72 %vcgt.i = insertelement <1 x i64> undef, i64 %b, i32 0
73 %vcgt1.i = insertelement <1 x i64> undef, i64 %a, i32 0
74 %vcgt2.i = call <1 x i64> @llvm.aarch64.neon.vcgt.v1i64.v1i64.v1i64(<1 x i64> %vcgt.i, <1 x i64> %vcgt1.i)
75 %0 = extractelement <1 x i64> %vcgt2.i, i32 0
79 define i64 @test_vclezd(i64 %a) {
81 ; CHECK: cmle {{d[0-9]}}, {{d[0-9]}}, #0x0
83 %vclez.i = insertelement <1 x i64> undef, i64 %a, i32 0
84 %vclez1.i = call <1 x i64> @llvm.aarch64.neon.vclez.v1i64.v1i64.v1i64(<1 x i64> %vclez.i, <1 x i64> zeroinitializer)
85 %0 = extractelement <1 x i64> %vclez1.i, i32 0
89 define i64 @test_vcltd(i64 %a, i64 %b) {
91 ; CHECK: cmge {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
93 %vcge.i = insertelement <1 x i64> undef, i64 %b, i32 0
94 %vcge1.i = insertelement <1 x i64> undef, i64 %a, i32 0
95 %vcge2.i = call <1 x i64> @llvm.aarch64.neon.vcge.v1i64.v1i64.v1i64(<1 x i64> %vcge.i, <1 x i64> %vcge1.i)
96 %0 = extractelement <1 x i64> %vcge2.i, i32 0
100 define i64 @test_vcltzd(i64 %a) {
102 ; CHECK: cmlt {{d[0-9]}}, {{d[0-9]}}, #0x0
104 %vcltz.i = insertelement <1 x i64> undef, i64 %a, i32 0
105 %vcltz1.i = call <1 x i64> @llvm.aarch64.neon.vcltz.v1i64.v1i64.v1i64(<1 x i64> %vcltz.i, <1 x i64> zeroinitializer)
106 %0 = extractelement <1 x i64> %vcltz1.i, i32 0
110 define i64 @test_vtstd(i64 %a, i64 %b) {
112 ; CHECK: cmtst {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
114 %vtst.i = insertelement <1 x i64> undef, i64 %a, i32 0
115 %vtst1.i = insertelement <1 x i64> undef, i64 %b, i32 0
116 %vtst2.i = call <1 x i64> @llvm.aarch64.neon.vtstd.v1i64.v1i64.v1i64(<1 x i64> %vtst.i, <1 x i64> %vtst1.i)
117 %0 = extractelement <1 x i64> %vtst2.i, i32 0
122 define <1 x i64> @test_vcage_f64(<1 x double> %a, <1 x double> %b) #0 {
123 ; CHECK: test_vcage_f64
124 ; CHECK: facge {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
125 %vcage2.i = tail call <1 x i64> @llvm.arm.neon.vacge.v1i64.v1f64(<1 x double> %a, <1 x double> %b) #2
126 ret <1 x i64> %vcage2.i
129 define <1 x i64> @test_vcagt_f64(<1 x double> %a, <1 x double> %b) #0 {
130 ; CHECK: test_vcagt_f64
131 ; CHECK: facgt {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
132 %vcagt2.i = tail call <1 x i64> @llvm.arm.neon.vacgt.v1i64.v1f64(<1 x double> %a, <1 x double> %b) #2
133 ret <1 x i64> %vcagt2.i
136 define <1 x i64> @test_vcale_f64(<1 x double> %a, <1 x double> %b) #0 {
137 ; CHECK: test_vcale_f64
138 ; CHECK: facge {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
139 %vcage2.i = tail call <1 x i64> @llvm.arm.neon.vacge.v1i64.v1f64(<1 x double> %b, <1 x double> %a) #2
140 ret <1 x i64> %vcage2.i
143 define <1 x i64> @test_vcalt_f64(<1 x double> %a, <1 x double> %b) #0 {
144 ; CHECK: test_vcalt_f64
145 ; CHECK: facgt {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
146 %vcagt2.i = tail call <1 x i64> @llvm.arm.neon.vacgt.v1i64.v1f64(<1 x double> %b, <1 x double> %a) #2
147 ret <1 x i64> %vcagt2.i
150 define <1 x i64> @test_vceq_s64(<1 x i64> %a, <1 x i64> %b) #0 {
151 ; CHECK: test_vceq_s64
152 ; CHECK: cmeq {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
153 %cmp.i = icmp eq <1 x i64> %a, %b
154 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
155 ret <1 x i64> %sext.i
158 define <1 x i64> @test_vceq_u64(<1 x i64> %a, <1 x i64> %b) #0 {
159 ; CHECK: test_vceq_u64
160 ; CHECK: cmeq {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
161 %cmp.i = icmp eq <1 x i64> %a, %b
162 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
163 ret <1 x i64> %sext.i
166 define <1 x i64> @test_vceq_f64(<1 x double> %a, <1 x double> %b) #0 {
167 ; CHECK: test_vceq_f64
168 ; CHECK: fcmeq {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
169 %cmp.i = fcmp oeq <1 x double> %a, %b
170 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
171 ret <1 x i64> %sext.i
174 define <1 x i64> @test_vcge_s64(<1 x i64> %a, <1 x i64> %b) #0 {
175 ; CHECK: test_vcge_s64
176 ; CHECK: cmge {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
177 %cmp.i = icmp sge <1 x i64> %a, %b
178 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
179 ret <1 x i64> %sext.i
182 define <1 x i64> @test_vcge_u64(<1 x i64> %a, <1 x i64> %b) #0 {
183 ; CHECK: test_vcge_u64
184 ; CHECK: cmhs {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
185 %cmp.i = icmp uge <1 x i64> %a, %b
186 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
187 ret <1 x i64> %sext.i
190 define <1 x i64> @test_vcge_f64(<1 x double> %a, <1 x double> %b) #0 {
191 ; CHECK: test_vcge_f64
192 ; CHECK: fcmge {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
193 %cmp.i = fcmp oge <1 x double> %a, %b
194 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
195 ret <1 x i64> %sext.i
198 define <1 x i64> @test_vcle_s64(<1 x i64> %a, <1 x i64> %b) #0 {
199 ; CHECK: test_vcle_s64
200 ; CHECK: cmge {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
201 %cmp.i = icmp sle <1 x i64> %a, %b
202 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
203 ret <1 x i64> %sext.i
206 define <1 x i64> @test_vcle_u64(<1 x i64> %a, <1 x i64> %b) #0 {
207 ; CHECK: test_vcle_u64
208 ; CHECK: cmhs {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
209 %cmp.i = icmp ule <1 x i64> %a, %b
210 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
211 ret <1 x i64> %sext.i
214 define <1 x i64> @test_vcle_f64(<1 x double> %a, <1 x double> %b) #0 {
215 ; CHECK: test_vcle_f64
216 ; CHECK: fcmge {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
217 %cmp.i = fcmp ole <1 x double> %a, %b
218 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
219 ret <1 x i64> %sext.i
222 define <1 x i64> @test_vcgt_s64(<1 x i64> %a, <1 x i64> %b) #0 {
223 ; CHECK: test_vcgt_s64
224 ; CHECK: cmgt {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
225 %cmp.i = icmp sgt <1 x i64> %a, %b
226 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
227 ret <1 x i64> %sext.i
230 define <1 x i64> @test_vcgt_u64(<1 x i64> %a, <1 x i64> %b) #0 {
231 ; CHECK: test_vcgt_u64
232 ; CHECK: cmhi {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
233 %cmp.i = icmp ugt <1 x i64> %a, %b
234 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
235 ret <1 x i64> %sext.i
238 define <1 x i64> @test_vcgt_f64(<1 x double> %a, <1 x double> %b) #0 {
239 ; CHECK: test_vcgt_f64
240 ; CHECK: fcmgt {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
241 %cmp.i = fcmp ogt <1 x double> %a, %b
242 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
243 ret <1 x i64> %sext.i
246 define <1 x i64> @test_vclt_s64(<1 x i64> %a, <1 x i64> %b) #0 {
247 ; CHECK: test_vclt_s64
248 ; CHECK: cmgt {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
249 %cmp.i = icmp slt <1 x i64> %a, %b
250 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
251 ret <1 x i64> %sext.i
254 define <1 x i64> @test_vclt_u64(<1 x i64> %a, <1 x i64> %b) #0 {
255 ; CHECK: test_vclt_u64
256 ; CHECK: cmhi {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
257 %cmp.i = icmp ult <1 x i64> %a, %b
258 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
259 ret <1 x i64> %sext.i
262 define <1 x i64> @test_vclt_f64(<1 x double> %a, <1 x double> %b) #0 {
263 ; CHECK: test_vclt_f64
264 ; CHECK: fcmgt {{d[0-9]}}, {{d[0-9]}}, {{d[0-9]}}
265 %cmp.i = fcmp olt <1 x double> %a, %b
266 %sext.i = sext <1 x i1> %cmp.i to <1 x i64>
267 ret <1 x i64> %sext.i
270 define <1 x i64> @test_vceqz_s64(<1 x i64> %a) #0 {
271 ; CHECK: test_vceqz_s64
272 ; CHECK: cmeq {{d[0-9]}}, {{d[0-9]}}, #0x0
273 %1 = icmp eq <1 x i64> %a, zeroinitializer
274 %vceqz.i = sext <1 x i1> %1 to <1 x i64>
275 ret <1 x i64> %vceqz.i
278 define <1 x i64> @test_vceqz_u64(<1 x i64> %a) #0 {
279 ; CHECK: test_vceqz_u64
280 ; CHECK: cmeq {{d[0-9]}}, {{d[0-9]}}, #0x0
281 %1 = icmp eq <1 x i64> %a, zeroinitializer
282 %vceqz.i = sext <1 x i1> %1 to <1 x i64>
283 ret <1 x i64> %vceqz.i
286 define <1 x i64> @test_vceqz_p64(<1 x i64> %a) #0 {
287 ; CHECK: test_vceqz_p64
288 ; CHECK: cmeq {{d[0-9]}}, {{d[0-9]}}, #0x0
289 %1 = icmp eq <1 x i64> %a, zeroinitializer
290 %vceqz.i = sext <1 x i1> %1 to <1 x i64>
291 ret <1 x i64> %vceqz.i
294 define <2 x i64> @test_vceqzq_p64(<2 x i64> %a) #0 {
295 ; CHECK: test_vceqzq_p64
296 ; CHECK: cmeq {{v[0-9]}}.2d, {{v[0-9]}}.2d, #0
297 %1 = icmp eq <2 x i64> %a, zeroinitializer
298 %vceqz.i = sext <2 x i1> %1 to <2 x i64>
299 ret <2 x i64> %vceqz.i
302 define <1 x i64> @test_vcgez_s64(<1 x i64> %a) #0 {
303 ; CHECK: test_vcgez_s64
304 ; CHECK: cmge {{d[0-9]}}, {{d[0-9]}}, #0x0
305 %1 = icmp sge <1 x i64> %a, zeroinitializer
306 %vcgez.i = sext <1 x i1> %1 to <1 x i64>
307 ret <1 x i64> %vcgez.i
310 define <1 x i64> @test_vclez_s64(<1 x i64> %a) #0 {
311 ; CHECK: test_vclez_s64
312 ; CHECK: cmle {{d[0-9]}}, {{d[0-9]}}, #0x0
313 %1 = icmp sle <1 x i64> %a, zeroinitializer
314 %vclez.i = sext <1 x i1> %1 to <1 x i64>
315 ret <1 x i64> %vclez.i
318 define <1 x i64> @test_vcgtz_s64(<1 x i64> %a) #0 {
319 ; CHECK: test_vcgtz_s64
320 ; CHECK: cmgt {{d[0-9]}}, {{d[0-9]}}, #0x0
321 %1 = icmp sgt <1 x i64> %a, zeroinitializer
322 %vcgtz.i = sext <1 x i1> %1 to <1 x i64>
323 ret <1 x i64> %vcgtz.i
326 define <1 x i64> @test_vcltz_s64(<1 x i64> %a) #0 {
327 ; CHECK: test_vcltz_s64
328 ; CHECK: cmlt {{d[0-9]}}, {{d[0-9]}}, #0
329 %1 = icmp slt <1 x i64> %a, zeroinitializer
330 %vcltz.i = sext <1 x i1> %1 to <1 x i64>
331 ret <1 x i64> %vcltz.i
334 declare <1 x i64> @llvm.arm.neon.vacgt.v1i64.v1f64(<1 x double>, <1 x double>)
335 declare <1 x i64> @llvm.arm.neon.vacge.v1i64.v1f64(<1 x double>, <1 x double>)
336 declare <1 x i64> @llvm.aarch64.neon.vtstd.v1i64.v1i64.v1i64(<1 x i64>, <1 x i64>)
337 declare <1 x i64> @llvm.aarch64.neon.vcltz.v1i64.v1i64.v1i64(<1 x i64>, <1 x i64>)
338 declare <1 x i64> @llvm.aarch64.neon.vchs.v1i64.v1i64.v1i64(<1 x i64>, <1 x i64>)
339 declare <1 x i64> @llvm.aarch64.neon.vcge.v1i64.v1i64.v1i64(<1 x i64>, <1 x i64>)
340 declare <1 x i64> @llvm.aarch64.neon.vclez.v1i64.v1i64.v1i64(<1 x i64>, <1 x i64>)
341 declare <1 x i64> @llvm.aarch64.neon.vchi.v1i64.v1i64.v1i64(<1 x i64>, <1 x i64>)
342 declare <1 x i64> @llvm.aarch64.neon.vcgt.v1i64.v1i64.v1i64(<1 x i64>, <1 x i64>)
343 declare <1 x i64> @llvm.aarch64.neon.vceq.v1i64.v1i64.v1i64(<1 x i64>, <1 x i64>)