1 ; RUN: llvm-upgrade < %s | llvm-as | llvm-dis > %t1.ll
2 ; RUN: llvm-as %t1.ll -o - | llvm-dis > %t2.ll
3 ; RUN: diff %t1.ll %t2.ll
6 declare bool %llvm.isunordered.f32(float,float)
7 declare bool %llvm.isunordered.f64(double,double)
9 declare void %llvm.prefetch(sbyte*, uint, uint)
11 declare ubyte %llvm.ctpop.i8(ubyte)
12 declare ushort %llvm.ctpop.i16(ushort)
13 declare uint %llvm.ctpop.i32(uint)
14 declare ulong %llvm.ctpop.i64(ulong)
16 declare ubyte %llvm.cttz.i8(ubyte)
17 declare ushort %llvm.cttz.i16(ushort)
18 declare uint %llvm.cttz.i32(uint)
19 declare ulong %llvm.cttz.i64(ulong)
21 declare ubyte %llvm.ctlz.i8(ubyte)
22 declare ushort %llvm.ctlz.i16(ushort)
23 declare uint %llvm.ctlz.i32(uint)
24 declare ulong %llvm.ctlz.i64(ulong)
26 declare float %llvm.sqrt.f32(float)
27 declare double %llvm.sqrt.f64(double)
31 ; Test llvm intrinsics
34 call bool %llvm.isunordered.f32(float 1.0, float 2.0)
35 call bool %llvm.isunordered.f64(double 3.0, double 4.0)
37 call void %llvm.prefetch(sbyte* null, uint 1, uint 3)
39 call float %llvm.sqrt.f32(float 5.0)
40 call double %llvm.sqrt.f64(double 6.0)
42 call ubyte %llvm.ctpop.i8(ubyte 10)
43 call ushort %llvm.ctpop.i16(ushort 11)
44 call uint %llvm.ctpop.i32(uint 12)
45 call ulong %llvm.ctpop.i64(ulong 13)
47 call ubyte %llvm.ctlz.i8(ubyte 14)
48 call ushort %llvm.ctlz.i16(ushort 15)
49 call uint %llvm.ctlz.i32(uint 16)
50 call ulong %llvm.ctlz.i64(ulong 17)
52 call ubyte %llvm.cttz.i8(ubyte 18)
53 call ushort %llvm.cttz.i16(ushort 19)
54 call uint %llvm.cttz.i32(uint 20)
55 call ulong %llvm.cttz.i64(ulong 21)
59 ; FIXME: test ALL the intrinsics in this file.