// Cacheability support ops
let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
def int_x86_sse_prefetch : GCCBuiltin<"__builtin_ia32_prefetch">,
- Intrinsic<[llvm_ptr_ty, llvm_int_ty], [IntrWriteMem]>;
+ Intrinsic<[llvm_void_ty, llvm_ptr_ty,
+ llvm_int_ty], [IntrWriteMem]>;
def int_x86_sse_movnt_ps : GCCBuiltin<"__builtin_ia32_movntps">,
Intrinsic<[llvm_void_ty, llvm_ptr_ty,
llvm_v4f32_ty], [IntrWriteMem]>;
Imp<[EDI],[]>;
// Prefetching loads
-def PREFETCHT0 : I<0x18, MRM1m, (ops i8mem:$src),
- "prefetcht0 $src", []>, TB,
- Requires<[HasSSE1]>;
-def PREFETCHT1 : I<0x18, MRM2m, (ops i8mem:$src),
- "prefetcht0 $src", []>, TB,
- Requires<[HasSSE1]>;
-def PREFETCHT2 : I<0x18, MRM3m, (ops i8mem:$src),
- "prefetcht0 $src", []>, TB,
- Requires<[HasSSE1]>;
-def PREFETCHTNTA : I<0x18, MRM0m, (ops i8mem:$src),
- "prefetcht0 $src", []>, TB,
- Requires<[HasSSE1]>;
+def PREFETCHT0 : PSI<0x18, MRM1m, (ops i8mem:$src),
+ "prefetcht0 $src",
+ [(int_x86_sse_prefetch addr:$src, 1)]>;
+def PREFETCHT1 : PSI<0x18, MRM2m, (ops i8mem:$src),
+ "prefetcht1 $src",
+ [(int_x86_sse_prefetch addr:$src, 2)]>;
+def PREFETCHT2 : PSI<0x18, MRM3m, (ops i8mem:$src),
+ "prefetcht2 $src",
+ [(int_x86_sse_prefetch addr:$src, 3)]>;
+def PREFETCHTNTA : PSI<0x18, MRM0m, (ops i8mem:$src),
+ "prefetchtnta $src",
+ [(int_x86_sse_prefetch addr:$src, 0)]>;
// Non-temporal stores
def MOVNTPSmr : PSI<0x2B, MRMDestMem, (ops i128mem:$dst, VR128:$src),
// Store fence
def SFENCE : I<0xAE, MRM7m, (ops),
- "sfence", []>, TB, Requires<[HasSSE1]>;
+ "sfence", [(int_x86_sse_sfence)]>, TB, Requires<[HasSSE1]>;
// MXCSR register
def LDMXCSR : I<0xAE, MRM2m, (ops i32mem:$src),