X86AddressMode &AM,
MachineMemOperand *MMO, bool Aligned) {
bool HasSSE2 = Subtarget->hasSSE2();
+ bool HasSSE4A = Subtarget->hasSSE4A();
bool HasAVX = Subtarget->hasAVX();
bool IsNonTemporal = MMO && MMO->isNonTemporal();
Opc = (IsNonTemporal && HasSSE2) ? X86::MOVNTI_64mr : X86::MOV64mr;
break;
case MVT::f32:
- Opc = X86ScalarSSEf32 ?
- (HasAVX ? X86::VMOVSSmr : X86::MOVSSmr) : X86::ST_Fp32m;
+ if (X86ScalarSSEf32) {
+ if (IsNonTemporal && HasSSE4A)
+ Opc = X86::MOVNTSS;
+ else
+ Opc = HasAVX ? X86::VMOVSSmr : X86::MOVSSmr;
+ } else
+ Opc = X86::ST_Fp32m;
break;
case MVT::f64:
- Opc = X86ScalarSSEf64 ?
- (HasAVX ? X86::VMOVSDmr : X86::MOVSDmr) : X86::ST_Fp64m;
+ if (X86ScalarSSEf32) {
+ if (IsNonTemporal && HasSSE4A)
+ Opc = X86::MOVNTSD;
+ else
+ Opc = HasAVX ? X86::VMOVSDmr : X86::MOVSDmr;
+ } else
+ Opc = X86::ST_Fp64m;
break;
case MVT::v4f32:
if (Aligned) {
-; RUN: llc -mtriple=x86_64-unknown-unknown -mattr=+sse2 -fast-isel -O0 < %s | FileCheck %s --check-prefix=ALL --check-prefix=SSE2
+; RUN: llc -mtriple=x86_64-unknown-unknown -mattr=+sse2 -fast-isel -O0 < %s | FileCheck %s --check-prefix=ALL --check-prefix=SSE --check-prefix=SSE2
+; RUN: llc -mtriple=x86_64-unknown-unknown -mattr=+sse4a -fast-isel -O0 < %s | FileCheck %s --check-prefix=ALL --check-prefix=SSE --check-prefix=SSE4A
; RUN: llc -mtriple=x86_64-unknown-unknown -mattr=+avx -fast-isel -O0 < %s | FileCheck %s --check-prefix=ALL --check-prefix=AVX
define void @test_nti32(i32* nocapture %ptr, i32 %X) {
ret void
}
-define void @test_nt4xfloat(<4 x float>* nocapture %ptr, <4 x float> %X) {
-; SSE2-LABEL: test_nt4xfloat:
+define void @test_ntfloat(float* nocapture %ptr, float %X) {
+; SSE2-LABEL: test_ntfloat:
+; SSE2: # BB#0: # %entry
+; SSE2-NEXT: movss %xmm0, (%rdi)
+; SSE2-NEXT: retq
+;
+; SSE4A-LABEL: test_ntfloat:
+; SSE4A: # BB#0: # %entry
+; SSE4A-NEXT: movntss %xmm0, (%rdi)
+; SSE4A-NEXT: retq
+;
+; AVX-LABEL: test_ntfloat:
+; AVX: # BB#0: # %entry
+; AVX-NEXT: vmovss %xmm0, (%rdi)
+; AVX-NEXT: retq
+entry:
+ store float %X, float* %ptr, align 4, !nontemporal !1
+ ret void
+}
+
+define void @test_ntdouble(double* nocapture %ptr, double %X) {
+; SSE2-LABEL: test_ntdouble:
; SSE2: # BB#0: # %entry
-; SSE2-NEXT: movntps %xmm0, (%rdi)
+; SSE2-NEXT: movsd %xmm0, (%rdi)
; SSE2-NEXT: retq
;
+; SSE4A-LABEL: test_ntdouble:
+; SSE4A: # BB#0: # %entry
+; SSE4A-NEXT: movntsd %xmm0, (%rdi)
+; SSE4A-NEXT: retq
+;
+; AVX-LABEL: test_ntdouble:
+; AVX: # BB#0: # %entry
+; AVX-NEXT: vmovsd %xmm0, (%rdi)
+; AVX-NEXT: retq
+entry:
+ store double %X, double* %ptr, align 8, !nontemporal !1
+ ret void
+}
+
+define void @test_nt4xfloat(<4 x float>* nocapture %ptr, <4 x float> %X) {
+; SSE-LABEL: test_nt4xfloat:
+; SSE: # BB#0: # %entry
+; SSE-NEXT: movntps %xmm0, (%rdi)
+; SSE-NEXT: retq
+;
; AVX-LABEL: test_nt4xfloat:
; AVX: # BB#0: # %entry
; AVX-NEXT: vmovntps %xmm0, (%rdi)
}
define void @test_nt2xdouble(<2 x double>* nocapture %ptr, <2 x double> %X) {
-; SSE2-LABEL: test_nt2xdouble:
-; SSE2: # BB#0: # %entry
-; SSE2-NEXT: movntpd %xmm0, (%rdi)
-; SSE2-NEXT: retq
+; SSE-LABEL: test_nt2xdouble:
+; SSE: # BB#0: # %entry
+; SSE-NEXT: movntpd %xmm0, (%rdi)
+; SSE-NEXT: retq
;
; AVX-LABEL: test_nt2xdouble:
; AVX: # BB#0: # %entry
}
define void @test_nt2xi64(<2 x i64>* nocapture %ptr, <2 x i64> %X) {
-; SSE2-LABEL: test_nt2xi64:
-; SSE2: # BB#0: # %entry
-; SSE2-NEXT: movntdq %xmm0, (%rdi)
-; SSE2-NEXT: retq
+; SSE-LABEL: test_nt2xi64:
+; SSE: # BB#0: # %entry
+; SSE-NEXT: movntdq %xmm0, (%rdi)
+; SSE-NEXT: retq
;
; AVX-LABEL: test_nt2xi64:
; AVX: # BB#0: # %entry