2 * Copyright (C) 2012 Intel Corporation
3 * Author: Jim Kukunas <james.t.kukunas@linux.intel.com>
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License
7 * as published by the Free Software Foundation; version 2
11 #if (defined(__i386__) || defined(__x86_64__)) && !defined(__arch_um__)
15 #include <linux/raid/pq.h>
18 static int raid6_has_avx2(void)
20 return boot_cpu_has(X86_FEATURE_AVX2) &&
21 boot_cpu_has(X86_FEATURE_AVX);
24 static void raid6_2data_recov_avx2(int disks, size_t bytes, int faila,
25 int failb, void **ptrs)
28 const u8 *pbmul; /* P multiplier table for B data */
29 const u8 *qmul; /* Q multiplier table (for both) */
32 p = (u8 *)ptrs[disks-2];
33 q = (u8 *)ptrs[disks-1];
35 /* Compute syndrome with zero for the missing data pages
36 Use the dead data pages as temporary storage for
37 delta p and delta q */
38 dp = (u8 *)ptrs[faila];
39 ptrs[faila] = (void *)raid6_empty_zero_page;
41 dq = (u8 *)ptrs[failb];
42 ptrs[failb] = (void *)raid6_empty_zero_page;
45 raid6_call.gen_syndrome(disks, bytes, ptrs);
47 /* Restore pointer table */
53 /* Now, pick the proper data tables */
54 pbmul = raid6_vgfmul[raid6_gfexi[failb-faila]];
55 qmul = raid6_vgfmul[raid6_gfinv[raid6_gfexp[faila] ^
61 asm volatile("vpbroadcastb %0, %%ymm7" : : "m" (x0f));
65 asm volatile("vmovdqa %0, %%ymm1" : : "m" (q[0]));
66 asm volatile("vmovdqa %0, %%ymm9" : : "m" (q[32]));
67 asm volatile("vmovdqa %0, %%ymm0" : : "m" (p[0]));
68 asm volatile("vmovdqa %0, %%ymm8" : : "m" (p[32]));
69 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0]));
70 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32]));
71 asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (dp[0]));
72 asm volatile("vpxor %0, %%ymm8, %%ymm8" : : "m" (dp[32]));
81 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (qmul[0]));
82 asm volatile("vbroadcasti128 %0, %%ymm5" : : "m" (qmul[16]));
84 asm volatile("vpsraw $4, %ymm1, %ymm3");
85 asm volatile("vpsraw $4, %ymm9, %ymm12");
86 asm volatile("vpand %ymm7, %ymm1, %ymm1");
87 asm volatile("vpand %ymm7, %ymm9, %ymm9");
88 asm volatile("vpand %ymm7, %ymm3, %ymm3");
89 asm volatile("vpand %ymm7, %ymm12, %ymm12");
90 asm volatile("vpshufb %ymm9, %ymm4, %ymm14");
91 asm volatile("vpshufb %ymm1, %ymm4, %ymm4");
92 asm volatile("vpshufb %ymm12, %ymm5, %ymm15");
93 asm volatile("vpshufb %ymm3, %ymm5, %ymm5");
94 asm volatile("vpxor %ymm14, %ymm15, %ymm15");
95 asm volatile("vpxor %ymm4, %ymm5, %ymm5");
102 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (pbmul[0]));
103 asm volatile("vbroadcasti128 %0, %%ymm1" : : "m" (pbmul[16]));
104 asm volatile("vpsraw $4, %ymm0, %ymm2");
105 asm volatile("vpsraw $4, %ymm8, %ymm6");
106 asm volatile("vpand %ymm7, %ymm0, %ymm3");
107 asm volatile("vpand %ymm7, %ymm8, %ymm14");
108 asm volatile("vpand %ymm7, %ymm2, %ymm2");
109 asm volatile("vpand %ymm7, %ymm6, %ymm6");
110 asm volatile("vpshufb %ymm14, %ymm4, %ymm12");
111 asm volatile("vpshufb %ymm3, %ymm4, %ymm4");
112 asm volatile("vpshufb %ymm6, %ymm1, %ymm13");
113 asm volatile("vpshufb %ymm2, %ymm1, %ymm1");
114 asm volatile("vpxor %ymm4, %ymm1, %ymm1");
115 asm volatile("vpxor %ymm12, %ymm13, %ymm13");
121 asm volatile("vpxor %ymm5, %ymm1, %ymm1");
122 asm volatile("vpxor %ymm15, %ymm13, %ymm13");
126 * 13 = db[32] = DQ[32]
128 asm volatile("vmovdqa %%ymm1, %0" : "=m" (dq[0]));
129 asm volatile("vmovdqa %%ymm13,%0" : "=m" (dq[32]));
130 asm volatile("vpxor %ymm1, %ymm0, %ymm0");
131 asm volatile("vpxor %ymm13, %ymm8, %ymm8");
133 asm volatile("vmovdqa %%ymm0, %0" : "=m" (dp[0]));
134 asm volatile("vmovdqa %%ymm8, %0" : "=m" (dp[32]));
142 asm volatile("vmovdqa %0, %%ymm1" : : "m" (*q));
143 asm volatile("vmovdqa %0, %%ymm0" : : "m" (*p));
144 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (*dq));
145 asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (*dp));
147 /* 1 = dq ^ q; 0 = dp ^ p */
149 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (qmul[0]));
150 asm volatile("vbroadcasti128 %0, %%ymm5" : : "m" (qmul[16]));
156 asm volatile("vpsraw $4, %ymm1, %ymm3");
157 asm volatile("vpand %ymm7, %ymm1, %ymm1");
158 asm volatile("vpand %ymm7, %ymm3, %ymm3");
159 asm volatile("vpshufb %ymm1, %ymm4, %ymm4");
160 asm volatile("vpshufb %ymm3, %ymm5, %ymm5");
161 asm volatile("vpxor %ymm4, %ymm5, %ymm5");
165 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (pbmul[0]));
166 asm volatile("vbroadcasti128 %0, %%ymm1" : : "m" (pbmul[16]));
168 asm volatile("vpsraw $4, %ymm0, %ymm2");
169 asm volatile("vpand %ymm7, %ymm0, %ymm3");
170 asm volatile("vpand %ymm7, %ymm2, %ymm2");
171 asm volatile("vpshufb %ymm3, %ymm4, %ymm4");
172 asm volatile("vpshufb %ymm2, %ymm1, %ymm1");
173 asm volatile("vpxor %ymm4, %ymm1, %ymm1");
176 asm volatile("vpxor %ymm5, %ymm1, %ymm1");
178 asm volatile("vmovdqa %%ymm1, %0" : "=m" (dq[0]));
180 asm volatile("vpxor %ymm1, %ymm0, %ymm0");
181 asm volatile("vmovdqa %%ymm0, %0" : "=m" (dp[0]));
194 static void raid6_datap_recov_avx2(int disks, size_t bytes, int faila,
198 const u8 *qmul; /* Q multiplier table */
201 p = (u8 *)ptrs[disks-2];
202 q = (u8 *)ptrs[disks-1];
204 /* Compute syndrome with zero for the missing data page
205 Use the dead data page as temporary storage for delta q */
206 dq = (u8 *)ptrs[faila];
207 ptrs[faila] = (void *)raid6_empty_zero_page;
210 raid6_call.gen_syndrome(disks, bytes, ptrs);
212 /* Restore pointer table */
216 /* Now, pick the proper data tables */
217 qmul = raid6_vgfmul[raid6_gfinv[raid6_gfexp[faila]]];
221 asm volatile("vpbroadcastb %0, %%ymm7" : : "m" (x0f));
225 asm volatile("vmovdqa %0, %%ymm3" : : "m" (dq[0]));
226 asm volatile("vmovdqa %0, %%ymm8" : : "m" (dq[32]));
227 asm volatile("vpxor %0, %%ymm3, %%ymm3" : : "m" (q[0]));
228 asm volatile("vpxor %0, %%ymm8, %%ymm8" : : "m" (q[32]));
234 asm volatile("vbroadcasti128 %0, %%ymm0" : : "m" (qmul[0]));
235 asm volatile("vmovapd %ymm0, %ymm13");
236 asm volatile("vbroadcasti128 %0, %%ymm1" : : "m" (qmul[16]));
237 asm volatile("vmovapd %ymm1, %ymm14");
239 asm volatile("vpsraw $4, %ymm3, %ymm6");
240 asm volatile("vpsraw $4, %ymm8, %ymm12");
241 asm volatile("vpand %ymm7, %ymm3, %ymm3");
242 asm volatile("vpand %ymm7, %ymm8, %ymm8");
243 asm volatile("vpand %ymm7, %ymm6, %ymm6");
244 asm volatile("vpand %ymm7, %ymm12, %ymm12");
245 asm volatile("vpshufb %ymm3, %ymm0, %ymm0");
246 asm volatile("vpshufb %ymm8, %ymm13, %ymm13");
247 asm volatile("vpshufb %ymm6, %ymm1, %ymm1");
248 asm volatile("vpshufb %ymm12, %ymm14, %ymm14");
249 asm volatile("vpxor %ymm0, %ymm1, %ymm1");
250 asm volatile("vpxor %ymm13, %ymm14, %ymm14");
253 * 1 = qmul[q[0] ^ dq[0]]
254 * 14 = qmul[q[32] ^ dq[32]]
256 asm volatile("vmovdqa %0, %%ymm2" : : "m" (p[0]));
257 asm volatile("vmovdqa %0, %%ymm12" : : "m" (p[32]));
258 asm volatile("vpxor %ymm1, %ymm2, %ymm2");
259 asm volatile("vpxor %ymm14, %ymm12, %ymm12");
262 * 2 = p[0] ^ qmul[q[0] ^ dq[0]]
263 * 12 = p[32] ^ qmul[q[32] ^ dq[32]]
266 asm volatile("vmovdqa %%ymm1, %0" : "=m" (dq[0]));
267 asm volatile("vmovdqa %%ymm14, %0" : "=m" (dq[32]));
268 asm volatile("vmovdqa %%ymm2, %0" : "=m" (p[0]));
269 asm volatile("vmovdqa %%ymm12,%0" : "=m" (p[32]));
276 asm volatile("vmovdqa %0, %%ymm3" : : "m" (dq[0]));
277 asm volatile("vpxor %0, %%ymm3, %%ymm3" : : "m" (q[0]));
281 asm volatile("vbroadcasti128 %0, %%ymm0" : : "m" (qmul[0]));
282 asm volatile("vbroadcasti128 %0, %%ymm1" : : "m" (qmul[16]));
284 asm volatile("vpsraw $4, %ymm3, %ymm6");
285 asm volatile("vpand %ymm7, %ymm3, %ymm3");
286 asm volatile("vpand %ymm7, %ymm6, %ymm6");
287 asm volatile("vpshufb %ymm3, %ymm0, %ymm0");
288 asm volatile("vpshufb %ymm6, %ymm1, %ymm1");
289 asm volatile("vpxor %ymm0, %ymm1, %ymm1");
291 /* 1 = qmul[q ^ dq] */
293 asm volatile("vmovdqa %0, %%ymm2" : : "m" (p[0]));
294 asm volatile("vpxor %ymm1, %ymm2, %ymm2");
296 /* 2 = p ^ qmul[q ^ dq] */
298 asm volatile("vmovdqa %%ymm1, %0" : "=m" (dq[0]));
299 asm volatile("vmovdqa %%ymm2, %0" : "=m" (p[0]));
311 const struct raid6_recov_calls raid6_recov_avx2 = {
312 .data2 = raid6_2data_recov_avx2,
313 .datap = raid6_datap_recov_avx2,
314 .valid = raid6_has_avx2,
324 #warning "your version of binutils lacks AVX2 support"