2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Based on talitos crypto API driver.
8 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10 * --------------- ---------------
11 * | JobDesc #1 |-------------------->| ShareDesc |
12 * | *(packet 1) | | (PDB) |
13 * --------------- |------------->| (hashKey) |
15 * . | |-------->| (operation) |
16 * --------------- | | ---------------
17 * | JobDesc #2 |------| |
23 * | JobDesc #3 |------------
27 * The SharedDesc never changes for a connection unless rekeyed, but
28 * each packet will likely be in a different place. So all we need
29 * to know to process the packet is where the input is, where the
30 * output goes, and what context we want to process with. Context is
31 * in the SharedDesc, packet references in the JobDesc.
33 * So, a job desc looks like:
35 * ---------------------
37 * | ShareDesc Pointer |
44 * ---------------------
51 #include "desc_constr.h"
54 #include "sg_sw_sec4.h"
60 #define CAAM_CRA_PRIORITY 3000
61 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
62 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
63 CTR_RFC3686_NONCE_SIZE + \
64 SHA512_DIGEST_SIZE * 2)
65 /* max IV is max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
66 #define CAAM_MAX_IV_LENGTH 16
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 /* length of descriptors text */
75 #define DESC_AEAD_BASE (4 * CAAM_CMD_SZ)
76 #define DESC_AEAD_ENC_LEN (DESC_AEAD_BASE + 11 * CAAM_CMD_SZ)
77 #define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 15 * CAAM_CMD_SZ)
78 #define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 9 * CAAM_CMD_SZ)
80 /* Note: Nonce is counted in enckeylen */
81 #define DESC_AEAD_CTR_RFC3686_LEN (4 * CAAM_CMD_SZ)
83 #define DESC_AEAD_NULL_BASE (3 * CAAM_CMD_SZ)
84 #define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 11 * CAAM_CMD_SZ)
85 #define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 13 * CAAM_CMD_SZ)
87 #define DESC_GCM_BASE (3 * CAAM_CMD_SZ)
88 #define DESC_GCM_ENC_LEN (DESC_GCM_BASE + 16 * CAAM_CMD_SZ)
89 #define DESC_GCM_DEC_LEN (DESC_GCM_BASE + 12 * CAAM_CMD_SZ)
91 #define DESC_RFC4106_BASE (3 * CAAM_CMD_SZ)
92 #define DESC_RFC4106_ENC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
93 #define DESC_RFC4106_DEC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
95 #define DESC_RFC4543_BASE (3 * CAAM_CMD_SZ)
96 #define DESC_RFC4543_ENC_LEN (DESC_RFC4543_BASE + 11 * CAAM_CMD_SZ)
97 #define DESC_RFC4543_DEC_LEN (DESC_RFC4543_BASE + 12 * CAAM_CMD_SZ)
99 #define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
100 #define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
102 #define DESC_ABLKCIPHER_DEC_LEN (DESC_ABLKCIPHER_BASE + \
105 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
106 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
109 /* for print_hex_dumps with line references */
110 #define debug(format, arg...) printk(format, arg)
112 #define debug(format, arg...)
114 static struct list_head alg_list;
116 struct caam_alg_entry {
124 struct caam_aead_alg {
125 struct aead_alg aead;
126 struct caam_alg_entry caam;
130 /* Set DK bit in class 1 operation if shared */
131 static inline void append_dec_op1(u32 *desc, u32 type)
133 u32 *jump_cmd, *uncond_jump_cmd;
135 /* DK bit is valid only for AES */
136 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
137 append_operation(desc, type | OP_ALG_AS_INITFINAL |
142 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
143 append_operation(desc, type | OP_ALG_AS_INITFINAL |
145 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
146 set_jump_tgt_here(desc, jump_cmd);
147 append_operation(desc, type | OP_ALG_AS_INITFINAL |
148 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
149 set_jump_tgt_here(desc, uncond_jump_cmd);
153 * For aead functions, read payload and write payload,
154 * both of which are specified in req->src and req->dst
156 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
158 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
159 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
160 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
164 * For ablkcipher encrypt and decrypt, read from req->src and
167 static inline void ablkcipher_append_src_dst(u32 *desc)
169 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
170 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
171 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
172 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
173 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
177 * per-session context
180 struct device *jrdev;
181 u32 sh_desc_enc[DESC_MAX_USED_LEN];
182 u32 sh_desc_dec[DESC_MAX_USED_LEN];
183 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
184 dma_addr_t sh_desc_enc_dma;
185 dma_addr_t sh_desc_dec_dma;
186 dma_addr_t sh_desc_givenc_dma;
190 u8 key[CAAM_MAX_KEY_SIZE];
192 unsigned int enckeylen;
193 unsigned int split_key_len;
194 unsigned int split_key_pad_len;
195 unsigned int authsize;
198 static void append_key_aead(u32 *desc, struct caam_ctx *ctx,
199 int keys_fit_inline, bool is_rfc3686)
202 unsigned int enckeylen = ctx->enckeylen;
206 * | ctx->key = {AUTH_KEY, ENC_KEY, NONCE}
207 * | enckeylen = encryption key size + nonce size
210 enckeylen -= CTR_RFC3686_NONCE_SIZE;
212 if (keys_fit_inline) {
213 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
214 ctx->split_key_len, CLASS_2 |
215 KEY_DEST_MDHA_SPLIT | KEY_ENC);
216 append_key_as_imm(desc, (void *)ctx->key +
217 ctx->split_key_pad_len, enckeylen,
218 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
220 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
221 KEY_DEST_MDHA_SPLIT | KEY_ENC);
222 append_key(desc, ctx->key_dma + ctx->split_key_pad_len,
223 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
226 /* Load Counter into CONTEXT1 reg */
228 nonce = (u32 *)((void *)ctx->key + ctx->split_key_pad_len +
230 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
231 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
234 MOVE_DEST_CLASS1CTX |
235 (16 << MOVE_OFFSET_SHIFT) |
236 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
240 static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx,
241 int keys_fit_inline, bool is_rfc3686)
245 /* Note: Context registers are saved. */
246 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
248 /* Skip if already shared */
249 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
252 append_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
254 set_jump_tgt_here(desc, key_jump_cmd);
257 static int aead_null_set_sh_desc(struct crypto_aead *aead)
259 struct caam_ctx *ctx = crypto_aead_ctx(aead);
260 struct device *jrdev = ctx->jrdev;
261 bool keys_fit_inline = false;
262 u32 *key_jump_cmd, *jump_cmd, *read_move_cmd, *write_move_cmd;
266 * Job Descriptor and Shared Descriptors
267 * must all fit into the 64-word Descriptor h/w Buffer
269 if (DESC_AEAD_NULL_ENC_LEN + AEAD_DESC_JOB_IO_LEN +
270 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
271 keys_fit_inline = true;
273 /* aead_encrypt shared descriptor */
274 desc = ctx->sh_desc_enc;
276 init_sh_desc(desc, HDR_SHARE_SERIAL);
278 /* Skip if already shared */
279 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
282 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
283 ctx->split_key_len, CLASS_2 |
284 KEY_DEST_MDHA_SPLIT | KEY_ENC);
286 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
287 KEY_DEST_MDHA_SPLIT | KEY_ENC);
288 set_jump_tgt_here(desc, key_jump_cmd);
290 /* assoclen + cryptlen = seqinlen */
291 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
293 /* Prepare to read and write cryptlen + assoclen bytes */
294 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
295 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
298 * MOVE_LEN opcode is not available in all SEC HW revisions,
299 * thus need to do some magic, i.e. self-patch the descriptor
302 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
304 (0x6 << MOVE_LEN_SHIFT));
305 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
308 (0x8 << MOVE_LEN_SHIFT));
310 /* Class 2 operation */
311 append_operation(desc, ctx->class2_alg_type |
312 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
314 /* Read and write cryptlen bytes */
315 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
317 set_move_tgt_here(desc, read_move_cmd);
318 set_move_tgt_here(desc, write_move_cmd);
319 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
320 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
324 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
325 LDST_SRCDST_BYTE_CONTEXT);
327 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
330 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
331 dev_err(jrdev, "unable to map shared descriptor\n");
335 print_hex_dump(KERN_ERR,
336 "aead null enc shdesc@"__stringify(__LINE__)": ",
337 DUMP_PREFIX_ADDRESS, 16, 4, desc,
338 desc_bytes(desc), 1);
342 * Job Descriptor and Shared Descriptors
343 * must all fit into the 64-word Descriptor h/w Buffer
345 keys_fit_inline = false;
346 if (DESC_AEAD_NULL_DEC_LEN + DESC_JOB_IO_LEN +
347 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
348 keys_fit_inline = true;
350 desc = ctx->sh_desc_dec;
352 /* aead_decrypt shared descriptor */
353 init_sh_desc(desc, HDR_SHARE_SERIAL);
355 /* Skip if already shared */
356 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
359 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
360 ctx->split_key_len, CLASS_2 |
361 KEY_DEST_MDHA_SPLIT | KEY_ENC);
363 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
364 KEY_DEST_MDHA_SPLIT | KEY_ENC);
365 set_jump_tgt_here(desc, key_jump_cmd);
367 /* Class 2 operation */
368 append_operation(desc, ctx->class2_alg_type |
369 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
371 /* assoclen + cryptlen = seqoutlen */
372 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
374 /* Prepare to read and write cryptlen + assoclen bytes */
375 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
376 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
379 * MOVE_LEN opcode is not available in all SEC HW revisions,
380 * thus need to do some magic, i.e. self-patch the descriptor
383 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
385 (0x6 << MOVE_LEN_SHIFT));
386 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
389 (0x8 << MOVE_LEN_SHIFT));
391 /* Read and write cryptlen bytes */
392 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
395 * Insert a NOP here, since we need at least 4 instructions between
396 * code patching the descriptor buffer and the location being patched.
398 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
399 set_jump_tgt_here(desc, jump_cmd);
401 set_move_tgt_here(desc, read_move_cmd);
402 set_move_tgt_here(desc, write_move_cmd);
403 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
404 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
406 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
409 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
410 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
412 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
415 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
416 dev_err(jrdev, "unable to map shared descriptor\n");
420 print_hex_dump(KERN_ERR,
421 "aead null dec shdesc@"__stringify(__LINE__)": ",
422 DUMP_PREFIX_ADDRESS, 16, 4, desc,
423 desc_bytes(desc), 1);
429 static int aead_set_sh_desc(struct crypto_aead *aead)
431 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
432 struct caam_aead_alg, aead);
433 unsigned int ivsize = crypto_aead_ivsize(aead);
434 struct caam_ctx *ctx = crypto_aead_ctx(aead);
435 struct device *jrdev = ctx->jrdev;
436 bool keys_fit_inline;
440 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
441 OP_ALG_AAI_CTR_MOD128);
442 const bool is_rfc3686 = alg->caam.rfc3686;
447 /* NULL encryption / decryption */
449 return aead_null_set_sh_desc(aead);
452 * AES-CTR needs to load IV in CONTEXT1 reg
453 * at an offset of 128bits (16bytes)
454 * CONTEXT1[255:128] = IV
461 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
464 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
470 * Job Descriptor and Shared Descriptors
471 * must all fit into the 64-word Descriptor h/w Buffer
473 keys_fit_inline = false;
474 if (DESC_AEAD_ENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
475 ctx->split_key_pad_len + ctx->enckeylen +
476 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
478 keys_fit_inline = true;
480 /* aead_encrypt shared descriptor */
481 desc = ctx->sh_desc_enc;
483 /* Note: Context registers are saved. */
484 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
486 /* Class 2 operation */
487 append_operation(desc, ctx->class2_alg_type |
488 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
490 /* Read and write assoclen bytes */
491 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
492 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
494 /* Skip assoc data */
495 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
497 /* read assoc before reading payload */
498 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
501 /* Load Counter into CONTEXT1 reg */
503 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
505 LDST_SRCDST_BYTE_CONTEXT |
506 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
509 /* Class 1 operation */
510 append_operation(desc, ctx->class1_alg_type |
511 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
513 /* Read and write cryptlen bytes */
514 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
515 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
516 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
519 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
520 LDST_SRCDST_BYTE_CONTEXT);
522 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
525 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
526 dev_err(jrdev, "unable to map shared descriptor\n");
530 print_hex_dump(KERN_ERR, "aead enc shdesc@"__stringify(__LINE__)": ",
531 DUMP_PREFIX_ADDRESS, 16, 4, desc,
532 desc_bytes(desc), 1);
537 * Job Descriptor and Shared Descriptors
538 * must all fit into the 64-word Descriptor h/w Buffer
540 keys_fit_inline = false;
541 if (DESC_AEAD_DEC_LEN + AUTHENC_DESC_JOB_IO_LEN +
542 ctx->split_key_pad_len + ctx->enckeylen +
543 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
545 keys_fit_inline = true;
547 /* aead_decrypt shared descriptor */
548 desc = ctx->sh_desc_dec;
550 /* Note: Context registers are saved. */
551 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
553 /* Class 2 operation */
554 append_operation(desc, ctx->class2_alg_type |
555 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
557 /* Read and write assoclen bytes */
558 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
560 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
562 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
564 /* Skip assoc data */
565 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
567 /* read assoc before reading payload */
568 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
571 if (alg->caam.geniv) {
572 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
573 LDST_SRCDST_BYTE_CONTEXT |
574 (ctx1_iv_off << LDST_OFFSET_SHIFT));
575 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
576 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
579 /* Load Counter into CONTEXT1 reg */
581 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
583 LDST_SRCDST_BYTE_CONTEXT |
584 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
587 /* Choose operation */
589 append_operation(desc, ctx->class1_alg_type |
590 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
592 append_dec_op1(desc, ctx->class1_alg_type);
594 /* Read and write cryptlen bytes */
595 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
596 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
597 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
600 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
601 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
603 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
606 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
607 dev_err(jrdev, "unable to map shared descriptor\n");
611 print_hex_dump(KERN_ERR, "aead dec shdesc@"__stringify(__LINE__)": ",
612 DUMP_PREFIX_ADDRESS, 16, 4, desc,
613 desc_bytes(desc), 1);
616 if (!alg->caam.geniv)
620 * Job Descriptor and Shared Descriptors
621 * must all fit into the 64-word Descriptor h/w Buffer
623 keys_fit_inline = false;
624 if (DESC_AEAD_GIVENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
625 ctx->split_key_pad_len + ctx->enckeylen +
626 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
628 keys_fit_inline = true;
630 /* aead_givencrypt shared descriptor */
631 desc = ctx->sh_desc_enc;
633 /* Note: Context registers are saved. */
634 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
640 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
641 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
642 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
643 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
644 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
645 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
646 append_move(desc, MOVE_WAITCOMP |
647 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
648 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
649 (ivsize << MOVE_LEN_SHIFT));
650 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
653 /* Copy IV to class 1 context */
654 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
655 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
656 (ivsize << MOVE_LEN_SHIFT));
658 /* Return to encryption */
659 append_operation(desc, ctx->class2_alg_type |
660 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
662 /* Read and write assoclen bytes */
663 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
664 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
666 /* ivsize + cryptlen = seqoutlen - authsize */
667 append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize);
669 /* Skip assoc data */
670 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
672 /* read assoc before reading payload */
673 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
676 /* Copy iv from outfifo to class 2 fifo */
677 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
678 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
679 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
680 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
681 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
682 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
684 /* Load Counter into CONTEXT1 reg */
686 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
688 LDST_SRCDST_BYTE_CONTEXT |
689 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
692 /* Class 1 operation */
693 append_operation(desc, ctx->class1_alg_type |
694 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
696 /* Will write ivsize + cryptlen */
697 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
699 /* Not need to reload iv */
700 append_seq_fifo_load(desc, ivsize,
703 /* Will read cryptlen */
704 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
705 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
706 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
707 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
710 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
711 LDST_SRCDST_BYTE_CONTEXT);
713 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
716 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
717 dev_err(jrdev, "unable to map shared descriptor\n");
721 print_hex_dump(KERN_ERR, "aead givenc shdesc@"__stringify(__LINE__)": ",
722 DUMP_PREFIX_ADDRESS, 16, 4, desc,
723 desc_bytes(desc), 1);
730 static int aead_setauthsize(struct crypto_aead *authenc,
731 unsigned int authsize)
733 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
735 ctx->authsize = authsize;
736 aead_set_sh_desc(authenc);
741 static int gcm_set_sh_desc(struct crypto_aead *aead)
743 struct caam_ctx *ctx = crypto_aead_ctx(aead);
744 struct device *jrdev = ctx->jrdev;
745 bool keys_fit_inline = false;
746 u32 *key_jump_cmd, *zero_payload_jump_cmd,
747 *zero_assoc_jump_cmd1, *zero_assoc_jump_cmd2;
750 if (!ctx->enckeylen || !ctx->authsize)
754 * AES GCM encrypt shared descriptor
755 * Job Descriptor and Shared Descriptor
756 * must fit into the 64-word Descriptor h/w Buffer
758 if (DESC_GCM_ENC_LEN + GCM_DESC_JOB_IO_LEN +
759 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
760 keys_fit_inline = true;
762 desc = ctx->sh_desc_enc;
764 init_sh_desc(desc, HDR_SHARE_SERIAL);
766 /* skip key loading if they are loaded due to sharing */
767 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
768 JUMP_COND_SHRD | JUMP_COND_SELF);
770 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
771 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
773 append_key(desc, ctx->key_dma, ctx->enckeylen,
774 CLASS_1 | KEY_DEST_CLASS_REG);
775 set_jump_tgt_here(desc, key_jump_cmd);
777 /* class 1 operation */
778 append_operation(desc, ctx->class1_alg_type |
779 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
781 /* if assoclen + cryptlen is ZERO, skip to ICV write */
782 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
783 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
786 /* if assoclen is ZERO, skip reading the assoc data */
787 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
788 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
791 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
793 /* skip assoc data */
794 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
796 /* cryptlen = seqinlen - assoclen */
797 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
799 /* if cryptlen is ZERO jump to zero-payload commands */
800 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
803 /* read assoc data */
804 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
805 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
806 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
808 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
810 /* write encrypted data */
811 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
813 /* read payload data */
814 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
815 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
817 /* jump the zero-payload commands */
818 append_jump(desc, JUMP_TEST_ALL | 2);
820 /* zero-payload commands */
821 set_jump_tgt_here(desc, zero_payload_jump_cmd);
823 /* read assoc data */
824 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
825 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
827 /* There is no input data */
828 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
831 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
832 LDST_SRCDST_BYTE_CONTEXT);
834 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
837 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
838 dev_err(jrdev, "unable to map shared descriptor\n");
842 print_hex_dump(KERN_ERR, "gcm enc shdesc@"__stringify(__LINE__)": ",
843 DUMP_PREFIX_ADDRESS, 16, 4, desc,
844 desc_bytes(desc), 1);
848 * Job Descriptor and Shared Descriptors
849 * must all fit into the 64-word Descriptor h/w Buffer
851 keys_fit_inline = false;
852 if (DESC_GCM_DEC_LEN + GCM_DESC_JOB_IO_LEN +
853 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
854 keys_fit_inline = true;
856 desc = ctx->sh_desc_dec;
858 init_sh_desc(desc, HDR_SHARE_SERIAL);
860 /* skip key loading if they are loaded due to sharing */
861 key_jump_cmd = append_jump(desc, JUMP_JSL |
862 JUMP_TEST_ALL | JUMP_COND_SHRD |
865 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
866 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
868 append_key(desc, ctx->key_dma, ctx->enckeylen,
869 CLASS_1 | KEY_DEST_CLASS_REG);
870 set_jump_tgt_here(desc, key_jump_cmd);
872 /* class 1 operation */
873 append_operation(desc, ctx->class1_alg_type |
874 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
876 /* if assoclen is ZERO, skip reading the assoc data */
877 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
878 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
881 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
883 /* skip assoc data */
884 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
886 /* read assoc data */
887 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
888 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
890 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
892 /* cryptlen = seqoutlen - assoclen */
893 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
895 /* jump to zero-payload command if cryptlen is zero */
896 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
899 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
901 /* store encrypted data */
902 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
904 /* read payload data */
905 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
906 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
908 /* zero-payload command */
909 set_jump_tgt_here(desc, zero_payload_jump_cmd);
912 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
913 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
915 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
918 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
919 dev_err(jrdev, "unable to map shared descriptor\n");
923 print_hex_dump(KERN_ERR, "gcm dec shdesc@"__stringify(__LINE__)": ",
924 DUMP_PREFIX_ADDRESS, 16, 4, desc,
925 desc_bytes(desc), 1);
931 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
933 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
935 ctx->authsize = authsize;
936 gcm_set_sh_desc(authenc);
941 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
943 struct caam_ctx *ctx = crypto_aead_ctx(aead);
944 struct device *jrdev = ctx->jrdev;
945 bool keys_fit_inline = false;
949 if (!ctx->enckeylen || !ctx->authsize)
953 * RFC4106 encrypt shared descriptor
954 * Job Descriptor and Shared Descriptor
955 * must fit into the 64-word Descriptor h/w Buffer
957 if (DESC_RFC4106_ENC_LEN + GCM_DESC_JOB_IO_LEN +
958 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
959 keys_fit_inline = true;
961 desc = ctx->sh_desc_enc;
963 init_sh_desc(desc, HDR_SHARE_SERIAL);
965 /* Skip key loading if it is loaded due to sharing */
966 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
969 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
970 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
972 append_key(desc, ctx->key_dma, ctx->enckeylen,
973 CLASS_1 | KEY_DEST_CLASS_REG);
974 set_jump_tgt_here(desc, key_jump_cmd);
976 /* Class 1 operation */
977 append_operation(desc, ctx->class1_alg_type |
978 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
980 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
981 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
983 /* Read assoc data */
984 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
985 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
988 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
990 /* Will read cryptlen bytes */
991 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
993 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
994 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
996 /* Skip assoc data */
997 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
999 /* cryptlen = seqoutlen - assoclen */
1000 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
1002 /* Write encrypted data */
1003 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1005 /* Read payload data */
1006 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1007 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1010 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1011 LDST_SRCDST_BYTE_CONTEXT);
1013 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1016 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1017 dev_err(jrdev, "unable to map shared descriptor\n");
1021 print_hex_dump(KERN_ERR, "rfc4106 enc shdesc@"__stringify(__LINE__)": ",
1022 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1023 desc_bytes(desc), 1);
1027 * Job Descriptor and Shared Descriptors
1028 * must all fit into the 64-word Descriptor h/w Buffer
1030 keys_fit_inline = false;
1031 if (DESC_RFC4106_DEC_LEN + DESC_JOB_IO_LEN +
1032 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1033 keys_fit_inline = true;
1035 desc = ctx->sh_desc_dec;
1037 init_sh_desc(desc, HDR_SHARE_SERIAL);
1039 /* Skip key loading if it is loaded due to sharing */
1040 key_jump_cmd = append_jump(desc, JUMP_JSL |
1041 JUMP_TEST_ALL | JUMP_COND_SHRD);
1042 if (keys_fit_inline)
1043 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1044 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1046 append_key(desc, ctx->key_dma, ctx->enckeylen,
1047 CLASS_1 | KEY_DEST_CLASS_REG);
1048 set_jump_tgt_here(desc, key_jump_cmd);
1050 /* Class 1 operation */
1051 append_operation(desc, ctx->class1_alg_type |
1052 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1054 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
1055 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1057 /* Read assoc data */
1058 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1059 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1062 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1064 /* Will read cryptlen bytes */
1065 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1067 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1068 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1070 /* Skip assoc data */
1071 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1073 /* Will write cryptlen bytes */
1074 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1076 /* Store payload data */
1077 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1079 /* Read encrypted data */
1080 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1081 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1084 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1085 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1087 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1090 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1091 dev_err(jrdev, "unable to map shared descriptor\n");
1095 print_hex_dump(KERN_ERR, "rfc4106 dec shdesc@"__stringify(__LINE__)": ",
1096 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1097 desc_bytes(desc), 1);
1103 static int rfc4106_setauthsize(struct crypto_aead *authenc,
1104 unsigned int authsize)
1106 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1108 ctx->authsize = authsize;
1109 rfc4106_set_sh_desc(authenc);
1114 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
1116 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1117 struct device *jrdev = ctx->jrdev;
1118 bool keys_fit_inline = false;
1120 u32 *read_move_cmd, *write_move_cmd;
1123 if (!ctx->enckeylen || !ctx->authsize)
1127 * RFC4543 encrypt shared descriptor
1128 * Job Descriptor and Shared Descriptor
1129 * must fit into the 64-word Descriptor h/w Buffer
1131 if (DESC_RFC4543_ENC_LEN + GCM_DESC_JOB_IO_LEN +
1132 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1133 keys_fit_inline = true;
1135 desc = ctx->sh_desc_enc;
1137 init_sh_desc(desc, HDR_SHARE_SERIAL);
1139 /* Skip key loading if it is loaded due to sharing */
1140 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1142 if (keys_fit_inline)
1143 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1144 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1146 append_key(desc, ctx->key_dma, ctx->enckeylen,
1147 CLASS_1 | KEY_DEST_CLASS_REG);
1148 set_jump_tgt_here(desc, key_jump_cmd);
1150 /* Class 1 operation */
1151 append_operation(desc, ctx->class1_alg_type |
1152 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1154 /* assoclen + cryptlen = seqinlen */
1155 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1158 * MOVE_LEN opcode is not available in all SEC HW revisions,
1159 * thus need to do some magic, i.e. self-patch the descriptor
1162 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1163 (0x6 << MOVE_LEN_SHIFT));
1164 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1165 (0x8 << MOVE_LEN_SHIFT));
1167 /* Will read assoclen + cryptlen bytes */
1168 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1170 /* Will write assoclen + cryptlen bytes */
1171 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1173 /* Read and write assoclen + cryptlen bytes */
1174 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1176 set_move_tgt_here(desc, read_move_cmd);
1177 set_move_tgt_here(desc, write_move_cmd);
1178 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1179 /* Move payload data to OFIFO */
1180 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1183 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1184 LDST_SRCDST_BYTE_CONTEXT);
1186 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1189 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1190 dev_err(jrdev, "unable to map shared descriptor\n");
1194 print_hex_dump(KERN_ERR, "rfc4543 enc shdesc@"__stringify(__LINE__)": ",
1195 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1196 desc_bytes(desc), 1);
1200 * Job Descriptor and Shared Descriptors
1201 * must all fit into the 64-word Descriptor h/w Buffer
1203 keys_fit_inline = false;
1204 if (DESC_RFC4543_DEC_LEN + GCM_DESC_JOB_IO_LEN +
1205 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1206 keys_fit_inline = true;
1208 desc = ctx->sh_desc_dec;
1210 init_sh_desc(desc, HDR_SHARE_SERIAL);
1212 /* Skip key loading if it is loaded due to sharing */
1213 key_jump_cmd = append_jump(desc, JUMP_JSL |
1214 JUMP_TEST_ALL | JUMP_COND_SHRD);
1215 if (keys_fit_inline)
1216 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1217 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1219 append_key(desc, ctx->key_dma, ctx->enckeylen,
1220 CLASS_1 | KEY_DEST_CLASS_REG);
1221 set_jump_tgt_here(desc, key_jump_cmd);
1223 /* Class 1 operation */
1224 append_operation(desc, ctx->class1_alg_type |
1225 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1227 /* assoclen + cryptlen = seqoutlen */
1228 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1231 * MOVE_LEN opcode is not available in all SEC HW revisions,
1232 * thus need to do some magic, i.e. self-patch the descriptor
1235 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1236 (0x6 << MOVE_LEN_SHIFT));
1237 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1238 (0x8 << MOVE_LEN_SHIFT));
1240 /* Will read assoclen + cryptlen bytes */
1241 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1243 /* Will write assoclen + cryptlen bytes */
1244 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1246 /* Store payload data */
1247 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1249 /* In-snoop assoclen + cryptlen data */
1250 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1251 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1253 set_move_tgt_here(desc, read_move_cmd);
1254 set_move_tgt_here(desc, write_move_cmd);
1255 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1256 /* Move payload data to OFIFO */
1257 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1258 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1261 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1262 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1264 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1267 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1268 dev_err(jrdev, "unable to map shared descriptor\n");
1272 print_hex_dump(KERN_ERR, "rfc4543 dec shdesc@"__stringify(__LINE__)": ",
1273 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1274 desc_bytes(desc), 1);
1280 static int rfc4543_setauthsize(struct crypto_aead *authenc,
1281 unsigned int authsize)
1283 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1285 ctx->authsize = authsize;
1286 rfc4543_set_sh_desc(authenc);
1291 static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in,
1294 return gen_split_key(ctx->jrdev, ctx->key, ctx->split_key_len,
1295 ctx->split_key_pad_len, key_in, authkeylen,
1299 static int aead_setkey(struct crypto_aead *aead,
1300 const u8 *key, unsigned int keylen)
1302 /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
1303 static const u8 mdpadlen[] = { 16, 20, 32, 32, 64, 64 };
1304 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1305 struct device *jrdev = ctx->jrdev;
1306 struct crypto_authenc_keys keys;
1309 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1312 /* Pick class 2 key length from algorithm submask */
1313 ctx->split_key_len = mdpadlen[(ctx->alg_op & OP_ALG_ALGSEL_SUBMASK) >>
1314 OP_ALG_ALGSEL_SHIFT] * 2;
1315 ctx->split_key_pad_len = ALIGN(ctx->split_key_len, 16);
1317 if (ctx->split_key_pad_len + keys.enckeylen > CAAM_MAX_KEY_SIZE)
1321 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
1322 keys.authkeylen + keys.enckeylen, keys.enckeylen,
1324 printk(KERN_ERR "split_key_len %d split_key_pad_len %d\n",
1325 ctx->split_key_len, ctx->split_key_pad_len);
1326 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1327 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1330 ret = gen_split_aead_key(ctx, keys.authkey, keys.authkeylen);
1335 /* postpend encryption key to auth split key */
1336 memcpy(ctx->key + ctx->split_key_pad_len, keys.enckey, keys.enckeylen);
1338 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->split_key_pad_len +
1339 keys.enckeylen, DMA_TO_DEVICE);
1340 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1341 dev_err(jrdev, "unable to map key i/o memory\n");
1345 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
1346 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
1347 ctx->split_key_pad_len + keys.enckeylen, 1);
1350 ctx->enckeylen = keys.enckeylen;
1352 ret = aead_set_sh_desc(aead);
1354 dma_unmap_single(jrdev, ctx->key_dma, ctx->split_key_pad_len +
1355 keys.enckeylen, DMA_TO_DEVICE);
1360 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
1364 static int gcm_setkey(struct crypto_aead *aead,
1365 const u8 *key, unsigned int keylen)
1367 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1368 struct device *jrdev = ctx->jrdev;
1372 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1373 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1376 memcpy(ctx->key, key, keylen);
1377 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1379 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1380 dev_err(jrdev, "unable to map key i/o memory\n");
1383 ctx->enckeylen = keylen;
1385 ret = gcm_set_sh_desc(aead);
1387 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1394 static int rfc4106_setkey(struct crypto_aead *aead,
1395 const u8 *key, unsigned int keylen)
1397 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1398 struct device *jrdev = ctx->jrdev;
1405 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1406 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1409 memcpy(ctx->key, key, keylen);
1412 * The last four bytes of the key material are used as the salt value
1413 * in the nonce. Update the AES key length.
1415 ctx->enckeylen = keylen - 4;
1417 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1419 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1420 dev_err(jrdev, "unable to map key i/o memory\n");
1424 ret = rfc4106_set_sh_desc(aead);
1426 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1433 static int rfc4543_setkey(struct crypto_aead *aead,
1434 const u8 *key, unsigned int keylen)
1436 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1437 struct device *jrdev = ctx->jrdev;
1444 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1445 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1448 memcpy(ctx->key, key, keylen);
1451 * The last four bytes of the key material are used as the salt value
1452 * in the nonce. Update the AES key length.
1454 ctx->enckeylen = keylen - 4;
1456 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1458 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1459 dev_err(jrdev, "unable to map key i/o memory\n");
1463 ret = rfc4543_set_sh_desc(aead);
1465 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1472 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1473 const u8 *key, unsigned int keylen)
1475 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1476 struct ablkcipher_tfm *crt = &ablkcipher->base.crt_ablkcipher;
1477 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
1478 const char *alg_name = crypto_tfm_alg_name(tfm);
1479 struct device *jrdev = ctx->jrdev;
1485 u32 ctx1_iv_off = 0;
1486 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
1487 OP_ALG_AAI_CTR_MOD128);
1488 const bool is_rfc3686 = (ctr_mode &&
1489 (strstr(alg_name, "rfc3686") != NULL));
1492 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1493 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1496 * AES-CTR needs to load IV in CONTEXT1 reg
1497 * at an offset of 128bits (16bytes)
1498 * CONTEXT1[255:128] = IV
1505 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1506 * | *key = {KEY, NONCE}
1509 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
1510 keylen -= CTR_RFC3686_NONCE_SIZE;
1513 memcpy(ctx->key, key, keylen);
1514 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1516 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1517 dev_err(jrdev, "unable to map key i/o memory\n");
1520 ctx->enckeylen = keylen;
1522 /* ablkcipher_encrypt shared descriptor */
1523 desc = ctx->sh_desc_enc;
1524 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1525 /* Skip if already shared */
1526 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1529 /* Load class1 key only */
1530 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1531 ctx->enckeylen, CLASS_1 |
1532 KEY_DEST_CLASS_REG);
1534 /* Load nonce into CONTEXT1 reg */
1536 nonce = (u32 *)(key + keylen);
1537 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1538 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1539 append_move(desc, MOVE_WAITCOMP |
1541 MOVE_DEST_CLASS1CTX |
1542 (16 << MOVE_OFFSET_SHIFT) |
1543 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1546 set_jump_tgt_here(desc, key_jump_cmd);
1549 append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1550 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1552 /* Load counter into CONTEXT1 reg */
1554 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
1556 LDST_SRCDST_BYTE_CONTEXT |
1557 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1558 LDST_OFFSET_SHIFT));
1560 /* Load operation */
1561 append_operation(desc, ctx->class1_alg_type |
1562 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1564 /* Perform operation */
1565 ablkcipher_append_src_dst(desc);
1567 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1570 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1571 dev_err(jrdev, "unable to map shared descriptor\n");
1575 print_hex_dump(KERN_ERR,
1576 "ablkcipher enc shdesc@"__stringify(__LINE__)": ",
1577 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1578 desc_bytes(desc), 1);
1580 /* ablkcipher_decrypt shared descriptor */
1581 desc = ctx->sh_desc_dec;
1583 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1584 /* Skip if already shared */
1585 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1588 /* Load class1 key only */
1589 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1590 ctx->enckeylen, CLASS_1 |
1591 KEY_DEST_CLASS_REG);
1593 /* Load nonce into CONTEXT1 reg */
1595 nonce = (u32 *)(key + keylen);
1596 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1597 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1598 append_move(desc, MOVE_WAITCOMP |
1600 MOVE_DEST_CLASS1CTX |
1601 (16 << MOVE_OFFSET_SHIFT) |
1602 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1605 set_jump_tgt_here(desc, key_jump_cmd);
1608 append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1609 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1611 /* Load counter into CONTEXT1 reg */
1613 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
1615 LDST_SRCDST_BYTE_CONTEXT |
1616 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1617 LDST_OFFSET_SHIFT));
1619 /* Choose operation */
1621 append_operation(desc, ctx->class1_alg_type |
1622 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
1624 append_dec_op1(desc, ctx->class1_alg_type);
1626 /* Perform operation */
1627 ablkcipher_append_src_dst(desc);
1629 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1632 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1633 dev_err(jrdev, "unable to map shared descriptor\n");
1638 print_hex_dump(KERN_ERR,
1639 "ablkcipher dec shdesc@"__stringify(__LINE__)": ",
1640 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1641 desc_bytes(desc), 1);
1643 /* ablkcipher_givencrypt shared descriptor */
1644 desc = ctx->sh_desc_givenc;
1646 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1647 /* Skip if already shared */
1648 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1651 /* Load class1 key only */
1652 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1653 ctx->enckeylen, CLASS_1 |
1654 KEY_DEST_CLASS_REG);
1656 /* Load Nonce into CONTEXT1 reg */
1658 nonce = (u32 *)(key + keylen);
1659 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1660 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1661 append_move(desc, MOVE_WAITCOMP |
1663 MOVE_DEST_CLASS1CTX |
1664 (16 << MOVE_OFFSET_SHIFT) |
1665 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1667 set_jump_tgt_here(desc, key_jump_cmd);
1670 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1671 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
1672 NFIFOENTRY_PTYPE_RND | (crt->ivsize << NFIFOENTRY_DLEN_SHIFT);
1673 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1674 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1675 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1676 append_move(desc, MOVE_WAITCOMP |
1678 MOVE_DEST_CLASS1CTX |
1679 (crt->ivsize << MOVE_LEN_SHIFT) |
1680 (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1681 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1683 /* Copy generated IV to memory */
1684 append_seq_store(desc, crt->ivsize,
1685 LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1686 (ctx1_iv_off << LDST_OFFSET_SHIFT));
1688 /* Load Counter into CONTEXT1 reg */
1690 append_load_imm_u32(desc, (u32)1, LDST_IMM |
1692 LDST_SRCDST_BYTE_CONTEXT |
1693 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1694 LDST_OFFSET_SHIFT));
1697 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1698 (1 << JUMP_OFFSET_SHIFT));
1700 /* Load operation */
1701 append_operation(desc, ctx->class1_alg_type |
1702 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1704 /* Perform operation */
1705 ablkcipher_append_src_dst(desc);
1707 ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc,
1710 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) {
1711 dev_err(jrdev, "unable to map shared descriptor\n");
1715 print_hex_dump(KERN_ERR,
1716 "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1717 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1718 desc_bytes(desc), 1);
1724 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1725 const u8 *key, unsigned int keylen)
1727 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1728 struct device *jrdev = ctx->jrdev;
1729 u32 *key_jump_cmd, *desc;
1730 __be64 sector_size = cpu_to_be64(512);
1732 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
1733 crypto_ablkcipher_set_flags(ablkcipher,
1734 CRYPTO_TFM_RES_BAD_KEY_LEN);
1735 dev_err(jrdev, "key size mismatch\n");
1739 memcpy(ctx->key, key, keylen);
1740 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE);
1741 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1742 dev_err(jrdev, "unable to map key i/o memory\n");
1745 ctx->enckeylen = keylen;
1747 /* xts_ablkcipher_encrypt shared descriptor */
1748 desc = ctx->sh_desc_enc;
1749 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1750 /* Skip if already shared */
1751 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1754 /* Load class1 keys only */
1755 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1756 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1758 /* Load sector size with index 40 bytes (0x28) */
1759 append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1760 LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1761 append_data(desc, (void *)§or_size, 8);
1763 set_jump_tgt_here(desc, key_jump_cmd);
1766 * create sequence for loading the sector index
1767 * Upper 8B of IV - will be used as sector index
1768 * Lower 8B of IV - will be discarded
1770 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1771 LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1772 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1774 /* Load operation */
1775 append_operation(desc, ctx->class1_alg_type | OP_ALG_AS_INITFINAL |
1778 /* Perform operation */
1779 ablkcipher_append_src_dst(desc);
1781 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1783 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1784 dev_err(jrdev, "unable to map shared descriptor\n");
1788 print_hex_dump(KERN_ERR,
1789 "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1790 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1793 /* xts_ablkcipher_decrypt shared descriptor */
1794 desc = ctx->sh_desc_dec;
1796 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1797 /* Skip if already shared */
1798 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1801 /* Load class1 key only */
1802 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1803 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1805 /* Load sector size with index 40 bytes (0x28) */
1806 append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1807 LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1808 append_data(desc, (void *)§or_size, 8);
1810 set_jump_tgt_here(desc, key_jump_cmd);
1813 * create sequence for loading the sector index
1814 * Upper 8B of IV - will be used as sector index
1815 * Lower 8B of IV - will be discarded
1817 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1818 LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1819 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1821 /* Load operation */
1822 append_dec_op1(desc, ctx->class1_alg_type);
1824 /* Perform operation */
1825 ablkcipher_append_src_dst(desc);
1827 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1829 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1830 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma,
1831 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
1832 dev_err(jrdev, "unable to map shared descriptor\n");
1836 print_hex_dump(KERN_ERR,
1837 "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1838 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1845 * aead_edesc - s/w-extended aead descriptor
1846 * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist
1847 * @src_nents: number of segments in input scatterlist
1848 * @dst_nents: number of segments in output scatterlist
1849 * @iv_dma: dma address of iv for checking continuity and link table
1850 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1851 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1852 * @sec4_sg_dma: bus physical mapped address of h/w link table
1853 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1861 dma_addr_t sec4_sg_dma;
1862 struct sec4_sg_entry *sec4_sg;
1867 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
1868 * @src_nents: number of segments in input scatterlist
1869 * @dst_nents: number of segments in output scatterlist
1870 * @iv_dma: dma address of iv for checking continuity and link table
1871 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1872 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1873 * @sec4_sg_dma: bus physical mapped address of h/w link table
1874 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1876 struct ablkcipher_edesc {
1881 dma_addr_t sec4_sg_dma;
1882 struct sec4_sg_entry *sec4_sg;
1886 static void caam_unmap(struct device *dev, struct scatterlist *src,
1887 struct scatterlist *dst, int src_nents,
1889 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
1893 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_TO_DEVICE);
1894 dma_unmap_sg(dev, dst, dst_nents ? : 1, DMA_FROM_DEVICE);
1896 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_BIDIRECTIONAL);
1900 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
1902 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
1906 static void aead_unmap(struct device *dev,
1907 struct aead_edesc *edesc,
1908 struct aead_request *req)
1910 caam_unmap(dev, req->src, req->dst,
1911 edesc->src_nents, edesc->dst_nents, 0, 0,
1912 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1915 static void ablkcipher_unmap(struct device *dev,
1916 struct ablkcipher_edesc *edesc,
1917 struct ablkcipher_request *req)
1919 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1920 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1922 caam_unmap(dev, req->src, req->dst,
1923 edesc->src_nents, edesc->dst_nents,
1924 edesc->iv_dma, ivsize,
1925 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1928 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1931 struct aead_request *req = context;
1932 struct aead_edesc *edesc;
1935 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1938 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1941 caam_jr_strstatus(jrdev, err);
1943 aead_unmap(jrdev, edesc, req);
1947 aead_request_complete(req, err);
1950 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1953 struct aead_request *req = context;
1954 struct aead_edesc *edesc;
1957 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1960 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1963 caam_jr_strstatus(jrdev, err);
1965 aead_unmap(jrdev, edesc, req);
1968 * verify hw auth check passed else return -EBADMSG
1970 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
1975 aead_request_complete(req, err);
1978 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1981 struct ablkcipher_request *req = context;
1982 struct ablkcipher_edesc *edesc;
1984 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1985 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1987 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1990 edesc = (struct ablkcipher_edesc *)((char *)desc -
1991 offsetof(struct ablkcipher_edesc, hw_desc));
1994 caam_jr_strstatus(jrdev, err);
1997 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
1998 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1999 edesc->src_nents > 1 ? 100 : ivsize, 1);
2000 print_hex_dump(KERN_ERR, "dst @"__stringify(__LINE__)": ",
2001 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2002 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
2005 ablkcipher_unmap(jrdev, edesc, req);
2008 ablkcipher_request_complete(req, err);
2011 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
2014 struct ablkcipher_request *req = context;
2015 struct ablkcipher_edesc *edesc;
2017 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2018 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2020 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2023 edesc = (struct ablkcipher_edesc *)((char *)desc -
2024 offsetof(struct ablkcipher_edesc, hw_desc));
2026 caam_jr_strstatus(jrdev, err);
2029 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
2030 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2032 print_hex_dump(KERN_ERR, "dst @"__stringify(__LINE__)": ",
2033 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2034 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
2037 ablkcipher_unmap(jrdev, edesc, req);
2040 ablkcipher_request_complete(req, err);
2044 * Fill in aead job descriptor
2046 static void init_aead_job(struct aead_request *req,
2047 struct aead_edesc *edesc,
2048 bool all_contig, bool encrypt)
2050 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2051 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2052 int authsize = ctx->authsize;
2053 u32 *desc = edesc->hw_desc;
2054 u32 out_options, in_options;
2055 dma_addr_t dst_dma, src_dma;
2056 int len, sec4_sg_index = 0;
2060 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
2061 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
2063 len = desc_len(sh_desc);
2064 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2067 src_dma = sg_dma_address(req->src);
2070 src_dma = edesc->sec4_sg_dma;
2071 sec4_sg_index += edesc->src_nents;
2072 in_options = LDST_SGF;
2075 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
2079 out_options = in_options;
2081 if (unlikely(req->src != req->dst)) {
2082 if (!edesc->dst_nents) {
2083 dst_dma = sg_dma_address(req->dst);
2085 dst_dma = edesc->sec4_sg_dma +
2087 sizeof(struct sec4_sg_entry);
2088 out_options = LDST_SGF;
2093 append_seq_out_ptr(desc, dst_dma,
2094 req->assoclen + req->cryptlen + authsize,
2097 append_seq_out_ptr(desc, dst_dma,
2098 req->assoclen + req->cryptlen - authsize,
2101 /* REG3 = assoclen */
2102 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
2105 static void init_gcm_job(struct aead_request *req,
2106 struct aead_edesc *edesc,
2107 bool all_contig, bool encrypt)
2109 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2110 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2111 unsigned int ivsize = crypto_aead_ivsize(aead);
2112 u32 *desc = edesc->hw_desc;
2113 bool generic_gcm = (ivsize == 12);
2116 init_aead_job(req, edesc, all_contig, encrypt);
2118 /* BUG This should not be specific to generic GCM. */
2120 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
2121 last = FIFOLD_TYPE_LAST1;
2124 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
2125 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
2128 append_data(desc, ctx->key + ctx->enckeylen, 4);
2130 append_data(desc, req->iv, ivsize);
2131 /* End of blank commands */
2134 static void init_authenc_job(struct aead_request *req,
2135 struct aead_edesc *edesc,
2136 bool all_contig, bool encrypt)
2138 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2139 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
2140 struct caam_aead_alg, aead);
2141 unsigned int ivsize = crypto_aead_ivsize(aead);
2142 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2143 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
2144 OP_ALG_AAI_CTR_MOD128);
2145 const bool is_rfc3686 = alg->caam.rfc3686;
2146 u32 *desc = edesc->hw_desc;
2150 * AES-CTR needs to load IV in CONTEXT1 reg
2151 * at an offset of 128bits (16bytes)
2152 * CONTEXT1[255:128] = IV
2159 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
2162 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
2164 init_aead_job(req, edesc, all_contig, encrypt);
2166 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
2167 append_load_as_imm(desc, req->iv, ivsize,
2169 LDST_SRCDST_BYTE_CONTEXT |
2170 (ivoffset << LDST_OFFSET_SHIFT));
2174 * Fill in ablkcipher job descriptor
2176 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
2177 struct ablkcipher_edesc *edesc,
2178 struct ablkcipher_request *req,
2181 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2182 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2183 u32 *desc = edesc->hw_desc;
2184 u32 out_options = 0, in_options;
2185 dma_addr_t dst_dma, src_dma;
2186 int len, sec4_sg_index = 0;
2189 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
2190 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2192 print_hex_dump(KERN_ERR, "src @"__stringify(__LINE__)": ",
2193 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2194 edesc->src_nents ? 100 : req->nbytes, 1);
2197 len = desc_len(sh_desc);
2198 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2201 src_dma = edesc->iv_dma;
2204 src_dma = edesc->sec4_sg_dma;
2205 sec4_sg_index += edesc->src_nents + 1;
2206 in_options = LDST_SGF;
2208 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
2210 if (likely(req->src == req->dst)) {
2211 if (!edesc->src_nents && iv_contig) {
2212 dst_dma = sg_dma_address(req->src);
2214 dst_dma = edesc->sec4_sg_dma +
2215 sizeof(struct sec4_sg_entry);
2216 out_options = LDST_SGF;
2219 if (!edesc->dst_nents) {
2220 dst_dma = sg_dma_address(req->dst);
2222 dst_dma = edesc->sec4_sg_dma +
2223 sec4_sg_index * sizeof(struct sec4_sg_entry);
2224 out_options = LDST_SGF;
2227 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
2231 * Fill in ablkcipher givencrypt job descriptor
2233 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
2234 struct ablkcipher_edesc *edesc,
2235 struct ablkcipher_request *req,
2238 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2239 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2240 u32 *desc = edesc->hw_desc;
2241 u32 out_options, in_options;
2242 dma_addr_t dst_dma, src_dma;
2243 int len, sec4_sg_index = 0;
2246 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
2247 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2249 print_hex_dump(KERN_ERR, "src @" __stringify(__LINE__) ": ",
2250 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2251 edesc->src_nents ? 100 : req->nbytes, 1);
2254 len = desc_len(sh_desc);
2255 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2257 if (!edesc->src_nents) {
2258 src_dma = sg_dma_address(req->src);
2261 src_dma = edesc->sec4_sg_dma;
2262 sec4_sg_index += edesc->src_nents;
2263 in_options = LDST_SGF;
2265 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
2268 dst_dma = edesc->iv_dma;
2271 dst_dma = edesc->sec4_sg_dma +
2272 sec4_sg_index * sizeof(struct sec4_sg_entry);
2273 out_options = LDST_SGF;
2275 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
2279 * allocate and map the aead extended descriptor
2281 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
2282 int desc_bytes, bool *all_contig_ptr,
2285 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2286 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2287 struct device *jrdev = ctx->jrdev;
2288 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2289 CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
2290 int src_nents, dst_nents = 0;
2291 struct aead_edesc *edesc;
2293 bool all_contig = true;
2294 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
2295 unsigned int authsize = ctx->authsize;
2297 if (unlikely(req->dst != req->src)) {
2298 src_nents = sg_count(req->src, req->assoclen + req->cryptlen);
2299 dst_nents = sg_count(req->dst,
2300 req->assoclen + req->cryptlen +
2301 (encrypt ? authsize : (-authsize)));
2303 src_nents = sg_count(req->src,
2304 req->assoclen + req->cryptlen +
2305 (encrypt ? authsize : 0));
2308 /* Check if data are contiguous. */
2309 all_contig = !src_nents;
2311 src_nents = src_nents ? : 1;
2312 sec4_sg_len = src_nents;
2315 sec4_sg_len += dst_nents;
2317 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
2319 /* allocate space for base edesc and hw desc commands, link tables */
2320 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2323 dev_err(jrdev, "could not allocate extended descriptor\n");
2324 return ERR_PTR(-ENOMEM);
2327 if (likely(req->src == req->dst)) {
2328 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2330 if (unlikely(!sgc)) {
2331 dev_err(jrdev, "unable to map source\n");
2333 return ERR_PTR(-ENOMEM);
2336 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2338 if (unlikely(!sgc)) {
2339 dev_err(jrdev, "unable to map source\n");
2341 return ERR_PTR(-ENOMEM);
2344 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2346 if (unlikely(!sgc)) {
2347 dev_err(jrdev, "unable to map destination\n");
2348 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
2351 return ERR_PTR(-ENOMEM);
2355 edesc->src_nents = src_nents;
2356 edesc->dst_nents = dst_nents;
2357 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
2359 *all_contig_ptr = all_contig;
2363 sg_to_sec4_sg_last(req->src, src_nents,
2364 edesc->sec4_sg + sec4_sg_index, 0);
2365 sec4_sg_index += src_nents;
2368 sg_to_sec4_sg_last(req->dst, dst_nents,
2369 edesc->sec4_sg + sec4_sg_index, 0);
2375 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2376 sec4_sg_bytes, DMA_TO_DEVICE);
2377 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2378 dev_err(jrdev, "unable to map S/G table\n");
2379 aead_unmap(jrdev, edesc, req);
2381 return ERR_PTR(-ENOMEM);
2384 edesc->sec4_sg_bytes = sec4_sg_bytes;
2389 static int gcm_encrypt(struct aead_request *req)
2391 struct aead_edesc *edesc;
2392 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2393 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2394 struct device *jrdev = ctx->jrdev;
2399 /* allocate extended descriptor */
2400 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
2402 return PTR_ERR(edesc);
2404 /* Create and submit job descriptor */
2405 init_gcm_job(req, edesc, all_contig, true);
2407 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2408 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2409 desc_bytes(edesc->hw_desc), 1);
2412 desc = edesc->hw_desc;
2413 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2417 aead_unmap(jrdev, edesc, req);
2424 static int ipsec_gcm_encrypt(struct aead_request *req)
2426 if (req->assoclen < 8)
2429 return gcm_encrypt(req);
2432 static int aead_encrypt(struct aead_request *req)
2434 struct aead_edesc *edesc;
2435 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2436 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2437 struct device *jrdev = ctx->jrdev;
2442 /* allocate extended descriptor */
2443 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2446 return PTR_ERR(edesc);
2448 /* Create and submit job descriptor */
2449 init_authenc_job(req, edesc, all_contig, true);
2451 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2452 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2453 desc_bytes(edesc->hw_desc), 1);
2456 desc = edesc->hw_desc;
2457 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2461 aead_unmap(jrdev, edesc, req);
2468 static int gcm_decrypt(struct aead_request *req)
2470 struct aead_edesc *edesc;
2471 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2472 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2473 struct device *jrdev = ctx->jrdev;
2478 /* allocate extended descriptor */
2479 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
2481 return PTR_ERR(edesc);
2483 /* Create and submit job descriptor*/
2484 init_gcm_job(req, edesc, all_contig, false);
2486 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2487 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2488 desc_bytes(edesc->hw_desc), 1);
2491 desc = edesc->hw_desc;
2492 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2496 aead_unmap(jrdev, edesc, req);
2503 static int ipsec_gcm_decrypt(struct aead_request *req)
2505 if (req->assoclen < 8)
2508 return gcm_decrypt(req);
2511 static int aead_decrypt(struct aead_request *req)
2513 struct aead_edesc *edesc;
2514 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2515 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2516 struct device *jrdev = ctx->jrdev;
2521 /* allocate extended descriptor */
2522 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2523 &all_contig, false);
2525 return PTR_ERR(edesc);
2528 print_hex_dump(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
2529 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2530 req->assoclen + req->cryptlen, 1);
2533 /* Create and submit job descriptor*/
2534 init_authenc_job(req, edesc, all_contig, false);
2536 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2537 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2538 desc_bytes(edesc->hw_desc), 1);
2541 desc = edesc->hw_desc;
2542 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2546 aead_unmap(jrdev, edesc, req);
2554 * allocate and map the ablkcipher extended descriptor for ablkcipher
2556 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
2557 *req, int desc_bytes,
2558 bool *iv_contig_out)
2560 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2561 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2562 struct device *jrdev = ctx->jrdev;
2563 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2564 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2565 GFP_KERNEL : GFP_ATOMIC;
2566 int src_nents, dst_nents = 0, sec4_sg_bytes;
2567 struct ablkcipher_edesc *edesc;
2568 dma_addr_t iv_dma = 0;
2569 bool iv_contig = false;
2571 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2574 src_nents = sg_count(req->src, req->nbytes);
2576 if (req->dst != req->src)
2577 dst_nents = sg_count(req->dst, req->nbytes);
2579 if (likely(req->src == req->dst)) {
2580 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2583 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2585 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2589 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
2590 if (dma_mapping_error(jrdev, iv_dma)) {
2591 dev_err(jrdev, "unable to map IV\n");
2592 return ERR_PTR(-ENOMEM);
2596 * Check if iv can be contiguous with source and destination.
2597 * If so, include it. If not, create scatterlist.
2599 if (!src_nents && iv_dma + ivsize == sg_dma_address(req->src))
2602 src_nents = src_nents ? : 1;
2603 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2604 sizeof(struct sec4_sg_entry);
2606 /* allocate space for base edesc and hw desc commands, link tables */
2607 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2610 dev_err(jrdev, "could not allocate extended descriptor\n");
2611 return ERR_PTR(-ENOMEM);
2614 edesc->src_nents = src_nents;
2615 edesc->dst_nents = dst_nents;
2616 edesc->sec4_sg_bytes = sec4_sg_bytes;
2617 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2622 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
2623 sg_to_sec4_sg_last(req->src, src_nents,
2624 edesc->sec4_sg + 1, 0);
2625 sec4_sg_index += 1 + src_nents;
2629 sg_to_sec4_sg_last(req->dst, dst_nents,
2630 edesc->sec4_sg + sec4_sg_index, 0);
2633 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2634 sec4_sg_bytes, DMA_TO_DEVICE);
2635 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2636 dev_err(jrdev, "unable to map S/G table\n");
2637 return ERR_PTR(-ENOMEM);
2640 edesc->iv_dma = iv_dma;
2643 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
2644 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2648 *iv_contig_out = iv_contig;
2652 static int ablkcipher_encrypt(struct ablkcipher_request *req)
2654 struct ablkcipher_edesc *edesc;
2655 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2656 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2657 struct device *jrdev = ctx->jrdev;
2662 /* allocate extended descriptor */
2663 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2664 CAAM_CMD_SZ, &iv_contig);
2666 return PTR_ERR(edesc);
2668 /* Create and submit job descriptor*/
2669 init_ablkcipher_job(ctx->sh_desc_enc,
2670 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
2672 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2673 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2674 desc_bytes(edesc->hw_desc), 1);
2676 desc = edesc->hw_desc;
2677 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2682 ablkcipher_unmap(jrdev, edesc, req);
2689 static int ablkcipher_decrypt(struct ablkcipher_request *req)
2691 struct ablkcipher_edesc *edesc;
2692 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2693 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2694 struct device *jrdev = ctx->jrdev;
2699 /* allocate extended descriptor */
2700 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2701 CAAM_CMD_SZ, &iv_contig);
2703 return PTR_ERR(edesc);
2705 /* Create and submit job descriptor*/
2706 init_ablkcipher_job(ctx->sh_desc_dec,
2707 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
2708 desc = edesc->hw_desc;
2710 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2711 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2712 desc_bytes(edesc->hw_desc), 1);
2715 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
2719 ablkcipher_unmap(jrdev, edesc, req);
2727 * allocate and map the ablkcipher extended descriptor
2728 * for ablkcipher givencrypt
2730 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
2731 struct skcipher_givcrypt_request *greq,
2733 bool *iv_contig_out)
2735 struct ablkcipher_request *req = &greq->creq;
2736 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2737 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2738 struct device *jrdev = ctx->jrdev;
2739 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2740 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2741 GFP_KERNEL : GFP_ATOMIC;
2742 int src_nents, dst_nents = 0, sec4_sg_bytes;
2743 struct ablkcipher_edesc *edesc;
2744 dma_addr_t iv_dma = 0;
2745 bool iv_contig = false;
2747 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2750 src_nents = sg_count(req->src, req->nbytes);
2752 if (unlikely(req->dst != req->src))
2753 dst_nents = sg_count(req->dst, req->nbytes);
2755 if (likely(req->src == req->dst)) {
2756 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2759 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2761 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2766 * Check if iv can be contiguous with source and destination.
2767 * If so, include it. If not, create scatterlist.
2769 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
2770 if (dma_mapping_error(jrdev, iv_dma)) {
2771 dev_err(jrdev, "unable to map IV\n");
2772 return ERR_PTR(-ENOMEM);
2775 if (!dst_nents && iv_dma + ivsize == sg_dma_address(req->dst))
2778 dst_nents = dst_nents ? : 1;
2779 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2780 sizeof(struct sec4_sg_entry);
2782 /* allocate space for base edesc and hw desc commands, link tables */
2783 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2786 dev_err(jrdev, "could not allocate extended descriptor\n");
2787 return ERR_PTR(-ENOMEM);
2790 edesc->src_nents = src_nents;
2791 edesc->dst_nents = dst_nents;
2792 edesc->sec4_sg_bytes = sec4_sg_bytes;
2793 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2798 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0);
2799 sec4_sg_index += src_nents;
2803 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
2806 sg_to_sec4_sg_last(req->dst, dst_nents,
2807 edesc->sec4_sg + sec4_sg_index, 0);
2810 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2811 sec4_sg_bytes, DMA_TO_DEVICE);
2812 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2813 dev_err(jrdev, "unable to map S/G table\n");
2814 return ERR_PTR(-ENOMEM);
2816 edesc->iv_dma = iv_dma;
2819 print_hex_dump(KERN_ERR,
2820 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
2821 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2825 *iv_contig_out = iv_contig;
2829 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
2831 struct ablkcipher_request *req = &creq->creq;
2832 struct ablkcipher_edesc *edesc;
2833 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2834 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2835 struct device *jrdev = ctx->jrdev;
2840 /* allocate extended descriptor */
2841 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
2842 CAAM_CMD_SZ, &iv_contig);
2844 return PTR_ERR(edesc);
2846 /* Create and submit job descriptor*/
2847 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
2848 edesc, req, iv_contig);
2850 print_hex_dump(KERN_ERR,
2851 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
2852 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2853 desc_bytes(edesc->hw_desc), 1);
2855 desc = edesc->hw_desc;
2856 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2861 ablkcipher_unmap(jrdev, edesc, req);
2868 #define template_aead template_u.aead
2869 #define template_ablkcipher template_u.ablkcipher
2870 struct caam_alg_template {
2871 char name[CRYPTO_MAX_ALG_NAME];
2872 char driver_name[CRYPTO_MAX_ALG_NAME];
2873 unsigned int blocksize;
2876 struct ablkcipher_alg ablkcipher;
2878 u32 class1_alg_type;
2879 u32 class2_alg_type;
2883 static struct caam_alg_template driver_algs[] = {
2884 /* ablkcipher descriptor */
2887 .driver_name = "cbc-aes-caam",
2888 .blocksize = AES_BLOCK_SIZE,
2889 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2890 .template_ablkcipher = {
2891 .setkey = ablkcipher_setkey,
2892 .encrypt = ablkcipher_encrypt,
2893 .decrypt = ablkcipher_decrypt,
2894 .givencrypt = ablkcipher_givencrypt,
2895 .geniv = "<built-in>",
2896 .min_keysize = AES_MIN_KEY_SIZE,
2897 .max_keysize = AES_MAX_KEY_SIZE,
2898 .ivsize = AES_BLOCK_SIZE,
2900 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2903 .name = "cbc(des3_ede)",
2904 .driver_name = "cbc-3des-caam",
2905 .blocksize = DES3_EDE_BLOCK_SIZE,
2906 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2907 .template_ablkcipher = {
2908 .setkey = ablkcipher_setkey,
2909 .encrypt = ablkcipher_encrypt,
2910 .decrypt = ablkcipher_decrypt,
2911 .givencrypt = ablkcipher_givencrypt,
2912 .geniv = "<built-in>",
2913 .min_keysize = DES3_EDE_KEY_SIZE,
2914 .max_keysize = DES3_EDE_KEY_SIZE,
2915 .ivsize = DES3_EDE_BLOCK_SIZE,
2917 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2921 .driver_name = "cbc-des-caam",
2922 .blocksize = DES_BLOCK_SIZE,
2923 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2924 .template_ablkcipher = {
2925 .setkey = ablkcipher_setkey,
2926 .encrypt = ablkcipher_encrypt,
2927 .decrypt = ablkcipher_decrypt,
2928 .givencrypt = ablkcipher_givencrypt,
2929 .geniv = "<built-in>",
2930 .min_keysize = DES_KEY_SIZE,
2931 .max_keysize = DES_KEY_SIZE,
2932 .ivsize = DES_BLOCK_SIZE,
2934 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2938 .driver_name = "ctr-aes-caam",
2940 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2941 .template_ablkcipher = {
2942 .setkey = ablkcipher_setkey,
2943 .encrypt = ablkcipher_encrypt,
2944 .decrypt = ablkcipher_decrypt,
2946 .min_keysize = AES_MIN_KEY_SIZE,
2947 .max_keysize = AES_MAX_KEY_SIZE,
2948 .ivsize = AES_BLOCK_SIZE,
2950 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2953 .name = "rfc3686(ctr(aes))",
2954 .driver_name = "rfc3686-ctr-aes-caam",
2956 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2957 .template_ablkcipher = {
2958 .setkey = ablkcipher_setkey,
2959 .encrypt = ablkcipher_encrypt,
2960 .decrypt = ablkcipher_decrypt,
2961 .givencrypt = ablkcipher_givencrypt,
2962 .geniv = "<built-in>",
2963 .min_keysize = AES_MIN_KEY_SIZE +
2964 CTR_RFC3686_NONCE_SIZE,
2965 .max_keysize = AES_MAX_KEY_SIZE +
2966 CTR_RFC3686_NONCE_SIZE,
2967 .ivsize = CTR_RFC3686_IV_SIZE,
2969 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2973 .driver_name = "xts-aes-caam",
2974 .blocksize = AES_BLOCK_SIZE,
2975 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2976 .template_ablkcipher = {
2977 .setkey = xts_ablkcipher_setkey,
2978 .encrypt = ablkcipher_encrypt,
2979 .decrypt = ablkcipher_decrypt,
2981 .min_keysize = 2 * AES_MIN_KEY_SIZE,
2982 .max_keysize = 2 * AES_MAX_KEY_SIZE,
2983 .ivsize = AES_BLOCK_SIZE,
2985 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
2989 static struct caam_aead_alg driver_aeads[] = {
2993 .cra_name = "rfc4106(gcm(aes))",
2994 .cra_driver_name = "rfc4106-gcm-aes-caam",
2997 .setkey = rfc4106_setkey,
2998 .setauthsize = rfc4106_setauthsize,
2999 .encrypt = ipsec_gcm_encrypt,
3000 .decrypt = ipsec_gcm_decrypt,
3002 .maxauthsize = AES_BLOCK_SIZE,
3005 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3011 .cra_name = "rfc4543(gcm(aes))",
3012 .cra_driver_name = "rfc4543-gcm-aes-caam",
3015 .setkey = rfc4543_setkey,
3016 .setauthsize = rfc4543_setauthsize,
3017 .encrypt = ipsec_gcm_encrypt,
3018 .decrypt = ipsec_gcm_decrypt,
3020 .maxauthsize = AES_BLOCK_SIZE,
3023 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3026 /* Galois Counter Mode */
3030 .cra_name = "gcm(aes)",
3031 .cra_driver_name = "gcm-aes-caam",
3034 .setkey = gcm_setkey,
3035 .setauthsize = gcm_setauthsize,
3036 .encrypt = gcm_encrypt,
3037 .decrypt = gcm_decrypt,
3039 .maxauthsize = AES_BLOCK_SIZE,
3042 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3045 /* single-pass ipsec_esp descriptor */
3049 .cra_name = "authenc(hmac(md5),"
3050 "ecb(cipher_null))",
3051 .cra_driver_name = "authenc-hmac-md5-"
3052 "ecb-cipher_null-caam",
3053 .cra_blocksize = NULL_BLOCK_SIZE,
3055 .setkey = aead_setkey,
3056 .setauthsize = aead_setauthsize,
3057 .encrypt = aead_encrypt,
3058 .decrypt = aead_decrypt,
3059 .ivsize = NULL_IV_SIZE,
3060 .maxauthsize = MD5_DIGEST_SIZE,
3063 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3064 OP_ALG_AAI_HMAC_PRECOMP,
3065 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3071 .cra_name = "authenc(hmac(sha1),"
3072 "ecb(cipher_null))",
3073 .cra_driver_name = "authenc-hmac-sha1-"
3074 "ecb-cipher_null-caam",
3075 .cra_blocksize = NULL_BLOCK_SIZE,
3077 .setkey = aead_setkey,
3078 .setauthsize = aead_setauthsize,
3079 .encrypt = aead_encrypt,
3080 .decrypt = aead_decrypt,
3081 .ivsize = NULL_IV_SIZE,
3082 .maxauthsize = SHA1_DIGEST_SIZE,
3085 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3086 OP_ALG_AAI_HMAC_PRECOMP,
3087 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3093 .cra_name = "authenc(hmac(sha224),"
3094 "ecb(cipher_null))",
3095 .cra_driver_name = "authenc-hmac-sha224-"
3096 "ecb-cipher_null-caam",
3097 .cra_blocksize = NULL_BLOCK_SIZE,
3099 .setkey = aead_setkey,
3100 .setauthsize = aead_setauthsize,
3101 .encrypt = aead_encrypt,
3102 .decrypt = aead_decrypt,
3103 .ivsize = NULL_IV_SIZE,
3104 .maxauthsize = SHA224_DIGEST_SIZE,
3107 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3108 OP_ALG_AAI_HMAC_PRECOMP,
3109 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3115 .cra_name = "authenc(hmac(sha256),"
3116 "ecb(cipher_null))",
3117 .cra_driver_name = "authenc-hmac-sha256-"
3118 "ecb-cipher_null-caam",
3119 .cra_blocksize = NULL_BLOCK_SIZE,
3121 .setkey = aead_setkey,
3122 .setauthsize = aead_setauthsize,
3123 .encrypt = aead_encrypt,
3124 .decrypt = aead_decrypt,
3125 .ivsize = NULL_IV_SIZE,
3126 .maxauthsize = SHA256_DIGEST_SIZE,
3129 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3130 OP_ALG_AAI_HMAC_PRECOMP,
3131 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3137 .cra_name = "authenc(hmac(sha384),"
3138 "ecb(cipher_null))",
3139 .cra_driver_name = "authenc-hmac-sha384-"
3140 "ecb-cipher_null-caam",
3141 .cra_blocksize = NULL_BLOCK_SIZE,
3143 .setkey = aead_setkey,
3144 .setauthsize = aead_setauthsize,
3145 .encrypt = aead_encrypt,
3146 .decrypt = aead_decrypt,
3147 .ivsize = NULL_IV_SIZE,
3148 .maxauthsize = SHA384_DIGEST_SIZE,
3151 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3152 OP_ALG_AAI_HMAC_PRECOMP,
3153 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3159 .cra_name = "authenc(hmac(sha512),"
3160 "ecb(cipher_null))",
3161 .cra_driver_name = "authenc-hmac-sha512-"
3162 "ecb-cipher_null-caam",
3163 .cra_blocksize = NULL_BLOCK_SIZE,
3165 .setkey = aead_setkey,
3166 .setauthsize = aead_setauthsize,
3167 .encrypt = aead_encrypt,
3168 .decrypt = aead_decrypt,
3169 .ivsize = NULL_IV_SIZE,
3170 .maxauthsize = SHA512_DIGEST_SIZE,
3173 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3174 OP_ALG_AAI_HMAC_PRECOMP,
3175 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3181 .cra_name = "authenc(hmac(md5),cbc(aes))",
3182 .cra_driver_name = "authenc-hmac-md5-"
3184 .cra_blocksize = AES_BLOCK_SIZE,
3186 .setkey = aead_setkey,
3187 .setauthsize = aead_setauthsize,
3188 .encrypt = aead_encrypt,
3189 .decrypt = aead_decrypt,
3190 .ivsize = AES_BLOCK_SIZE,
3191 .maxauthsize = MD5_DIGEST_SIZE,
3194 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3195 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3196 OP_ALG_AAI_HMAC_PRECOMP,
3197 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3203 .cra_name = "echainiv(authenc(hmac(md5),"
3205 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3207 .cra_blocksize = AES_BLOCK_SIZE,
3209 .setkey = aead_setkey,
3210 .setauthsize = aead_setauthsize,
3211 .encrypt = aead_encrypt,
3212 .decrypt = aead_decrypt,
3213 .ivsize = AES_BLOCK_SIZE,
3214 .maxauthsize = MD5_DIGEST_SIZE,
3217 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3218 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3219 OP_ALG_AAI_HMAC_PRECOMP,
3220 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3227 .cra_name = "authenc(hmac(sha1),cbc(aes))",
3228 .cra_driver_name = "authenc-hmac-sha1-"
3230 .cra_blocksize = AES_BLOCK_SIZE,
3232 .setkey = aead_setkey,
3233 .setauthsize = aead_setauthsize,
3234 .encrypt = aead_encrypt,
3235 .decrypt = aead_decrypt,
3236 .ivsize = AES_BLOCK_SIZE,
3237 .maxauthsize = SHA1_DIGEST_SIZE,
3240 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3241 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3242 OP_ALG_AAI_HMAC_PRECOMP,
3243 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3249 .cra_name = "echainiv(authenc(hmac(sha1),"
3251 .cra_driver_name = "echainiv-authenc-"
3252 "hmac-sha1-cbc-aes-caam",
3253 .cra_blocksize = AES_BLOCK_SIZE,
3255 .setkey = aead_setkey,
3256 .setauthsize = aead_setauthsize,
3257 .encrypt = aead_encrypt,
3258 .decrypt = aead_decrypt,
3259 .ivsize = AES_BLOCK_SIZE,
3260 .maxauthsize = SHA1_DIGEST_SIZE,
3263 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3264 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3265 OP_ALG_AAI_HMAC_PRECOMP,
3266 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3273 .cra_name = "authenc(hmac(sha224),cbc(aes))",
3274 .cra_driver_name = "authenc-hmac-sha224-"
3276 .cra_blocksize = AES_BLOCK_SIZE,
3278 .setkey = aead_setkey,
3279 .setauthsize = aead_setauthsize,
3280 .encrypt = aead_encrypt,
3281 .decrypt = aead_decrypt,
3282 .ivsize = AES_BLOCK_SIZE,
3283 .maxauthsize = SHA224_DIGEST_SIZE,
3286 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3287 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3288 OP_ALG_AAI_HMAC_PRECOMP,
3289 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3295 .cra_name = "echainiv(authenc(hmac(sha224),"
3297 .cra_driver_name = "echainiv-authenc-"
3298 "hmac-sha224-cbc-aes-caam",
3299 .cra_blocksize = AES_BLOCK_SIZE,
3301 .setkey = aead_setkey,
3302 .setauthsize = aead_setauthsize,
3303 .encrypt = aead_encrypt,
3304 .decrypt = aead_decrypt,
3305 .ivsize = AES_BLOCK_SIZE,
3306 .maxauthsize = SHA224_DIGEST_SIZE,
3309 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3310 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3311 OP_ALG_AAI_HMAC_PRECOMP,
3312 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3319 .cra_name = "authenc(hmac(sha256),cbc(aes))",
3320 .cra_driver_name = "authenc-hmac-sha256-"
3322 .cra_blocksize = AES_BLOCK_SIZE,
3324 .setkey = aead_setkey,
3325 .setauthsize = aead_setauthsize,
3326 .encrypt = aead_encrypt,
3327 .decrypt = aead_decrypt,
3328 .ivsize = AES_BLOCK_SIZE,
3329 .maxauthsize = SHA256_DIGEST_SIZE,
3332 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3333 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3334 OP_ALG_AAI_HMAC_PRECOMP,
3335 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3341 .cra_name = "echainiv(authenc(hmac(sha256),"
3343 .cra_driver_name = "echainiv-authenc-"
3344 "hmac-sha256-cbc-aes-caam",
3345 .cra_blocksize = AES_BLOCK_SIZE,
3347 .setkey = aead_setkey,
3348 .setauthsize = aead_setauthsize,
3349 .encrypt = aead_encrypt,
3350 .decrypt = aead_decrypt,
3351 .ivsize = AES_BLOCK_SIZE,
3352 .maxauthsize = SHA256_DIGEST_SIZE,
3355 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3356 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3357 OP_ALG_AAI_HMAC_PRECOMP,
3358 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3365 .cra_name = "authenc(hmac(sha384),cbc(aes))",
3366 .cra_driver_name = "authenc-hmac-sha384-"
3368 .cra_blocksize = AES_BLOCK_SIZE,
3370 .setkey = aead_setkey,
3371 .setauthsize = aead_setauthsize,
3372 .encrypt = aead_encrypt,
3373 .decrypt = aead_decrypt,
3374 .ivsize = AES_BLOCK_SIZE,
3375 .maxauthsize = SHA384_DIGEST_SIZE,
3378 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3379 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3380 OP_ALG_AAI_HMAC_PRECOMP,
3381 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3387 .cra_name = "echainiv(authenc(hmac(sha384),"
3389 .cra_driver_name = "echainiv-authenc-"
3390 "hmac-sha384-cbc-aes-caam",
3391 .cra_blocksize = AES_BLOCK_SIZE,
3393 .setkey = aead_setkey,
3394 .setauthsize = aead_setauthsize,
3395 .encrypt = aead_encrypt,
3396 .decrypt = aead_decrypt,
3397 .ivsize = AES_BLOCK_SIZE,
3398 .maxauthsize = SHA384_DIGEST_SIZE,
3401 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3402 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3403 OP_ALG_AAI_HMAC_PRECOMP,
3404 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3411 .cra_name = "authenc(hmac(sha512),cbc(aes))",
3412 .cra_driver_name = "authenc-hmac-sha512-"
3414 .cra_blocksize = AES_BLOCK_SIZE,
3416 .setkey = aead_setkey,
3417 .setauthsize = aead_setauthsize,
3418 .encrypt = aead_encrypt,
3419 .decrypt = aead_decrypt,
3420 .ivsize = AES_BLOCK_SIZE,
3421 .maxauthsize = SHA512_DIGEST_SIZE,
3424 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3425 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3426 OP_ALG_AAI_HMAC_PRECOMP,
3427 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3433 .cra_name = "echainiv(authenc(hmac(sha512),"
3435 .cra_driver_name = "echainiv-authenc-"
3436 "hmac-sha512-cbc-aes-caam",
3437 .cra_blocksize = AES_BLOCK_SIZE,
3439 .setkey = aead_setkey,
3440 .setauthsize = aead_setauthsize,
3441 .encrypt = aead_encrypt,
3442 .decrypt = aead_decrypt,
3443 .ivsize = AES_BLOCK_SIZE,
3444 .maxauthsize = SHA512_DIGEST_SIZE,
3447 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3448 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3449 OP_ALG_AAI_HMAC_PRECOMP,
3450 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3457 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
3458 .cra_driver_name = "authenc-hmac-md5-"
3459 "cbc-des3_ede-caam",
3460 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3462 .setkey = aead_setkey,
3463 .setauthsize = aead_setauthsize,
3464 .encrypt = aead_encrypt,
3465 .decrypt = aead_decrypt,
3466 .ivsize = DES3_EDE_BLOCK_SIZE,
3467 .maxauthsize = MD5_DIGEST_SIZE,
3470 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3471 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3472 OP_ALG_AAI_HMAC_PRECOMP,
3473 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3479 .cra_name = "echainiv(authenc(hmac(md5),"
3481 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3482 "cbc-des3_ede-caam",
3483 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3485 .setkey = aead_setkey,
3486 .setauthsize = aead_setauthsize,
3487 .encrypt = aead_encrypt,
3488 .decrypt = aead_decrypt,
3489 .ivsize = DES3_EDE_BLOCK_SIZE,
3490 .maxauthsize = MD5_DIGEST_SIZE,
3493 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3494 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3495 OP_ALG_AAI_HMAC_PRECOMP,
3496 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3503 .cra_name = "authenc(hmac(sha1),"
3505 .cra_driver_name = "authenc-hmac-sha1-"
3506 "cbc-des3_ede-caam",
3507 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3509 .setkey = aead_setkey,
3510 .setauthsize = aead_setauthsize,
3511 .encrypt = aead_encrypt,
3512 .decrypt = aead_decrypt,
3513 .ivsize = DES3_EDE_BLOCK_SIZE,
3514 .maxauthsize = SHA1_DIGEST_SIZE,
3517 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3518 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3519 OP_ALG_AAI_HMAC_PRECOMP,
3520 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3526 .cra_name = "echainiv(authenc(hmac(sha1),"
3528 .cra_driver_name = "echainiv-authenc-"
3530 "cbc-des3_ede-caam",
3531 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3533 .setkey = aead_setkey,
3534 .setauthsize = aead_setauthsize,
3535 .encrypt = aead_encrypt,
3536 .decrypt = aead_decrypt,
3537 .ivsize = DES3_EDE_BLOCK_SIZE,
3538 .maxauthsize = SHA1_DIGEST_SIZE,
3541 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3542 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3543 OP_ALG_AAI_HMAC_PRECOMP,
3544 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3551 .cra_name = "authenc(hmac(sha224),"
3553 .cra_driver_name = "authenc-hmac-sha224-"
3554 "cbc-des3_ede-caam",
3555 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3557 .setkey = aead_setkey,
3558 .setauthsize = aead_setauthsize,
3559 .encrypt = aead_encrypt,
3560 .decrypt = aead_decrypt,
3561 .ivsize = DES3_EDE_BLOCK_SIZE,
3562 .maxauthsize = SHA224_DIGEST_SIZE,
3565 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3566 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3567 OP_ALG_AAI_HMAC_PRECOMP,
3568 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3574 .cra_name = "echainiv(authenc(hmac(sha224),"
3576 .cra_driver_name = "echainiv-authenc-"
3578 "cbc-des3_ede-caam",
3579 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3581 .setkey = aead_setkey,
3582 .setauthsize = aead_setauthsize,
3583 .encrypt = aead_encrypt,
3584 .decrypt = aead_decrypt,
3585 .ivsize = DES3_EDE_BLOCK_SIZE,
3586 .maxauthsize = SHA224_DIGEST_SIZE,
3589 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3590 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3591 OP_ALG_AAI_HMAC_PRECOMP,
3592 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3599 .cra_name = "authenc(hmac(sha256),"
3601 .cra_driver_name = "authenc-hmac-sha256-"
3602 "cbc-des3_ede-caam",
3603 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3605 .setkey = aead_setkey,
3606 .setauthsize = aead_setauthsize,
3607 .encrypt = aead_encrypt,
3608 .decrypt = aead_decrypt,
3609 .ivsize = DES3_EDE_BLOCK_SIZE,
3610 .maxauthsize = SHA256_DIGEST_SIZE,
3613 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3614 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3615 OP_ALG_AAI_HMAC_PRECOMP,
3616 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3622 .cra_name = "echainiv(authenc(hmac(sha256),"
3624 .cra_driver_name = "echainiv-authenc-"
3626 "cbc-des3_ede-caam",
3627 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3629 .setkey = aead_setkey,
3630 .setauthsize = aead_setauthsize,
3631 .encrypt = aead_encrypt,
3632 .decrypt = aead_decrypt,
3633 .ivsize = DES3_EDE_BLOCK_SIZE,
3634 .maxauthsize = SHA256_DIGEST_SIZE,
3637 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3638 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3639 OP_ALG_AAI_HMAC_PRECOMP,
3640 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3647 .cra_name = "authenc(hmac(sha384),"
3649 .cra_driver_name = "authenc-hmac-sha384-"
3650 "cbc-des3_ede-caam",
3651 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3653 .setkey = aead_setkey,
3654 .setauthsize = aead_setauthsize,
3655 .encrypt = aead_encrypt,
3656 .decrypt = aead_decrypt,
3657 .ivsize = DES3_EDE_BLOCK_SIZE,
3658 .maxauthsize = SHA384_DIGEST_SIZE,
3661 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3662 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3663 OP_ALG_AAI_HMAC_PRECOMP,
3664 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3670 .cra_name = "echainiv(authenc(hmac(sha384),"
3672 .cra_driver_name = "echainiv-authenc-"
3674 "cbc-des3_ede-caam",
3675 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3677 .setkey = aead_setkey,
3678 .setauthsize = aead_setauthsize,
3679 .encrypt = aead_encrypt,
3680 .decrypt = aead_decrypt,
3681 .ivsize = DES3_EDE_BLOCK_SIZE,
3682 .maxauthsize = SHA384_DIGEST_SIZE,
3685 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3686 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3687 OP_ALG_AAI_HMAC_PRECOMP,
3688 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3695 .cra_name = "authenc(hmac(sha512),"
3697 .cra_driver_name = "authenc-hmac-sha512-"
3698 "cbc-des3_ede-caam",
3699 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3701 .setkey = aead_setkey,
3702 .setauthsize = aead_setauthsize,
3703 .encrypt = aead_encrypt,
3704 .decrypt = aead_decrypt,
3705 .ivsize = DES3_EDE_BLOCK_SIZE,
3706 .maxauthsize = SHA512_DIGEST_SIZE,
3709 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3710 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3711 OP_ALG_AAI_HMAC_PRECOMP,
3712 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3718 .cra_name = "echainiv(authenc(hmac(sha512),"
3720 .cra_driver_name = "echainiv-authenc-"
3722 "cbc-des3_ede-caam",
3723 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3725 .setkey = aead_setkey,
3726 .setauthsize = aead_setauthsize,
3727 .encrypt = aead_encrypt,
3728 .decrypt = aead_decrypt,
3729 .ivsize = DES3_EDE_BLOCK_SIZE,
3730 .maxauthsize = SHA512_DIGEST_SIZE,
3733 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3734 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3735 OP_ALG_AAI_HMAC_PRECOMP,
3736 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3743 .cra_name = "authenc(hmac(md5),cbc(des))",
3744 .cra_driver_name = "authenc-hmac-md5-"
3746 .cra_blocksize = DES_BLOCK_SIZE,
3748 .setkey = aead_setkey,
3749 .setauthsize = aead_setauthsize,
3750 .encrypt = aead_encrypt,
3751 .decrypt = aead_decrypt,
3752 .ivsize = DES_BLOCK_SIZE,
3753 .maxauthsize = MD5_DIGEST_SIZE,
3756 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3757 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3758 OP_ALG_AAI_HMAC_PRECOMP,
3759 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3765 .cra_name = "echainiv(authenc(hmac(md5),"
3767 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3769 .cra_blocksize = DES_BLOCK_SIZE,
3771 .setkey = aead_setkey,
3772 .setauthsize = aead_setauthsize,
3773 .encrypt = aead_encrypt,
3774 .decrypt = aead_decrypt,
3775 .ivsize = DES_BLOCK_SIZE,
3776 .maxauthsize = MD5_DIGEST_SIZE,
3779 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3780 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3781 OP_ALG_AAI_HMAC_PRECOMP,
3782 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3789 .cra_name = "authenc(hmac(sha1),cbc(des))",
3790 .cra_driver_name = "authenc-hmac-sha1-"
3792 .cra_blocksize = DES_BLOCK_SIZE,
3794 .setkey = aead_setkey,
3795 .setauthsize = aead_setauthsize,
3796 .encrypt = aead_encrypt,
3797 .decrypt = aead_decrypt,
3798 .ivsize = DES_BLOCK_SIZE,
3799 .maxauthsize = SHA1_DIGEST_SIZE,
3802 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3803 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3804 OP_ALG_AAI_HMAC_PRECOMP,
3805 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3811 .cra_name = "echainiv(authenc(hmac(sha1),"
3813 .cra_driver_name = "echainiv-authenc-"
3814 "hmac-sha1-cbc-des-caam",
3815 .cra_blocksize = DES_BLOCK_SIZE,
3817 .setkey = aead_setkey,
3818 .setauthsize = aead_setauthsize,
3819 .encrypt = aead_encrypt,
3820 .decrypt = aead_decrypt,
3821 .ivsize = DES_BLOCK_SIZE,
3822 .maxauthsize = SHA1_DIGEST_SIZE,
3825 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3826 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3827 OP_ALG_AAI_HMAC_PRECOMP,
3828 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3835 .cra_name = "authenc(hmac(sha224),cbc(des))",
3836 .cra_driver_name = "authenc-hmac-sha224-"
3838 .cra_blocksize = DES_BLOCK_SIZE,
3840 .setkey = aead_setkey,
3841 .setauthsize = aead_setauthsize,
3842 .encrypt = aead_encrypt,
3843 .decrypt = aead_decrypt,
3844 .ivsize = DES_BLOCK_SIZE,
3845 .maxauthsize = SHA224_DIGEST_SIZE,
3848 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3849 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3850 OP_ALG_AAI_HMAC_PRECOMP,
3851 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3857 .cra_name = "echainiv(authenc(hmac(sha224),"
3859 .cra_driver_name = "echainiv-authenc-"
3860 "hmac-sha224-cbc-des-caam",
3861 .cra_blocksize = DES_BLOCK_SIZE,
3863 .setkey = aead_setkey,
3864 .setauthsize = aead_setauthsize,
3865 .encrypt = aead_encrypt,
3866 .decrypt = aead_decrypt,
3867 .ivsize = DES_BLOCK_SIZE,
3868 .maxauthsize = SHA224_DIGEST_SIZE,
3871 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3872 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3873 OP_ALG_AAI_HMAC_PRECOMP,
3874 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3881 .cra_name = "authenc(hmac(sha256),cbc(des))",
3882 .cra_driver_name = "authenc-hmac-sha256-"
3884 .cra_blocksize = DES_BLOCK_SIZE,
3886 .setkey = aead_setkey,
3887 .setauthsize = aead_setauthsize,
3888 .encrypt = aead_encrypt,
3889 .decrypt = aead_decrypt,
3890 .ivsize = DES_BLOCK_SIZE,
3891 .maxauthsize = SHA256_DIGEST_SIZE,
3894 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3895 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3896 OP_ALG_AAI_HMAC_PRECOMP,
3897 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3903 .cra_name = "echainiv(authenc(hmac(sha256),"
3905 .cra_driver_name = "echainiv-authenc-"
3906 "hmac-sha256-cbc-des-caam",
3907 .cra_blocksize = DES_BLOCK_SIZE,
3909 .setkey = aead_setkey,
3910 .setauthsize = aead_setauthsize,
3911 .encrypt = aead_encrypt,
3912 .decrypt = aead_decrypt,
3913 .ivsize = DES_BLOCK_SIZE,
3914 .maxauthsize = SHA256_DIGEST_SIZE,
3917 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3918 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3919 OP_ALG_AAI_HMAC_PRECOMP,
3920 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3927 .cra_name = "authenc(hmac(sha384),cbc(des))",
3928 .cra_driver_name = "authenc-hmac-sha384-"
3930 .cra_blocksize = DES_BLOCK_SIZE,
3932 .setkey = aead_setkey,
3933 .setauthsize = aead_setauthsize,
3934 .encrypt = aead_encrypt,
3935 .decrypt = aead_decrypt,
3936 .ivsize = DES_BLOCK_SIZE,
3937 .maxauthsize = SHA384_DIGEST_SIZE,
3940 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3941 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3942 OP_ALG_AAI_HMAC_PRECOMP,
3943 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3949 .cra_name = "echainiv(authenc(hmac(sha384),"
3951 .cra_driver_name = "echainiv-authenc-"
3952 "hmac-sha384-cbc-des-caam",
3953 .cra_blocksize = DES_BLOCK_SIZE,
3955 .setkey = aead_setkey,
3956 .setauthsize = aead_setauthsize,
3957 .encrypt = aead_encrypt,
3958 .decrypt = aead_decrypt,
3959 .ivsize = DES_BLOCK_SIZE,
3960 .maxauthsize = SHA384_DIGEST_SIZE,
3963 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3964 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3965 OP_ALG_AAI_HMAC_PRECOMP,
3966 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3973 .cra_name = "authenc(hmac(sha512),cbc(des))",
3974 .cra_driver_name = "authenc-hmac-sha512-"
3976 .cra_blocksize = DES_BLOCK_SIZE,
3978 .setkey = aead_setkey,
3979 .setauthsize = aead_setauthsize,
3980 .encrypt = aead_encrypt,
3981 .decrypt = aead_decrypt,
3982 .ivsize = DES_BLOCK_SIZE,
3983 .maxauthsize = SHA512_DIGEST_SIZE,
3986 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3987 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3988 OP_ALG_AAI_HMAC_PRECOMP,
3989 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3995 .cra_name = "echainiv(authenc(hmac(sha512),"
3997 .cra_driver_name = "echainiv-authenc-"
3998 "hmac-sha512-cbc-des-caam",
3999 .cra_blocksize = DES_BLOCK_SIZE,
4001 .setkey = aead_setkey,
4002 .setauthsize = aead_setauthsize,
4003 .encrypt = aead_encrypt,
4004 .decrypt = aead_decrypt,
4005 .ivsize = DES_BLOCK_SIZE,
4006 .maxauthsize = SHA512_DIGEST_SIZE,
4009 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4010 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4011 OP_ALG_AAI_HMAC_PRECOMP,
4012 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4019 .cra_name = "authenc(hmac(md5),"
4020 "rfc3686(ctr(aes)))",
4021 .cra_driver_name = "authenc-hmac-md5-"
4022 "rfc3686-ctr-aes-caam",
4025 .setkey = aead_setkey,
4026 .setauthsize = aead_setauthsize,
4027 .encrypt = aead_encrypt,
4028 .decrypt = aead_decrypt,
4029 .ivsize = CTR_RFC3686_IV_SIZE,
4030 .maxauthsize = MD5_DIGEST_SIZE,
4033 .class1_alg_type = OP_ALG_ALGSEL_AES |
4034 OP_ALG_AAI_CTR_MOD128,
4035 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4036 OP_ALG_AAI_HMAC_PRECOMP,
4037 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4044 .cra_name = "seqiv(authenc("
4045 "hmac(md5),rfc3686(ctr(aes))))",
4046 .cra_driver_name = "seqiv-authenc-hmac-md5-"
4047 "rfc3686-ctr-aes-caam",
4050 .setkey = aead_setkey,
4051 .setauthsize = aead_setauthsize,
4052 .encrypt = aead_encrypt,
4053 .decrypt = aead_decrypt,
4054 .ivsize = CTR_RFC3686_IV_SIZE,
4055 .maxauthsize = MD5_DIGEST_SIZE,
4058 .class1_alg_type = OP_ALG_ALGSEL_AES |
4059 OP_ALG_AAI_CTR_MOD128,
4060 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4061 OP_ALG_AAI_HMAC_PRECOMP,
4062 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4070 .cra_name = "authenc(hmac(sha1),"
4071 "rfc3686(ctr(aes)))",
4072 .cra_driver_name = "authenc-hmac-sha1-"
4073 "rfc3686-ctr-aes-caam",
4076 .setkey = aead_setkey,
4077 .setauthsize = aead_setauthsize,
4078 .encrypt = aead_encrypt,
4079 .decrypt = aead_decrypt,
4080 .ivsize = CTR_RFC3686_IV_SIZE,
4081 .maxauthsize = SHA1_DIGEST_SIZE,
4084 .class1_alg_type = OP_ALG_ALGSEL_AES |
4085 OP_ALG_AAI_CTR_MOD128,
4086 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4087 OP_ALG_AAI_HMAC_PRECOMP,
4088 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4095 .cra_name = "seqiv(authenc("
4096 "hmac(sha1),rfc3686(ctr(aes))))",
4097 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
4098 "rfc3686-ctr-aes-caam",
4101 .setkey = aead_setkey,
4102 .setauthsize = aead_setauthsize,
4103 .encrypt = aead_encrypt,
4104 .decrypt = aead_decrypt,
4105 .ivsize = CTR_RFC3686_IV_SIZE,
4106 .maxauthsize = SHA1_DIGEST_SIZE,
4109 .class1_alg_type = OP_ALG_ALGSEL_AES |
4110 OP_ALG_AAI_CTR_MOD128,
4111 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4112 OP_ALG_AAI_HMAC_PRECOMP,
4113 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4121 .cra_name = "authenc(hmac(sha224),"
4122 "rfc3686(ctr(aes)))",
4123 .cra_driver_name = "authenc-hmac-sha224-"
4124 "rfc3686-ctr-aes-caam",
4127 .setkey = aead_setkey,
4128 .setauthsize = aead_setauthsize,
4129 .encrypt = aead_encrypt,
4130 .decrypt = aead_decrypt,
4131 .ivsize = CTR_RFC3686_IV_SIZE,
4132 .maxauthsize = SHA224_DIGEST_SIZE,
4135 .class1_alg_type = OP_ALG_ALGSEL_AES |
4136 OP_ALG_AAI_CTR_MOD128,
4137 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4138 OP_ALG_AAI_HMAC_PRECOMP,
4139 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4146 .cra_name = "seqiv(authenc("
4147 "hmac(sha224),rfc3686(ctr(aes))))",
4148 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
4149 "rfc3686-ctr-aes-caam",
4152 .setkey = aead_setkey,
4153 .setauthsize = aead_setauthsize,
4154 .encrypt = aead_encrypt,
4155 .decrypt = aead_decrypt,
4156 .ivsize = CTR_RFC3686_IV_SIZE,
4157 .maxauthsize = SHA224_DIGEST_SIZE,
4160 .class1_alg_type = OP_ALG_ALGSEL_AES |
4161 OP_ALG_AAI_CTR_MOD128,
4162 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4163 OP_ALG_AAI_HMAC_PRECOMP,
4164 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4172 .cra_name = "authenc(hmac(sha256),"
4173 "rfc3686(ctr(aes)))",
4174 .cra_driver_name = "authenc-hmac-sha256-"
4175 "rfc3686-ctr-aes-caam",
4178 .setkey = aead_setkey,
4179 .setauthsize = aead_setauthsize,
4180 .encrypt = aead_encrypt,
4181 .decrypt = aead_decrypt,
4182 .ivsize = CTR_RFC3686_IV_SIZE,
4183 .maxauthsize = SHA256_DIGEST_SIZE,
4186 .class1_alg_type = OP_ALG_ALGSEL_AES |
4187 OP_ALG_AAI_CTR_MOD128,
4188 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4189 OP_ALG_AAI_HMAC_PRECOMP,
4190 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4197 .cra_name = "seqiv(authenc(hmac(sha256),"
4198 "rfc3686(ctr(aes))))",
4199 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
4200 "rfc3686-ctr-aes-caam",
4203 .setkey = aead_setkey,
4204 .setauthsize = aead_setauthsize,
4205 .encrypt = aead_encrypt,
4206 .decrypt = aead_decrypt,
4207 .ivsize = CTR_RFC3686_IV_SIZE,
4208 .maxauthsize = SHA256_DIGEST_SIZE,
4211 .class1_alg_type = OP_ALG_ALGSEL_AES |
4212 OP_ALG_AAI_CTR_MOD128,
4213 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4214 OP_ALG_AAI_HMAC_PRECOMP,
4215 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4223 .cra_name = "authenc(hmac(sha384),"
4224 "rfc3686(ctr(aes)))",
4225 .cra_driver_name = "authenc-hmac-sha384-"
4226 "rfc3686-ctr-aes-caam",
4229 .setkey = aead_setkey,
4230 .setauthsize = aead_setauthsize,
4231 .encrypt = aead_encrypt,
4232 .decrypt = aead_decrypt,
4233 .ivsize = CTR_RFC3686_IV_SIZE,
4234 .maxauthsize = SHA384_DIGEST_SIZE,
4237 .class1_alg_type = OP_ALG_ALGSEL_AES |
4238 OP_ALG_AAI_CTR_MOD128,
4239 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4240 OP_ALG_AAI_HMAC_PRECOMP,
4241 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4248 .cra_name = "seqiv(authenc(hmac(sha384),"
4249 "rfc3686(ctr(aes))))",
4250 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
4251 "rfc3686-ctr-aes-caam",
4254 .setkey = aead_setkey,
4255 .setauthsize = aead_setauthsize,
4256 .encrypt = aead_encrypt,
4257 .decrypt = aead_decrypt,
4258 .ivsize = CTR_RFC3686_IV_SIZE,
4259 .maxauthsize = SHA384_DIGEST_SIZE,
4262 .class1_alg_type = OP_ALG_ALGSEL_AES |
4263 OP_ALG_AAI_CTR_MOD128,
4264 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4265 OP_ALG_AAI_HMAC_PRECOMP,
4266 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4274 .cra_name = "authenc(hmac(sha512),"
4275 "rfc3686(ctr(aes)))",
4276 .cra_driver_name = "authenc-hmac-sha512-"
4277 "rfc3686-ctr-aes-caam",
4280 .setkey = aead_setkey,
4281 .setauthsize = aead_setauthsize,
4282 .encrypt = aead_encrypt,
4283 .decrypt = aead_decrypt,
4284 .ivsize = CTR_RFC3686_IV_SIZE,
4285 .maxauthsize = SHA512_DIGEST_SIZE,
4288 .class1_alg_type = OP_ALG_ALGSEL_AES |
4289 OP_ALG_AAI_CTR_MOD128,
4290 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4291 OP_ALG_AAI_HMAC_PRECOMP,
4292 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4299 .cra_name = "seqiv(authenc(hmac(sha512),"
4300 "rfc3686(ctr(aes))))",
4301 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
4302 "rfc3686-ctr-aes-caam",
4305 .setkey = aead_setkey,
4306 .setauthsize = aead_setauthsize,
4307 .encrypt = aead_encrypt,
4308 .decrypt = aead_decrypt,
4309 .ivsize = CTR_RFC3686_IV_SIZE,
4310 .maxauthsize = SHA512_DIGEST_SIZE,
4313 .class1_alg_type = OP_ALG_ALGSEL_AES |
4314 OP_ALG_AAI_CTR_MOD128,
4315 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4316 OP_ALG_AAI_HMAC_PRECOMP,
4317 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4324 struct caam_crypto_alg {
4325 struct crypto_alg crypto_alg;
4326 struct list_head entry;
4327 struct caam_alg_entry caam;
4330 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
4332 ctx->jrdev = caam_jr_alloc();
4333 if (IS_ERR(ctx->jrdev)) {
4334 pr_err("Job Ring Device allocation for transform failed\n");
4335 return PTR_ERR(ctx->jrdev);
4338 /* copy descriptor header template value */
4339 ctx->class1_alg_type = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
4340 ctx->class2_alg_type = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
4341 ctx->alg_op = OP_TYPE_CLASS2_ALG | caam->alg_op;
4346 static int caam_cra_init(struct crypto_tfm *tfm)
4348 struct crypto_alg *alg = tfm->__crt_alg;
4349 struct caam_crypto_alg *caam_alg =
4350 container_of(alg, struct caam_crypto_alg, crypto_alg);
4351 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
4353 return caam_init_common(ctx, &caam_alg->caam);
4356 static int caam_aead_init(struct crypto_aead *tfm)
4358 struct aead_alg *alg = crypto_aead_alg(tfm);
4359 struct caam_aead_alg *caam_alg =
4360 container_of(alg, struct caam_aead_alg, aead);
4361 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
4363 return caam_init_common(ctx, &caam_alg->caam);
4366 static void caam_exit_common(struct caam_ctx *ctx)
4368 if (ctx->sh_desc_enc_dma &&
4369 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma))
4370 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma,
4371 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
4372 if (ctx->sh_desc_dec_dma &&
4373 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma))
4374 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma,
4375 desc_bytes(ctx->sh_desc_dec), DMA_TO_DEVICE);
4376 if (ctx->sh_desc_givenc_dma &&
4377 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma))
4378 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma,
4379 desc_bytes(ctx->sh_desc_givenc),
4382 !dma_mapping_error(ctx->jrdev, ctx->key_dma))
4383 dma_unmap_single(ctx->jrdev, ctx->key_dma,
4384 ctx->enckeylen + ctx->split_key_pad_len,
4387 caam_jr_free(ctx->jrdev);
4390 static void caam_cra_exit(struct crypto_tfm *tfm)
4392 caam_exit_common(crypto_tfm_ctx(tfm));
4395 static void caam_aead_exit(struct crypto_aead *tfm)
4397 caam_exit_common(crypto_aead_ctx(tfm));
4400 static void __exit caam_algapi_exit(void)
4403 struct caam_crypto_alg *t_alg, *n;
4406 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4407 struct caam_aead_alg *t_alg = driver_aeads + i;
4409 if (t_alg->registered)
4410 crypto_unregister_aead(&t_alg->aead);
4416 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
4417 crypto_unregister_alg(&t_alg->crypto_alg);
4418 list_del(&t_alg->entry);
4423 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
4426 struct caam_crypto_alg *t_alg;
4427 struct crypto_alg *alg;
4429 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
4431 pr_err("failed to allocate t_alg\n");
4432 return ERR_PTR(-ENOMEM);
4435 alg = &t_alg->crypto_alg;
4437 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
4438 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
4439 template->driver_name);
4440 alg->cra_module = THIS_MODULE;
4441 alg->cra_init = caam_cra_init;
4442 alg->cra_exit = caam_cra_exit;
4443 alg->cra_priority = CAAM_CRA_PRIORITY;
4444 alg->cra_blocksize = template->blocksize;
4445 alg->cra_alignmask = 0;
4446 alg->cra_ctxsize = sizeof(struct caam_ctx);
4447 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
4449 switch (template->type) {
4450 case CRYPTO_ALG_TYPE_GIVCIPHER:
4451 alg->cra_type = &crypto_givcipher_type;
4452 alg->cra_ablkcipher = template->template_ablkcipher;
4454 case CRYPTO_ALG_TYPE_ABLKCIPHER:
4455 alg->cra_type = &crypto_ablkcipher_type;
4456 alg->cra_ablkcipher = template->template_ablkcipher;
4460 t_alg->caam.class1_alg_type = template->class1_alg_type;
4461 t_alg->caam.class2_alg_type = template->class2_alg_type;
4462 t_alg->caam.alg_op = template->alg_op;
4467 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
4469 struct aead_alg *alg = &t_alg->aead;
4471 alg->base.cra_module = THIS_MODULE;
4472 alg->base.cra_priority = CAAM_CRA_PRIORITY;
4473 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
4474 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
4476 alg->init = caam_aead_init;
4477 alg->exit = caam_aead_exit;
4480 static int __init caam_algapi_init(void)
4482 struct device_node *dev_node;
4483 struct platform_device *pdev;
4484 struct device *ctrldev;
4485 struct caam_drv_private *priv;
4487 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
4488 unsigned int md_limit = SHA512_DIGEST_SIZE;
4489 bool registered = false;
4491 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
4493 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
4498 pdev = of_find_device_by_node(dev_node);
4500 of_node_put(dev_node);
4504 ctrldev = &pdev->dev;
4505 priv = dev_get_drvdata(ctrldev);
4506 of_node_put(dev_node);
4509 * If priv is NULL, it's probably because the caam driver wasn't
4510 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
4516 INIT_LIST_HEAD(&alg_list);
4519 * Register crypto algorithms the device supports.
4520 * First, detect presence and attributes of DES, AES, and MD blocks.
4522 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
4523 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
4524 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
4525 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
4526 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
4528 /* If MD is present, limit digest size based on LP256 */
4529 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
4530 md_limit = SHA256_DIGEST_SIZE;
4532 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4533 struct caam_crypto_alg *t_alg;
4534 struct caam_alg_template *alg = driver_algs + i;
4535 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
4537 /* Skip DES algorithms if not supported by device */
4539 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
4540 (alg_sel == OP_ALG_ALGSEL_DES)))
4543 /* Skip AES algorithms if not supported by device */
4544 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
4548 * Check support for AES modes not available
4551 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4552 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
4556 t_alg = caam_alg_alloc(alg);
4557 if (IS_ERR(t_alg)) {
4558 err = PTR_ERR(t_alg);
4559 pr_warn("%s alg allocation failed\n", alg->driver_name);
4563 err = crypto_register_alg(&t_alg->crypto_alg);
4565 pr_warn("%s alg registration failed\n",
4566 t_alg->crypto_alg.cra_driver_name);
4571 list_add_tail(&t_alg->entry, &alg_list);
4575 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4576 struct caam_aead_alg *t_alg = driver_aeads + i;
4577 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
4579 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
4581 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
4583 /* Skip DES algorithms if not supported by device */
4585 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
4586 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
4589 /* Skip AES algorithms if not supported by device */
4590 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
4594 * Check support for AES algorithms not available
4597 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4598 if (alg_aai == OP_ALG_AAI_GCM)
4602 * Skip algorithms requiring message digests
4603 * if MD or MD size is not supported by device.
4606 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
4609 caam_aead_alg_init(t_alg);
4611 err = crypto_register_aead(&t_alg->aead);
4613 pr_warn("%s alg registration failed\n",
4614 t_alg->aead.base.cra_driver_name);
4618 t_alg->registered = true;
4623 pr_info("caam algorithms registered in /proc/crypto\n");
4628 module_init(caam_algapi_init);
4629 module_exit(caam_algapi_exit);
4631 MODULE_LICENSE("GPL");
4632 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
4633 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");