Merge branch 'master' of git://git.kernel.org/pub/scm/linux/kernel/git/klassert/ipsec
[firefly-linux-kernel-4.4.55.git] / crypto / shash.c
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22
23 #include "internal.h"
24
25 static const struct crypto_type crypto_shash_type;
26
27 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28                            unsigned int keylen)
29 {
30         return -ENOSYS;
31 }
32
33 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
34                                   unsigned int keylen)
35 {
36         struct shash_alg *shash = crypto_shash_alg(tfm);
37         unsigned long alignmask = crypto_shash_alignmask(tfm);
38         unsigned long absize;
39         u8 *buffer, *alignbuffer;
40         int err;
41
42         absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
43         buffer = kmalloc(absize, GFP_KERNEL);
44         if (!buffer)
45                 return -ENOMEM;
46
47         alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
48         memcpy(alignbuffer, key, keylen);
49         err = shash->setkey(tfm, alignbuffer, keylen);
50         kzfree(buffer);
51         return err;
52 }
53
54 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
55                         unsigned int keylen)
56 {
57         struct shash_alg *shash = crypto_shash_alg(tfm);
58         unsigned long alignmask = crypto_shash_alignmask(tfm);
59
60         if ((unsigned long)key & alignmask)
61                 return shash_setkey_unaligned(tfm, key, keylen);
62
63         return shash->setkey(tfm, key, keylen);
64 }
65 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
66
67 static inline unsigned int shash_align_buffer_size(unsigned len,
68                                                    unsigned long mask)
69 {
70         typedef u8 __attribute__ ((aligned)) u8_aligned;
71         return len + (mask & ~(__alignof__(u8_aligned) - 1));
72 }
73
74 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
75                                   unsigned int len)
76 {
77         struct crypto_shash *tfm = desc->tfm;
78         struct shash_alg *shash = crypto_shash_alg(tfm);
79         unsigned long alignmask = crypto_shash_alignmask(tfm);
80         unsigned int unaligned_len = alignmask + 1 -
81                                      ((unsigned long)data & alignmask);
82         u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
83                 __attribute__ ((aligned));
84         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
85         int err;
86
87         if (unaligned_len > len)
88                 unaligned_len = len;
89
90         memcpy(buf, data, unaligned_len);
91         err = shash->update(desc, buf, unaligned_len);
92         memset(buf, 0, unaligned_len);
93
94         return err ?:
95                shash->update(desc, data + unaligned_len, len - unaligned_len);
96 }
97
98 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
99                         unsigned int len)
100 {
101         struct crypto_shash *tfm = desc->tfm;
102         struct shash_alg *shash = crypto_shash_alg(tfm);
103         unsigned long alignmask = crypto_shash_alignmask(tfm);
104
105         if ((unsigned long)data & alignmask)
106                 return shash_update_unaligned(desc, data, len);
107
108         return shash->update(desc, data, len);
109 }
110 EXPORT_SYMBOL_GPL(crypto_shash_update);
111
112 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
113 {
114         struct crypto_shash *tfm = desc->tfm;
115         unsigned long alignmask = crypto_shash_alignmask(tfm);
116         struct shash_alg *shash = crypto_shash_alg(tfm);
117         unsigned int ds = crypto_shash_digestsize(tfm);
118         u8 ubuf[shash_align_buffer_size(ds, alignmask)]
119                 __attribute__ ((aligned));
120         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
121         int err;
122
123         err = shash->final(desc, buf);
124         if (err)
125                 goto out;
126
127         memcpy(out, buf, ds);
128
129 out:
130         memset(buf, 0, ds);
131         return err;
132 }
133
134 int crypto_shash_final(struct shash_desc *desc, u8 *out)
135 {
136         struct crypto_shash *tfm = desc->tfm;
137         struct shash_alg *shash = crypto_shash_alg(tfm);
138         unsigned long alignmask = crypto_shash_alignmask(tfm);
139
140         if ((unsigned long)out & alignmask)
141                 return shash_final_unaligned(desc, out);
142
143         return shash->final(desc, out);
144 }
145 EXPORT_SYMBOL_GPL(crypto_shash_final);
146
147 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
148                                  unsigned int len, u8 *out)
149 {
150         return crypto_shash_update(desc, data, len) ?:
151                crypto_shash_final(desc, out);
152 }
153
154 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
155                        unsigned int len, u8 *out)
156 {
157         struct crypto_shash *tfm = desc->tfm;
158         struct shash_alg *shash = crypto_shash_alg(tfm);
159         unsigned long alignmask = crypto_shash_alignmask(tfm);
160
161         if (((unsigned long)data | (unsigned long)out) & alignmask)
162                 return shash_finup_unaligned(desc, data, len, out);
163
164         return shash->finup(desc, data, len, out);
165 }
166 EXPORT_SYMBOL_GPL(crypto_shash_finup);
167
168 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
169                                   unsigned int len, u8 *out)
170 {
171         return crypto_shash_init(desc) ?:
172                crypto_shash_finup(desc, data, len, out);
173 }
174
175 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
176                         unsigned int len, u8 *out)
177 {
178         struct crypto_shash *tfm = desc->tfm;
179         struct shash_alg *shash = crypto_shash_alg(tfm);
180         unsigned long alignmask = crypto_shash_alignmask(tfm);
181
182         if (((unsigned long)data | (unsigned long)out) & alignmask)
183                 return shash_digest_unaligned(desc, data, len, out);
184
185         return shash->digest(desc, data, len, out);
186 }
187 EXPORT_SYMBOL_GPL(crypto_shash_digest);
188
189 static int shash_default_export(struct shash_desc *desc, void *out)
190 {
191         memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
192         return 0;
193 }
194
195 static int shash_default_import(struct shash_desc *desc, const void *in)
196 {
197         memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
198         return 0;
199 }
200
201 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
202                               unsigned int keylen)
203 {
204         struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
205
206         return crypto_shash_setkey(*ctx, key, keylen);
207 }
208
209 static int shash_async_init(struct ahash_request *req)
210 {
211         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
212         struct shash_desc *desc = ahash_request_ctx(req);
213
214         desc->tfm = *ctx;
215         desc->flags = req->base.flags;
216
217         return crypto_shash_init(desc);
218 }
219
220 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
221 {
222         struct crypto_hash_walk walk;
223         int nbytes;
224
225         for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
226              nbytes = crypto_hash_walk_done(&walk, nbytes))
227                 nbytes = crypto_shash_update(desc, walk.data, nbytes);
228
229         return nbytes;
230 }
231 EXPORT_SYMBOL_GPL(shash_ahash_update);
232
233 static int shash_async_update(struct ahash_request *req)
234 {
235         return shash_ahash_update(req, ahash_request_ctx(req));
236 }
237
238 static int shash_async_final(struct ahash_request *req)
239 {
240         return crypto_shash_final(ahash_request_ctx(req), req->result);
241 }
242
243 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
244 {
245         struct crypto_hash_walk walk;
246         int nbytes;
247
248         nbytes = crypto_hash_walk_first(req, &walk);
249         if (!nbytes)
250                 return crypto_shash_final(desc, req->result);
251
252         do {
253                 nbytes = crypto_hash_walk_last(&walk) ?
254                          crypto_shash_finup(desc, walk.data, nbytes,
255                                             req->result) :
256                          crypto_shash_update(desc, walk.data, nbytes);
257                 nbytes = crypto_hash_walk_done(&walk, nbytes);
258         } while (nbytes > 0);
259
260         return nbytes;
261 }
262 EXPORT_SYMBOL_GPL(shash_ahash_finup);
263
264 static int shash_async_finup(struct ahash_request *req)
265 {
266         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
267         struct shash_desc *desc = ahash_request_ctx(req);
268
269         desc->tfm = *ctx;
270         desc->flags = req->base.flags;
271
272         return shash_ahash_finup(req, desc);
273 }
274
275 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
276 {
277         struct scatterlist *sg = req->src;
278         unsigned int offset = sg->offset;
279         unsigned int nbytes = req->nbytes;
280         int err;
281
282         if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
283                 void *data;
284
285                 data = kmap_atomic(sg_page(sg));
286                 err = crypto_shash_digest(desc, data + offset, nbytes,
287                                           req->result);
288                 kunmap_atomic(data);
289                 crypto_yield(desc->flags);
290         } else
291                 err = crypto_shash_init(desc) ?:
292                       shash_ahash_finup(req, desc);
293
294         return err;
295 }
296 EXPORT_SYMBOL_GPL(shash_ahash_digest);
297
298 static int shash_async_digest(struct ahash_request *req)
299 {
300         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
301         struct shash_desc *desc = ahash_request_ctx(req);
302
303         desc->tfm = *ctx;
304         desc->flags = req->base.flags;
305
306         return shash_ahash_digest(req, desc);
307 }
308
309 static int shash_async_export(struct ahash_request *req, void *out)
310 {
311         return crypto_shash_export(ahash_request_ctx(req), out);
312 }
313
314 static int shash_async_import(struct ahash_request *req, const void *in)
315 {
316         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
317         struct shash_desc *desc = ahash_request_ctx(req);
318
319         desc->tfm = *ctx;
320         desc->flags = req->base.flags;
321
322         return crypto_shash_import(desc, in);
323 }
324
325 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
326 {
327         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
328
329         crypto_free_shash(*ctx);
330 }
331
332 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
333 {
334         struct crypto_alg *calg = tfm->__crt_alg;
335         struct shash_alg *alg = __crypto_shash_alg(calg);
336         struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
337         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
338         struct crypto_shash *shash;
339
340         if (!crypto_mod_get(calg))
341                 return -EAGAIN;
342
343         shash = crypto_create_tfm(calg, &crypto_shash_type);
344         if (IS_ERR(shash)) {
345                 crypto_mod_put(calg);
346                 return PTR_ERR(shash);
347         }
348
349         *ctx = shash;
350         tfm->exit = crypto_exit_shash_ops_async;
351
352         crt->init = shash_async_init;
353         crt->update = shash_async_update;
354         crt->final = shash_async_final;
355         crt->finup = shash_async_finup;
356         crt->digest = shash_async_digest;
357
358         if (alg->setkey)
359                 crt->setkey = shash_async_setkey;
360         if (alg->export)
361                 crt->export = shash_async_export;
362         if (alg->import)
363                 crt->import = shash_async_import;
364
365         crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
366
367         return 0;
368 }
369
370 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
371                                unsigned int keylen)
372 {
373         struct shash_desc **descp = crypto_hash_ctx(tfm);
374         struct shash_desc *desc = *descp;
375
376         return crypto_shash_setkey(desc->tfm, key, keylen);
377 }
378
379 static int shash_compat_init(struct hash_desc *hdesc)
380 {
381         struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
382         struct shash_desc *desc = *descp;
383
384         desc->flags = hdesc->flags;
385
386         return crypto_shash_init(desc);
387 }
388
389 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
390                                unsigned int len)
391 {
392         struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
393         struct shash_desc *desc = *descp;
394         struct crypto_hash_walk walk;
395         int nbytes;
396
397         for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
398              nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
399                 nbytes = crypto_shash_update(desc, walk.data, nbytes);
400
401         return nbytes;
402 }
403
404 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
405 {
406         struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
407
408         return crypto_shash_final(*descp, out);
409 }
410
411 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
412                                unsigned int nbytes, u8 *out)
413 {
414         unsigned int offset = sg->offset;
415         int err;
416
417         if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
418                 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
419                 struct shash_desc *desc = *descp;
420                 void *data;
421
422                 desc->flags = hdesc->flags;
423
424                 data = kmap_atomic(sg_page(sg));
425                 err = crypto_shash_digest(desc, data + offset, nbytes, out);
426                 kunmap_atomic(data);
427                 crypto_yield(desc->flags);
428                 goto out;
429         }
430
431         err = shash_compat_init(hdesc);
432         if (err)
433                 goto out;
434
435         err = shash_compat_update(hdesc, sg, nbytes);
436         if (err)
437                 goto out;
438
439         err = shash_compat_final(hdesc, out);
440
441 out:
442         return err;
443 }
444
445 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
446 {
447         struct shash_desc **descp = crypto_tfm_ctx(tfm);
448         struct shash_desc *desc = *descp;
449
450         crypto_free_shash(desc->tfm);
451         kzfree(desc);
452 }
453
454 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
455 {
456         struct hash_tfm *crt = &tfm->crt_hash;
457         struct crypto_alg *calg = tfm->__crt_alg;
458         struct shash_alg *alg = __crypto_shash_alg(calg);
459         struct shash_desc **descp = crypto_tfm_ctx(tfm);
460         struct crypto_shash *shash;
461         struct shash_desc *desc;
462
463         if (!crypto_mod_get(calg))
464                 return -EAGAIN;
465
466         shash = crypto_create_tfm(calg, &crypto_shash_type);
467         if (IS_ERR(shash)) {
468                 crypto_mod_put(calg);
469                 return PTR_ERR(shash);
470         }
471
472         desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
473                        GFP_KERNEL);
474         if (!desc) {
475                 crypto_free_shash(shash);
476                 return -ENOMEM;
477         }
478
479         *descp = desc;
480         desc->tfm = shash;
481         tfm->exit = crypto_exit_shash_ops_compat;
482
483         crt->init = shash_compat_init;
484         crt->update = shash_compat_update;
485         crt->final  = shash_compat_final;
486         crt->digest = shash_compat_digest;
487         crt->setkey = shash_compat_setkey;
488
489         crt->digestsize = alg->digestsize;
490
491         return 0;
492 }
493
494 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
495 {
496         switch (mask & CRYPTO_ALG_TYPE_MASK) {
497         case CRYPTO_ALG_TYPE_HASH_MASK:
498                 return crypto_init_shash_ops_compat(tfm);
499         }
500
501         return -EINVAL;
502 }
503
504 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
505                                          u32 mask)
506 {
507         switch (mask & CRYPTO_ALG_TYPE_MASK) {
508         case CRYPTO_ALG_TYPE_HASH_MASK:
509                 return sizeof(struct shash_desc *);
510         }
511
512         return 0;
513 }
514
515 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
516 {
517         struct crypto_shash *hash = __crypto_shash_cast(tfm);
518
519         hash->descsize = crypto_shash_alg(hash)->descsize;
520         return 0;
521 }
522
523 #ifdef CONFIG_NET
524 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
525 {
526         struct crypto_report_hash rhash;
527         struct shash_alg *salg = __crypto_shash_alg(alg);
528
529         strncpy(rhash.type, "shash", sizeof(rhash.type));
530
531         rhash.blocksize = alg->cra_blocksize;
532         rhash.digestsize = salg->digestsize;
533
534         if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
535                     sizeof(struct crypto_report_hash), &rhash))
536                 goto nla_put_failure;
537         return 0;
538
539 nla_put_failure:
540         return -EMSGSIZE;
541 }
542 #else
543 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
544 {
545         return -ENOSYS;
546 }
547 #endif
548
549 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
550         __attribute__ ((unused));
551 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
552 {
553         struct shash_alg *salg = __crypto_shash_alg(alg);
554
555         seq_printf(m, "type         : shash\n");
556         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
557         seq_printf(m, "digestsize   : %u\n", salg->digestsize);
558 }
559
560 static const struct crypto_type crypto_shash_type = {
561         .ctxsize = crypto_shash_ctxsize,
562         .extsize = crypto_alg_extsize,
563         .init = crypto_init_shash_ops,
564         .init_tfm = crypto_shash_init_tfm,
565 #ifdef CONFIG_PROC_FS
566         .show = crypto_shash_show,
567 #endif
568         .report = crypto_shash_report,
569         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
570         .maskset = CRYPTO_ALG_TYPE_MASK,
571         .type = CRYPTO_ALG_TYPE_SHASH,
572         .tfmsize = offsetof(struct crypto_shash, base),
573 };
574
575 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
576                                         u32 mask)
577 {
578         return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
579 }
580 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
581
582 static int shash_prepare_alg(struct shash_alg *alg)
583 {
584         struct crypto_alg *base = &alg->base;
585
586         if (alg->digestsize > PAGE_SIZE / 8 ||
587             alg->descsize > PAGE_SIZE / 8 ||
588             alg->statesize > PAGE_SIZE / 8)
589                 return -EINVAL;
590
591         base->cra_type = &crypto_shash_type;
592         base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
593         base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
594
595         if (!alg->finup)
596                 alg->finup = shash_finup_unaligned;
597         if (!alg->digest)
598                 alg->digest = shash_digest_unaligned;
599         if (!alg->export) {
600                 alg->export = shash_default_export;
601                 alg->import = shash_default_import;
602                 alg->statesize = alg->descsize;
603         }
604         if (!alg->setkey)
605                 alg->setkey = shash_no_setkey;
606
607         return 0;
608 }
609
610 int crypto_register_shash(struct shash_alg *alg)
611 {
612         struct crypto_alg *base = &alg->base;
613         int err;
614
615         err = shash_prepare_alg(alg);
616         if (err)
617                 return err;
618
619         return crypto_register_alg(base);
620 }
621 EXPORT_SYMBOL_GPL(crypto_register_shash);
622
623 int crypto_unregister_shash(struct shash_alg *alg)
624 {
625         return crypto_unregister_alg(&alg->base);
626 }
627 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
628
629 int crypto_register_shashes(struct shash_alg *algs, int count)
630 {
631         int i, ret;
632
633         for (i = 0; i < count; i++) {
634                 ret = crypto_register_shash(&algs[i]);
635                 if (ret)
636                         goto err;
637         }
638
639         return 0;
640
641 err:
642         for (--i; i >= 0; --i)
643                 crypto_unregister_shash(&algs[i]);
644
645         return ret;
646 }
647 EXPORT_SYMBOL_GPL(crypto_register_shashes);
648
649 int crypto_unregister_shashes(struct shash_alg *algs, int count)
650 {
651         int i, ret;
652
653         for (i = count - 1; i >= 0; --i) {
654                 ret = crypto_unregister_shash(&algs[i]);
655                 if (ret)
656                         pr_err("Failed to unregister %s %s: %d\n",
657                                algs[i].base.cra_driver_name,
658                                algs[i].base.cra_name, ret);
659         }
660
661         return 0;
662 }
663 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
664
665 int shash_register_instance(struct crypto_template *tmpl,
666                             struct shash_instance *inst)
667 {
668         int err;
669
670         err = shash_prepare_alg(&inst->alg);
671         if (err)
672                 return err;
673
674         return crypto_register_instance(tmpl, shash_crypto_instance(inst));
675 }
676 EXPORT_SYMBOL_GPL(shash_register_instance);
677
678 void shash_free_instance(struct crypto_instance *inst)
679 {
680         crypto_drop_spawn(crypto_instance_ctx(inst));
681         kfree(shash_instance(inst));
682 }
683 EXPORT_SYMBOL_GPL(shash_free_instance);
684
685 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
686                             struct shash_alg *alg,
687                             struct crypto_instance *inst)
688 {
689         return crypto_init_spawn2(&spawn->base, &alg->base, inst,
690                                   &crypto_shash_type);
691 }
692 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
693
694 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
695 {
696         struct crypto_alg *alg;
697
698         alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
699         return IS_ERR(alg) ? ERR_CAST(alg) :
700                container_of(alg, struct shash_alg, base);
701 }
702 EXPORT_SYMBOL_GPL(shash_attr_alg);
703
704 MODULE_LICENSE("GPL");
705 MODULE_DESCRIPTION("Synchronous cryptographic hash type");