[CRYPTO] hash: Add asynchronous hash support
authorLoc Ho <lho@amcc.com>
Wed, 14 May 2008 12:41:47 +0000 (20:41 +0800)
committerHerbert Xu <herbert@gondor.apana.org.au>
Thu, 10 Jul 2008 12:35:13 +0000 (20:35 +0800)
This patch adds asynchronous hash and digest support.

Signed-off-by: Loc Ho <lho@amcc.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
crypto/Makefile
crypto/ahash.c [new file with mode: 0644]
crypto/api.c
crypto/digest.c
crypto/hash.c
crypto/internal.h
include/crypto/algapi.h
include/linux/crypto.h

index 807656b64e028da4f71ead5ae62ab8170c5c6ef9..d4f3ed857df01bc376aee37a54bb7d2186c529a3 100644 (file)
@@ -19,6 +19,7 @@ obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto_blkcipher.o
 obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
 
 crypto_hash-objs := hash.o
+crypto_hash-objs += ahash.o
 obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o
 
 obj-$(CONFIG_CRYPTO_MANAGER) += cryptomgr.o
diff --git a/crypto/ahash.c b/crypto/ahash.c
new file mode 100644 (file)
index 0000000..a83e035
--- /dev/null
@@ -0,0 +1,106 @@
+/*
+ * Asynchronous Cryptographic Hash operations.
+ *
+ * This is the asynchronous version of hash.c with notification of
+ * completion via a callback.
+ *
+ * Copyright (c) 2008 Loc Ho <lho@amcc.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/algapi.h>
+#include <linux/err.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/sched.h>
+#include <linux/slab.h>
+#include <linux/seq_file.h>
+
+#include "internal.h"
+
+static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
+                               unsigned int keylen)
+{
+       struct ahash_alg *ahash = crypto_ahash_alg(tfm);
+       unsigned long alignmask = crypto_ahash_alignmask(tfm);
+       int ret;
+       u8 *buffer, *alignbuffer;
+       unsigned long absize;
+
+       absize = keylen + alignmask;
+       buffer = kmalloc(absize, GFP_ATOMIC);
+       if (!buffer)
+               return -ENOMEM;
+
+       alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
+       memcpy(alignbuffer, key, keylen);
+       ret = ahash->setkey(tfm, alignbuffer, keylen);
+       memset(alignbuffer, 0, keylen);
+       kfree(buffer);
+       return ret;
+}
+
+static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
+                       unsigned int keylen)
+{
+       struct ahash_alg *ahash = crypto_ahash_alg(tfm);
+       unsigned long alignmask = crypto_ahash_alignmask(tfm);
+
+       if ((unsigned long)key & alignmask)
+               return ahash_setkey_unaligned(tfm, key, keylen);
+
+       return ahash->setkey(tfm, key, keylen);
+}
+
+static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type,
+                                       u32 mask)
+{
+       return alg->cra_ctxsize;
+}
+
+static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
+{
+       struct ahash_alg *alg = &tfm->__crt_alg->cra_ahash;
+       struct ahash_tfm *crt   = &tfm->crt_ahash;
+
+       if (alg->digestsize > crypto_tfm_alg_blocksize(tfm))
+               return -EINVAL;
+
+       crt->init = alg->init;
+       crt->update = alg->update;
+       crt->final  = alg->final;
+       crt->digest = alg->digest;
+       crt->setkey = ahash_setkey;
+       crt->base   = __crypto_ahash_cast(tfm);
+       crt->digestsize = alg->digestsize;
+
+       return 0;
+}
+
+static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
+       __attribute__ ((unused));
+static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
+{
+       seq_printf(m, "type         : ahash\n");
+       seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
+                                            "yes" : "no");
+       seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
+       seq_printf(m, "digestsize   : %u\n", alg->cra_hash.digestsize);
+}
+
+const struct crypto_type crypto_ahash_type = {
+       .ctxsize = crypto_ahash_ctxsize,
+       .init = crypto_init_ahash_ops,
+#ifdef CONFIG_PROC_FS
+       .show = crypto_ahash_show,
+#endif
+};
+EXPORT_SYMBOL_GPL(crypto_ahash_type);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
index 0a0f41ef255ff4b48e69cebdf26db12157a72c52..d06e33270abea8fae3c5ae97354774bca63bf9b3 100644 (file)
@@ -235,8 +235,12 @@ static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
                return crypto_init_cipher_ops(tfm);
                
        case CRYPTO_ALG_TYPE_DIGEST:
-               return crypto_init_digest_ops(tfm);
-               
+               if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
+                   CRYPTO_ALG_TYPE_HASH_MASK)
+                       return crypto_init_digest_ops_async(tfm);
+               else
+                       return crypto_init_digest_ops(tfm);
+
        case CRYPTO_ALG_TYPE_COMPRESS:
                return crypto_init_compress_ops(tfm);
        
index b526cc348b79313e21bea12808976f392b5505a8..025c9aea24ed814b87ddbd1fa445bfd28fcf6f33 100644 (file)
@@ -157,3 +157,84 @@ int crypto_init_digest_ops(struct crypto_tfm *tfm)
 void crypto_exit_digest_ops(struct crypto_tfm *tfm)
 {
 }
+
+static int digest_async_nosetkey(struct crypto_ahash *tfm_async, const u8 *key,
+                       unsigned int keylen)
+{
+       crypto_ahash_clear_flags(tfm_async, CRYPTO_TFM_RES_MASK);
+       return -ENOSYS;
+}
+
+static int digest_async_setkey(struct crypto_ahash *tfm_async, const u8 *key,
+                       unsigned int keylen)
+{
+       struct crypto_tfm    *tfm        = crypto_ahash_tfm(tfm_async);
+       struct digest_alg    *dalg       = &tfm->__crt_alg->cra_digest;
+
+       crypto_ahash_clear_flags(tfm_async, CRYPTO_TFM_RES_MASK);
+       return dalg->dia_setkey(tfm, key, keylen);
+}
+
+static int digest_async_init(struct ahash_request *req)
+{
+       struct crypto_tfm *tfm  = req->base.tfm;
+       struct digest_alg *dalg = &tfm->__crt_alg->cra_digest;
+
+       dalg->dia_init(tfm);
+       return 0;
+}
+
+static int digest_async_update(struct ahash_request *req)
+{
+       struct crypto_tfm *tfm = req->base.tfm;
+       struct hash_desc  desc = {
+               .tfm   = __crypto_hash_cast(tfm),
+               .flags = req->base.flags,
+       };
+
+       update(&desc, req->src, req->nbytes);
+       return 0;
+}
+
+static int digest_async_final(struct ahash_request *req)
+{
+       struct crypto_tfm *tfm  = req->base.tfm;
+       struct hash_desc  desc = {
+               .tfm   = __crypto_hash_cast(tfm),
+               .flags = req->base.flags,
+       };
+
+       final(&desc, req->result);
+       return 0;
+}
+
+static int digest_async_digest(struct ahash_request *req)
+{
+       struct crypto_tfm *tfm  = req->base.tfm;
+       struct hash_desc  desc = {
+               .tfm   = __crypto_hash_cast(tfm),
+               .flags = req->base.flags,
+       };
+
+       return digest(&desc, req->src, req->nbytes, req->result);
+}
+
+int crypto_init_digest_ops_async(struct crypto_tfm *tfm)
+{
+       struct ahash_tfm  *crt  = &tfm->crt_ahash;
+       struct digest_alg *dalg = &tfm->__crt_alg->cra_digest;
+
+       if (dalg->dia_digestsize > crypto_tfm_alg_blocksize(tfm))
+               return -EINVAL;
+
+       crt->init       = digest_async_init;
+       crt->update     = digest_async_update;
+       crt->final      = digest_async_final;
+       crt->digest     = digest_async_digest;
+       crt->setkey     = dalg->dia_setkey ? digest_async_setkey :
+                                               digest_async_nosetkey;
+       crt->digestsize = dalg->dia_digestsize;
+       crt->base       = __crypto_ahash_cast(tfm);
+
+       return 0;
+}
index 7dcff671c19b6d91a6127467b57c4db91a23fff4..f9400a014e74b3bbb3147b046aae36dad6abfad4 100644 (file)
@@ -59,24 +59,108 @@ static int hash_setkey(struct crypto_hash *crt, const u8 *key,
        return alg->setkey(crt, key, keylen);
 }
 
-static int crypto_init_hash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
+static int hash_async_setkey(struct crypto_ahash *tfm_async, const u8 *key,
+                       unsigned int keylen)
+{
+       struct crypto_tfm  *tfm      = crypto_ahash_tfm(tfm_async);
+       struct crypto_hash *tfm_hash = __crypto_hash_cast(tfm);
+       struct hash_alg    *alg      = &tfm->__crt_alg->cra_hash;
+
+       return alg->setkey(tfm_hash, key, keylen);
+}
+
+static int hash_async_init(struct ahash_request *req)
+{
+       struct crypto_tfm *tfm = req->base.tfm;
+       struct hash_alg   *alg = &tfm->__crt_alg->cra_hash;
+       struct hash_desc  desc = {
+               .tfm = __crypto_hash_cast(tfm),
+               .flags = req->base.flags,
+       };
+
+       return alg->init(&desc);
+}
+
+static int hash_async_update(struct ahash_request *req)
+{
+       struct crypto_tfm *tfm = req->base.tfm;
+       struct hash_alg   *alg = &tfm->__crt_alg->cra_hash;
+       struct hash_desc  desc = {
+               .tfm = __crypto_hash_cast(tfm),
+               .flags = req->base.flags,
+       };
+
+       return alg->update(&desc, req->src, req->nbytes);
+}
+
+static int hash_async_final(struct ahash_request *req)
+{
+       struct crypto_tfm *tfm = req->base.tfm;
+       struct hash_alg   *alg = &tfm->__crt_alg->cra_hash;
+       struct hash_desc  desc = {
+               .tfm = __crypto_hash_cast(tfm),
+               .flags = req->base.flags,
+       };
+
+       return alg->final(&desc, req->result);
+}
+
+static int hash_async_digest(struct ahash_request *req)
+{
+       struct crypto_tfm *tfm = req->base.tfm;
+       struct hash_alg   *alg = &tfm->__crt_alg->cra_hash;
+       struct hash_desc  desc = {
+               .tfm = __crypto_hash_cast(tfm),
+               .flags = req->base.flags,
+       };
+
+       return alg->digest(&desc, req->src, req->nbytes, req->result);
+}
+
+static int crypto_init_hash_ops_async(struct crypto_tfm *tfm)
+{
+       struct ahash_tfm *crt = &tfm->crt_ahash;
+       struct hash_alg  *alg = &tfm->__crt_alg->cra_hash;
+
+       crt->init       = hash_async_init;
+       crt->update     = hash_async_update;
+       crt->final      = hash_async_final;
+       crt->digest     = hash_async_digest;
+       crt->setkey     = hash_async_setkey;
+       crt->digestsize = alg->digestsize;
+       crt->base       = __crypto_ahash_cast(tfm);
+
+       return 0;
+}
+
+static int crypto_init_hash_ops_sync(struct crypto_tfm *tfm)
 {
        struct hash_tfm *crt = &tfm->crt_hash;
        struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
 
-       if (alg->digestsize > crypto_tfm_alg_blocksize(tfm))
-               return -EINVAL;
-
-       crt->init = alg->init;
-       crt->update = alg->update;
-       crt->final = alg->final;
-       crt->digest = alg->digest;
-       crt->setkey = hash_setkey;
+       crt->init       = alg->init;
+       crt->update     = alg->update;
+       crt->final      = alg->final;
+       crt->digest     = alg->digest;
+       crt->setkey     = hash_setkey;
        crt->digestsize = alg->digestsize;
 
        return 0;
 }
 
+static int crypto_init_hash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
+{
+       struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
+
+       if (alg->digestsize > crypto_tfm_alg_blocksize(tfm))
+               return -EINVAL;
+
+       if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) != CRYPTO_ALG_TYPE_HASH_MASK)
+               return crypto_init_hash_ops_async(tfm);
+       else
+               return crypto_init_hash_ops_sync(tfm);
+}
+
 static void crypto_hash_show(struct seq_file *m, struct crypto_alg *alg)
        __attribute__ ((unused));
 static void crypto_hash_show(struct seq_file *m, struct crypto_alg *alg)
index 32f4c214560315ccc284346499f6b9428bae1d66..683fcb2d91f41968e946792e82d6eae3e917913c 100644 (file)
@@ -86,6 +86,7 @@ struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask);
 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask);
 
 int crypto_init_digest_ops(struct crypto_tfm *tfm);
+int crypto_init_digest_ops_async(struct crypto_tfm *tfm);
 int crypto_init_cipher_ops(struct crypto_tfm *tfm);
 int crypto_init_compress_ops(struct crypto_tfm *tfm);
 
index 60d06e784be3aa54464c0263e4611c087cc0a17c..fef272a8ceeb9f4fa00995c90d851092d3c60372 100644 (file)
@@ -98,6 +98,7 @@ extern const struct crypto_type crypto_ablkcipher_type;
 extern const struct crypto_type crypto_aead_type;
 extern const struct crypto_type crypto_blkcipher_type;
 extern const struct crypto_type crypto_hash_type;
+extern const struct crypto_type crypto_ahash_type;
 
 void crypto_mod_put(struct crypto_alg *alg);
 
@@ -314,5 +315,40 @@ static inline int crypto_requires_sync(u32 type, u32 mask)
        return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
 }
 
+static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm)
+{
+       return crypto_tfm_ctx(&tfm->base);
+}
+
+static inline struct ahash_alg *crypto_ahash_alg(
+       struct crypto_ahash *tfm)
+{
+       return &crypto_ahash_tfm(tfm)->__crt_alg->cra_ahash;
+}
+
+static inline int ahash_enqueue_request(struct crypto_queue *queue,
+                                            struct ahash_request *request)
+{
+       return crypto_enqueue_request(queue, &request->base);
+}
+
+static inline struct ahash_request *ahash_dequeue_request(
+       struct crypto_queue *queue)
+{
+       return ahash_request_cast(crypto_dequeue_request(queue));
+}
+
+static inline void *ahash_request_ctx(struct ahash_request *req)
+{
+       return req->__ctx;
+}
+
+static inline int ahash_tfm_in_queue(struct crypto_queue *queue,
+                                         struct crypto_ahash *tfm)
+{
+       return crypto_tfm_in_queue(queue, crypto_ahash_tfm(tfm));
+}
+
+
 #endif /* _CRYPTO_ALGAPI_H */
 
index 425824bd49f308f64074943fd257bd69f512edce..b6efe569128d8cf5a786100346a65789ad8fdd08 100644 (file)
  */
 #define CRYPTO_ALG_TYPE_MASK           0x0000000f
 #define CRYPTO_ALG_TYPE_CIPHER         0x00000001
-#define CRYPTO_ALG_TYPE_DIGEST         0x00000002
-#define CRYPTO_ALG_TYPE_HASH           0x00000003
+#define CRYPTO_ALG_TYPE_COMPRESS       0x00000002
+#define CRYPTO_ALG_TYPE_AEAD           0x00000003
 #define CRYPTO_ALG_TYPE_BLKCIPHER      0x00000004
 #define CRYPTO_ALG_TYPE_ABLKCIPHER     0x00000005
 #define CRYPTO_ALG_TYPE_GIVCIPHER      0x00000006
-#define CRYPTO_ALG_TYPE_COMPRESS       0x00000008
-#define CRYPTO_ALG_TYPE_AEAD           0x00000009
+#define CRYPTO_ALG_TYPE_DIGEST         0x00000008
+#define CRYPTO_ALG_TYPE_HASH           0x00000009
+#define CRYPTO_ALG_TYPE_AHASH          0x0000000a
 
 #define CRYPTO_ALG_TYPE_HASH_MASK      0x0000000e
+#define CRYPTO_ALG_TYPE_AHASH_MASK     0x0000000c
 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c
 
 #define CRYPTO_ALG_LARVAL              0x00000010
@@ -102,6 +104,7 @@ struct crypto_async_request;
 struct crypto_aead;
 struct crypto_blkcipher;
 struct crypto_hash;
+struct crypto_ahash;
 struct crypto_tfm;
 struct crypto_type;
 struct aead_givcrypt_request;
@@ -131,6 +134,18 @@ struct ablkcipher_request {
        void *__ctx[] CRYPTO_MINALIGN_ATTR;
 };
 
+struct ahash_request {
+       struct crypto_async_request base;
+
+       void *info;
+
+       unsigned int nbytes;
+       struct scatterlist *src;
+       u8                 *result;
+
+       void *__ctx[] CRYPTO_MINALIGN_ATTR;
+};
+
 /**
  *     struct aead_request - AEAD request
  *     @base: Common attributes for async crypto requests
@@ -195,6 +210,17 @@ struct ablkcipher_alg {
        unsigned int ivsize;
 };
 
+struct ahash_alg {
+       int (*init)(struct ahash_request *req);
+       int (*update)(struct ahash_request *req);
+       int (*final)(struct ahash_request *req);
+       int (*digest)(struct ahash_request *req);
+       int (*setkey)(struct crypto_ahash *tfm, const u8 *key,
+                       unsigned int keylen);
+
+       unsigned int digestsize;
+};
+
 struct aead_alg {
        int (*setkey)(struct crypto_aead *tfm, const u8 *key,
                      unsigned int keylen);
@@ -272,6 +298,7 @@ struct compress_alg {
 #define cra_cipher     cra_u.cipher
 #define cra_digest     cra_u.digest
 #define cra_hash       cra_u.hash
+#define cra_ahash      cra_u.ahash
 #define cra_compress   cra_u.compress
 
 struct crypto_alg {
@@ -298,6 +325,7 @@ struct crypto_alg {
                struct cipher_alg cipher;
                struct digest_alg digest;
                struct hash_alg hash;
+               struct ahash_alg ahash;
                struct compress_alg compress;
        } cra_u;
 
@@ -383,6 +411,19 @@ struct hash_tfm {
        unsigned int digestsize;
 };
 
+struct ahash_tfm {
+       int (*init)(struct ahash_request *req);
+       int (*update)(struct ahash_request *req);
+       int (*final)(struct ahash_request *req);
+       int (*digest)(struct ahash_request *req);
+       int (*setkey)(struct crypto_ahash *tfm, const u8 *key,
+                       unsigned int keylen);
+
+       unsigned int digestsize;
+       struct crypto_ahash *base;
+       unsigned int reqsize;
+};
+
 struct compress_tfm {
        int (*cot_compress)(struct crypto_tfm *tfm,
                            const u8 *src, unsigned int slen,
@@ -397,6 +438,7 @@ struct compress_tfm {
 #define crt_blkcipher  crt_u.blkcipher
 #define crt_cipher     crt_u.cipher
 #define crt_hash       crt_u.hash
+#define crt_ahash      crt_u.ahash
 #define crt_compress   crt_u.compress
 
 struct crypto_tfm {
@@ -409,6 +451,7 @@ struct crypto_tfm {
                struct blkcipher_tfm blkcipher;
                struct cipher_tfm cipher;
                struct hash_tfm hash;
+               struct ahash_tfm ahash;
                struct compress_tfm compress;
        } crt_u;
        
@@ -441,6 +484,10 @@ struct crypto_hash {
        struct crypto_tfm base;
 };
 
+struct crypto_ahash {
+       struct crypto_tfm base;
+};
+
 enum {
        CRYPTOA_UNSPEC,
        CRYPTOA_ALG,
@@ -1264,5 +1311,137 @@ static inline int crypto_comp_decompress(struct crypto_comp *tfm,
                                                    src, slen, dst, dlen);
 }
 
+static inline struct crypto_ahash *__crypto_ahash_cast(struct crypto_tfm *tfm)
+{
+       return (struct crypto_ahash *)tfm;
+}
+
+static inline struct crypto_ahash *crypto_alloc_ahash(const char *alg_name,
+                                                     u32 type, u32 mask)
+{
+       type &= ~CRYPTO_ALG_TYPE_MASK;
+       mask &= ~CRYPTO_ALG_TYPE_MASK;
+       type |= CRYPTO_ALG_TYPE_AHASH;
+       mask |= CRYPTO_ALG_TYPE_AHASH_MASK;
+
+       return __crypto_ahash_cast(crypto_alloc_base(alg_name, type, mask));
+}
+
+static inline struct crypto_tfm *crypto_ahash_tfm(struct crypto_ahash *tfm)
+{
+       return &tfm->base;
+}
+
+static inline void crypto_free_ahash(struct crypto_ahash *tfm)
+{
+       crypto_free_tfm(crypto_ahash_tfm(tfm));
+}
+
+static inline unsigned int crypto_ahash_alignmask(
+       struct crypto_ahash *tfm)
+{
+       return crypto_tfm_alg_alignmask(crypto_ahash_tfm(tfm));
+}
+
+static inline struct ahash_tfm *crypto_ahash_crt(struct crypto_ahash *tfm)
+{
+       return &crypto_ahash_tfm(tfm)->crt_ahash;
+}
+
+static inline unsigned int crypto_ahash_digestsize(struct crypto_ahash *tfm)
+{
+       return crypto_ahash_crt(tfm)->digestsize;
+}
+
+static inline u32 crypto_ahash_get_flags(struct crypto_ahash *tfm)
+{
+       return crypto_tfm_get_flags(crypto_ahash_tfm(tfm));
+}
+
+static inline void crypto_ahash_set_flags(struct crypto_ahash *tfm, u32 flags)
+{
+       crypto_tfm_set_flags(crypto_ahash_tfm(tfm), flags);
+}
+
+static inline void crypto_ahash_clear_flags(struct crypto_ahash *tfm, u32 flags)
+{
+       crypto_tfm_clear_flags(crypto_ahash_tfm(tfm), flags);
+}
+
+static inline struct crypto_ahash *crypto_ahash_reqtfm(
+       struct ahash_request *req)
+{
+       return __crypto_ahash_cast(req->base.tfm);
+}
+
+static inline unsigned int crypto_ahash_reqsize(struct crypto_ahash *tfm)
+{
+       return crypto_ahash_crt(tfm)->reqsize;
+}
+
+static inline int crypto_ahash_setkey(struct crypto_ahash *tfm,
+                                     const u8 *key, unsigned int keylen)
+{
+       struct ahash_tfm *crt = crypto_ahash_crt(tfm);
+
+       return crt->setkey(crt->base, key, keylen);
+}
+
+static inline int crypto_ahash_digest(struct ahash_request *req)
+{
+       struct ahash_tfm *crt = crypto_ahash_crt(crypto_ahash_reqtfm(req));
+       return crt->digest(req);
+}
+
+static inline void ahash_request_set_tfm(struct ahash_request *req,
+                                        struct crypto_ahash *tfm)
+{
+       req->base.tfm = crypto_ahash_tfm(crypto_ahash_crt(tfm)->base);
+}
+
+static inline struct ahash_request *ahash_request_alloc(
+       struct crypto_ahash *tfm, gfp_t gfp)
+{
+       struct ahash_request *req;
+
+       req = kmalloc(sizeof(struct ahash_request) +
+                     crypto_ahash_reqsize(tfm), gfp);
+
+       if (likely(req))
+               ahash_request_set_tfm(req, tfm);
+
+       return req;
+}
+
+static inline void ahash_request_free(struct ahash_request *req)
+{
+       kfree(req);
+}
+
+static inline struct ahash_request *ahash_request_cast(
+       struct crypto_async_request *req)
+{
+       return container_of(req, struct ahash_request, base);
+}
+
+static inline void ahash_request_set_callback(struct ahash_request *req,
+                                             u32 flags,
+                                             crypto_completion_t complete,
+                                             void *data)
+{
+       req->base.complete = complete;
+       req->base.data = data;
+       req->base.flags = flags;
+}
+
+static inline void ahash_request_set_crypt(struct ahash_request *req,
+                                          struct scatterlist *src, u8 *result,
+                                          unsigned int nbytes)
+{
+       req->src = src;
+       req->nbytes = nbytes;
+       req->result = result;
+}
+
 #endif /* _LINUX_CRYPTO_H */