4 * Support for VIA PadLock hardware crypto engine.
6 * Copyright (c) 2006 Michal Ludvig <michal@logix.cz>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
15 #include <linux/module.h>
16 #include <linux/init.h>
17 #include <linux/errno.h>
18 #include <linux/crypto.h>
19 #include <linux/cryptohash.h>
20 #include <linux/interrupt.h>
21 #include <linux/kernel.h>
22 #include <linux/scatterlist.h>
25 #define SHA1_DEFAULT_FALLBACK "sha1-generic"
26 #define SHA1_DIGEST_SIZE 20
27 #define SHA1_HMAC_BLOCK_SIZE 64
29 #define SHA256_DEFAULT_FALLBACK "sha256-generic"
30 #define SHA256_DIGEST_SIZE 32
31 #define SHA256_HMAC_BLOCK_SIZE 64
33 static char *sha1_fallback = SHA1_DEFAULT_FALLBACK;
34 static char *sha256_fallback = SHA256_DEFAULT_FALLBACK;
36 module_param(sha1_fallback, charp, 0644);
37 module_param(sha256_fallback, charp, 0644);
39 MODULE_PARM_DESC(sha1_fallback, "Fallback driver for SHA1. Default is "
40 SHA1_DEFAULT_FALLBACK);
41 MODULE_PARM_DESC(sha256_fallback, "Fallback driver for SHA256. Default is "
42 SHA256_DEFAULT_FALLBACK);
44 struct padlock_sha_ctx {
48 void (*f_sha_padlock)(const char *in, char *out, int count);
49 struct crypto_tfm *fallback_tfm;
52 static inline struct padlock_sha_ctx *ctx(struct crypto_tfm *tfm)
54 return (struct padlock_sha_ctx *)(crypto_tfm_ctx(tfm));
57 /* We'll need aligned address on the stack */
58 #define NEAREST_ALIGNED(ptr) \
59 ((void *)ALIGN((size_t)(ptr), PADLOCK_ALIGNMENT))
61 static struct crypto_alg sha1_alg, sha256_alg;
63 static void padlock_sha_bypass(struct crypto_tfm *tfm)
68 BUG_ON(!ctx(tfm)->fallback_tfm);
70 crypto_digest_init(ctx(tfm)->fallback_tfm);
71 if (ctx(tfm)->data && ctx(tfm)->used) {
72 struct scatterlist sg;
74 sg_set_buf(&sg, ctx(tfm)->data, ctx(tfm)->used);
75 crypto_digest_update(ctx(tfm)->fallback_tfm, &sg, 1);
82 static void padlock_sha_init(struct crypto_tfm *tfm)
88 static void padlock_sha_update(struct crypto_tfm *tfm,
89 const uint8_t *data, unsigned int length)
91 /* Our buffer is always one page. */
92 if (unlikely(!ctx(tfm)->bypass &&
93 (ctx(tfm)->used + length > PAGE_SIZE)))
94 padlock_sha_bypass(tfm);
96 if (unlikely(ctx(tfm)->bypass)) {
97 struct scatterlist sg;
98 BUG_ON(!ctx(tfm)->fallback_tfm);
99 sg_set_buf(&sg, (uint8_t *)data, length);
100 crypto_digest_update(ctx(tfm)->fallback_tfm, &sg, 1);
104 memcpy(ctx(tfm)->data + ctx(tfm)->used, data, length);
105 ctx(tfm)->used += length;
108 static inline void padlock_output_block(uint32_t *src,
109 uint32_t *dst, size_t count)
112 *dst++ = swab32(*src++);
115 static void padlock_do_sha1(const char *in, char *out, int count)
117 /* We can't store directly to *out as it may be unaligned. */
118 /* BTW Don't reduce the buffer size below 128 Bytes!
119 * PadLock microcode needs it that big. */
121 char *result = NEAREST_ALIGNED(buf);
123 ((uint32_t *)result)[0] = 0x67452301;
124 ((uint32_t *)result)[1] = 0xEFCDAB89;
125 ((uint32_t *)result)[2] = 0x98BADCFE;
126 ((uint32_t *)result)[3] = 0x10325476;
127 ((uint32_t *)result)[4] = 0xC3D2E1F0;
129 asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */
130 : "+S"(in), "+D"(result)
131 : "c"(count), "a"(0));
133 padlock_output_block((uint32_t *)result, (uint32_t *)out, 5);
136 static void padlock_do_sha256(const char *in, char *out, int count)
138 /* We can't store directly to *out as it may be unaligned. */
139 /* BTW Don't reduce the buffer size below 128 Bytes!
140 * PadLock microcode needs it that big. */
142 char *result = NEAREST_ALIGNED(buf);
144 ((uint32_t *)result)[0] = 0x6A09E667;
145 ((uint32_t *)result)[1] = 0xBB67AE85;
146 ((uint32_t *)result)[2] = 0x3C6EF372;
147 ((uint32_t *)result)[3] = 0xA54FF53A;
148 ((uint32_t *)result)[4] = 0x510E527F;
149 ((uint32_t *)result)[5] = 0x9B05688C;
150 ((uint32_t *)result)[6] = 0x1F83D9AB;
151 ((uint32_t *)result)[7] = 0x5BE0CD19;
153 asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */
154 : "+S"(in), "+D"(result)
155 : "c"(count), "a"(0));
157 padlock_output_block((uint32_t *)result, (uint32_t *)out, 8);
160 static void padlock_sha_final(struct crypto_tfm *tfm, uint8_t *out)
162 if (unlikely(ctx(tfm)->bypass)) {
163 BUG_ON(!ctx(tfm)->fallback_tfm);
164 crypto_digest_final(ctx(tfm)->fallback_tfm, out);
165 ctx(tfm)->bypass = 0;
169 /* Pass the input buffer to PadLock microcode... */
170 ctx(tfm)->f_sha_padlock(ctx(tfm)->data, out, ctx(tfm)->used);
175 static int padlock_cra_init(struct crypto_tfm *tfm, const char *fallback_driver_name)
177 /* For now we'll allocate one page. This
178 * could eventually be configurable one day. */
179 ctx(tfm)->data = (char *)__get_free_page(GFP_KERNEL);
183 /* Allocate a fallback and abort if it failed. */
184 ctx(tfm)->fallback_tfm = crypto_alloc_tfm(fallback_driver_name, 0);
185 if (!ctx(tfm)->fallback_tfm) {
186 printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n",
187 fallback_driver_name);
188 free_page((unsigned long)(ctx(tfm)->data));
195 static int padlock_sha1_cra_init(struct crypto_tfm *tfm)
197 ctx(tfm)->f_sha_padlock = padlock_do_sha1;
199 return padlock_cra_init(tfm, sha1_fallback);
202 static int padlock_sha256_cra_init(struct crypto_tfm *tfm)
204 ctx(tfm)->f_sha_padlock = padlock_do_sha256;
206 return padlock_cra_init(tfm, sha256_fallback);
209 static void padlock_cra_exit(struct crypto_tfm *tfm)
211 if (ctx(tfm)->data) {
212 free_page((unsigned long)(ctx(tfm)->data));
213 ctx(tfm)->data = NULL;
216 BUG_ON(!ctx(tfm)->fallback_tfm);
217 crypto_free_tfm(ctx(tfm)->fallback_tfm);
218 ctx(tfm)->fallback_tfm = NULL;
221 static struct crypto_alg sha1_alg = {
223 .cra_driver_name = "sha1-padlock",
224 .cra_priority = PADLOCK_CRA_PRIORITY,
225 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
226 .cra_blocksize = SHA1_HMAC_BLOCK_SIZE,
227 .cra_ctxsize = sizeof(struct padlock_sha_ctx),
228 .cra_module = THIS_MODULE,
229 .cra_list = LIST_HEAD_INIT(sha1_alg.cra_list),
230 .cra_init = padlock_sha1_cra_init,
231 .cra_exit = padlock_cra_exit,
234 .dia_digestsize = SHA1_DIGEST_SIZE,
235 .dia_init = padlock_sha_init,
236 .dia_update = padlock_sha_update,
237 .dia_final = padlock_sha_final,
242 static struct crypto_alg sha256_alg = {
243 .cra_name = "sha256",
244 .cra_driver_name = "sha256-padlock",
245 .cra_priority = PADLOCK_CRA_PRIORITY,
246 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
247 .cra_blocksize = SHA256_HMAC_BLOCK_SIZE,
248 .cra_ctxsize = sizeof(struct padlock_sha_ctx),
249 .cra_module = THIS_MODULE,
250 .cra_list = LIST_HEAD_INIT(sha256_alg.cra_list),
251 .cra_init = padlock_sha256_cra_init,
252 .cra_exit = padlock_cra_exit,
255 .dia_digestsize = SHA256_DIGEST_SIZE,
256 .dia_init = padlock_sha_init,
257 .dia_update = padlock_sha_update,
258 .dia_final = padlock_sha_final,
263 static void __init padlock_sha_check_fallbacks(void)
265 static struct crypto_tfm *tfm_sha1, *tfm_sha256;
267 /* We'll try to allocate one TFM for each fallback
268 * to test that the modules are available. */
269 tfm_sha1 = crypto_alloc_tfm(sha1_fallback, 0);
271 printk(KERN_WARNING PFX "Couldn't load fallback module for '%s'. Tried '%s'.\n",
272 sha1_alg.cra_name, sha1_fallback);
274 printk(KERN_NOTICE PFX "Fallback for '%s' is driver '%s' (prio=%d)\n", sha1_alg.cra_name,
275 crypto_tfm_alg_driver_name(tfm_sha1), crypto_tfm_alg_priority(tfm_sha1));
276 crypto_free_tfm(tfm_sha1);
279 tfm_sha256 = crypto_alloc_tfm(sha256_fallback, 0);
281 printk(KERN_WARNING PFX "Couldn't load fallback module for '%s'. Tried '%s'.\n",
282 sha256_alg.cra_name, sha256_fallback);
284 printk(KERN_NOTICE PFX "Fallback for '%s' is driver '%s' (prio=%d)\n", sha256_alg.cra_name,
285 crypto_tfm_alg_driver_name(tfm_sha256), crypto_tfm_alg_priority(tfm_sha256));
286 crypto_free_tfm(tfm_sha256);
290 static int __init padlock_init(void)
295 printk(KERN_ERR PFX "VIA PadLock Hash Engine not detected.\n");
299 if (!cpu_has_phe_enabled) {
300 printk(KERN_ERR PFX "VIA PadLock detected, but not enabled. Hmm, strange...\n");
304 padlock_sha_check_fallbacks();
306 rc = crypto_register_alg(&sha1_alg);
310 rc = crypto_register_alg(&sha256_alg);
314 printk(KERN_NOTICE PFX "Using VIA PadLock ACE for SHA1/SHA256 algorithms.\n");
319 crypto_unregister_alg(&sha1_alg);
321 printk(KERN_ERR PFX "VIA PadLock SHA1/SHA256 initialization failed.\n");
325 static void __exit padlock_fini(void)
327 crypto_unregister_alg(&sha1_alg);
328 crypto_unregister_alg(&sha256_alg);
331 module_init(padlock_init);
332 module_exit(padlock_fini);
334 MODULE_DESCRIPTION("VIA PadLock SHA1/SHA256 algorithms support.");
335 MODULE_LICENSE("GPL");
336 MODULE_AUTHOR("Michal Ludvig");
338 MODULE_ALIAS("sha1-padlock");
339 MODULE_ALIAS("sha256-padlock");