Commit ca7c39385ce1a7b44894a4b225a4608624e90730
Committed by
Herbert Xu
1 parent
fe3c5206ad
Exists in
master
and in
20 other branches
[CRYPTO] api: Handle unaligned keys in setkey
setkey() in {cipher,blkcipher,ablkcipher,hash}.c does not respect the requested alignment by the algorithm. This patch fixes it. The extra memory is allocated by kmalloc() with GFP_ATOMIC flag. Signed-off-by: Sebastian Siewior <linux-crypto@ml.breakpoint.cc> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Showing 4 changed files with 117 additions and 4 deletions Side-by-side Diff
crypto/ablkcipher.c
... | ... | @@ -19,15 +19,40 @@ |
19 | 19 | #include <linux/module.h> |
20 | 20 | #include <linux/seq_file.h> |
21 | 21 | |
22 | +static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key, unsigned int keylen) | |
23 | +{ | |
24 | + struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm); | |
25 | + unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); | |
26 | + int ret; | |
27 | + u8 *buffer, *alignbuffer; | |
28 | + unsigned long absize; | |
29 | + | |
30 | + absize = keylen + alignmask; | |
31 | + buffer = kmalloc(absize, GFP_ATOMIC); | |
32 | + if (!buffer) | |
33 | + return -ENOMEM; | |
34 | + | |
35 | + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | |
36 | + memcpy(alignbuffer, key, keylen); | |
37 | + ret = cipher->setkey(tfm, alignbuffer, keylen); | |
38 | + memset(alignbuffer, 0, absize); | |
39 | + kfree(buffer); | |
40 | + return ret; | |
41 | +} | |
42 | + | |
22 | 43 | static int setkey(struct crypto_ablkcipher *tfm, const u8 *key, |
23 | 44 | unsigned int keylen) |
24 | 45 | { |
25 | 46 | struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm); |
47 | + unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); | |
26 | 48 | |
27 | 49 | if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) { |
28 | 50 | crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
29 | 51 | return -EINVAL; |
30 | 52 | } |
53 | + | |
54 | + if ((unsigned long)key & alignmask) | |
55 | + return setkey_unaligned(tfm, key, keylen); | |
31 | 56 | |
32 | 57 | return cipher->setkey(tfm, key, keylen); |
33 | 58 | } |
crypto/blkcipher.c
... | ... | @@ -336,15 +336,40 @@ |
336 | 336 | return blkcipher_walk_next(desc, walk); |
337 | 337 | } |
338 | 338 | |
339 | +static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | |
340 | +{ | |
341 | + struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher; | |
342 | + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); | |
343 | + int ret; | |
344 | + u8 *buffer, *alignbuffer; | |
345 | + unsigned long absize; | |
346 | + | |
347 | + absize = keylen + alignmask; | |
348 | + buffer = kmalloc(absize, GFP_ATOMIC); | |
349 | + if (!buffer) | |
350 | + return -ENOMEM; | |
351 | + | |
352 | + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | |
353 | + memcpy(alignbuffer, key, keylen); | |
354 | + ret = cipher->setkey(tfm, alignbuffer, keylen); | |
355 | + memset(alignbuffer, 0, absize); | |
356 | + kfree(buffer); | |
357 | + return ret; | |
358 | +} | |
359 | + | |
339 | 360 | static int setkey(struct crypto_tfm *tfm, const u8 *key, |
340 | 361 | unsigned int keylen) |
341 | 362 | { |
342 | 363 | struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher; |
364 | + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); | |
343 | 365 | |
344 | 366 | if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) { |
345 | 367 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
346 | 368 | return -EINVAL; |
347 | 369 | } |
370 | + | |
371 | + if ((unsigned long)key & alignmask) | |
372 | + return setkey_unaligned(tfm, key, keylen); | |
348 | 373 | |
349 | 374 | return cipher->setkey(tfm, key, keylen); |
350 | 375 | } |
crypto/cipher.c
... | ... | @@ -20,16 +20,43 @@ |
20 | 20 | #include <linux/string.h> |
21 | 21 | #include "internal.h" |
22 | 22 | |
23 | +static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | |
24 | +{ | |
25 | + struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher; | |
26 | + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); | |
27 | + int ret; | |
28 | + u8 *buffer, *alignbuffer; | |
29 | + unsigned long absize; | |
30 | + | |
31 | + absize = keylen + alignmask; | |
32 | + buffer = kmalloc(absize, GFP_ATOMIC); | |
33 | + if (!buffer) | |
34 | + return -ENOMEM; | |
35 | + | |
36 | + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | |
37 | + memcpy(alignbuffer, key, keylen); | |
38 | + ret = cia->cia_setkey(tfm, alignbuffer, keylen); | |
39 | + memset(alignbuffer, 0, absize); | |
40 | + kfree(buffer); | |
41 | + return ret; | |
42 | + | |
43 | +} | |
44 | + | |
23 | 45 | static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) |
24 | 46 | { |
25 | 47 | struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher; |
26 | - | |
48 | + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); | |
49 | + | |
27 | 50 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
28 | 51 | if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) { |
29 | 52 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
30 | 53 | return -EINVAL; |
31 | - } else | |
32 | - return cia->cia_setkey(tfm, key, keylen); | |
54 | + } | |
55 | + | |
56 | + if ((unsigned long)key & alignmask) | |
57 | + return setkey_unaligned(tfm, key, keylen); | |
58 | + | |
59 | + return cia->cia_setkey(tfm, key, keylen); | |
33 | 60 | } |
34 | 61 | |
35 | 62 | static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *, |
crypto/hash.c
... | ... | @@ -22,6 +22,42 @@ |
22 | 22 | return alg->cra_ctxsize; |
23 | 23 | } |
24 | 24 | |
25 | +static int hash_setkey_unaligned(struct crypto_hash *crt, const u8 *key, | |
26 | + unsigned int keylen) | |
27 | +{ | |
28 | + struct crypto_tfm *tfm = crypto_hash_tfm(crt); | |
29 | + struct hash_alg *alg = &tfm->__crt_alg->cra_hash; | |
30 | + unsigned long alignmask = crypto_hash_alignmask(crt); | |
31 | + int ret; | |
32 | + u8 *buffer, *alignbuffer; | |
33 | + unsigned long absize; | |
34 | + | |
35 | + absize = keylen + alignmask; | |
36 | + buffer = kmalloc(absize, GFP_ATOMIC); | |
37 | + if (!buffer) | |
38 | + return -ENOMEM; | |
39 | + | |
40 | + alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | |
41 | + memcpy(alignbuffer, key, keylen); | |
42 | + ret = alg->setkey(crt, alignbuffer, keylen); | |
43 | + memset(alignbuffer, 0, absize); | |
44 | + kfree(buffer); | |
45 | + return ret; | |
46 | +} | |
47 | + | |
48 | +static int hash_setkey(struct crypto_hash *crt, const u8 *key, | |
49 | + unsigned int keylen) | |
50 | +{ | |
51 | + struct crypto_tfm *tfm = crypto_hash_tfm(crt); | |
52 | + struct hash_alg *alg = &tfm->__crt_alg->cra_hash; | |
53 | + unsigned long alignmask = crypto_hash_alignmask(crt); | |
54 | + | |
55 | + if ((unsigned long)key & alignmask) | |
56 | + return hash_setkey_unaligned(crt, key, keylen); | |
57 | + | |
58 | + return alg->setkey(crt, key, keylen); | |
59 | +} | |
60 | + | |
25 | 61 | static int crypto_init_hash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) |
26 | 62 | { |
27 | 63 | struct hash_tfm *crt = &tfm->crt_hash; |
... | ... | @@ -34,7 +70,7 @@ |
34 | 70 | crt->update = alg->update; |
35 | 71 | crt->final = alg->final; |
36 | 72 | crt->digest = alg->digest; |
37 | - crt->setkey = alg->setkey; | |
73 | + crt->setkey = hash_setkey; | |
38 | 74 | crt->digestsize = alg->digestsize; |
39 | 75 | |
40 | 76 | return 0; |