Commit 5f7082ed4f482f05db01d84dbf58190492ebf0ad

Authored by Herbert Xu
1 parent 67cd080c50

crypto: hash - Export shash through hash

This patch allows shash algorithms to be used through the old hash
interface.  This is a transitional measure so we can convert the
underlying algorithms to shash before converting the users across.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Showing 7 changed files with 144 additions and 6 deletions Side-by-side Diff

... ... @@ -112,6 +112,22 @@
112 112 }
113 113 EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
114 114  
  115 +int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
  116 + struct crypto_hash_walk *walk,
  117 + struct scatterlist *sg, unsigned int len)
  118 +{
  119 + walk->total = len;
  120 +
  121 + if (!walk->total)
  122 + return 0;
  123 +
  124 + walk->alignmask = crypto_hash_alignmask(hdesc->tfm);
  125 + walk->sg = sg;
  126 + walk->flags = hdesc->flags;
  127 +
  128 + return hash_walk_new_entry(walk);
  129 +}
  130 +
115 131 static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
116 132 unsigned int keylen)
117 133 {
... ... @@ -11,6 +11,7 @@
11 11 */
12 12  
13 13 #include <crypto/aead.h>
  14 +#include <crypto/internal/hash.h>
14 15 #include <crypto/internal/skcipher.h>
15 16 #include <crypto/authenc.h>
16 17 #include <crypto/scatterwalk.h>
... ... @@ -431,6 +432,8 @@
431 432 inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
432 433 inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ?
433 434 auth->cra_hash.digestsize :
  435 + auth->cra_type ?
  436 + __crypto_shash_alg(auth)->digestsize :
434 437 auth->cra_digest.dia_digestsize;
435 438  
436 439 inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
... ... @@ -16,7 +16,7 @@
16 16 *
17 17 */
18 18  
19   -#include <crypto/algapi.h>
  19 +#include <crypto/internal/hash.h>
20 20 #include <crypto/scatterwalk.h>
21 21 #include <linux/err.h>
22 22 #include <linux/init.h>
... ... @@ -238,9 +238,11 @@
238 238 return ERR_CAST(alg);
239 239  
240 240 inst = ERR_PTR(-EINVAL);
241   - ds = (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
242   - CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize :
243   - alg->cra_digest.dia_digestsize;
  241 + ds = alg->cra_type == &crypto_hash_type ?
  242 + alg->cra_hash.digestsize :
  243 + alg->cra_type ?
  244 + __crypto_shash_alg(alg)->digestsize :
  245 + alg->cra_digest.dia_digestsize;
244 246 if (ds > alg->cra_blocksize)
245 247 goto out_put_alg;
246 248  
... ... @@ -301,9 +301,114 @@
301 301 return 0;
302 302 }
303 303  
  304 +static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
  305 + unsigned int keylen)
  306 +{
  307 + struct shash_desc *desc = crypto_hash_ctx(tfm);
  308 +
  309 + return crypto_shash_setkey(desc->tfm, key, keylen);
  310 +}
  311 +
  312 +static int shash_compat_init(struct hash_desc *hdesc)
  313 +{
  314 + struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
  315 +
  316 + desc->flags = hdesc->flags;
  317 +
  318 + return crypto_shash_init(desc);
  319 +}
  320 +
  321 +static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
  322 + unsigned int len)
  323 +{
  324 + struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
  325 + struct crypto_hash_walk walk;
  326 + int nbytes;
  327 +
  328 + for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
  329 + nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
  330 + nbytes = crypto_shash_update(desc, walk.data, nbytes);
  331 +
  332 + return nbytes;
  333 +}
  334 +
  335 +static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
  336 +{
  337 + return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out);
  338 +}
  339 +
  340 +static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
  341 + unsigned int nbytes, u8 *out)
  342 +{
  343 + unsigned int offset = sg->offset;
  344 + int err;
  345 +
  346 + if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
  347 + struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
  348 + void *data;
  349 +
  350 + desc->flags = hdesc->flags;
  351 +
  352 + data = crypto_kmap(sg_page(sg), 0);
  353 + err = crypto_shash_digest(desc, data + offset, nbytes, out);
  354 + crypto_kunmap(data, 0);
  355 + crypto_yield(desc->flags);
  356 + goto out;
  357 + }
  358 +
  359 + err = shash_compat_init(hdesc);
  360 + if (err)
  361 + goto out;
  362 +
  363 + err = shash_compat_update(hdesc, sg, nbytes);
  364 + if (err)
  365 + goto out;
  366 +
  367 + err = shash_compat_final(hdesc, out);
  368 +
  369 +out:
  370 + return err;
  371 +}
  372 +
  373 +static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
  374 +{
  375 + struct shash_desc *desc= crypto_tfm_ctx(tfm);
  376 +
  377 + crypto_free_shash(desc->tfm);
  378 +}
  379 +
  380 +static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
  381 +{
  382 + struct hash_tfm *crt = &tfm->crt_hash;
  383 + struct crypto_alg *calg = tfm->__crt_alg;
  384 + struct shash_alg *alg = __crypto_shash_alg(calg);
  385 + struct shash_desc *desc = crypto_tfm_ctx(tfm);
  386 + struct crypto_shash *shash;
  387 +
  388 + shash = __crypto_shash_cast(crypto_create_tfm(
  389 + calg, &crypto_shash_type));
  390 + if (IS_ERR(shash))
  391 + return PTR_ERR(shash);
  392 +
  393 + desc->tfm = shash;
  394 + tfm->exit = crypto_exit_shash_ops_compat;
  395 +
  396 + crt->init = shash_compat_init;
  397 + crt->update = shash_compat_update;
  398 + crt->final = shash_compat_final;
  399 + crt->digest = shash_compat_digest;
  400 + crt->setkey = shash_compat_setkey;
  401 +
  402 + crt->digestsize = alg->digestsize;
  403 +
  404 + return 0;
  405 +}
  406 +
304 407 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
305 408 {
306 409 switch (mask & CRYPTO_ALG_TYPE_MASK) {
  410 + case CRYPTO_ALG_TYPE_HASH_MASK:
  411 + return crypto_init_shash_ops_compat(tfm);
307 412 case CRYPTO_ALG_TYPE_AHASH_MASK:
308 413 return crypto_init_shash_ops_async(tfm);
309 414 }
310 415  
... ... @@ -314,7 +419,11 @@
314 419 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
315 420 u32 mask)
316 421 {
  422 + struct shash_alg *salg = __crypto_shash_alg(alg);
  423 +
317 424 switch (mask & CRYPTO_ALG_TYPE_MASK) {
  425 + case CRYPTO_ALG_TYPE_HASH_MASK:
  426 + return sizeof(struct shash_desc) + salg->descsize;
318 427 case CRYPTO_ALG_TYPE_AHASH_MASK:
319 428 return sizeof(struct crypto_shash *);
320 429 }
include/crypto/algapi.h
... ... @@ -248,6 +248,11 @@
248 248 return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
249 249 }
250 250  
  251 +static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
  252 +{
  253 + return crypto_tfm_ctx(&tfm->base);
  254 +}
  255 +
251 256 static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
252 257 {
253 258 return crypto_tfm_ctx_aligned(&tfm->base);
include/crypto/internal/hash.h
... ... @@ -39,6 +39,9 @@
39 39 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err);
40 40 int crypto_hash_walk_first(struct ahash_request *req,
41 41 struct crypto_hash_walk *walk);
  42 +int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
  43 + struct crypto_hash_walk *walk,
  44 + struct scatterlist *sg, unsigned int len);
42 45  
43 46 int crypto_register_shash(struct shash_alg *alg);
44 47 int crypto_unregister_shash(struct shash_alg *alg);
include/linux/crypto.h
... ... @@ -36,9 +36,9 @@
36 36 #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005
37 37 #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006
38 38 #define CRYPTO_ALG_TYPE_DIGEST 0x00000008
39   -#define CRYPTO_ALG_TYPE_HASH 0x00000009
  39 +#define CRYPTO_ALG_TYPE_HASH 0x00000008
  40 +#define CRYPTO_ALG_TYPE_SHASH 0x00000009
40 41 #define CRYPTO_ALG_TYPE_AHASH 0x0000000a
41   -#define CRYPTO_ALG_TYPE_SHASH 0x0000000b
42 42 #define CRYPTO_ALG_TYPE_RNG 0x0000000c
43 43  
44 44 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e