Blame view
crypto/shash.c
16.2 KB
7b5a080b3
|
1 2 3 4 5 6 7 8 9 10 11 |
/* * Synchronous Cryptographic Hash operations. * * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * */ |
3b2f6df08
|
12 |
#include <crypto/scatterwalk.h> |
7b5a080b3
|
13 14 15 16 17 18 |
#include <crypto/internal/hash.h> #include <linux/err.h> #include <linux/kernel.h> #include <linux/module.h> #include <linux/slab.h> #include <linux/seq_file.h> |
3b2f6df08
|
19 |
#include "internal.h" |
3f683d617
|
20 |
static const struct crypto_type crypto_shash_type; |
57cfe44bc
|
21 22 23 24 25 |
static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, unsigned int keylen) { return -ENOSYS; } |
7b5a080b3
|
26 27 28 29 30 31 32 33 |
static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, unsigned int keylen) { struct shash_alg *shash = crypto_shash_alg(tfm); unsigned long alignmask = crypto_shash_alignmask(tfm); unsigned long absize; u8 *buffer, *alignbuffer; int err; |
18eb8ea6e
|
34 |
absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); |
7b5a080b3
|
35 36 37 38 39 40 41 |
buffer = kmalloc(absize, GFP_KERNEL); if (!buffer) return -ENOMEM; alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); memcpy(alignbuffer, key, keylen); err = shash->setkey(tfm, alignbuffer, keylen); |
8c32c516e
|
42 |
kzfree(buffer); |
7b5a080b3
|
43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
return err; } int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, unsigned int keylen) { struct shash_alg *shash = crypto_shash_alg(tfm); unsigned long alignmask = crypto_shash_alignmask(tfm); if ((unsigned long)key & alignmask) return shash_setkey_unaligned(tfm, key, keylen); return shash->setkey(tfm, key, keylen); } EXPORT_SYMBOL_GPL(crypto_shash_setkey); static inline unsigned int shash_align_buffer_size(unsigned len, unsigned long mask) { return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1)); } static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, unsigned int len) { struct crypto_shash *tfm = desc->tfm; struct shash_alg *shash = crypto_shash_alg(tfm); unsigned long alignmask = crypto_shash_alignmask(tfm); unsigned int unaligned_len = alignmask + 1 - ((unsigned long)data & alignmask); |
0e2d3a126
|
73 |
u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] |
7b5a080b3
|
74 |
__attribute__ ((aligned)); |
0e2d3a126
|
75 |
u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); |
8c32c516e
|
76 |
int err; |
7b5a080b3
|
77 |
|
f4f689933
|
78 79 |
if (unaligned_len > len) unaligned_len = len; |
7b5a080b3
|
80 |
memcpy(buf, data, unaligned_len); |
8c32c516e
|
81 82 |
err = shash->update(desc, buf, unaligned_len); memset(buf, 0, unaligned_len); |
7b5a080b3
|
83 |
|
8c32c516e
|
84 |
return err ?: |
7b5a080b3
|
85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
shash->update(desc, data + unaligned_len, len - unaligned_len); } int crypto_shash_update(struct shash_desc *desc, const u8 *data, unsigned int len) { struct crypto_shash *tfm = desc->tfm; struct shash_alg *shash = crypto_shash_alg(tfm); unsigned long alignmask = crypto_shash_alignmask(tfm); if ((unsigned long)data & alignmask) return shash_update_unaligned(desc, data, len); return shash->update(desc, data, len); } EXPORT_SYMBOL_GPL(crypto_shash_update); static int shash_final_unaligned(struct shash_desc *desc, u8 *out) { struct crypto_shash *tfm = desc->tfm; unsigned long alignmask = crypto_shash_alignmask(tfm); struct shash_alg *shash = crypto_shash_alg(tfm); unsigned int ds = crypto_shash_digestsize(tfm); |
0e2d3a126
|
108 |
u8 ubuf[shash_align_buffer_size(ds, alignmask)] |
7b5a080b3
|
109 |
__attribute__ ((aligned)); |
0e2d3a126
|
110 |
u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); |
7b5a080b3
|
111 112 113 |
int err; err = shash->final(desc, buf); |
8c32c516e
|
114 115 |
if (err) goto out; |
7b5a080b3
|
116 |
memcpy(out, buf, ds); |
8c32c516e
|
117 118 119 |
out: memset(buf, 0, ds); |
7b5a080b3
|
120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
return err; } int crypto_shash_final(struct shash_desc *desc, u8 *out) { struct crypto_shash *tfm = desc->tfm; struct shash_alg *shash = crypto_shash_alg(tfm); unsigned long alignmask = crypto_shash_alignmask(tfm); if ((unsigned long)out & alignmask) return shash_final_unaligned(desc, out); return shash->final(desc, out); } EXPORT_SYMBOL_GPL(crypto_shash_final); static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) { return crypto_shash_update(desc, data, len) ?: crypto_shash_final(desc, out); } int crypto_shash_finup(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) { struct crypto_shash *tfm = desc->tfm; struct shash_alg *shash = crypto_shash_alg(tfm); unsigned long alignmask = crypto_shash_alignmask(tfm); |
8267adab9
|
149 |
if (((unsigned long)data | (unsigned long)out) & alignmask) |
7b5a080b3
|
150 151 152 153 154 155 156 157 158 159 |
return shash_finup_unaligned(desc, data, len, out); return shash->finup(desc, data, len, out); } EXPORT_SYMBOL_GPL(crypto_shash_finup); static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) { return crypto_shash_init(desc) ?: |
f88ad8de2
|
160 |
crypto_shash_finup(desc, data, len, out); |
7b5a080b3
|
161 162 163 164 165 166 167 168 |
} int crypto_shash_digest(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) { struct crypto_shash *tfm = desc->tfm; struct shash_alg *shash = crypto_shash_alg(tfm); unsigned long alignmask = crypto_shash_alignmask(tfm); |
8267adab9
|
169 |
if (((unsigned long)data | (unsigned long)out) & alignmask) |
7b5a080b3
|
170 171 172 173 174 |
return shash_digest_unaligned(desc, data, len, out); return shash->digest(desc, data, len, out); } EXPORT_SYMBOL_GPL(crypto_shash_digest); |
f592682f9
|
175 |
static int shash_default_export(struct shash_desc *desc, void *out) |
dec8b7860
|
176 |
{ |
f592682f9
|
177 178 |
memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm)); return 0; |
99d27e1c5
|
179 |
} |
dec8b7860
|
180 |
|
f592682f9
|
181 |
static int shash_default_import(struct shash_desc *desc, const void *in) |
99d27e1c5
|
182 |
{ |
f592682f9
|
183 184 |
memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm)); return 0; |
dec8b7860
|
185 |
} |
dec8b7860
|
186 |
|
3b2f6df08
|
187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 |
static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen) { struct crypto_shash **ctx = crypto_ahash_ctx(tfm); return crypto_shash_setkey(*ctx, key, keylen); } static int shash_async_init(struct ahash_request *req) { struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); struct shash_desc *desc = ahash_request_ctx(req); desc->tfm = *ctx; desc->flags = req->base.flags; return crypto_shash_init(desc); } |
7eddf95ec
|
205 |
int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) |
3b2f6df08
|
206 |
{ |
3b2f6df08
|
207 208 209 210 211 212 213 214 215 |
struct crypto_hash_walk walk; int nbytes; for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes)) nbytes = crypto_shash_update(desc, walk.data, nbytes); return nbytes; } |
7eddf95ec
|
216 217 218 219 220 221 |
EXPORT_SYMBOL_GPL(shash_ahash_update); static int shash_async_update(struct ahash_request *req) { return shash_ahash_update(req, ahash_request_ctx(req)); } |
3b2f6df08
|
222 223 224 225 226 |
static int shash_async_final(struct ahash_request *req) { return crypto_shash_final(ahash_request_ctx(req), req->result); } |
66f6ce5e5
|
227 228 229 230 |
int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) { struct crypto_hash_walk walk; int nbytes; |
cbc86b916
|
231 232 233 234 235 |
nbytes = crypto_hash_walk_first(req, &walk); if (!nbytes) return crypto_shash_final(desc, req->result); do { |
66f6ce5e5
|
236 237 238 239 |
nbytes = crypto_hash_walk_last(&walk) ? crypto_shash_finup(desc, walk.data, nbytes, req->result) : crypto_shash_update(desc, walk.data, nbytes); |
cbc86b916
|
240 241 |
nbytes = crypto_hash_walk_done(&walk, nbytes); } while (nbytes > 0); |
66f6ce5e5
|
242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 |
return nbytes; } EXPORT_SYMBOL_GPL(shash_ahash_finup); static int shash_async_finup(struct ahash_request *req) { struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); struct shash_desc *desc = ahash_request_ctx(req); desc->tfm = *ctx; desc->flags = req->base.flags; return shash_ahash_finup(req, desc); } |
7eddf95ec
|
257 |
int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) |
3b2f6df08
|
258 259 260 261 262 263 264 |
{ struct scatterlist *sg = req->src; unsigned int offset = sg->offset; unsigned int nbytes = req->nbytes; int err; if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { |
3b2f6df08
|
265 |
void *data; |
3b2f6df08
|
266 267 268 269 270 |
data = crypto_kmap(sg_page(sg), 0); err = crypto_shash_digest(desc, data + offset, nbytes, req->result); crypto_kunmap(data, 0); crypto_yield(desc->flags); |
7eddf95ec
|
271 272 |
} else err = crypto_shash_init(desc) ?: |
66f6ce5e5
|
273 |
shash_ahash_finup(req, desc); |
3b2f6df08
|
274 |
|
7eddf95ec
|
275 276 277 |
return err; } EXPORT_SYMBOL_GPL(shash_ahash_digest); |
3b2f6df08
|
278 |
|
7eddf95ec
|
279 280 281 282 |
static int shash_async_digest(struct ahash_request *req) { struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); struct shash_desc *desc = ahash_request_ctx(req); |
3b2f6df08
|
283 |
|
7eddf95ec
|
284 285 |
desc->tfm = *ctx; desc->flags = req->base.flags; |
3b2f6df08
|
286 |
|
7eddf95ec
|
287 |
return shash_ahash_digest(req, desc); |
3b2f6df08
|
288 |
} |
66f6ce5e5
|
289 290 291 292 293 294 295 |
static int shash_async_export(struct ahash_request *req, void *out) { return crypto_shash_export(ahash_request_ctx(req), out); } static int shash_async_import(struct ahash_request *req, const void *in) { |
90246e79a
|
296 297 298 299 300 301 302 |
struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); struct shash_desc *desc = ahash_request_ctx(req); desc->tfm = *ctx; desc->flags = req->base.flags; return crypto_shash_import(desc, in); |
66f6ce5e5
|
303 |
} |
3b2f6df08
|
304 305 306 307 308 309 |
static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) { struct crypto_shash **ctx = crypto_tfm_ctx(tfm); crypto_free_shash(*ctx); } |
88056ec34
|
310 |
int crypto_init_shash_ops_async(struct crypto_tfm *tfm) |
3b2f6df08
|
311 312 |
{ struct crypto_alg *calg = tfm->__crt_alg; |
66f6ce5e5
|
313 |
struct shash_alg *alg = __crypto_shash_alg(calg); |
88056ec34
|
314 |
struct crypto_ahash *crt = __crypto_ahash_cast(tfm); |
3b2f6df08
|
315 316 317 318 319 |
struct crypto_shash **ctx = crypto_tfm_ctx(tfm); struct crypto_shash *shash; if (!crypto_mod_get(calg)) return -EAGAIN; |
3f683d617
|
320 |
shash = crypto_create_tfm(calg, &crypto_shash_type); |
3b2f6df08
|
321 322 323 324 325 326 327 328 329 330 |
if (IS_ERR(shash)) { crypto_mod_put(calg); return PTR_ERR(shash); } *ctx = shash; tfm->exit = crypto_exit_shash_ops_async; crt->init = shash_async_init; crt->update = shash_async_update; |
66f6ce5e5
|
331 332 |
crt->final = shash_async_final; crt->finup = shash_async_finup; |
3b2f6df08
|
333 |
crt->digest = shash_async_digest; |
66f6ce5e5
|
334 335 336 337 338 |
if (alg->setkey) crt->setkey = shash_async_setkey; if (alg->export) crt->export = shash_async_export; |
0044f3eda
|
339 |
if (alg->import) |
66f6ce5e5
|
340 |
crt->import = shash_async_import; |
3b2f6df08
|
341 |
|
3b2f6df08
|
342 343 344 345 |
crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); return 0; } |
5f7082ed4
|
346 347 348 |
static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, unsigned int keylen) { |
113adefc7
|
349 350 |
struct shash_desc **descp = crypto_hash_ctx(tfm); struct shash_desc *desc = *descp; |
5f7082ed4
|
351 352 353 354 355 356 |
return crypto_shash_setkey(desc->tfm, key, keylen); } static int shash_compat_init(struct hash_desc *hdesc) { |
113adefc7
|
357 358 |
struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); struct shash_desc *desc = *descp; |
5f7082ed4
|
359 360 361 362 363 364 365 366 367 |
desc->flags = hdesc->flags; return crypto_shash_init(desc); } static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, unsigned int len) { |
113adefc7
|
368 369 |
struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); struct shash_desc *desc = *descp; |
5f7082ed4
|
370 371 372 373 374 375 376 377 378 379 380 381 |
struct crypto_hash_walk walk; int nbytes; for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes)) nbytes = crypto_shash_update(desc, walk.data, nbytes); return nbytes; } static int shash_compat_final(struct hash_desc *hdesc, u8 *out) { |
113adefc7
|
382 383 384 |
struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); return crypto_shash_final(*descp, out); |
5f7082ed4
|
385 386 387 388 389 390 391 392 393 |
} static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, unsigned int nbytes, u8 *out) { unsigned int offset = sg->offset; int err; if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { |
113adefc7
|
394 395 |
struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); struct shash_desc *desc = *descp; |
5f7082ed4
|
396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 |
void *data; desc->flags = hdesc->flags; data = crypto_kmap(sg_page(sg), 0); err = crypto_shash_digest(desc, data + offset, nbytes, out); crypto_kunmap(data, 0); crypto_yield(desc->flags); goto out; } err = shash_compat_init(hdesc); if (err) goto out; err = shash_compat_update(hdesc, sg, nbytes); if (err) goto out; err = shash_compat_final(hdesc, out); out: return err; } static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) { |
113adefc7
|
423 424 |
struct shash_desc **descp = crypto_tfm_ctx(tfm); struct shash_desc *desc = *descp; |
5f7082ed4
|
425 426 |
crypto_free_shash(desc->tfm); |
113adefc7
|
427 |
kzfree(desc); |
5f7082ed4
|
428 429 430 431 432 433 434 |
} static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) { struct hash_tfm *crt = &tfm->crt_hash; struct crypto_alg *calg = tfm->__crt_alg; struct shash_alg *alg = __crypto_shash_alg(calg); |
113adefc7
|
435 |
struct shash_desc **descp = crypto_tfm_ctx(tfm); |
5f7082ed4
|
436 |
struct crypto_shash *shash; |
113adefc7
|
437 |
struct shash_desc *desc; |
5f7082ed4
|
438 |
|
4abfd73e3
|
439 440 |
if (!crypto_mod_get(calg)) return -EAGAIN; |
3f683d617
|
441 |
shash = crypto_create_tfm(calg, &crypto_shash_type); |
4abfd73e3
|
442 443 |
if (IS_ERR(shash)) { crypto_mod_put(calg); |
5f7082ed4
|
444 |
return PTR_ERR(shash); |
4abfd73e3
|
445 |
} |
5f7082ed4
|
446 |
|
113adefc7
|
447 448 449 450 451 452 453 454 |
desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash), GFP_KERNEL); if (!desc) { crypto_free_shash(shash); return -ENOMEM; } *descp = desc; |
5f7082ed4
|
455 456 457 458 459 460 461 462 463 464 465 466 467 |
desc->tfm = shash; tfm->exit = crypto_exit_shash_ops_compat; crt->init = shash_compat_init; crt->update = shash_compat_update; crt->final = shash_compat_final; crt->digest = shash_compat_digest; crt->setkey = shash_compat_setkey; crt->digestsize = alg->digestsize; return 0; } |
3b2f6df08
|
468 469 470 |
static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) { switch (mask & CRYPTO_ALG_TYPE_MASK) { |
5f7082ed4
|
471 472 |
case CRYPTO_ALG_TYPE_HASH_MASK: return crypto_init_shash_ops_compat(tfm); |
3b2f6df08
|
473 474 475 476 477 478 479 480 481 |
} return -EINVAL; } static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) { switch (mask & CRYPTO_ALG_TYPE_MASK) { |
5f7082ed4
|
482 |
case CRYPTO_ALG_TYPE_HASH_MASK: |
113adefc7
|
483 |
return sizeof(struct shash_desc *); |
3b2f6df08
|
484 485 486 487 |
} return 0; } |
2ca33da1d
|
488 |
static int crypto_shash_init_tfm(struct crypto_tfm *tfm) |
7b5a080b3
|
489 |
{ |
113adefc7
|
490 491 492 |
struct crypto_shash *hash = __crypto_shash_cast(tfm); hash->descsize = crypto_shash_alg(hash)->descsize; |
7b5a080b3
|
493 494 |
return 0; } |
2ca33da1d
|
495 |
static unsigned int crypto_shash_extsize(struct crypto_alg *alg) |
7b5a080b3
|
496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 |
{ return alg->cra_ctxsize; } static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) __attribute__ ((unused)); static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) { struct shash_alg *salg = __crypto_shash_alg(alg); seq_printf(m, "type : shash "); seq_printf(m, "blocksize : %u ", alg->cra_blocksize); seq_printf(m, "digestsize : %u ", salg->digestsize); |
7b5a080b3
|
512 513 514 |
} static const struct crypto_type crypto_shash_type = { |
3b2f6df08
|
515 |
.ctxsize = crypto_shash_ctxsize, |
7b5a080b3
|
516 |
.extsize = crypto_shash_extsize, |
3b2f6df08
|
517 |
.init = crypto_init_shash_ops, |
7b5a080b3
|
518 519 520 521 522 523 524 525 526 527 528 529 530 |
.init_tfm = crypto_shash_init_tfm, #ifdef CONFIG_PROC_FS .show = crypto_shash_show, #endif .maskclear = ~CRYPTO_ALG_TYPE_MASK, .maskset = CRYPTO_ALG_TYPE_MASK, .type = CRYPTO_ALG_TYPE_SHASH, .tfmsize = offsetof(struct crypto_shash, base), }; struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, u32 mask) { |
3f683d617
|
531 |
return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask); |
7b5a080b3
|
532 533 |
} EXPORT_SYMBOL_GPL(crypto_alloc_shash); |
619a6ebd2
|
534 |
static int shash_prepare_alg(struct shash_alg *alg) |
7b5a080b3
|
535 536 537 538 |
{ struct crypto_alg *base = &alg->base; if (alg->digestsize > PAGE_SIZE / 8 || |
99d27e1c5
|
539 540 |
alg->descsize > PAGE_SIZE / 8 || alg->statesize > PAGE_SIZE / 8) |
7b5a080b3
|
541 542 543 544 545 |
return -EINVAL; base->cra_type = &crypto_shash_type; base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; |
99d27e1c5
|
546 |
|
8267adab9
|
547 548 549 550 |
if (!alg->finup) alg->finup = shash_finup_unaligned; if (!alg->digest) alg->digest = shash_digest_unaligned; |
f592682f9
|
551 552 553 554 555 |
if (!alg->export) { alg->export = shash_default_export; alg->import = shash_default_import; alg->statesize = alg->descsize; } |
57cfe44bc
|
556 557 |
if (!alg->setkey) alg->setkey = shash_no_setkey; |
99d27e1c5
|
558 |
|
619a6ebd2
|
559 560 561 562 563 564 565 566 567 568 569 |
return 0; } int crypto_register_shash(struct shash_alg *alg) { struct crypto_alg *base = &alg->base; int err; err = shash_prepare_alg(alg); if (err) return err; |
7b5a080b3
|
570 571 572 573 574 575 576 577 578 579 |
return crypto_register_alg(base); } EXPORT_SYMBOL_GPL(crypto_register_shash); int crypto_unregister_shash(struct shash_alg *alg) { return crypto_unregister_alg(&alg->base); } EXPORT_SYMBOL_GPL(crypto_unregister_shash); |
619a6ebd2
|
580 581 582 583 584 585 586 587 588 589 590 591 |
int shash_register_instance(struct crypto_template *tmpl, struct shash_instance *inst) { int err; err = shash_prepare_alg(&inst->alg); if (err) return err; return crypto_register_instance(tmpl, shash_crypto_instance(inst)); } EXPORT_SYMBOL_GPL(shash_register_instance); |
2e4fddd8e
|
592 593 594 595 596 597 |
void shash_free_instance(struct crypto_instance *inst) { crypto_drop_spawn(crypto_instance_ctx(inst)); kfree(shash_instance(inst)); } EXPORT_SYMBOL_GPL(shash_free_instance); |
942969992
|
598 599 600 601 602 603 604 605 |
int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, struct shash_alg *alg, struct crypto_instance *inst) { return crypto_init_spawn2(&spawn->base, &alg->base, inst, &crypto_shash_type); } EXPORT_SYMBOL_GPL(crypto_init_shash_spawn); |
7d6f56400
|
606 607 608 609 610 611 612 613 614 |
struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask) { struct crypto_alg *alg; alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask); return IS_ERR(alg) ? ERR_CAST(alg) : container_of(alg, struct shash_alg, base); } EXPORT_SYMBOL_GPL(shash_attr_alg); |
7b5a080b3
|
615 616 |
MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Synchronous cryptographic hash type"); |