Blame view
crypto/cryptd.c
28.6 KB
2874c5fd2 treewide: Replace... |
1 |
// SPDX-License-Identifier: GPL-2.0-or-later |
124b53d02 [CRYPTO] cryptd: ... |
2 3 4 5 6 |
/* * Software async crypto daemon. * * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> * |
298c926c6 crypto: cryptd - ... |
7 8 9 10 11 12 |
* Added AEAD support to cryptd. * Authors: Tadeusz Struk (tadeusz.struk@intel.com) * Adrian Hoban <adrian.hoban@intel.com> * Gabriele Paoloni <gabriele.paoloni@intel.com> * Aidan O'Mahony (aidan.o.mahony@intel.com) * Copyright (c) 2010, Intel Corporation. |
124b53d02 [CRYPTO] cryptd: ... |
13 |
*/ |
18e33e6d5 crypto: hash - Mo... |
14 |
#include <crypto/internal/hash.h> |
298c926c6 crypto: cryptd - ... |
15 |
#include <crypto/internal/aead.h> |
4e0958d19 crypto: cryptd - ... |
16 |
#include <crypto/internal/skcipher.h> |
1cac2cbc7 crypto: cryptd - ... |
17 |
#include <crypto/cryptd.h> |
43b970fa8 crypto: cryptd - ... |
18 |
#include <linux/refcount.h> |
124b53d02 [CRYPTO] cryptd: ... |
19 20 21 |
#include <linux/err.h> #include <linux/init.h> #include <linux/kernel.h> |
124b53d02 [CRYPTO] cryptd: ... |
22 23 |
#include <linux/list.h> #include <linux/module.h> |
124b53d02 [CRYPTO] cryptd: ... |
24 25 26 |
#include <linux/scatterlist.h> #include <linux/sched.h> #include <linux/slab.h> |
3e56e1686 crypto: cryptd - ... |
27 |
#include <linux/workqueue.h> |
124b53d02 [CRYPTO] cryptd: ... |
28 |
|
eaf356e4b crypto: cryptd - ... |
29 |
static unsigned int cryptd_max_cpu_qlen = 1000; |
c3a536056 crypto: cryptd - ... |
30 31 |
module_param(cryptd_max_cpu_qlen, uint, 0); MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth"); |
124b53d02 [CRYPTO] cryptd: ... |
32 |
|
3e56e1686 crypto: cryptd - ... |
33 |
static struct workqueue_struct *cryptd_wq; |
254eff771 crypto: cryptd - ... |
34 |
struct cryptd_cpu_queue { |
124b53d02 [CRYPTO] cryptd: ... |
35 |
struct crypto_queue queue; |
254eff771 crypto: cryptd - ... |
36 37 38 39 |
struct work_struct work; }; struct cryptd_queue { |
a29d8b8e2 percpu: add __per... |
40 |
struct cryptd_cpu_queue __percpu *cpu_queue; |
124b53d02 [CRYPTO] cryptd: ... |
41 42 43 44 |
}; struct cryptd_instance_ctx { struct crypto_spawn spawn; |
254eff771 crypto: cryptd - ... |
45 |
struct cryptd_queue *queue; |
124b53d02 [CRYPTO] cryptd: ... |
46 |
}; |
4e0958d19 crypto: cryptd - ... |
47 48 49 50 |
struct skcipherd_instance_ctx { struct crypto_skcipher_spawn spawn; struct cryptd_queue *queue; }; |
46309d893 crypto: cryptd - ... |
51 52 53 54 |
struct hashd_instance_ctx { struct crypto_shash_spawn spawn; struct cryptd_queue *queue; }; |
298c926c6 crypto: cryptd - ... |
55 56 57 58 |
struct aead_instance_ctx { struct crypto_aead_spawn aead_spawn; struct cryptd_queue *queue; }; |
4e0958d19 crypto: cryptd - ... |
59 |
struct cryptd_skcipher_ctx { |
43b970fa8 crypto: cryptd - ... |
60 |
refcount_t refcnt; |
36b3875a9 crypto: cryptd - ... |
61 |
struct crypto_sync_skcipher *child; |
4e0958d19 crypto: cryptd - ... |
62 63 64 65 66 |
}; struct cryptd_skcipher_request_ctx { crypto_completion_t complete; }; |
b8a28251c [CRYPTO] cryptd: ... |
67 |
struct cryptd_hash_ctx { |
43b970fa8 crypto: cryptd - ... |
68 |
refcount_t refcnt; |
46309d893 crypto: cryptd - ... |
69 |
struct crypto_shash *child; |
b8a28251c [CRYPTO] cryptd: ... |
70 71 72 73 |
}; struct cryptd_hash_request_ctx { crypto_completion_t complete; |
46309d893 crypto: cryptd - ... |
74 |
struct shash_desc desc; |
b8a28251c [CRYPTO] cryptd: ... |
75 |
}; |
124b53d02 [CRYPTO] cryptd: ... |
76 |
|
298c926c6 crypto: cryptd - ... |
77 |
struct cryptd_aead_ctx { |
43b970fa8 crypto: cryptd - ... |
78 |
refcount_t refcnt; |
298c926c6 crypto: cryptd - ... |
79 80 81 82 83 84 |
struct crypto_aead *child; }; struct cryptd_aead_request_ctx { crypto_completion_t complete; }; |
254eff771 crypto: cryptd - ... |
85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
static void cryptd_queue_worker(struct work_struct *work); static int cryptd_init_queue(struct cryptd_queue *queue, unsigned int max_cpu_qlen) { int cpu; struct cryptd_cpu_queue *cpu_queue; queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue); if (!queue->cpu_queue) return -ENOMEM; for_each_possible_cpu(cpu) { cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); crypto_init_queue(&cpu_queue->queue, max_cpu_qlen); INIT_WORK(&cpu_queue->work, cryptd_queue_worker); } |
c3a536056 crypto: cryptd - ... |
101 102 |
pr_info("cryptd: max_cpu_qlen set to %d ", max_cpu_qlen); |
254eff771 crypto: cryptd - ... |
103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 |
return 0; } static void cryptd_fini_queue(struct cryptd_queue *queue) { int cpu; struct cryptd_cpu_queue *cpu_queue; for_each_possible_cpu(cpu) { cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); BUG_ON(cpu_queue->queue.qlen); } free_percpu(queue->cpu_queue); } static int cryptd_enqueue_request(struct cryptd_queue *queue, struct crypto_async_request *request) { int cpu, err; struct cryptd_cpu_queue *cpu_queue; |
43b970fa8 crypto: cryptd - ... |
123 |
refcount_t *refcnt; |
254eff771 crypto: cryptd - ... |
124 125 |
cpu = get_cpu(); |
0b44f4861 this_cpu: Use thi... |
126 |
cpu_queue = this_cpu_ptr(queue->cpu_queue); |
254eff771 crypto: cryptd - ... |
127 |
err = crypto_enqueue_request(&cpu_queue->queue, request); |
81760ea6a crypto: cryptd - ... |
128 129 |
refcnt = crypto_tfm_ctx(request->tfm); |
81760ea6a crypto: cryptd - ... |
130 |
|
6b80ea389 crypto: change tr... |
131 |
if (err == -ENOSPC) |
81760ea6a crypto: cryptd - ... |
132 |
goto out_put_cpu; |
3e56e1686 crypto: cryptd - ... |
133 |
queue_work_on(cpu, cryptd_wq, &cpu_queue->work); |
81760ea6a crypto: cryptd - ... |
134 |
|
43b970fa8 crypto: cryptd - ... |
135 |
if (!refcount_read(refcnt)) |
81760ea6a crypto: cryptd - ... |
136 |
goto out_put_cpu; |
43b970fa8 crypto: cryptd - ... |
137 |
refcount_inc(refcnt); |
81760ea6a crypto: cryptd - ... |
138 139 |
out_put_cpu: |
254eff771 crypto: cryptd - ... |
140 141 142 143 144 145 146 147 148 149 150 151 152 153 |
put_cpu(); return err; } /* Called in workqueue context, do one real cryption work (via * req->complete) and reschedule itself if there are more work to * do. */ static void cryptd_queue_worker(struct work_struct *work) { struct cryptd_cpu_queue *cpu_queue; struct crypto_async_request *req, *backlog; cpu_queue = container_of(work, struct cryptd_cpu_queue, work); |
9efade1b3 crypto: cryptd - ... |
154 155 156 157 158 159 160 |
/* * Only handle one request at a time to avoid hogging crypto workqueue. * preempt_disable/enable is used to prevent being preempted by * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent * cryptd_enqueue_request() being accessed from software interrupts. */ local_bh_disable(); |
254eff771 crypto: cryptd - ... |
161 162 163 164 |
preempt_disable(); backlog = crypto_get_backlog(&cpu_queue->queue); req = crypto_dequeue_request(&cpu_queue->queue); preempt_enable(); |
9efade1b3 crypto: cryptd - ... |
165 |
local_bh_enable(); |
254eff771 crypto: cryptd - ... |
166 167 168 169 170 171 172 173 174 |
if (!req) return; if (backlog) backlog->complete(backlog, -EINPROGRESS); req->complete(req, 0); if (cpu_queue->queue.qlen) |
3e56e1686 crypto: cryptd - ... |
175 |
queue_work(cryptd_wq, &cpu_queue->work); |
254eff771 crypto: cryptd - ... |
176 177 178 |
} static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm) |
124b53d02 [CRYPTO] cryptd: ... |
179 180 181 |
{ struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); |
254eff771 crypto: cryptd - ... |
182 |
return ictx->queue; |
124b53d02 [CRYPTO] cryptd: ... |
183 |
} |
7bcb2c99f crypto: algapi - ... |
184 185 |
static void cryptd_type_and_mask(struct crypto_attr_type *algt, u32 *type, u32 *mask) |
466a7b9e3 crypto: cryptd - ... |
186 |
{ |
7bcb2c99f crypto: algapi - ... |
187 188 189 190 191 192 |
/* * cryptd is allowed to wrap internal algorithms, but in that case the * resulting cryptd instance will be marked as internal as well. */ *type = algt->type & CRYPTO_ALG_INTERNAL; *mask = algt->mask & CRYPTO_ALG_INTERNAL; |
466a7b9e3 crypto: cryptd - ... |
193 |
|
7bcb2c99f crypto: algapi - ... |
194 195 |
/* No point in cryptd wrapping an algorithm that's already async. */ *mask |= CRYPTO_ALG_ASYNC; |
f6da32059 crypto: cryptd - ... |
196 |
|
7bcb2c99f crypto: algapi - ... |
197 |
*mask |= crypto_algt_inherited_mask(algt); |
466a7b9e3 crypto: cryptd - ... |
198 |
} |
9b8c456e0 crypto: cryptd - ... |
199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 |
static int cryptd_init_instance(struct crypto_instance *inst, struct crypto_alg *alg) { if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) return -ENAMETOOLONG; memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); inst->alg.cra_priority = alg->cra_priority + 50; inst->alg.cra_blocksize = alg->cra_blocksize; inst->alg.cra_alignmask = alg->cra_alignmask; return 0; } |
4e0958d19 crypto: cryptd - ... |
215 216 217 218 |
static int cryptd_skcipher_setkey(struct crypto_skcipher *parent, const u8 *key, unsigned int keylen) { struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent); |
36b3875a9 crypto: cryptd - ... |
219 |
struct crypto_sync_skcipher *child = ctx->child; |
4e0958d19 crypto: cryptd - ... |
220 |
|
36b3875a9 crypto: cryptd - ... |
221 222 223 |
crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); crypto_sync_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) & |
4e0958d19 crypto: cryptd - ... |
224 |
CRYPTO_TFM_REQ_MASK); |
af5034e8e crypto: remove pr... |
225 |
return crypto_sync_skcipher_setkey(child, key, keylen); |
4e0958d19 crypto: cryptd - ... |
226 227 228 229 230 231 232 |
} static void cryptd_skcipher_complete(struct skcipher_request *req, int err) { struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); |
43b970fa8 crypto: cryptd - ... |
233 |
int refcnt = refcount_read(&ctx->refcnt); |
4e0958d19 crypto: cryptd - ... |
234 235 236 237 |
local_bh_disable(); rctx->complete(&req->base, err); local_bh_enable(); |
43b970fa8 crypto: cryptd - ... |
238 |
if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) |
4e0958d19 crypto: cryptd - ... |
239 240 241 242 243 244 245 246 247 248 |
crypto_free_skcipher(tfm); } static void cryptd_skcipher_encrypt(struct crypto_async_request *base, int err) { struct skcipher_request *req = skcipher_request_cast(base); struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); |
36b3875a9 crypto: cryptd - ... |
249 250 |
struct crypto_sync_skcipher *child = ctx->child; SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child); |
4e0958d19 crypto: cryptd - ... |
251 252 253 |
if (unlikely(err == -EINPROGRESS)) goto out; |
36b3875a9 crypto: cryptd - ... |
254 |
skcipher_request_set_sync_tfm(subreq, child); |
4e0958d19 crypto: cryptd - ... |
255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 |
skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP, NULL, NULL); skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, req->iv); err = crypto_skcipher_encrypt(subreq); skcipher_request_zero(subreq); req->base.complete = rctx->complete; out: cryptd_skcipher_complete(req, err); } static void cryptd_skcipher_decrypt(struct crypto_async_request *base, int err) { struct skcipher_request *req = skcipher_request_cast(base); struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); |
36b3875a9 crypto: cryptd - ... |
276 277 |
struct crypto_sync_skcipher *child = ctx->child; SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child); |
4e0958d19 crypto: cryptd - ... |
278 279 280 |
if (unlikely(err == -EINPROGRESS)) goto out; |
36b3875a9 crypto: cryptd - ... |
281 |
skcipher_request_set_sync_tfm(subreq, child); |
4e0958d19 crypto: cryptd - ... |
282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 |
skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP, NULL, NULL); skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, req->iv); err = crypto_skcipher_decrypt(subreq); skcipher_request_zero(subreq); req->base.complete = rctx->complete; out: cryptd_skcipher_complete(req, err); } static int cryptd_skcipher_enqueue(struct skcipher_request *req, crypto_completion_t compl) { struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct cryptd_queue *queue; queue = cryptd_get_queue(crypto_skcipher_tfm(tfm)); rctx->complete = req->base.complete; req->base.complete = compl; return cryptd_enqueue_request(queue, &req->base); } static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req) { return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt); } static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req) { return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt); } static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm) { struct skcipher_instance *inst = skcipher_alg_instance(tfm); struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst); struct crypto_skcipher_spawn *spawn = &ictx->spawn; struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); struct crypto_skcipher *cipher; cipher = crypto_spawn_skcipher(spawn); if (IS_ERR(cipher)) return PTR_ERR(cipher); |
36b3875a9 crypto: cryptd - ... |
331 |
ctx->child = (struct crypto_sync_skcipher *)cipher; |
4e0958d19 crypto: cryptd - ... |
332 333 334 335 336 337 338 339 |
crypto_skcipher_set_reqsize( tfm, sizeof(struct cryptd_skcipher_request_ctx)); return 0; } static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm) { struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); |
36b3875a9 crypto: cryptd - ... |
340 |
crypto_free_sync_skcipher(ctx->child); |
4e0958d19 crypto: cryptd - ... |
341 342 343 344 345 346 347 |
} static void cryptd_skcipher_free(struct skcipher_instance *inst) { struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst); crypto_drop_skcipher(&ctx->spawn); |
1a0fad630 crypto: cryptd - ... |
348 |
kfree(inst); |
4e0958d19 crypto: cryptd - ... |
349 350 351 352 |
} static int cryptd_create_skcipher(struct crypto_template *tmpl, struct rtattr **tb, |
7bcb2c99f crypto: algapi - ... |
353 |
struct crypto_attr_type *algt, |
4e0958d19 crypto: cryptd - ... |
354 355 356 357 358 |
struct cryptd_queue *queue) { struct skcipherd_instance_ctx *ctx; struct skcipher_instance *inst; struct skcipher_alg *alg; |
4e0958d19 crypto: cryptd - ... |
359 360 361 |
u32 type; u32 mask; int err; |
7bcb2c99f crypto: algapi - ... |
362 |
cryptd_type_and_mask(algt, &type, &mask); |
4e0958d19 crypto: cryptd - ... |
363 |
|
4e0958d19 crypto: cryptd - ... |
364 365 366 367 368 369 |
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); if (!inst) return -ENOMEM; ctx = skcipher_instance_ctx(inst); ctx->queue = queue; |
b9f76dddb crypto: skcipher ... |
370 |
err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst), |
b8c0d74a7 crypto: cryptd - ... |
371 |
crypto_attr_alg_name(tb[1]), type, mask); |
4e0958d19 crypto: cryptd - ... |
372 |
if (err) |
b8c0d74a7 crypto: cryptd - ... |
373 |
goto err_free_inst; |
4e0958d19 crypto: cryptd - ... |
374 375 376 377 |
alg = crypto_spawn_skcipher_alg(&ctx->spawn); err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base); if (err) |
b8c0d74a7 crypto: cryptd - ... |
378 |
goto err_free_inst; |
4e0958d19 crypto: cryptd - ... |
379 |
|
7bcb2c99f crypto: algapi - ... |
380 381 |
inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC | (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); |
4e0958d19 crypto: cryptd - ... |
382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 |
inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg); inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg); inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg); inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg); inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx); inst->alg.init = cryptd_skcipher_init_tfm; inst->alg.exit = cryptd_skcipher_exit_tfm; inst->alg.setkey = cryptd_skcipher_setkey; inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue; inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue; inst->free = cryptd_skcipher_free; err = skcipher_register_instance(tmpl, inst); if (err) { |
b8c0d74a7 crypto: cryptd - ... |
400 401 |
err_free_inst: cryptd_skcipher_free(inst); |
4e0958d19 crypto: cryptd - ... |
402 403 404 |
} return err; } |
b8a28251c [CRYPTO] cryptd: ... |
405 406 407 |
static int cryptd_hash_init_tfm(struct crypto_tfm *tfm) { struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); |
46309d893 crypto: cryptd - ... |
408 409 |
struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst); struct crypto_shash_spawn *spawn = &ictx->spawn; |
b8a28251c [CRYPTO] cryptd: ... |
410 |
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); |
46309d893 crypto: cryptd - ... |
411 |
struct crypto_shash *hash; |
b8a28251c [CRYPTO] cryptd: ... |
412 |
|
46309d893 crypto: cryptd - ... |
413 414 415 |
hash = crypto_spawn_shash(spawn); if (IS_ERR(hash)) return PTR_ERR(hash); |
b8a28251c [CRYPTO] cryptd: ... |
416 |
|
46309d893 crypto: cryptd - ... |
417 |
ctx->child = hash; |
0d6669e2b crypto: cryptd - ... |
418 419 420 |
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct cryptd_hash_request_ctx) + crypto_shash_descsize(hash)); |
b8a28251c [CRYPTO] cryptd: ... |
421 422 423 424 425 426 |
return 0; } static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm) { struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); |
b8a28251c [CRYPTO] cryptd: ... |
427 |
|
46309d893 crypto: cryptd - ... |
428 |
crypto_free_shash(ctx->child); |
b8a28251c [CRYPTO] cryptd: ... |
429 430 431 432 433 434 |
} static int cryptd_hash_setkey(struct crypto_ahash *parent, const u8 *key, unsigned int keylen) { struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); |
46309d893 crypto: cryptd - ... |
435 |
struct crypto_shash *child = ctx->child; |
b8a28251c [CRYPTO] cryptd: ... |
436 |
|
46309d893 crypto: cryptd - ... |
437 438 439 |
crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK); crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) & CRYPTO_TFM_REQ_MASK); |
af5034e8e crypto: remove pr... |
440 |
return crypto_shash_setkey(child, key, keylen); |
b8a28251c [CRYPTO] cryptd: ... |
441 442 443 |
} static int cryptd_hash_enqueue(struct ahash_request *req, |
3e3dc25fe crypto: Resolve s... |
444 |
crypto_completion_t compl) |
b8a28251c [CRYPTO] cryptd: ... |
445 446 447 |
{ struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
254eff771 crypto: cryptd - ... |
448 449 |
struct cryptd_queue *queue = cryptd_get_queue(crypto_ahash_tfm(tfm)); |
b8a28251c [CRYPTO] cryptd: ... |
450 451 |
rctx->complete = req->base.complete; |
3e3dc25fe crypto: Resolve s... |
452 |
req->base.complete = compl; |
b8a28251c [CRYPTO] cryptd: ... |
453 |
|
254eff771 crypto: cryptd - ... |
454 |
return cryptd_enqueue_request(queue, &req->base); |
b8a28251c [CRYPTO] cryptd: ... |
455 |
} |
81760ea6a crypto: cryptd - ... |
456 457 458 459 460 |
static void cryptd_hash_complete(struct ahash_request *req, int err) { struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm); struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); |
43b970fa8 crypto: cryptd - ... |
461 |
int refcnt = refcount_read(&ctx->refcnt); |
81760ea6a crypto: cryptd - ... |
462 463 464 465 |
local_bh_disable(); rctx->complete(&req->base, err); local_bh_enable(); |
43b970fa8 crypto: cryptd - ... |
466 |
if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) |
81760ea6a crypto: cryptd - ... |
467 468 |
crypto_free_ahash(tfm); } |
b8a28251c [CRYPTO] cryptd: ... |
469 470 |
static void cryptd_hash_init(struct crypto_async_request *req_async, int err) { |
46309d893 crypto: cryptd - ... |
471 472 473 474 475 |
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); struct crypto_shash *child = ctx->child; struct ahash_request *req = ahash_request_cast(req_async); struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); struct shash_desc *desc = &rctx->desc; |
b8a28251c [CRYPTO] cryptd: ... |
476 477 478 |
if (unlikely(err == -EINPROGRESS)) goto out; |
46309d893 crypto: cryptd - ... |
479 |
desc->tfm = child; |
b8a28251c [CRYPTO] cryptd: ... |
480 |
|
46309d893 crypto: cryptd - ... |
481 |
err = crypto_shash_init(desc); |
b8a28251c [CRYPTO] cryptd: ... |
482 483 484 485 |
req->base.complete = rctx->complete; out: |
81760ea6a crypto: cryptd - ... |
486 |
cryptd_hash_complete(req, err); |
b8a28251c [CRYPTO] cryptd: ... |
487 488 489 490 491 492 493 494 495 |
} static int cryptd_hash_init_enqueue(struct ahash_request *req) { return cryptd_hash_enqueue(req, cryptd_hash_init); } static void cryptd_hash_update(struct crypto_async_request *req_async, int err) { |
46309d893 crypto: cryptd - ... |
496 |
struct ahash_request *req = ahash_request_cast(req_async); |
b8a28251c [CRYPTO] cryptd: ... |
497 |
struct cryptd_hash_request_ctx *rctx; |
b8a28251c [CRYPTO] cryptd: ... |
498 499 500 501 502 |
rctx = ahash_request_ctx(req); if (unlikely(err == -EINPROGRESS)) goto out; |
46309d893 crypto: cryptd - ... |
503 |
err = shash_ahash_update(req, &rctx->desc); |
b8a28251c [CRYPTO] cryptd: ... |
504 505 506 507 |
req->base.complete = rctx->complete; out: |
81760ea6a crypto: cryptd - ... |
508 |
cryptd_hash_complete(req, err); |
b8a28251c [CRYPTO] cryptd: ... |
509 510 511 512 513 514 515 516 517 |
} static int cryptd_hash_update_enqueue(struct ahash_request *req) { return cryptd_hash_enqueue(req, cryptd_hash_update); } static void cryptd_hash_final(struct crypto_async_request *req_async, int err) { |
46309d893 crypto: cryptd - ... |
518 519 |
struct ahash_request *req = ahash_request_cast(req_async); struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); |
b8a28251c [CRYPTO] cryptd: ... |
520 521 522 |
if (unlikely(err == -EINPROGRESS)) goto out; |
46309d893 crypto: cryptd - ... |
523 |
err = crypto_shash_final(&rctx->desc, req->result); |
b8a28251c [CRYPTO] cryptd: ... |
524 525 526 527 |
req->base.complete = rctx->complete; out: |
81760ea6a crypto: cryptd - ... |
528 |
cryptd_hash_complete(req, err); |
b8a28251c [CRYPTO] cryptd: ... |
529 530 531 532 533 534 |
} static int cryptd_hash_final_enqueue(struct ahash_request *req) { return cryptd_hash_enqueue(req, cryptd_hash_final); } |
6fba00d17 crypto: cryptd - ... |
535 536 537 538 539 540 541 542 543 544 545 546 547 |
static void cryptd_hash_finup(struct crypto_async_request *req_async, int err) { struct ahash_request *req = ahash_request_cast(req_async); struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); if (unlikely(err == -EINPROGRESS)) goto out; err = shash_ahash_finup(req, &rctx->desc); req->base.complete = rctx->complete; out: |
81760ea6a crypto: cryptd - ... |
548 |
cryptd_hash_complete(req, err); |
6fba00d17 crypto: cryptd - ... |
549 550 551 552 553 554 |
} static int cryptd_hash_finup_enqueue(struct ahash_request *req) { return cryptd_hash_enqueue(req, cryptd_hash_finup); } |
b8a28251c [CRYPTO] cryptd: ... |
555 556 |
static void cryptd_hash_digest(struct crypto_async_request *req_async, int err) { |
46309d893 crypto: cryptd - ... |
557 558 559 560 561 |
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); struct crypto_shash *child = ctx->child; struct ahash_request *req = ahash_request_cast(req_async); struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); struct shash_desc *desc = &rctx->desc; |
b8a28251c [CRYPTO] cryptd: ... |
562 563 564 |
if (unlikely(err == -EINPROGRESS)) goto out; |
46309d893 crypto: cryptd - ... |
565 |
desc->tfm = child; |
b8a28251c [CRYPTO] cryptd: ... |
566 |
|
46309d893 crypto: cryptd - ... |
567 |
err = shash_ahash_digest(req, desc); |
b8a28251c [CRYPTO] cryptd: ... |
568 569 570 571 |
req->base.complete = rctx->complete; out: |
81760ea6a crypto: cryptd - ... |
572 |
cryptd_hash_complete(req, err); |
b8a28251c [CRYPTO] cryptd: ... |
573 574 575 576 577 578 |
} static int cryptd_hash_digest_enqueue(struct ahash_request *req) { return cryptd_hash_enqueue(req, cryptd_hash_digest); } |
6fba00d17 crypto: cryptd - ... |
579 580 581 582 583 584 585 586 587 |
static int cryptd_hash_export(struct ahash_request *req, void *out) { struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); return crypto_shash_export(&rctx->desc, out); } static int cryptd_hash_import(struct ahash_request *req, const void *in) { |
0bd222359 crypto: cryptd - ... |
588 589 590 591 592 |
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm); struct shash_desc *desc = cryptd_shash_desc(req); desc->tfm = ctx->child; |
6fba00d17 crypto: cryptd - ... |
593 |
|
0bd222359 crypto: cryptd - ... |
594 |
return crypto_shash_import(desc, in); |
6fba00d17 crypto: cryptd - ... |
595 |
} |
758ec5ac5 crypto: cryptd - ... |
596 597 598 599 600 601 602 |
static void cryptd_hash_free(struct ahash_instance *inst) { struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst); crypto_drop_shash(&ctx->spawn); kfree(inst); } |
9cd899a32 crypto: cryptd - ... |
603 |
static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb, |
7bcb2c99f crypto: algapi - ... |
604 |
struct crypto_attr_type *algt, |
9cd899a32 crypto: cryptd - ... |
605 |
struct cryptd_queue *queue) |
b8a28251c [CRYPTO] cryptd: ... |
606 |
{ |
46309d893 crypto: cryptd - ... |
607 |
struct hashd_instance_ctx *ctx; |
0b535adfb crypto: cryptd - ... |
608 |
struct ahash_instance *inst; |
218c5035f crypto: cryptd - ... |
609 |
struct shash_alg *alg; |
7bcb2c99f crypto: algapi - ... |
610 611 |
u32 type; u32 mask; |
46309d893 crypto: cryptd - ... |
612 |
int err; |
b8a28251c [CRYPTO] cryptd: ... |
613 |
|
7bcb2c99f crypto: algapi - ... |
614 |
cryptd_type_and_mask(algt, &type, &mask); |
466a7b9e3 crypto: cryptd - ... |
615 |
|
218c5035f crypto: cryptd - ... |
616 617 618 |
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); if (!inst) return -ENOMEM; |
b8a28251c [CRYPTO] cryptd: ... |
619 |
|
0b535adfb crypto: cryptd - ... |
620 |
ctx = ahash_instance_ctx(inst); |
46309d893 crypto: cryptd - ... |
621 |
ctx->queue = queue; |
218c5035f crypto: cryptd - ... |
622 623 |
err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst), crypto_attr_alg_name(tb[1]), type, mask); |
46309d893 crypto: cryptd - ... |
624 |
if (err) |
218c5035f crypto: cryptd - ... |
625 626 627 628 629 630 |
goto err_free_inst; alg = crypto_spawn_shash_alg(&ctx->spawn); err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base); if (err) goto err_free_inst; |
46309d893 crypto: cryptd - ... |
631 |
|
7bcb2c99f crypto: algapi - ... |
632 633 |
inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC | (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL| |
218c5035f crypto: cryptd - ... |
634 |
CRYPTO_ALG_OPTIONAL_KEY)); |
218c5035f crypto: cryptd - ... |
635 636 |
inst->alg.halg.digestsize = alg->digestsize; inst->alg.halg.statesize = alg->statesize; |
0b535adfb crypto: cryptd - ... |
637 |
inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx); |
b8a28251c [CRYPTO] cryptd: ... |
638 |
|
0b535adfb crypto: cryptd - ... |
639 640 |
inst->alg.halg.base.cra_init = cryptd_hash_init_tfm; inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm; |
b8a28251c [CRYPTO] cryptd: ... |
641 |
|
0b535adfb crypto: cryptd - ... |
642 643 644 |
inst->alg.init = cryptd_hash_init_enqueue; inst->alg.update = cryptd_hash_update_enqueue; inst->alg.final = cryptd_hash_final_enqueue; |
6fba00d17 crypto: cryptd - ... |
645 646 647 |
inst->alg.finup = cryptd_hash_finup_enqueue; inst->alg.export = cryptd_hash_export; inst->alg.import = cryptd_hash_import; |
218c5035f crypto: cryptd - ... |
648 |
if (crypto_shash_alg_has_setkey(alg)) |
841a3ff32 crypto: cryptd - ... |
649 |
inst->alg.setkey = cryptd_hash_setkey; |
0b535adfb crypto: cryptd - ... |
650 |
inst->alg.digest = cryptd_hash_digest_enqueue; |
b8a28251c [CRYPTO] cryptd: ... |
651 |
|
758ec5ac5 crypto: cryptd - ... |
652 |
inst->free = cryptd_hash_free; |
0b535adfb crypto: cryptd - ... |
653 |
err = ahash_register_instance(tmpl, inst); |
9cd899a32 crypto: cryptd - ... |
654 |
if (err) { |
218c5035f crypto: cryptd - ... |
655 |
err_free_inst: |
b8c0d74a7 crypto: cryptd - ... |
656 |
cryptd_hash_free(inst); |
9cd899a32 crypto: cryptd - ... |
657 |
} |
9cd899a32 crypto: cryptd - ... |
658 |
return err; |
b8a28251c [CRYPTO] cryptd: ... |
659 |
} |
92b9876bd crypto: cryptd - ... |
660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 |
static int cryptd_aead_setkey(struct crypto_aead *parent, const u8 *key, unsigned int keylen) { struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent); struct crypto_aead *child = ctx->child; return crypto_aead_setkey(child, key, keylen); } static int cryptd_aead_setauthsize(struct crypto_aead *parent, unsigned int authsize) { struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent); struct crypto_aead *child = ctx->child; return crypto_aead_setauthsize(child, authsize); } |
298c926c6 crypto: cryptd - ... |
677 678 679 680 681 682 |
static void cryptd_aead_crypt(struct aead_request *req, struct crypto_aead *child, int err, int (*crypt)(struct aead_request *req)) { struct cryptd_aead_request_ctx *rctx; |
81760ea6a crypto: cryptd - ... |
683 |
struct cryptd_aead_ctx *ctx; |
ec9f2006f crypto: cryptd - ... |
684 |
crypto_completion_t compl; |
81760ea6a crypto: cryptd - ... |
685 686 |
struct crypto_aead *tfm; int refcnt; |
ec9f2006f crypto: cryptd - ... |
687 |
|
298c926c6 crypto: cryptd - ... |
688 |
rctx = aead_request_ctx(req); |
ec9f2006f crypto: cryptd - ... |
689 |
compl = rctx->complete; |
298c926c6 crypto: cryptd - ... |
690 |
|
31bd44e76 crypto: cryptd - ... |
691 |
tfm = crypto_aead_reqtfm(req); |
298c926c6 crypto: cryptd - ... |
692 693 694 695 |
if (unlikely(err == -EINPROGRESS)) goto out; aead_request_set_tfm(req, child); err = crypt( req ); |
81760ea6a crypto: cryptd - ... |
696 |
|
298c926c6 crypto: cryptd - ... |
697 |
out: |
81760ea6a crypto: cryptd - ... |
698 |
ctx = crypto_aead_ctx(tfm); |
43b970fa8 crypto: cryptd - ... |
699 |
refcnt = refcount_read(&ctx->refcnt); |
81760ea6a crypto: cryptd - ... |
700 |
|
298c926c6 crypto: cryptd - ... |
701 |
local_bh_disable(); |
ec9f2006f crypto: cryptd - ... |
702 |
compl(&req->base, err); |
298c926c6 crypto: cryptd - ... |
703 |
local_bh_enable(); |
81760ea6a crypto: cryptd - ... |
704 |
|
43b970fa8 crypto: cryptd - ... |
705 |
if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) |
81760ea6a crypto: cryptd - ... |
706 |
crypto_free_aead(tfm); |
298c926c6 crypto: cryptd - ... |
707 708 709 710 711 712 713 714 715 |
} static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err) { struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm); struct crypto_aead *child = ctx->child; struct aead_request *req; req = container_of(areq, struct aead_request, base); |
ba3749a71 crypto: cryptd - ... |
716 |
cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt); |
298c926c6 crypto: cryptd - ... |
717 718 719 720 721 722 723 724 725 |
} static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err) { struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm); struct crypto_aead *child = ctx->child; struct aead_request *req; req = container_of(areq, struct aead_request, base); |
ba3749a71 crypto: cryptd - ... |
726 |
cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt); |
298c926c6 crypto: cryptd - ... |
727 728 729 |
} static int cryptd_aead_enqueue(struct aead_request *req, |
3e3dc25fe crypto: Resolve s... |
730 |
crypto_completion_t compl) |
298c926c6 crypto: cryptd - ... |
731 732 733 734 735 736 |
{ struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req); struct crypto_aead *tfm = crypto_aead_reqtfm(req); struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm)); rctx->complete = req->base.complete; |
3e3dc25fe crypto: Resolve s... |
737 |
req->base.complete = compl; |
298c926c6 crypto: cryptd - ... |
738 739 740 741 742 743 744 745 746 747 748 749 |
return cryptd_enqueue_request(queue, &req->base); } static int cryptd_aead_encrypt_enqueue(struct aead_request *req) { return cryptd_aead_enqueue(req, cryptd_aead_encrypt ); } static int cryptd_aead_decrypt_enqueue(struct aead_request *req) { return cryptd_aead_enqueue(req, cryptd_aead_decrypt ); } |
f614e546f crypto: cryptd - ... |
750 |
static int cryptd_aead_init_tfm(struct crypto_aead *tfm) |
298c926c6 crypto: cryptd - ... |
751 |
{ |
f614e546f crypto: cryptd - ... |
752 753 |
struct aead_instance *inst = aead_alg_instance(tfm); struct aead_instance_ctx *ictx = aead_instance_ctx(inst); |
298c926c6 crypto: cryptd - ... |
754 |
struct crypto_aead_spawn *spawn = &ictx->aead_spawn; |
f614e546f crypto: cryptd - ... |
755 |
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm); |
298c926c6 crypto: cryptd - ... |
756 757 758 759 760 |
struct crypto_aead *cipher; cipher = crypto_spawn_aead(spawn); if (IS_ERR(cipher)) return PTR_ERR(cipher); |
298c926c6 crypto: cryptd - ... |
761 |
ctx->child = cipher; |
ec9f2006f crypto: cryptd - ... |
762 763 764 |
crypto_aead_set_reqsize( tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx), crypto_aead_reqsize(cipher))); |
298c926c6 crypto: cryptd - ... |
765 766 |
return 0; } |
f614e546f crypto: cryptd - ... |
767 |
static void cryptd_aead_exit_tfm(struct crypto_aead *tfm) |
298c926c6 crypto: cryptd - ... |
768 |
{ |
f614e546f crypto: cryptd - ... |
769 |
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm); |
298c926c6 crypto: cryptd - ... |
770 771 |
crypto_free_aead(ctx->child); } |
758ec5ac5 crypto: cryptd - ... |
772 773 774 775 776 777 778 |
static void cryptd_aead_free(struct aead_instance *inst) { struct aead_instance_ctx *ctx = aead_instance_ctx(inst); crypto_drop_aead(&ctx->aead_spawn); kfree(inst); } |
298c926c6 crypto: cryptd - ... |
779 780 |
static int cryptd_create_aead(struct crypto_template *tmpl, struct rtattr **tb, |
7bcb2c99f crypto: algapi - ... |
781 |
struct crypto_attr_type *algt, |
298c926c6 crypto: cryptd - ... |
782 783 784 |
struct cryptd_queue *queue) { struct aead_instance_ctx *ctx; |
f614e546f crypto: cryptd - ... |
785 786 |
struct aead_instance *inst; struct aead_alg *alg; |
7bcb2c99f crypto: algapi - ... |
787 788 |
u32 type; u32 mask; |
298c926c6 crypto: cryptd - ... |
789 |
int err; |
7bcb2c99f crypto: algapi - ... |
790 |
cryptd_type_and_mask(algt, &type, &mask); |
466a7b9e3 crypto: cryptd - ... |
791 |
|
9b8c456e0 crypto: cryptd - ... |
792 793 794 |
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); if (!inst) return -ENOMEM; |
298c926c6 crypto: cryptd - ... |
795 |
|
f614e546f crypto: cryptd - ... |
796 |
ctx = aead_instance_ctx(inst); |
298c926c6 crypto: cryptd - ... |
797 |
ctx->queue = queue; |
cd900f0ca crypto: aead - pa... |
798 |
err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst), |
b8c0d74a7 crypto: cryptd - ... |
799 |
crypto_attr_alg_name(tb[1]), type, mask); |
298c926c6 crypto: cryptd - ... |
800 |
if (err) |
b8c0d74a7 crypto: cryptd - ... |
801 |
goto err_free_inst; |
298c926c6 crypto: cryptd - ... |
802 |
|
f614e546f crypto: cryptd - ... |
803 804 |
alg = crypto_spawn_aead_alg(&ctx->aead_spawn); err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base); |
9b8c456e0 crypto: cryptd - ... |
805 |
if (err) |
b8c0d74a7 crypto: cryptd - ... |
806 |
goto err_free_inst; |
9b8c456e0 crypto: cryptd - ... |
807 |
|
7bcb2c99f crypto: algapi - ... |
808 809 |
inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC | (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); |
f614e546f crypto: cryptd - ... |
810 |
inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx); |
298c926c6 crypto: cryptd - ... |
811 |
|
f614e546f crypto: cryptd - ... |
812 813 814 815 816 817 818 819 820 |
inst->alg.ivsize = crypto_aead_alg_ivsize(alg); inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); inst->alg.init = cryptd_aead_init_tfm; inst->alg.exit = cryptd_aead_exit_tfm; inst->alg.setkey = cryptd_aead_setkey; inst->alg.setauthsize = cryptd_aead_setauthsize; inst->alg.encrypt = cryptd_aead_encrypt_enqueue; inst->alg.decrypt = cryptd_aead_decrypt_enqueue; |
758ec5ac5 crypto: cryptd - ... |
821 |
inst->free = cryptd_aead_free; |
f614e546f crypto: cryptd - ... |
822 |
err = aead_register_instance(tmpl, inst); |
298c926c6 crypto: cryptd - ... |
823 |
if (err) { |
b8c0d74a7 crypto: cryptd - ... |
824 825 |
err_free_inst: cryptd_aead_free(inst); |
298c926c6 crypto: cryptd - ... |
826 |
} |
298c926c6 crypto: cryptd - ... |
827 828 |
return err; } |
254eff771 crypto: cryptd - ... |
829 |
static struct cryptd_queue queue; |
124b53d02 [CRYPTO] cryptd: ... |
830 |
|
9cd899a32 crypto: cryptd - ... |
831 |
static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb) |
124b53d02 [CRYPTO] cryptd: ... |
832 833 834 835 836 |
{ struct crypto_attr_type *algt; algt = crypto_get_attr_type(tb); if (IS_ERR(algt)) |
9cd899a32 crypto: cryptd - ... |
837 |
return PTR_ERR(algt); |
124b53d02 [CRYPTO] cryptd: ... |
838 839 |
switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { |
c65058b75 crypto: skcipher ... |
840 |
case CRYPTO_ALG_TYPE_SKCIPHER: |
7bcb2c99f crypto: algapi - ... |
841 |
return cryptd_create_skcipher(tmpl, tb, algt, &queue); |
84ede58df crypto: hash - re... |
842 |
case CRYPTO_ALG_TYPE_HASH: |
7bcb2c99f crypto: algapi - ... |
843 |
return cryptd_create_hash(tmpl, tb, algt, &queue); |
298c926c6 crypto: cryptd - ... |
844 |
case CRYPTO_ALG_TYPE_AEAD: |
7bcb2c99f crypto: algapi - ... |
845 |
return cryptd_create_aead(tmpl, tb, algt, &queue); |
124b53d02 [CRYPTO] cryptd: ... |
846 |
} |
9cd899a32 crypto: cryptd - ... |
847 |
return -EINVAL; |
124b53d02 [CRYPTO] cryptd: ... |
848 |
} |
124b53d02 [CRYPTO] cryptd: ... |
849 850 |
static struct crypto_template cryptd_tmpl = { .name = "cryptd", |
9cd899a32 crypto: cryptd - ... |
851 |
.create = cryptd_create, |
124b53d02 [CRYPTO] cryptd: ... |
852 853 |
.module = THIS_MODULE, }; |
4e0958d19 crypto: cryptd - ... |
854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 |
struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, u32 type, u32 mask) { char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; struct cryptd_skcipher_ctx *ctx; struct crypto_skcipher *tfm; if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) return ERR_PTR(-EINVAL); tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask); if (IS_ERR(tfm)) return ERR_CAST(tfm); if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { crypto_free_skcipher(tfm); return ERR_PTR(-EINVAL); } ctx = crypto_skcipher_ctx(tfm); |
43b970fa8 crypto: cryptd - ... |
875 |
refcount_set(&ctx->refcnt, 1); |
4e0958d19 crypto: cryptd - ... |
876 877 878 879 880 881 882 883 |
return container_of(tfm, struct cryptd_skcipher, base); } EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher); struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm) { struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); |
36b3875a9 crypto: cryptd - ... |
884 |
return &ctx->child->base; |
4e0958d19 crypto: cryptd - ... |
885 886 887 888 889 890 |
} EXPORT_SYMBOL_GPL(cryptd_skcipher_child); bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm) { struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); |
43b970fa8 crypto: cryptd - ... |
891 |
return refcount_read(&ctx->refcnt) - 1; |
4e0958d19 crypto: cryptd - ... |
892 893 894 895 896 897 |
} EXPORT_SYMBOL_GPL(cryptd_skcipher_queued); void cryptd_free_skcipher(struct cryptd_skcipher *tfm) { struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); |
43b970fa8 crypto: cryptd - ... |
898 |
if (refcount_dec_and_test(&ctx->refcnt)) |
4e0958d19 crypto: cryptd - ... |
899 900 901 |
crypto_free_skcipher(&tfm->base); } EXPORT_SYMBOL_GPL(cryptd_free_skcipher); |
ace136636 crypto: cryptd - ... |
902 903 904 905 |
struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, u32 type, u32 mask) { char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; |
81760ea6a crypto: cryptd - ... |
906 |
struct cryptd_hash_ctx *ctx; |
ace136636 crypto: cryptd - ... |
907 908 909 910 911 912 913 914 915 916 917 918 |
struct crypto_ahash *tfm; if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) return ERR_PTR(-EINVAL); tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask); if (IS_ERR(tfm)) return ERR_CAST(tfm); if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { crypto_free_ahash(tfm); return ERR_PTR(-EINVAL); } |
81760ea6a crypto: cryptd - ... |
919 |
ctx = crypto_ahash_ctx(tfm); |
43b970fa8 crypto: cryptd - ... |
920 |
refcount_set(&ctx->refcnt, 1); |
81760ea6a crypto: cryptd - ... |
921 |
|
ace136636 crypto: cryptd - ... |
922 923 924 925 926 927 928 929 930 931 932 |
return __cryptd_ahash_cast(tfm); } EXPORT_SYMBOL_GPL(cryptd_alloc_ahash); struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm) { struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); return ctx->child; } EXPORT_SYMBOL_GPL(cryptd_ahash_child); |
0e1227d35 crypto: ghash - A... |
933 934 935 936 937 938 |
struct shash_desc *cryptd_shash_desc(struct ahash_request *req) { struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); return &rctx->desc; } EXPORT_SYMBOL_GPL(cryptd_shash_desc); |
81760ea6a crypto: cryptd - ... |
939 940 941 |
bool cryptd_ahash_queued(struct cryptd_ahash *tfm) { struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); |
43b970fa8 crypto: cryptd - ... |
942 |
return refcount_read(&ctx->refcnt) - 1; |
81760ea6a crypto: cryptd - ... |
943 944 |
} EXPORT_SYMBOL_GPL(cryptd_ahash_queued); |
ace136636 crypto: cryptd - ... |
945 946 |
void cryptd_free_ahash(struct cryptd_ahash *tfm) { |
81760ea6a crypto: cryptd - ... |
947 |
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); |
43b970fa8 crypto: cryptd - ... |
948 |
if (refcount_dec_and_test(&ctx->refcnt)) |
81760ea6a crypto: cryptd - ... |
949 |
crypto_free_ahash(&tfm->base); |
ace136636 crypto: cryptd - ... |
950 951 |
} EXPORT_SYMBOL_GPL(cryptd_free_ahash); |
298c926c6 crypto: cryptd - ... |
952 953 954 955 |
struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, u32 type, u32 mask) { char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; |
81760ea6a crypto: cryptd - ... |
956 |
struct cryptd_aead_ctx *ctx; |
298c926c6 crypto: cryptd - ... |
957 958 959 960 961 962 963 964 965 966 967 968 |
struct crypto_aead *tfm; if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) return ERR_PTR(-EINVAL); tfm = crypto_alloc_aead(cryptd_alg_name, type, mask); if (IS_ERR(tfm)) return ERR_CAST(tfm); if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { crypto_free_aead(tfm); return ERR_PTR(-EINVAL); } |
81760ea6a crypto: cryptd - ... |
969 970 |
ctx = crypto_aead_ctx(tfm); |
43b970fa8 crypto: cryptd - ... |
971 |
refcount_set(&ctx->refcnt, 1); |
81760ea6a crypto: cryptd - ... |
972 |
|
298c926c6 crypto: cryptd - ... |
973 974 975 976 977 978 979 980 981 982 983 |
return __cryptd_aead_cast(tfm); } EXPORT_SYMBOL_GPL(cryptd_alloc_aead); struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm) { struct cryptd_aead_ctx *ctx; ctx = crypto_aead_ctx(&tfm->base); return ctx->child; } EXPORT_SYMBOL_GPL(cryptd_aead_child); |
81760ea6a crypto: cryptd - ... |
984 985 986 |
bool cryptd_aead_queued(struct cryptd_aead *tfm) { struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); |
43b970fa8 crypto: cryptd - ... |
987 |
return refcount_read(&ctx->refcnt) - 1; |
81760ea6a crypto: cryptd - ... |
988 989 |
} EXPORT_SYMBOL_GPL(cryptd_aead_queued); |
298c926c6 crypto: cryptd - ... |
990 991 |
void cryptd_free_aead(struct cryptd_aead *tfm) { |
81760ea6a crypto: cryptd - ... |
992 |
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); |
43b970fa8 crypto: cryptd - ... |
993 |
if (refcount_dec_and_test(&ctx->refcnt)) |
81760ea6a crypto: cryptd - ... |
994 |
crypto_free_aead(&tfm->base); |
298c926c6 crypto: cryptd - ... |
995 996 |
} EXPORT_SYMBOL_GPL(cryptd_free_aead); |
124b53d02 [CRYPTO] cryptd: ... |
997 998 999 |
static int __init cryptd_init(void) { int err; |
3e56e1686 crypto: cryptd - ... |
1000 1001 1002 1003 |
cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE, 1); if (!cryptd_wq) return -ENOMEM; |
c3a536056 crypto: cryptd - ... |
1004 |
err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen); |
124b53d02 [CRYPTO] cryptd: ... |
1005 |
if (err) |
3e56e1686 crypto: cryptd - ... |
1006 |
goto err_destroy_wq; |
124b53d02 [CRYPTO] cryptd: ... |
1007 1008 1009 |
err = crypto_register_template(&cryptd_tmpl); if (err) |
3e56e1686 crypto: cryptd - ... |
1010 |
goto err_fini_queue; |
124b53d02 [CRYPTO] cryptd: ... |
1011 |
|
3e56e1686 crypto: cryptd - ... |
1012 1013 1014 1015 1016 1017 |
return 0; err_fini_queue: cryptd_fini_queue(&queue); err_destroy_wq: destroy_workqueue(cryptd_wq); |
124b53d02 [CRYPTO] cryptd: ... |
1018 1019 1020 1021 1022 |
return err; } static void __exit cryptd_exit(void) { |
3e56e1686 crypto: cryptd - ... |
1023 |
destroy_workqueue(cryptd_wq); |
254eff771 crypto: cryptd - ... |
1024 |
cryptd_fini_queue(&queue); |
124b53d02 [CRYPTO] cryptd: ... |
1025 1026 |
crypto_unregister_template(&cryptd_tmpl); } |
b2bac6acf crypto: cryptd - ... |
1027 |
subsys_initcall(cryptd_init); |
124b53d02 [CRYPTO] cryptd: ... |
1028 1029 1030 1031 |
module_exit(cryptd_exit); MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Software async crypto daemon"); |
4943ba16b crypto: include c... |
1032 |
MODULE_ALIAS_CRYPTO("cryptd"); |