Blame view

crypto/cryptd.c 24.9 KB
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
1
2
3
4
5
  /*
   * Software async crypto daemon.
   *
   * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
   *
298c926c6   Adrian Hoban   crypto: cryptd - ...
6
7
8
9
10
11
12
   * Added AEAD support to cryptd.
   *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
   *             Adrian Hoban <adrian.hoban@intel.com>
   *             Gabriele Paoloni <gabriele.paoloni@intel.com>
   *             Aidan O'Mahony (aidan.o.mahony@intel.com)
   *    Copyright (c) 2010, Intel Corporation.
   *
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
13
14
15
16
17
18
19
20
   * This program is free software; you can redistribute it and/or modify it
   * under the terms of the GNU General Public License as published by the Free
   * Software Foundation; either version 2 of the License, or (at your option)
   * any later version.
   *
   */
  
  #include <crypto/algapi.h>
18e33e6d5   Herbert Xu   crypto: hash - Mo...
21
  #include <crypto/internal/hash.h>
298c926c6   Adrian Hoban   crypto: cryptd - ...
22
  #include <crypto/internal/aead.h>
1cac2cbc7   Huang Ying   crypto: cryptd - ...
23
  #include <crypto/cryptd.h>
254eff771   Huang Ying   crypto: cryptd - ...
24
  #include <crypto/crypto_wq.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
25
26
27
  #include <linux/err.h>
  #include <linux/init.h>
  #include <linux/kernel.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
28
29
  #include <linux/list.h>
  #include <linux/module.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
30
31
32
  #include <linux/scatterlist.h>
  #include <linux/sched.h>
  #include <linux/slab.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
33

254eff771   Huang Ying   crypto: cryptd - ...
34
  #define CRYPTD_MAX_CPU_QLEN 100
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
35

254eff771   Huang Ying   crypto: cryptd - ...
36
  struct cryptd_cpu_queue {
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
37
  	struct crypto_queue queue;
254eff771   Huang Ying   crypto: cryptd - ...
38
39
40
41
  	struct work_struct work;
  };
  
  struct cryptd_queue {
a29d8b8e2   Tejun Heo   percpu: add __per...
42
  	struct cryptd_cpu_queue __percpu *cpu_queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
43
44
45
46
  };
  
  struct cryptd_instance_ctx {
  	struct crypto_spawn spawn;
254eff771   Huang Ying   crypto: cryptd - ...
47
  	struct cryptd_queue *queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
48
  };
46309d893   Herbert Xu   crypto: cryptd - ...
49
50
51
52
  struct hashd_instance_ctx {
  	struct crypto_shash_spawn spawn;
  	struct cryptd_queue *queue;
  };
298c926c6   Adrian Hoban   crypto: cryptd - ...
53
54
55
56
  struct aead_instance_ctx {
  	struct crypto_aead_spawn aead_spawn;
  	struct cryptd_queue *queue;
  };
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
57
58
59
60
61
62
63
  struct cryptd_blkcipher_ctx {
  	struct crypto_blkcipher *child;
  };
  
  struct cryptd_blkcipher_request_ctx {
  	crypto_completion_t complete;
  };
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
64
  struct cryptd_hash_ctx {
46309d893   Herbert Xu   crypto: cryptd - ...
65
  	struct crypto_shash *child;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
66
67
68
69
  };
  
  struct cryptd_hash_request_ctx {
  	crypto_completion_t complete;
46309d893   Herbert Xu   crypto: cryptd - ...
70
  	struct shash_desc desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
71
  };
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
72

298c926c6   Adrian Hoban   crypto: cryptd - ...
73
74
75
76
77
78
79
  struct cryptd_aead_ctx {
  	struct crypto_aead *child;
  };
  
  struct cryptd_aead_request_ctx {
  	crypto_completion_t complete;
  };
254eff771   Huang Ying   crypto: cryptd - ...
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
  static void cryptd_queue_worker(struct work_struct *work);
  
  static int cryptd_init_queue(struct cryptd_queue *queue,
  			     unsigned int max_cpu_qlen)
  {
  	int cpu;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
  	if (!queue->cpu_queue)
  		return -ENOMEM;
  	for_each_possible_cpu(cpu) {
  		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
  		crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
  		INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
  	}
  	return 0;
  }
  
  static void cryptd_fini_queue(struct cryptd_queue *queue)
  {
  	int cpu;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	for_each_possible_cpu(cpu) {
  		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
  		BUG_ON(cpu_queue->queue.qlen);
  	}
  	free_percpu(queue->cpu_queue);
  }
  
  static int cryptd_enqueue_request(struct cryptd_queue *queue,
  				  struct crypto_async_request *request)
  {
  	int cpu, err;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	cpu = get_cpu();
0b44f4861   Christoph Lameter   this_cpu: Use thi...
118
  	cpu_queue = this_cpu_ptr(queue->cpu_queue);
254eff771   Huang Ying   crypto: cryptd - ...
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
  	err = crypto_enqueue_request(&cpu_queue->queue, request);
  	queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
  	put_cpu();
  
  	return err;
  }
  
  /* Called in workqueue context, do one real cryption work (via
   * req->complete) and reschedule itself if there are more work to
   * do. */
  static void cryptd_queue_worker(struct work_struct *work)
  {
  	struct cryptd_cpu_queue *cpu_queue;
  	struct crypto_async_request *req, *backlog;
  
  	cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
9efade1b3   Jussi Kivilinna   crypto: cryptd - ...
135
136
137
138
139
140
141
  	/*
  	 * Only handle one request at a time to avoid hogging crypto workqueue.
  	 * preempt_disable/enable is used to prevent being preempted by
  	 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
  	 * cryptd_enqueue_request() being accessed from software interrupts.
  	 */
  	local_bh_disable();
254eff771   Huang Ying   crypto: cryptd - ...
142
143
144
145
  	preempt_disable();
  	backlog = crypto_get_backlog(&cpu_queue->queue);
  	req = crypto_dequeue_request(&cpu_queue->queue);
  	preempt_enable();
9efade1b3   Jussi Kivilinna   crypto: cryptd - ...
146
  	local_bh_enable();
254eff771   Huang Ying   crypto: cryptd - ...
147
148
149
150
151
152
153
154
155
156
157
158
159
  
  	if (!req)
  		return;
  
  	if (backlog)
  		backlog->complete(backlog, -EINPROGRESS);
  	req->complete(req, 0);
  
  	if (cpu_queue->queue.qlen)
  		queue_work(kcrypto_wq, &cpu_queue->work);
  }
  
  static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
160
161
162
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
254eff771   Huang Ying   crypto: cryptd - ...
163
  	return ictx->queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
  }
  
  static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
  				   const u8 *key, unsigned int keylen)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
  	struct crypto_blkcipher *child = ctx->child;
  	int err;
  
  	crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  	crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
  					  CRYPTO_TFM_REQ_MASK);
  	err = crypto_blkcipher_setkey(child, key, keylen);
  	crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
  					    CRYPTO_TFM_RES_MASK);
  	return err;
  }
  
  static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
  				   struct crypto_blkcipher *child,
  				   int err,
  				   int (*crypt)(struct blkcipher_desc *desc,
  						struct scatterlist *dst,
  						struct scatterlist *src,
  						unsigned int len))
  {
  	struct cryptd_blkcipher_request_ctx *rctx;
  	struct blkcipher_desc desc;
  
  	rctx = ablkcipher_request_ctx(req);
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
194
195
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
196
197
198
199
200
201
202
203
  
  	desc.tfm = child;
  	desc.info = req->info;
  	desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
  
  	err = crypt(&desc, req->dst, req->src, req->nbytes);
  
  	req->base.complete = rctx->complete;
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
204
  out:
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
205
  	local_bh_disable();
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
206
  	rctx->complete(&req->base, err);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
  	local_bh_enable();
  }
  
  static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
  	struct crypto_blkcipher *child = ctx->child;
  
  	cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
  			       crypto_blkcipher_crt(child)->encrypt);
  }
  
  static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
  	struct crypto_blkcipher *child = ctx->child;
  
  	cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
  			       crypto_blkcipher_crt(child)->decrypt);
  }
  
  static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
  				    crypto_completion_t complete)
  {
  	struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
  	struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
254eff771   Huang Ying   crypto: cryptd - ...
233
  	struct cryptd_queue *queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
234

254eff771   Huang Ying   crypto: cryptd - ...
235
  	queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
236
237
  	rctx->complete = req->base.complete;
  	req->base.complete = complete;
254eff771   Huang Ying   crypto: cryptd - ...
238
  	return cryptd_enqueue_request(queue, &req->base);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
  }
  
  static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
  {
  	return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
  }
  
  static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
  {
  	return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
  }
  
  static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_spawn *spawn = &ictx->spawn;
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
  	struct crypto_blkcipher *cipher;
  
  	cipher = crypto_spawn_blkcipher(spawn);
  	if (IS_ERR(cipher))
  		return PTR_ERR(cipher);
  
  	ctx->child = cipher;
  	tfm->crt_ablkcipher.reqsize =
  		sizeof(struct cryptd_blkcipher_request_ctx);
  	return 0;
  }
  
  static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
272
273
274
  
  	crypto_free_blkcipher(ctx->child);
  }
0b535adfb   Herbert Xu   crypto: cryptd - ...
275
276
  static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
  				   unsigned int tail)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
277
  {
0b535adfb   Herbert Xu   crypto: cryptd - ...
278
  	char *p;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
279
  	struct crypto_instance *inst;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
280
  	int err;
0b535adfb   Herbert Xu   crypto: cryptd - ...
281
282
283
284
285
  	p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
  	if (!p)
  		return ERR_PTR(-ENOMEM);
  
  	inst = (void *)(p + head);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
286
287
288
289
290
  
  	err = -ENAMETOOLONG;
  	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  		goto out_free_inst;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
291
292
293
294
295
296
297
  	memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
  
  	inst->alg.cra_priority = alg->cra_priority + 50;
  	inst->alg.cra_blocksize = alg->cra_blocksize;
  	inst->alg.cra_alignmask = alg->cra_alignmask;
  
  out:
0b535adfb   Herbert Xu   crypto: cryptd - ...
298
  	return p;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
299
300
  
  out_free_inst:
0b535adfb   Herbert Xu   crypto: cryptd - ...
301
302
  	kfree(p);
  	p = ERR_PTR(err);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
303
304
  	goto out;
  }
9cd899a32   Herbert Xu   crypto: cryptd - ...
305
306
307
  static int cryptd_create_blkcipher(struct crypto_template *tmpl,
  				   struct rtattr **tb,
  				   struct cryptd_queue *queue)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
308
  {
46309d893   Herbert Xu   crypto: cryptd - ...
309
  	struct cryptd_instance_ctx *ctx;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
310
311
  	struct crypto_instance *inst;
  	struct crypto_alg *alg;
46309d893   Herbert Xu   crypto: cryptd - ...
312
  	int err;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
313
314
  
  	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
332f8840f   Herbert Xu   [CRYPTO] ablkciph...
315
  				  CRYPTO_ALG_TYPE_MASK);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
316
  	if (IS_ERR(alg))
9cd899a32   Herbert Xu   crypto: cryptd - ...
317
  		return PTR_ERR(alg);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
318

0b535adfb   Herbert Xu   crypto: cryptd - ...
319
  	inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
05ed8758f   Steffen Klassert   crypto: cryptd - ...
320
  	err = PTR_ERR(inst);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
321
322
  	if (IS_ERR(inst))
  		goto out_put_alg;
46309d893   Herbert Xu   crypto: cryptd - ...
323
324
325
326
327
328
329
  	ctx = crypto_instance_ctx(inst);
  	ctx->queue = queue;
  
  	err = crypto_init_spawn(&ctx->spawn, alg, inst,
  				CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
  	if (err)
  		goto out_free_inst;
332f8840f   Herbert Xu   [CRYPTO] ablkciph...
330
  	inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
331
332
333
334
335
  	inst->alg.cra_type = &crypto_ablkcipher_type;
  
  	inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
  	inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
  	inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
927eead52   Herbert Xu   [CRYPTO] cryptd: ...
336
  	inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
337
338
339
340
341
342
343
344
  	inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
  
  	inst->alg.cra_init = cryptd_blkcipher_init_tfm;
  	inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
  
  	inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
  	inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
  	inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
9cd899a32   Herbert Xu   crypto: cryptd - ...
345
346
347
348
349
350
  	err = crypto_register_instance(tmpl, inst);
  	if (err) {
  		crypto_drop_spawn(&ctx->spawn);
  out_free_inst:
  		kfree(inst);
  	}
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
351
352
  out_put_alg:
  	crypto_mod_put(alg);
9cd899a32   Herbert Xu   crypto: cryptd - ...
353
  	return err;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
354
  }
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
355
356
357
  static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
46309d893   Herbert Xu   crypto: cryptd - ...
358
359
  	struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_shash_spawn *spawn = &ictx->spawn;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
360
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
46309d893   Herbert Xu   crypto: cryptd - ...
361
  	struct crypto_shash *hash;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
362

46309d893   Herbert Xu   crypto: cryptd - ...
363
364
365
  	hash = crypto_spawn_shash(spawn);
  	if (IS_ERR(hash))
  		return PTR_ERR(hash);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
366

46309d893   Herbert Xu   crypto: cryptd - ...
367
  	ctx->child = hash;
0d6669e2b   Herbert Xu   crypto: cryptd - ...
368
369
370
  	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
  				 sizeof(struct cryptd_hash_request_ctx) +
  				 crypto_shash_descsize(hash));
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
371
372
373
374
375
376
  	return 0;
  }
  
  static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
377

46309d893   Herbert Xu   crypto: cryptd - ...
378
  	crypto_free_shash(ctx->child);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
379
380
381
382
383
384
  }
  
  static int cryptd_hash_setkey(struct crypto_ahash *parent,
  				   const u8 *key, unsigned int keylen)
  {
  	struct cryptd_hash_ctx *ctx   = crypto_ahash_ctx(parent);
46309d893   Herbert Xu   crypto: cryptd - ...
385
  	struct crypto_shash *child = ctx->child;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
386
  	int err;
46309d893   Herbert Xu   crypto: cryptd - ...
387
388
389
390
391
392
  	crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  	crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
  				      CRYPTO_TFM_REQ_MASK);
  	err = crypto_shash_setkey(child, key, keylen);
  	crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
  				       CRYPTO_TFM_RES_MASK);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
393
394
395
396
397
398
399
400
  	return err;
  }
  
  static int cryptd_hash_enqueue(struct ahash_request *req,
  				crypto_completion_t complete)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
254eff771   Huang Ying   crypto: cryptd - ...
401
402
  	struct cryptd_queue *queue =
  		cryptd_get_queue(crypto_ahash_tfm(tfm));
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
403
404
405
  
  	rctx->complete = req->base.complete;
  	req->base.complete = complete;
254eff771   Huang Ying   crypto: cryptd - ...
406
  	return cryptd_enqueue_request(queue, &req->base);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
407
408
409
410
  }
  
  static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
411
412
413
414
415
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
  	struct crypto_shash *child = ctx->child;
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct shash_desc *desc = &rctx->desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
416
417
418
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
419
420
  	desc->tfm = child;
  	desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
421

46309d893   Herbert Xu   crypto: cryptd - ...
422
  	err = crypto_shash_init(desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_init_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_init);
  }
  
  static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
439
  	struct ahash_request *req = ahash_request_cast(req_async);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
440
  	struct cryptd_hash_request_ctx *rctx;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
441
442
443
444
445
  
  	rctx = ahash_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
446
  	err = shash_ahash_update(req, &rctx->desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_update_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_update);
  }
  
  static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
463
464
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
465
466
467
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
468
  	err = crypto_shash_final(&rctx->desc, req->result);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
469
470
471
472
473
474
475
476
477
478
479
480
481
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_final_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_final);
  }
6fba00d17   Herbert Xu   crypto: cryptd - ...
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
  static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
  {
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
  
  	err = shash_ahash_finup(req, &rctx->desc);
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_finup_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_finup);
  }
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
504
505
  static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
506
507
508
509
510
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
  	struct crypto_shash *child = ctx->child;
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct shash_desc *desc = &rctx->desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
511
512
513
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
514
515
  	desc->tfm = child;
  	desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
516

46309d893   Herbert Xu   crypto: cryptd - ...
517
  	err = shash_ahash_digest(req, desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
518
519
520
521
522
523
524
525
526
527
528
529
530
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_digest_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_digest);
  }
6fba00d17   Herbert Xu   crypto: cryptd - ...
531
532
533
534
535
536
537
538
539
540
541
542
543
  static int cryptd_hash_export(struct ahash_request *req, void *out)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	return crypto_shash_export(&rctx->desc, out);
  }
  
  static int cryptd_hash_import(struct ahash_request *req, const void *in)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	return crypto_shash_import(&rctx->desc, in);
  }
9cd899a32   Herbert Xu   crypto: cryptd - ...
544
545
  static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
  			      struct cryptd_queue *queue)
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
546
  {
46309d893   Herbert Xu   crypto: cryptd - ...
547
  	struct hashd_instance_ctx *ctx;
0b535adfb   Herbert Xu   crypto: cryptd - ...
548
  	struct ahash_instance *inst;
46309d893   Herbert Xu   crypto: cryptd - ...
549
  	struct shash_alg *salg;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
550
  	struct crypto_alg *alg;
46309d893   Herbert Xu   crypto: cryptd - ...
551
  	int err;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
552

46309d893   Herbert Xu   crypto: cryptd - ...
553
554
  	salg = shash_attr_alg(tb[1], 0, 0);
  	if (IS_ERR(salg))
9cd899a32   Herbert Xu   crypto: cryptd - ...
555
  		return PTR_ERR(salg);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
556

46309d893   Herbert Xu   crypto: cryptd - ...
557
  	alg = &salg->base;
0b535adfb   Herbert Xu   crypto: cryptd - ...
558
559
  	inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
  				     sizeof(*ctx));
05ed8758f   Steffen Klassert   crypto: cryptd - ...
560
  	err = PTR_ERR(inst);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
561
562
  	if (IS_ERR(inst))
  		goto out_put_alg;
0b535adfb   Herbert Xu   crypto: cryptd - ...
563
  	ctx = ahash_instance_ctx(inst);
46309d893   Herbert Xu   crypto: cryptd - ...
564
  	ctx->queue = queue;
0b535adfb   Herbert Xu   crypto: cryptd - ...
565
566
  	err = crypto_init_shash_spawn(&ctx->spawn, salg,
  				      ahash_crypto_instance(inst));
46309d893   Herbert Xu   crypto: cryptd - ...
567
568
  	if (err)
  		goto out_free_inst;
0b535adfb   Herbert Xu   crypto: cryptd - ...
569
  	inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
570

0b535adfb   Herbert Xu   crypto: cryptd - ...
571
572
  	inst->alg.halg.digestsize = salg->digestsize;
  	inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
573

0b535adfb   Herbert Xu   crypto: cryptd - ...
574
575
  	inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
  	inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
576

0b535adfb   Herbert Xu   crypto: cryptd - ...
577
578
579
  	inst->alg.init   = cryptd_hash_init_enqueue;
  	inst->alg.update = cryptd_hash_update_enqueue;
  	inst->alg.final  = cryptd_hash_final_enqueue;
6fba00d17   Herbert Xu   crypto: cryptd - ...
580
581
582
  	inst->alg.finup  = cryptd_hash_finup_enqueue;
  	inst->alg.export = cryptd_hash_export;
  	inst->alg.import = cryptd_hash_import;
0b535adfb   Herbert Xu   crypto: cryptd - ...
583
584
  	inst->alg.setkey = cryptd_hash_setkey;
  	inst->alg.digest = cryptd_hash_digest_enqueue;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
585

0b535adfb   Herbert Xu   crypto: cryptd - ...
586
  	err = ahash_register_instance(tmpl, inst);
9cd899a32   Herbert Xu   crypto: cryptd - ...
587
588
589
590
591
  	if (err) {
  		crypto_drop_shash(&ctx->spawn);
  out_free_inst:
  		kfree(inst);
  	}
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
592
593
  out_put_alg:
  	crypto_mod_put(alg);
9cd899a32   Herbert Xu   crypto: cryptd - ...
594
  	return err;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
595
  }
298c926c6   Adrian Hoban   crypto: cryptd - ...
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
  static void cryptd_aead_crypt(struct aead_request *req,
  			struct crypto_aead *child,
  			int err,
  			int (*crypt)(struct aead_request *req))
  {
  	struct cryptd_aead_request_ctx *rctx;
  	rctx = aead_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
  	aead_request_set_tfm(req, child);
  	err = crypt( req );
  	req->base.complete = rctx->complete;
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
  	struct crypto_aead *child = ctx->child;
  	struct aead_request *req;
  
  	req = container_of(areq, struct aead_request, base);
  	cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->encrypt);
  }
  
  static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
  	struct crypto_aead *child = ctx->child;
  	struct aead_request *req;
  
  	req = container_of(areq, struct aead_request, base);
  	cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->decrypt);
  }
  
  static int cryptd_aead_enqueue(struct aead_request *req,
  				    crypto_completion_t complete)
  {
  	struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  	struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
  
  	rctx->complete = req->base.complete;
  	req->base.complete = complete;
  	return cryptd_enqueue_request(queue, &req->base);
  }
  
  static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
  {
  	return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
  }
  
  static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
  {
  	return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
  }
  
  static int cryptd_aead_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct aead_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
  	struct crypto_aead *cipher;
  
  	cipher = crypto_spawn_aead(spawn);
  	if (IS_ERR(cipher))
  		return PTR_ERR(cipher);
  
  	crypto_aead_set_flags(cipher, CRYPTO_TFM_REQ_MAY_SLEEP);
  	ctx->child = cipher;
  	tfm->crt_aead.reqsize = sizeof(struct cryptd_aead_request_ctx);
  	return 0;
  }
  
  static void cryptd_aead_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
  	crypto_free_aead(ctx->child);
  }
  
  static int cryptd_create_aead(struct crypto_template *tmpl,
  		              struct rtattr **tb,
  			      struct cryptd_queue *queue)
  {
  	struct aead_instance_ctx *ctx;
  	struct crypto_instance *inst;
  	struct crypto_alg *alg;
  	int err;
  
  	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_AEAD,
  				CRYPTO_ALG_TYPE_MASK);
          if (IS_ERR(alg))
  		return PTR_ERR(alg);
  
  	inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
  	err = PTR_ERR(inst);
  	if (IS_ERR(inst))
  		goto out_put_alg;
  
  	ctx = crypto_instance_ctx(inst);
  	ctx->queue = queue;
  
  	err = crypto_init_spawn(&ctx->aead_spawn.base, alg, inst,
  			CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
  	if (err)
  		goto out_free_inst;
  
  	inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
  	inst->alg.cra_type = alg->cra_type;
  	inst->alg.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
  	inst->alg.cra_init = cryptd_aead_init_tfm;
  	inst->alg.cra_exit = cryptd_aead_exit_tfm;
  	inst->alg.cra_aead.setkey      = alg->cra_aead.setkey;
  	inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
  	inst->alg.cra_aead.geniv       = alg->cra_aead.geniv;
  	inst->alg.cra_aead.ivsize      = alg->cra_aead.ivsize;
  	inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
  	inst->alg.cra_aead.encrypt     = cryptd_aead_encrypt_enqueue;
  	inst->alg.cra_aead.decrypt     = cryptd_aead_decrypt_enqueue;
  	inst->alg.cra_aead.givencrypt  = alg->cra_aead.givencrypt;
  	inst->alg.cra_aead.givdecrypt  = alg->cra_aead.givdecrypt;
  
  	err = crypto_register_instance(tmpl, inst);
  	if (err) {
  		crypto_drop_spawn(&ctx->aead_spawn.base);
  out_free_inst:
  		kfree(inst);
  	}
  out_put_alg:
  	crypto_mod_put(alg);
  	return err;
  }
254eff771   Huang Ying   crypto: cryptd - ...
733
  static struct cryptd_queue queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
734

9cd899a32   Herbert Xu   crypto: cryptd - ...
735
  static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
736
737
738
739
740
  {
  	struct crypto_attr_type *algt;
  
  	algt = crypto_get_attr_type(tb);
  	if (IS_ERR(algt))
9cd899a32   Herbert Xu   crypto: cryptd - ...
741
  		return PTR_ERR(algt);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
742
743
744
  
  	switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
  	case CRYPTO_ALG_TYPE_BLKCIPHER:
9cd899a32   Herbert Xu   crypto: cryptd - ...
745
  		return cryptd_create_blkcipher(tmpl, tb, &queue);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
746
  	case CRYPTO_ALG_TYPE_DIGEST:
9cd899a32   Herbert Xu   crypto: cryptd - ...
747
  		return cryptd_create_hash(tmpl, tb, &queue);
298c926c6   Adrian Hoban   crypto: cryptd - ...
748
749
  	case CRYPTO_ALG_TYPE_AEAD:
  		return cryptd_create_aead(tmpl, tb, &queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
750
  	}
9cd899a32   Herbert Xu   crypto: cryptd - ...
751
  	return -EINVAL;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
752
753
754
755
756
  }
  
  static void cryptd_free(struct crypto_instance *inst)
  {
  	struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
757
  	struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
298c926c6   Adrian Hoban   crypto: cryptd - ...
758
  	struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
759
760
761
762
763
764
  
  	switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
  	case CRYPTO_ALG_TYPE_AHASH:
  		crypto_drop_shash(&hctx->spawn);
  		kfree(ahash_instance(inst));
  		return;
298c926c6   Adrian Hoban   crypto: cryptd - ...
765
766
767
768
769
770
771
  	case CRYPTO_ALG_TYPE_AEAD:
  		crypto_drop_spawn(&aead_ctx->aead_spawn.base);
  		kfree(inst);
  		return;
  	default:
  		crypto_drop_spawn(&ctx->spawn);
  		kfree(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
772
  	}
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
773
774
775
776
  }
  
  static struct crypto_template cryptd_tmpl = {
  	.name = "cryptd",
9cd899a32   Herbert Xu   crypto: cryptd - ...
777
  	.create = cryptd_create,
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
778
779
780
  	.free = cryptd_free,
  	.module = THIS_MODULE,
  };
1cac2cbc7   Huang Ying   crypto: cryptd - ...
781
782
783
784
  struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
  						  u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
505fd21d6   Huang Ying   crypto: cryptd - ...
785
  	struct crypto_tfm *tfm;
1cac2cbc7   Huang Ying   crypto: cryptd - ...
786
787
788
789
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
505fd21d6   Huang Ying   crypto: cryptd - ...
790
791
792
793
794
  	type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
  	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  	mask &= ~CRYPTO_ALG_TYPE_MASK;
  	mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
  	tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
1cac2cbc7   Huang Ying   crypto: cryptd - ...
795
796
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
505fd21d6   Huang Ying   crypto: cryptd - ...
797
798
  	if (tfm->__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_tfm(tfm);
1cac2cbc7   Huang Ying   crypto: cryptd - ...
799
800
  		return ERR_PTR(-EINVAL);
  	}
505fd21d6   Huang Ying   crypto: cryptd - ...
801
  	return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
1cac2cbc7   Huang Ying   crypto: cryptd - ...
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
  
  struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
  
  void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
  {
  	crypto_free_ablkcipher(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
ace136636   Huang Ying   crypto: cryptd - ...
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
  struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
  					u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
  	struct crypto_ahash *tfm;
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
  	tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
  	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_ahash(tfm);
  		return ERR_PTR(-EINVAL);
  	}
  
  	return __cryptd_ahash_cast(tfm);
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
  
  struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
  {
  	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
  
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_ahash_child);
0e1227d35   Huang Ying   crypto: ghash - A...
845
846
847
848
849
850
  struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	return &rctx->desc;
  }
  EXPORT_SYMBOL_GPL(cryptd_shash_desc);
ace136636   Huang Ying   crypto: cryptd - ...
851
852
853
854
855
  void cryptd_free_ahash(struct cryptd_ahash *tfm)
  {
  	crypto_free_ahash(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_ahash);
298c926c6   Adrian Hoban   crypto: cryptd - ...
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
  struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
  						  u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
  	struct crypto_aead *tfm;
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
  	tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
  	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_aead(tfm);
  		return ERR_PTR(-EINVAL);
  	}
  	return __cryptd_aead_cast(tfm);
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
  
  struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
  {
  	struct cryptd_aead_ctx *ctx;
  	ctx = crypto_aead_ctx(&tfm->base);
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_aead_child);
  
  void cryptd_free_aead(struct cryptd_aead *tfm)
  {
  	crypto_free_aead(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_aead);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
889
890
891
  static int __init cryptd_init(void)
  {
  	int err;
254eff771   Huang Ying   crypto: cryptd - ...
892
  	err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
893
894
895
896
897
  	if (err)
  		return err;
  
  	err = crypto_register_template(&cryptd_tmpl);
  	if (err)
254eff771   Huang Ying   crypto: cryptd - ...
898
  		cryptd_fini_queue(&queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
899
900
901
902
903
904
  
  	return err;
  }
  
  static void __exit cryptd_exit(void)
  {
254eff771   Huang Ying   crypto: cryptd - ...
905
  	cryptd_fini_queue(&queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
906
907
  	crypto_unregister_template(&cryptd_tmpl);
  }
b2bac6acf   Herbert Xu   crypto: cryptd - ...
908
  subsys_initcall(cryptd_init);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
909
910
911
912
  module_exit(cryptd_exit);
  
  MODULE_LICENSE("GPL");
  MODULE_DESCRIPTION("Software async crypto daemon");