Blame view

crypto/cryptd.c 24.8 KB
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
1
2
3
4
5
  /*
   * Software async crypto daemon.
   *
   * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
   *
298c926c6   Adrian Hoban   crypto: cryptd - ...
6
7
8
9
10
11
12
   * Added AEAD support to cryptd.
   *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
   *             Adrian Hoban <adrian.hoban@intel.com>
   *             Gabriele Paoloni <gabriele.paoloni@intel.com>
   *             Aidan O'Mahony (aidan.o.mahony@intel.com)
   *    Copyright (c) 2010, Intel Corporation.
   *
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
13
14
15
16
17
18
19
20
   * This program is free software; you can redistribute it and/or modify it
   * under the terms of the GNU General Public License as published by the Free
   * Software Foundation; either version 2 of the License, or (at your option)
   * any later version.
   *
   */
  
  #include <crypto/algapi.h>
18e33e6d5   Herbert Xu   crypto: hash - Mo...
21
  #include <crypto/internal/hash.h>
298c926c6   Adrian Hoban   crypto: cryptd - ...
22
  #include <crypto/internal/aead.h>
1cac2cbc7   Huang Ying   crypto: cryptd - ...
23
  #include <crypto/cryptd.h>
254eff771   Huang Ying   crypto: cryptd - ...
24
  #include <crypto/crypto_wq.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
25
26
27
  #include <linux/err.h>
  #include <linux/init.h>
  #include <linux/kernel.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
28
29
  #include <linux/list.h>
  #include <linux/module.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
30
31
32
  #include <linux/scatterlist.h>
  #include <linux/sched.h>
  #include <linux/slab.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
33

254eff771   Huang Ying   crypto: cryptd - ...
34
  #define CRYPTD_MAX_CPU_QLEN 100
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
35

254eff771   Huang Ying   crypto: cryptd - ...
36
  struct cryptd_cpu_queue {
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
37
  	struct crypto_queue queue;
254eff771   Huang Ying   crypto: cryptd - ...
38
39
40
41
  	struct work_struct work;
  };
  
  struct cryptd_queue {
a29d8b8e2   Tejun Heo   percpu: add __per...
42
  	struct cryptd_cpu_queue __percpu *cpu_queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
43
44
45
46
  };
  
  struct cryptd_instance_ctx {
  	struct crypto_spawn spawn;
254eff771   Huang Ying   crypto: cryptd - ...
47
  	struct cryptd_queue *queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
48
  };
46309d893   Herbert Xu   crypto: cryptd - ...
49
50
51
52
  struct hashd_instance_ctx {
  	struct crypto_shash_spawn spawn;
  	struct cryptd_queue *queue;
  };
298c926c6   Adrian Hoban   crypto: cryptd - ...
53
54
55
56
  struct aead_instance_ctx {
  	struct crypto_aead_spawn aead_spawn;
  	struct cryptd_queue *queue;
  };
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
57
58
59
60
61
62
63
  struct cryptd_blkcipher_ctx {
  	struct crypto_blkcipher *child;
  };
  
  struct cryptd_blkcipher_request_ctx {
  	crypto_completion_t complete;
  };
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
64
  struct cryptd_hash_ctx {
46309d893   Herbert Xu   crypto: cryptd - ...
65
  	struct crypto_shash *child;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
66
67
68
69
  };
  
  struct cryptd_hash_request_ctx {
  	crypto_completion_t complete;
46309d893   Herbert Xu   crypto: cryptd - ...
70
  	struct shash_desc desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
71
  };
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
72

298c926c6   Adrian Hoban   crypto: cryptd - ...
73
74
75
76
77
78
79
  struct cryptd_aead_ctx {
  	struct crypto_aead *child;
  };
  
  struct cryptd_aead_request_ctx {
  	crypto_completion_t complete;
  };
254eff771   Huang Ying   crypto: cryptd - ...
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
  static void cryptd_queue_worker(struct work_struct *work);
  
  static int cryptd_init_queue(struct cryptd_queue *queue,
  			     unsigned int max_cpu_qlen)
  {
  	int cpu;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
  	if (!queue->cpu_queue)
  		return -ENOMEM;
  	for_each_possible_cpu(cpu) {
  		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
  		crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
  		INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
  	}
  	return 0;
  }
  
  static void cryptd_fini_queue(struct cryptd_queue *queue)
  {
  	int cpu;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	for_each_possible_cpu(cpu) {
  		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
  		BUG_ON(cpu_queue->queue.qlen);
  	}
  	free_percpu(queue->cpu_queue);
  }
  
  static int cryptd_enqueue_request(struct cryptd_queue *queue,
  				  struct crypto_async_request *request)
  {
  	int cpu, err;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	cpu = get_cpu();
0b44f4861   Christoph Lameter   this_cpu: Use thi...
118
  	cpu_queue = this_cpu_ptr(queue->cpu_queue);
254eff771   Huang Ying   crypto: cryptd - ...
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
  	err = crypto_enqueue_request(&cpu_queue->queue, request);
  	queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
  	put_cpu();
  
  	return err;
  }
  
  /* Called in workqueue context, do one real cryption work (via
   * req->complete) and reschedule itself if there are more work to
   * do. */
  static void cryptd_queue_worker(struct work_struct *work)
  {
  	struct cryptd_cpu_queue *cpu_queue;
  	struct crypto_async_request *req, *backlog;
  
  	cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
  	/* Only handle one request at a time to avoid hogging crypto
  	 * workqueue. preempt_disable/enable is used to prevent
  	 * being preempted by cryptd_enqueue_request() */
  	preempt_disable();
  	backlog = crypto_get_backlog(&cpu_queue->queue);
  	req = crypto_dequeue_request(&cpu_queue->queue);
  	preempt_enable();
  
  	if (!req)
  		return;
  
  	if (backlog)
  		backlog->complete(backlog, -EINPROGRESS);
  	req->complete(req, 0);
  
  	if (cpu_queue->queue.qlen)
  		queue_work(kcrypto_wq, &cpu_queue->work);
  }
  
  static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
155
156
157
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
254eff771   Huang Ying   crypto: cryptd - ...
158
  	return ictx->queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
  }
  
  static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
  				   const u8 *key, unsigned int keylen)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
  	struct crypto_blkcipher *child = ctx->child;
  	int err;
  
  	crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  	crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
  					  CRYPTO_TFM_REQ_MASK);
  	err = crypto_blkcipher_setkey(child, key, keylen);
  	crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
  					    CRYPTO_TFM_RES_MASK);
  	return err;
  }
  
  static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
  				   struct crypto_blkcipher *child,
  				   int err,
  				   int (*crypt)(struct blkcipher_desc *desc,
  						struct scatterlist *dst,
  						struct scatterlist *src,
  						unsigned int len))
  {
  	struct cryptd_blkcipher_request_ctx *rctx;
  	struct blkcipher_desc desc;
  
  	rctx = ablkcipher_request_ctx(req);
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
189
190
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
191
192
193
194
195
196
197
198
  
  	desc.tfm = child;
  	desc.info = req->info;
  	desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
  
  	err = crypt(&desc, req->dst, req->src, req->nbytes);
  
  	req->base.complete = rctx->complete;
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
199
  out:
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
200
  	local_bh_disable();
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
201
  	rctx->complete(&req->base, err);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
  	local_bh_enable();
  }
  
  static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
  	struct crypto_blkcipher *child = ctx->child;
  
  	cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
  			       crypto_blkcipher_crt(child)->encrypt);
  }
  
  static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
  	struct crypto_blkcipher *child = ctx->child;
  
  	cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
  			       crypto_blkcipher_crt(child)->decrypt);
  }
  
  static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
  				    crypto_completion_t complete)
  {
  	struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
  	struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
254eff771   Huang Ying   crypto: cryptd - ...
228
  	struct cryptd_queue *queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
229

254eff771   Huang Ying   crypto: cryptd - ...
230
  	queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
231
232
  	rctx->complete = req->base.complete;
  	req->base.complete = complete;
254eff771   Huang Ying   crypto: cryptd - ...
233
  	return cryptd_enqueue_request(queue, &req->base);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
  }
  
  static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
  {
  	return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
  }
  
  static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
  {
  	return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
  }
  
  static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_spawn *spawn = &ictx->spawn;
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
  	struct crypto_blkcipher *cipher;
  
  	cipher = crypto_spawn_blkcipher(spawn);
  	if (IS_ERR(cipher))
  		return PTR_ERR(cipher);
  
  	ctx->child = cipher;
  	tfm->crt_ablkcipher.reqsize =
  		sizeof(struct cryptd_blkcipher_request_ctx);
  	return 0;
  }
  
  static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
267
268
269
  
  	crypto_free_blkcipher(ctx->child);
  }
0b535adfb   Herbert Xu   crypto: cryptd - ...
270
271
  static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
  				   unsigned int tail)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
272
  {
0b535adfb   Herbert Xu   crypto: cryptd - ...
273
  	char *p;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
274
  	struct crypto_instance *inst;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
275
  	int err;
0b535adfb   Herbert Xu   crypto: cryptd - ...
276
277
278
279
280
  	p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
  	if (!p)
  		return ERR_PTR(-ENOMEM);
  
  	inst = (void *)(p + head);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
281
282
283
284
285
  
  	err = -ENAMETOOLONG;
  	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  		goto out_free_inst;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
286
287
288
289
290
291
292
  	memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
  
  	inst->alg.cra_priority = alg->cra_priority + 50;
  	inst->alg.cra_blocksize = alg->cra_blocksize;
  	inst->alg.cra_alignmask = alg->cra_alignmask;
  
  out:
0b535adfb   Herbert Xu   crypto: cryptd - ...
293
  	return p;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
294
295
  
  out_free_inst:
0b535adfb   Herbert Xu   crypto: cryptd - ...
296
297
  	kfree(p);
  	p = ERR_PTR(err);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
298
299
  	goto out;
  }
9cd899a32   Herbert Xu   crypto: cryptd - ...
300
301
302
  static int cryptd_create_blkcipher(struct crypto_template *tmpl,
  				   struct rtattr **tb,
  				   struct cryptd_queue *queue)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
303
  {
46309d893   Herbert Xu   crypto: cryptd - ...
304
  	struct cryptd_instance_ctx *ctx;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
305
306
  	struct crypto_instance *inst;
  	struct crypto_alg *alg;
46309d893   Herbert Xu   crypto: cryptd - ...
307
  	int err;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
308
309
  
  	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
332f8840f   Herbert Xu   [CRYPTO] ablkciph...
310
  				  CRYPTO_ALG_TYPE_MASK);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
311
  	if (IS_ERR(alg))
9cd899a32   Herbert Xu   crypto: cryptd - ...
312
  		return PTR_ERR(alg);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
313

0b535adfb   Herbert Xu   crypto: cryptd - ...
314
  	inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
05ed8758f   Steffen Klassert   crypto: cryptd - ...
315
  	err = PTR_ERR(inst);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
316
317
  	if (IS_ERR(inst))
  		goto out_put_alg;
46309d893   Herbert Xu   crypto: cryptd - ...
318
319
320
321
322
323
324
  	ctx = crypto_instance_ctx(inst);
  	ctx->queue = queue;
  
  	err = crypto_init_spawn(&ctx->spawn, alg, inst,
  				CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
  	if (err)
  		goto out_free_inst;
332f8840f   Herbert Xu   [CRYPTO] ablkciph...
325
  	inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
326
327
328
329
330
  	inst->alg.cra_type = &crypto_ablkcipher_type;
  
  	inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
  	inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
  	inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
927eead52   Herbert Xu   [CRYPTO] cryptd: ...
331
  	inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
332
333
334
335
336
337
338
339
  	inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
  
  	inst->alg.cra_init = cryptd_blkcipher_init_tfm;
  	inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
  
  	inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
  	inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
  	inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
9cd899a32   Herbert Xu   crypto: cryptd - ...
340
341
342
343
344
345
  	err = crypto_register_instance(tmpl, inst);
  	if (err) {
  		crypto_drop_spawn(&ctx->spawn);
  out_free_inst:
  		kfree(inst);
  	}
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
346
347
  out_put_alg:
  	crypto_mod_put(alg);
9cd899a32   Herbert Xu   crypto: cryptd - ...
348
  	return err;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
349
  }
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
350
351
352
  static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
46309d893   Herbert Xu   crypto: cryptd - ...
353
354
  	struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_shash_spawn *spawn = &ictx->spawn;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
355
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
46309d893   Herbert Xu   crypto: cryptd - ...
356
  	struct crypto_shash *hash;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
357

46309d893   Herbert Xu   crypto: cryptd - ...
358
359
360
  	hash = crypto_spawn_shash(spawn);
  	if (IS_ERR(hash))
  		return PTR_ERR(hash);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
361

46309d893   Herbert Xu   crypto: cryptd - ...
362
  	ctx->child = hash;
0d6669e2b   Herbert Xu   crypto: cryptd - ...
363
364
365
  	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
  				 sizeof(struct cryptd_hash_request_ctx) +
  				 crypto_shash_descsize(hash));
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
366
367
368
369
370
371
  	return 0;
  }
  
  static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
372

46309d893   Herbert Xu   crypto: cryptd - ...
373
  	crypto_free_shash(ctx->child);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
374
375
376
377
378
379
  }
  
  static int cryptd_hash_setkey(struct crypto_ahash *parent,
  				   const u8 *key, unsigned int keylen)
  {
  	struct cryptd_hash_ctx *ctx   = crypto_ahash_ctx(parent);
46309d893   Herbert Xu   crypto: cryptd - ...
380
  	struct crypto_shash *child = ctx->child;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
381
  	int err;
46309d893   Herbert Xu   crypto: cryptd - ...
382
383
384
385
386
387
  	crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  	crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
  				      CRYPTO_TFM_REQ_MASK);
  	err = crypto_shash_setkey(child, key, keylen);
  	crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
  				       CRYPTO_TFM_RES_MASK);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
388
389
390
391
392
393
394
395
  	return err;
  }
  
  static int cryptd_hash_enqueue(struct ahash_request *req,
  				crypto_completion_t complete)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
254eff771   Huang Ying   crypto: cryptd - ...
396
397
  	struct cryptd_queue *queue =
  		cryptd_get_queue(crypto_ahash_tfm(tfm));
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
398
399
400
  
  	rctx->complete = req->base.complete;
  	req->base.complete = complete;
254eff771   Huang Ying   crypto: cryptd - ...
401
  	return cryptd_enqueue_request(queue, &req->base);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
402
403
404
405
  }
  
  static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
406
407
408
409
410
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
  	struct crypto_shash *child = ctx->child;
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct shash_desc *desc = &rctx->desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
411
412
413
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
414
415
  	desc->tfm = child;
  	desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
416

46309d893   Herbert Xu   crypto: cryptd - ...
417
  	err = crypto_shash_init(desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_init_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_init);
  }
  
  static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
434
  	struct ahash_request *req = ahash_request_cast(req_async);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
435
  	struct cryptd_hash_request_ctx *rctx;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
436
437
438
439
440
  
  	rctx = ahash_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
441
  	err = shash_ahash_update(req, &rctx->desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_update_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_update);
  }
  
  static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
458
459
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
460
461
462
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
463
  	err = crypto_shash_final(&rctx->desc, req->result);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
464
465
466
467
468
469
470
471
472
473
474
475
476
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_final_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_final);
  }
6fba00d17   Herbert Xu   crypto: cryptd - ...
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
  static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
  {
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
  
  	err = shash_ahash_finup(req, &rctx->desc);
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_finup_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_finup);
  }
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
499
500
  static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
501
502
503
504
505
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
  	struct crypto_shash *child = ctx->child;
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct shash_desc *desc = &rctx->desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
506
507
508
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
509
510
  	desc->tfm = child;
  	desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
511

46309d893   Herbert Xu   crypto: cryptd - ...
512
  	err = shash_ahash_digest(req, desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
513
514
515
516
517
518
519
520
521
522
523
524
525
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_digest_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_digest);
  }
6fba00d17   Herbert Xu   crypto: cryptd - ...
526
527
528
529
530
531
532
533
534
535
536
537
538
  static int cryptd_hash_export(struct ahash_request *req, void *out)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	return crypto_shash_export(&rctx->desc, out);
  }
  
  static int cryptd_hash_import(struct ahash_request *req, const void *in)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	return crypto_shash_import(&rctx->desc, in);
  }
9cd899a32   Herbert Xu   crypto: cryptd - ...
539
540
  static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
  			      struct cryptd_queue *queue)
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
541
  {
46309d893   Herbert Xu   crypto: cryptd - ...
542
  	struct hashd_instance_ctx *ctx;
0b535adfb   Herbert Xu   crypto: cryptd - ...
543
  	struct ahash_instance *inst;
46309d893   Herbert Xu   crypto: cryptd - ...
544
  	struct shash_alg *salg;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
545
  	struct crypto_alg *alg;
46309d893   Herbert Xu   crypto: cryptd - ...
546
  	int err;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
547

46309d893   Herbert Xu   crypto: cryptd - ...
548
549
  	salg = shash_attr_alg(tb[1], 0, 0);
  	if (IS_ERR(salg))
9cd899a32   Herbert Xu   crypto: cryptd - ...
550
  		return PTR_ERR(salg);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
551

46309d893   Herbert Xu   crypto: cryptd - ...
552
  	alg = &salg->base;
0b535adfb   Herbert Xu   crypto: cryptd - ...
553
554
  	inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
  				     sizeof(*ctx));
05ed8758f   Steffen Klassert   crypto: cryptd - ...
555
  	err = PTR_ERR(inst);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
556
557
  	if (IS_ERR(inst))
  		goto out_put_alg;
0b535adfb   Herbert Xu   crypto: cryptd - ...
558
  	ctx = ahash_instance_ctx(inst);
46309d893   Herbert Xu   crypto: cryptd - ...
559
  	ctx->queue = queue;
0b535adfb   Herbert Xu   crypto: cryptd - ...
560
561
  	err = crypto_init_shash_spawn(&ctx->spawn, salg,
  				      ahash_crypto_instance(inst));
46309d893   Herbert Xu   crypto: cryptd - ...
562
563
  	if (err)
  		goto out_free_inst;
0b535adfb   Herbert Xu   crypto: cryptd - ...
564
  	inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
565

0b535adfb   Herbert Xu   crypto: cryptd - ...
566
567
  	inst->alg.halg.digestsize = salg->digestsize;
  	inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
568

0b535adfb   Herbert Xu   crypto: cryptd - ...
569
570
  	inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
  	inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
571

0b535adfb   Herbert Xu   crypto: cryptd - ...
572
573
574
  	inst->alg.init   = cryptd_hash_init_enqueue;
  	inst->alg.update = cryptd_hash_update_enqueue;
  	inst->alg.final  = cryptd_hash_final_enqueue;
6fba00d17   Herbert Xu   crypto: cryptd - ...
575
576
577
  	inst->alg.finup  = cryptd_hash_finup_enqueue;
  	inst->alg.export = cryptd_hash_export;
  	inst->alg.import = cryptd_hash_import;
0b535adfb   Herbert Xu   crypto: cryptd - ...
578
579
  	inst->alg.setkey = cryptd_hash_setkey;
  	inst->alg.digest = cryptd_hash_digest_enqueue;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
580

0b535adfb   Herbert Xu   crypto: cryptd - ...
581
  	err = ahash_register_instance(tmpl, inst);
9cd899a32   Herbert Xu   crypto: cryptd - ...
582
583
584
585
586
  	if (err) {
  		crypto_drop_shash(&ctx->spawn);
  out_free_inst:
  		kfree(inst);
  	}
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
587
588
  out_put_alg:
  	crypto_mod_put(alg);
9cd899a32   Herbert Xu   crypto: cryptd - ...
589
  	return err;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
590
  }
298c926c6   Adrian Hoban   crypto: cryptd - ...
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
  static void cryptd_aead_crypt(struct aead_request *req,
  			struct crypto_aead *child,
  			int err,
  			int (*crypt)(struct aead_request *req))
  {
  	struct cryptd_aead_request_ctx *rctx;
  	rctx = aead_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
  	aead_request_set_tfm(req, child);
  	err = crypt( req );
  	req->base.complete = rctx->complete;
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
  	struct crypto_aead *child = ctx->child;
  	struct aead_request *req;
  
  	req = container_of(areq, struct aead_request, base);
  	cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->encrypt);
  }
  
  static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
  	struct crypto_aead *child = ctx->child;
  	struct aead_request *req;
  
  	req = container_of(areq, struct aead_request, base);
  	cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->decrypt);
  }
  
  static int cryptd_aead_enqueue(struct aead_request *req,
  				    crypto_completion_t complete)
  {
  	struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  	struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
  
  	rctx->complete = req->base.complete;
  	req->base.complete = complete;
  	return cryptd_enqueue_request(queue, &req->base);
  }
  
  static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
  {
  	return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
  }
  
  static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
  {
  	return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
  }
  
  static int cryptd_aead_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct aead_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
  	struct crypto_aead *cipher;
  
  	cipher = crypto_spawn_aead(spawn);
  	if (IS_ERR(cipher))
  		return PTR_ERR(cipher);
  
  	crypto_aead_set_flags(cipher, CRYPTO_TFM_REQ_MAY_SLEEP);
  	ctx->child = cipher;
  	tfm->crt_aead.reqsize = sizeof(struct cryptd_aead_request_ctx);
  	return 0;
  }
  
  static void cryptd_aead_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
  	crypto_free_aead(ctx->child);
  }
  
  static int cryptd_create_aead(struct crypto_template *tmpl,
  		              struct rtattr **tb,
  			      struct cryptd_queue *queue)
  {
  	struct aead_instance_ctx *ctx;
  	struct crypto_instance *inst;
  	struct crypto_alg *alg;
  	int err;
  
  	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_AEAD,
  				CRYPTO_ALG_TYPE_MASK);
          if (IS_ERR(alg))
  		return PTR_ERR(alg);
  
  	inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
  	err = PTR_ERR(inst);
  	if (IS_ERR(inst))
  		goto out_put_alg;
  
  	ctx = crypto_instance_ctx(inst);
  	ctx->queue = queue;
  
  	err = crypto_init_spawn(&ctx->aead_spawn.base, alg, inst,
  			CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
  	if (err)
  		goto out_free_inst;
  
  	inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
  	inst->alg.cra_type = alg->cra_type;
  	inst->alg.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
  	inst->alg.cra_init = cryptd_aead_init_tfm;
  	inst->alg.cra_exit = cryptd_aead_exit_tfm;
  	inst->alg.cra_aead.setkey      = alg->cra_aead.setkey;
  	inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
  	inst->alg.cra_aead.geniv       = alg->cra_aead.geniv;
  	inst->alg.cra_aead.ivsize      = alg->cra_aead.ivsize;
  	inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
  	inst->alg.cra_aead.encrypt     = cryptd_aead_encrypt_enqueue;
  	inst->alg.cra_aead.decrypt     = cryptd_aead_decrypt_enqueue;
  	inst->alg.cra_aead.givencrypt  = alg->cra_aead.givencrypt;
  	inst->alg.cra_aead.givdecrypt  = alg->cra_aead.givdecrypt;
  
  	err = crypto_register_instance(tmpl, inst);
  	if (err) {
  		crypto_drop_spawn(&ctx->aead_spawn.base);
  out_free_inst:
  		kfree(inst);
  	}
  out_put_alg:
  	crypto_mod_put(alg);
  	return err;
  }
254eff771   Huang Ying   crypto: cryptd - ...
728
  static struct cryptd_queue queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
729

9cd899a32   Herbert Xu   crypto: cryptd - ...
730
  static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
731
732
733
734
735
  {
  	struct crypto_attr_type *algt;
  
  	algt = crypto_get_attr_type(tb);
  	if (IS_ERR(algt))
9cd899a32   Herbert Xu   crypto: cryptd - ...
736
  		return PTR_ERR(algt);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
737
738
739
  
  	switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
  	case CRYPTO_ALG_TYPE_BLKCIPHER:
9cd899a32   Herbert Xu   crypto: cryptd - ...
740
  		return cryptd_create_blkcipher(tmpl, tb, &queue);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
741
  	case CRYPTO_ALG_TYPE_DIGEST:
9cd899a32   Herbert Xu   crypto: cryptd - ...
742
  		return cryptd_create_hash(tmpl, tb, &queue);
298c926c6   Adrian Hoban   crypto: cryptd - ...
743
744
  	case CRYPTO_ALG_TYPE_AEAD:
  		return cryptd_create_aead(tmpl, tb, &queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
745
  	}
9cd899a32   Herbert Xu   crypto: cryptd - ...
746
  	return -EINVAL;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
747
748
749
750
751
  }
  
  static void cryptd_free(struct crypto_instance *inst)
  {
  	struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
752
  	struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
298c926c6   Adrian Hoban   crypto: cryptd - ...
753
  	struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
754
755
756
757
758
759
  
  	switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
  	case CRYPTO_ALG_TYPE_AHASH:
  		crypto_drop_shash(&hctx->spawn);
  		kfree(ahash_instance(inst));
  		return;
298c926c6   Adrian Hoban   crypto: cryptd - ...
760
761
762
763
764
765
766
  	case CRYPTO_ALG_TYPE_AEAD:
  		crypto_drop_spawn(&aead_ctx->aead_spawn.base);
  		kfree(inst);
  		return;
  	default:
  		crypto_drop_spawn(&ctx->spawn);
  		kfree(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
767
  	}
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
768
769
770
771
  }
  
  static struct crypto_template cryptd_tmpl = {
  	.name = "cryptd",
9cd899a32   Herbert Xu   crypto: cryptd - ...
772
  	.create = cryptd_create,
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
773
774
775
  	.free = cryptd_free,
  	.module = THIS_MODULE,
  };
1cac2cbc7   Huang Ying   crypto: cryptd - ...
776
777
778
779
  struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
  						  u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
505fd21d6   Huang Ying   crypto: cryptd - ...
780
  	struct crypto_tfm *tfm;
1cac2cbc7   Huang Ying   crypto: cryptd - ...
781
782
783
784
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
505fd21d6   Huang Ying   crypto: cryptd - ...
785
786
787
788
789
  	type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
  	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  	mask &= ~CRYPTO_ALG_TYPE_MASK;
  	mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
  	tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
1cac2cbc7   Huang Ying   crypto: cryptd - ...
790
791
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
505fd21d6   Huang Ying   crypto: cryptd - ...
792
793
  	if (tfm->__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_tfm(tfm);
1cac2cbc7   Huang Ying   crypto: cryptd - ...
794
795
  		return ERR_PTR(-EINVAL);
  	}
505fd21d6   Huang Ying   crypto: cryptd - ...
796
  	return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
1cac2cbc7   Huang Ying   crypto: cryptd - ...
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
  
  struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
  
  void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
  {
  	crypto_free_ablkcipher(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
ace136636   Huang Ying   crypto: cryptd - ...
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
  struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
  					u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
  	struct crypto_ahash *tfm;
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
  	tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
  	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_ahash(tfm);
  		return ERR_PTR(-EINVAL);
  	}
  
  	return __cryptd_ahash_cast(tfm);
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
  
  struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
  {
  	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
  
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_ahash_child);
0e1227d35   Huang Ying   crypto: ghash - A...
840
841
842
843
844
845
  struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	return &rctx->desc;
  }
  EXPORT_SYMBOL_GPL(cryptd_shash_desc);
ace136636   Huang Ying   crypto: cryptd - ...
846
847
848
849
850
  void cryptd_free_ahash(struct cryptd_ahash *tfm)
  {
  	crypto_free_ahash(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_ahash);
298c926c6   Adrian Hoban   crypto: cryptd - ...
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
  struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
  						  u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
  	struct crypto_aead *tfm;
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
  	tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
  	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_aead(tfm);
  		return ERR_PTR(-EINVAL);
  	}
  	return __cryptd_aead_cast(tfm);
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
  
  struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
  {
  	struct cryptd_aead_ctx *ctx;
  	ctx = crypto_aead_ctx(&tfm->base);
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_aead_child);
  
  void cryptd_free_aead(struct cryptd_aead *tfm)
  {
  	crypto_free_aead(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_aead);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
884
885
886
  static int __init cryptd_init(void)
  {
  	int err;
254eff771   Huang Ying   crypto: cryptd - ...
887
  	err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
888
889
890
891
892
  	if (err)
  		return err;
  
  	err = crypto_register_template(&cryptd_tmpl);
  	if (err)
254eff771   Huang Ying   crypto: cryptd - ...
893
  		cryptd_fini_queue(&queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
894
895
896
897
898
899
  
  	return err;
  }
  
  static void __exit cryptd_exit(void)
  {
254eff771   Huang Ying   crypto: cryptd - ...
900
  	cryptd_fini_queue(&queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
901
902
  	crypto_unregister_template(&cryptd_tmpl);
  }
b2bac6acf   Herbert Xu   crypto: cryptd - ...
903
  subsys_initcall(cryptd_init);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
904
905
906
907
  module_exit(cryptd_exit);
  
  MODULE_LICENSE("GPL");
  MODULE_DESCRIPTION("Software async crypto daemon");