Blame view

crypto/cryptd.c 24.9 KB
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
1
2
3
4
5
  /*
   * Software async crypto daemon.
   *
   * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
   *
298c926c6   Adrian Hoban   crypto: cryptd - ...
6
7
8
9
10
11
12
   * Added AEAD support to cryptd.
   *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
   *             Adrian Hoban <adrian.hoban@intel.com>
   *             Gabriele Paoloni <gabriele.paoloni@intel.com>
   *             Aidan O'Mahony (aidan.o.mahony@intel.com)
   *    Copyright (c) 2010, Intel Corporation.
   *
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
13
14
15
16
17
18
19
20
   * This program is free software; you can redistribute it and/or modify it
   * under the terms of the GNU General Public License as published by the Free
   * Software Foundation; either version 2 of the License, or (at your option)
   * any later version.
   *
   */
  
  #include <crypto/algapi.h>
18e33e6d5   Herbert Xu   crypto: hash - Mo...
21
  #include <crypto/internal/hash.h>
298c926c6   Adrian Hoban   crypto: cryptd - ...
22
  #include <crypto/internal/aead.h>
1cac2cbc7   Huang Ying   crypto: cryptd - ...
23
  #include <crypto/cryptd.h>
254eff771   Huang Ying   crypto: cryptd - ...
24
  #include <crypto/crypto_wq.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
25
26
27
  #include <linux/err.h>
  #include <linux/init.h>
  #include <linux/kernel.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
28
29
  #include <linux/list.h>
  #include <linux/module.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
30
31
32
  #include <linux/scatterlist.h>
  #include <linux/sched.h>
  #include <linux/slab.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
33

254eff771   Huang Ying   crypto: cryptd - ...
34
  #define CRYPTD_MAX_CPU_QLEN 100
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
35

254eff771   Huang Ying   crypto: cryptd - ...
36
  struct cryptd_cpu_queue {
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
37
  	struct crypto_queue queue;
254eff771   Huang Ying   crypto: cryptd - ...
38
39
40
41
  	struct work_struct work;
  };
  
  struct cryptd_queue {
a29d8b8e2   Tejun Heo   percpu: add __per...
42
  	struct cryptd_cpu_queue __percpu *cpu_queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
43
44
45
46
  };
  
  struct cryptd_instance_ctx {
  	struct crypto_spawn spawn;
254eff771   Huang Ying   crypto: cryptd - ...
47
  	struct cryptd_queue *queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
48
  };
46309d893   Herbert Xu   crypto: cryptd - ...
49
50
51
52
  struct hashd_instance_ctx {
  	struct crypto_shash_spawn spawn;
  	struct cryptd_queue *queue;
  };
298c926c6   Adrian Hoban   crypto: cryptd - ...
53
54
55
56
  struct aead_instance_ctx {
  	struct crypto_aead_spawn aead_spawn;
  	struct cryptd_queue *queue;
  };
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
57
58
59
60
61
62
63
  struct cryptd_blkcipher_ctx {
  	struct crypto_blkcipher *child;
  };
  
  struct cryptd_blkcipher_request_ctx {
  	crypto_completion_t complete;
  };
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
64
  struct cryptd_hash_ctx {
46309d893   Herbert Xu   crypto: cryptd - ...
65
  	struct crypto_shash *child;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
66
67
68
69
  };
  
  struct cryptd_hash_request_ctx {
  	crypto_completion_t complete;
46309d893   Herbert Xu   crypto: cryptd - ...
70
  	struct shash_desc desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
71
  };
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
72

298c926c6   Adrian Hoban   crypto: cryptd - ...
73
74
75
76
77
78
79
  struct cryptd_aead_ctx {
  	struct crypto_aead *child;
  };
  
  struct cryptd_aead_request_ctx {
  	crypto_completion_t complete;
  };
254eff771   Huang Ying   crypto: cryptd - ...
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
  static void cryptd_queue_worker(struct work_struct *work);
  
  static int cryptd_init_queue(struct cryptd_queue *queue,
  			     unsigned int max_cpu_qlen)
  {
  	int cpu;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
  	if (!queue->cpu_queue)
  		return -ENOMEM;
  	for_each_possible_cpu(cpu) {
  		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
  		crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
  		INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
  	}
  	return 0;
  }
  
  static void cryptd_fini_queue(struct cryptd_queue *queue)
  {
  	int cpu;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	for_each_possible_cpu(cpu) {
  		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
  		BUG_ON(cpu_queue->queue.qlen);
  	}
  	free_percpu(queue->cpu_queue);
  }
  
  static int cryptd_enqueue_request(struct cryptd_queue *queue,
  				  struct crypto_async_request *request)
  {
  	int cpu, err;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	cpu = get_cpu();
0b44f4861   Christoph Lameter   this_cpu: Use thi...
118
  	cpu_queue = this_cpu_ptr(queue->cpu_queue);
254eff771   Huang Ying   crypto: cryptd - ...
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
  	err = crypto_enqueue_request(&cpu_queue->queue, request);
  	queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
  	put_cpu();
  
  	return err;
  }
  
  /* Called in workqueue context, do one real cryption work (via
   * req->complete) and reschedule itself if there are more work to
   * do. */
  static void cryptd_queue_worker(struct work_struct *work)
  {
  	struct cryptd_cpu_queue *cpu_queue;
  	struct crypto_async_request *req, *backlog;
  
  	cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
9efade1b3   Jussi Kivilinna   crypto: cryptd - ...
135
136
137
138
139
140
141
  	/*
  	 * Only handle one request at a time to avoid hogging crypto workqueue.
  	 * preempt_disable/enable is used to prevent being preempted by
  	 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
  	 * cryptd_enqueue_request() being accessed from software interrupts.
  	 */
  	local_bh_disable();
254eff771   Huang Ying   crypto: cryptd - ...
142
143
144
145
  	preempt_disable();
  	backlog = crypto_get_backlog(&cpu_queue->queue);
  	req = crypto_dequeue_request(&cpu_queue->queue);
  	preempt_enable();
9efade1b3   Jussi Kivilinna   crypto: cryptd - ...
146
  	local_bh_enable();
254eff771   Huang Ying   crypto: cryptd - ...
147
148
149
150
151
152
153
154
155
156
157
158
159
  
  	if (!req)
  		return;
  
  	if (backlog)
  		backlog->complete(backlog, -EINPROGRESS);
  	req->complete(req, 0);
  
  	if (cpu_queue->queue.qlen)
  		queue_work(kcrypto_wq, &cpu_queue->work);
  }
  
  static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
160
161
162
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
254eff771   Huang Ying   crypto: cryptd - ...
163
  	return ictx->queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
  }
  
  static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
  				   const u8 *key, unsigned int keylen)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
  	struct crypto_blkcipher *child = ctx->child;
  	int err;
  
  	crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  	crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
  					  CRYPTO_TFM_REQ_MASK);
  	err = crypto_blkcipher_setkey(child, key, keylen);
  	crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
  					    CRYPTO_TFM_RES_MASK);
  	return err;
  }
  
  static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
  				   struct crypto_blkcipher *child,
  				   int err,
  				   int (*crypt)(struct blkcipher_desc *desc,
  						struct scatterlist *dst,
  						struct scatterlist *src,
  						unsigned int len))
  {
  	struct cryptd_blkcipher_request_ctx *rctx;
  	struct blkcipher_desc desc;
  
  	rctx = ablkcipher_request_ctx(req);
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
194
195
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
196
197
198
199
200
201
202
203
  
  	desc.tfm = child;
  	desc.info = req->info;
  	desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
  
  	err = crypt(&desc, req->dst, req->src, req->nbytes);
  
  	req->base.complete = rctx->complete;
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
204
  out:
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
205
  	local_bh_disable();
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
206
  	rctx->complete(&req->base, err);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
  	local_bh_enable();
  }
  
  static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
  	struct crypto_blkcipher *child = ctx->child;
  
  	cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
  			       crypto_blkcipher_crt(child)->encrypt);
  }
  
  static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
  	struct crypto_blkcipher *child = ctx->child;
  
  	cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
  			       crypto_blkcipher_crt(child)->decrypt);
  }
  
  static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
3e3dc25fe   Mark Rustad   crypto: Resolve s...
229
  				    crypto_completion_t compl)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
230
231
232
  {
  	struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
  	struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
254eff771   Huang Ying   crypto: cryptd - ...
233
  	struct cryptd_queue *queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
234

254eff771   Huang Ying   crypto: cryptd - ...
235
  	queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
236
  	rctx->complete = req->base.complete;
3e3dc25fe   Mark Rustad   crypto: Resolve s...
237
  	req->base.complete = compl;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
238

254eff771   Huang Ying   crypto: cryptd - ...
239
  	return cryptd_enqueue_request(queue, &req->base);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
  }
  
  static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
  {
  	return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
  }
  
  static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
  {
  	return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
  }
  
  static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_spawn *spawn = &ictx->spawn;
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
  	struct crypto_blkcipher *cipher;
  
  	cipher = crypto_spawn_blkcipher(spawn);
  	if (IS_ERR(cipher))
  		return PTR_ERR(cipher);
  
  	ctx->child = cipher;
  	tfm->crt_ablkcipher.reqsize =
  		sizeof(struct cryptd_blkcipher_request_ctx);
  	return 0;
  }
  
  static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
273
274
275
  
  	crypto_free_blkcipher(ctx->child);
  }
0b535adfb   Herbert Xu   crypto: cryptd - ...
276
277
  static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
  				   unsigned int tail)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
278
  {
0b535adfb   Herbert Xu   crypto: cryptd - ...
279
  	char *p;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
280
  	struct crypto_instance *inst;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
281
  	int err;
0b535adfb   Herbert Xu   crypto: cryptd - ...
282
283
284
285
286
  	p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
  	if (!p)
  		return ERR_PTR(-ENOMEM);
  
  	inst = (void *)(p + head);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
287
288
289
290
291
  
  	err = -ENAMETOOLONG;
  	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  		goto out_free_inst;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
292
293
294
295
296
297
298
  	memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
  
  	inst->alg.cra_priority = alg->cra_priority + 50;
  	inst->alg.cra_blocksize = alg->cra_blocksize;
  	inst->alg.cra_alignmask = alg->cra_alignmask;
  
  out:
0b535adfb   Herbert Xu   crypto: cryptd - ...
299
  	return p;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
300
301
  
  out_free_inst:
0b535adfb   Herbert Xu   crypto: cryptd - ...
302
303
  	kfree(p);
  	p = ERR_PTR(err);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
304
305
  	goto out;
  }
9cd899a32   Herbert Xu   crypto: cryptd - ...
306
307
308
  static int cryptd_create_blkcipher(struct crypto_template *tmpl,
  				   struct rtattr **tb,
  				   struct cryptd_queue *queue)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
309
  {
46309d893   Herbert Xu   crypto: cryptd - ...
310
  	struct cryptd_instance_ctx *ctx;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
311
312
  	struct crypto_instance *inst;
  	struct crypto_alg *alg;
46309d893   Herbert Xu   crypto: cryptd - ...
313
  	int err;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
314
315
  
  	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
332f8840f   Herbert Xu   [CRYPTO] ablkciph...
316
  				  CRYPTO_ALG_TYPE_MASK);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
317
  	if (IS_ERR(alg))
9cd899a32   Herbert Xu   crypto: cryptd - ...
318
  		return PTR_ERR(alg);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
319

0b535adfb   Herbert Xu   crypto: cryptd - ...
320
  	inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
05ed8758f   Steffen Klassert   crypto: cryptd - ...
321
  	err = PTR_ERR(inst);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
322
323
  	if (IS_ERR(inst))
  		goto out_put_alg;
46309d893   Herbert Xu   crypto: cryptd - ...
324
325
326
327
328
329
330
  	ctx = crypto_instance_ctx(inst);
  	ctx->queue = queue;
  
  	err = crypto_init_spawn(&ctx->spawn, alg, inst,
  				CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
  	if (err)
  		goto out_free_inst;
332f8840f   Herbert Xu   [CRYPTO] ablkciph...
331
  	inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
332
333
334
335
336
  	inst->alg.cra_type = &crypto_ablkcipher_type;
  
  	inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
  	inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
  	inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
927eead52   Herbert Xu   [CRYPTO] cryptd: ...
337
  	inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
338
339
340
341
342
343
344
345
  	inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
  
  	inst->alg.cra_init = cryptd_blkcipher_init_tfm;
  	inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
  
  	inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
  	inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
  	inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
9cd899a32   Herbert Xu   crypto: cryptd - ...
346
347
348
349
350
351
  	err = crypto_register_instance(tmpl, inst);
  	if (err) {
  		crypto_drop_spawn(&ctx->spawn);
  out_free_inst:
  		kfree(inst);
  	}
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
352
353
  out_put_alg:
  	crypto_mod_put(alg);
9cd899a32   Herbert Xu   crypto: cryptd - ...
354
  	return err;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
355
  }
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
356
357
358
  static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
46309d893   Herbert Xu   crypto: cryptd - ...
359
360
  	struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_shash_spawn *spawn = &ictx->spawn;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
361
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
46309d893   Herbert Xu   crypto: cryptd - ...
362
  	struct crypto_shash *hash;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
363

46309d893   Herbert Xu   crypto: cryptd - ...
364
365
366
  	hash = crypto_spawn_shash(spawn);
  	if (IS_ERR(hash))
  		return PTR_ERR(hash);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
367

46309d893   Herbert Xu   crypto: cryptd - ...
368
  	ctx->child = hash;
0d6669e2b   Herbert Xu   crypto: cryptd - ...
369
370
371
  	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
  				 sizeof(struct cryptd_hash_request_ctx) +
  				 crypto_shash_descsize(hash));
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
372
373
374
375
376
377
  	return 0;
  }
  
  static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
378

46309d893   Herbert Xu   crypto: cryptd - ...
379
  	crypto_free_shash(ctx->child);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
380
381
382
383
384
385
  }
  
  static int cryptd_hash_setkey(struct crypto_ahash *parent,
  				   const u8 *key, unsigned int keylen)
  {
  	struct cryptd_hash_ctx *ctx   = crypto_ahash_ctx(parent);
46309d893   Herbert Xu   crypto: cryptd - ...
386
  	struct crypto_shash *child = ctx->child;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
387
  	int err;
46309d893   Herbert Xu   crypto: cryptd - ...
388
389
390
391
392
393
  	crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  	crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
  				      CRYPTO_TFM_REQ_MASK);
  	err = crypto_shash_setkey(child, key, keylen);
  	crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
  				       CRYPTO_TFM_RES_MASK);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
394
395
396
397
  	return err;
  }
  
  static int cryptd_hash_enqueue(struct ahash_request *req,
3e3dc25fe   Mark Rustad   crypto: Resolve s...
398
  				crypto_completion_t compl)
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
399
400
401
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
254eff771   Huang Ying   crypto: cryptd - ...
402
403
  	struct cryptd_queue *queue =
  		cryptd_get_queue(crypto_ahash_tfm(tfm));
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
404
405
  
  	rctx->complete = req->base.complete;
3e3dc25fe   Mark Rustad   crypto: Resolve s...
406
  	req->base.complete = compl;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
407

254eff771   Huang Ying   crypto: cryptd - ...
408
  	return cryptd_enqueue_request(queue, &req->base);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
409
410
411
412
  }
  
  static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
413
414
415
416
417
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
  	struct crypto_shash *child = ctx->child;
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct shash_desc *desc = &rctx->desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
418
419
420
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
421
422
  	desc->tfm = child;
  	desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
423

46309d893   Herbert Xu   crypto: cryptd - ...
424
  	err = crypto_shash_init(desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_init_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_init);
  }
  
  static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
441
  	struct ahash_request *req = ahash_request_cast(req_async);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
442
  	struct cryptd_hash_request_ctx *rctx;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
443
444
445
446
447
  
  	rctx = ahash_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
448
  	err = shash_ahash_update(req, &rctx->desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_update_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_update);
  }
  
  static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
465
466
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
467
468
469
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
470
  	err = crypto_shash_final(&rctx->desc, req->result);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
471
472
473
474
475
476
477
478
479
480
481
482
483
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_final_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_final);
  }
6fba00d17   Herbert Xu   crypto: cryptd - ...
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
  static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
  {
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
  
  	err = shash_ahash_finup(req, &rctx->desc);
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_finup_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_finup);
  }
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
506
507
  static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
508
509
510
511
512
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
  	struct crypto_shash *child = ctx->child;
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct shash_desc *desc = &rctx->desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
513
514
515
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
516
517
  	desc->tfm = child;
  	desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
518

46309d893   Herbert Xu   crypto: cryptd - ...
519
  	err = shash_ahash_digest(req, desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
520
521
522
523
524
525
526
527
528
529
530
531
532
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_digest_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_digest);
  }
6fba00d17   Herbert Xu   crypto: cryptd - ...
533
534
535
536
537
538
539
540
541
542
543
544
545
  static int cryptd_hash_export(struct ahash_request *req, void *out)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	return crypto_shash_export(&rctx->desc, out);
  }
  
  static int cryptd_hash_import(struct ahash_request *req, const void *in)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	return crypto_shash_import(&rctx->desc, in);
  }
9cd899a32   Herbert Xu   crypto: cryptd - ...
546
547
  static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
  			      struct cryptd_queue *queue)
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
548
  {
46309d893   Herbert Xu   crypto: cryptd - ...
549
  	struct hashd_instance_ctx *ctx;
0b535adfb   Herbert Xu   crypto: cryptd - ...
550
  	struct ahash_instance *inst;
46309d893   Herbert Xu   crypto: cryptd - ...
551
  	struct shash_alg *salg;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
552
  	struct crypto_alg *alg;
46309d893   Herbert Xu   crypto: cryptd - ...
553
  	int err;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
554

46309d893   Herbert Xu   crypto: cryptd - ...
555
556
  	salg = shash_attr_alg(tb[1], 0, 0);
  	if (IS_ERR(salg))
9cd899a32   Herbert Xu   crypto: cryptd - ...
557
  		return PTR_ERR(salg);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
558

46309d893   Herbert Xu   crypto: cryptd - ...
559
  	alg = &salg->base;
0b535adfb   Herbert Xu   crypto: cryptd - ...
560
561
  	inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
  				     sizeof(*ctx));
05ed8758f   Steffen Klassert   crypto: cryptd - ...
562
  	err = PTR_ERR(inst);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
563
564
  	if (IS_ERR(inst))
  		goto out_put_alg;
0b535adfb   Herbert Xu   crypto: cryptd - ...
565
  	ctx = ahash_instance_ctx(inst);
46309d893   Herbert Xu   crypto: cryptd - ...
566
  	ctx->queue = queue;
0b535adfb   Herbert Xu   crypto: cryptd - ...
567
568
  	err = crypto_init_shash_spawn(&ctx->spawn, salg,
  				      ahash_crypto_instance(inst));
46309d893   Herbert Xu   crypto: cryptd - ...
569
570
  	if (err)
  		goto out_free_inst;
0b535adfb   Herbert Xu   crypto: cryptd - ...
571
  	inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
572

0b535adfb   Herbert Xu   crypto: cryptd - ...
573
574
  	inst->alg.halg.digestsize = salg->digestsize;
  	inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
575

0b535adfb   Herbert Xu   crypto: cryptd - ...
576
577
  	inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
  	inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
578

0b535adfb   Herbert Xu   crypto: cryptd - ...
579
580
581
  	inst->alg.init   = cryptd_hash_init_enqueue;
  	inst->alg.update = cryptd_hash_update_enqueue;
  	inst->alg.final  = cryptd_hash_final_enqueue;
6fba00d17   Herbert Xu   crypto: cryptd - ...
582
583
584
  	inst->alg.finup  = cryptd_hash_finup_enqueue;
  	inst->alg.export = cryptd_hash_export;
  	inst->alg.import = cryptd_hash_import;
0b535adfb   Herbert Xu   crypto: cryptd - ...
585
586
  	inst->alg.setkey = cryptd_hash_setkey;
  	inst->alg.digest = cryptd_hash_digest_enqueue;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
587

0b535adfb   Herbert Xu   crypto: cryptd - ...
588
  	err = ahash_register_instance(tmpl, inst);
9cd899a32   Herbert Xu   crypto: cryptd - ...
589
590
591
592
593
  	if (err) {
  		crypto_drop_shash(&ctx->spawn);
  out_free_inst:
  		kfree(inst);
  	}
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
594
595
  out_put_alg:
  	crypto_mod_put(alg);
9cd899a32   Herbert Xu   crypto: cryptd - ...
596
  	return err;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
597
  }
298c926c6   Adrian Hoban   crypto: cryptd - ...
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
  static void cryptd_aead_crypt(struct aead_request *req,
  			struct crypto_aead *child,
  			int err,
  			int (*crypt)(struct aead_request *req))
  {
  	struct cryptd_aead_request_ctx *rctx;
  	rctx = aead_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
  	aead_request_set_tfm(req, child);
  	err = crypt( req );
  	req->base.complete = rctx->complete;
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
  	struct crypto_aead *child = ctx->child;
  	struct aead_request *req;
  
  	req = container_of(areq, struct aead_request, base);
  	cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->encrypt);
  }
  
  static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
  	struct crypto_aead *child = ctx->child;
  	struct aead_request *req;
  
  	req = container_of(areq, struct aead_request, base);
  	cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->decrypt);
  }
  
  static int cryptd_aead_enqueue(struct aead_request *req,
3e3dc25fe   Mark Rustad   crypto: Resolve s...
638
  				    crypto_completion_t compl)
298c926c6   Adrian Hoban   crypto: cryptd - ...
639
640
641
642
643
644
  {
  	struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  	struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
  
  	rctx->complete = req->base.complete;
3e3dc25fe   Mark Rustad   crypto: Resolve s...
645
  	req->base.complete = compl;
298c926c6   Adrian Hoban   crypto: cryptd - ...
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
  	return cryptd_enqueue_request(queue, &req->base);
  }
  
  static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
  {
  	return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
  }
  
  static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
  {
  	return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
  }
  
  static int cryptd_aead_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct aead_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
  	struct crypto_aead *cipher;
  
  	cipher = crypto_spawn_aead(spawn);
  	if (IS_ERR(cipher))
  		return PTR_ERR(cipher);
  
  	crypto_aead_set_flags(cipher, CRYPTO_TFM_REQ_MAY_SLEEP);
  	ctx->child = cipher;
  	tfm->crt_aead.reqsize = sizeof(struct cryptd_aead_request_ctx);
  	return 0;
  }
  
  static void cryptd_aead_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
  	crypto_free_aead(ctx->child);
  }
  
  static int cryptd_create_aead(struct crypto_template *tmpl,
  		              struct rtattr **tb,
  			      struct cryptd_queue *queue)
  {
  	struct aead_instance_ctx *ctx;
  	struct crypto_instance *inst;
  	struct crypto_alg *alg;
  	int err;
  
  	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_AEAD,
  				CRYPTO_ALG_TYPE_MASK);
          if (IS_ERR(alg))
  		return PTR_ERR(alg);
  
  	inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
  	err = PTR_ERR(inst);
  	if (IS_ERR(inst))
  		goto out_put_alg;
  
  	ctx = crypto_instance_ctx(inst);
  	ctx->queue = queue;
  
  	err = crypto_init_spawn(&ctx->aead_spawn.base, alg, inst,
  			CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
  	if (err)
  		goto out_free_inst;
  
  	inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
  	inst->alg.cra_type = alg->cra_type;
  	inst->alg.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
  	inst->alg.cra_init = cryptd_aead_init_tfm;
  	inst->alg.cra_exit = cryptd_aead_exit_tfm;
  	inst->alg.cra_aead.setkey      = alg->cra_aead.setkey;
  	inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
  	inst->alg.cra_aead.geniv       = alg->cra_aead.geniv;
  	inst->alg.cra_aead.ivsize      = alg->cra_aead.ivsize;
  	inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
  	inst->alg.cra_aead.encrypt     = cryptd_aead_encrypt_enqueue;
  	inst->alg.cra_aead.decrypt     = cryptd_aead_decrypt_enqueue;
  	inst->alg.cra_aead.givencrypt  = alg->cra_aead.givencrypt;
  	inst->alg.cra_aead.givdecrypt  = alg->cra_aead.givdecrypt;
  
  	err = crypto_register_instance(tmpl, inst);
  	if (err) {
  		crypto_drop_spawn(&ctx->aead_spawn.base);
  out_free_inst:
  		kfree(inst);
  	}
  out_put_alg:
  	crypto_mod_put(alg);
  	return err;
  }
254eff771   Huang Ying   crypto: cryptd - ...
735
  static struct cryptd_queue queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
736

9cd899a32   Herbert Xu   crypto: cryptd - ...
737
  static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
738
739
740
741
742
  {
  	struct crypto_attr_type *algt;
  
  	algt = crypto_get_attr_type(tb);
  	if (IS_ERR(algt))
9cd899a32   Herbert Xu   crypto: cryptd - ...
743
  		return PTR_ERR(algt);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
744
745
746
  
  	switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
  	case CRYPTO_ALG_TYPE_BLKCIPHER:
9cd899a32   Herbert Xu   crypto: cryptd - ...
747
  		return cryptd_create_blkcipher(tmpl, tb, &queue);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
748
  	case CRYPTO_ALG_TYPE_DIGEST:
9cd899a32   Herbert Xu   crypto: cryptd - ...
749
  		return cryptd_create_hash(tmpl, tb, &queue);
298c926c6   Adrian Hoban   crypto: cryptd - ...
750
751
  	case CRYPTO_ALG_TYPE_AEAD:
  		return cryptd_create_aead(tmpl, tb, &queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
752
  	}
9cd899a32   Herbert Xu   crypto: cryptd - ...
753
  	return -EINVAL;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
754
755
756
757
758
  }
  
  static void cryptd_free(struct crypto_instance *inst)
  {
  	struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
759
  	struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
298c926c6   Adrian Hoban   crypto: cryptd - ...
760
  	struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
761
762
763
764
765
766
  
  	switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
  	case CRYPTO_ALG_TYPE_AHASH:
  		crypto_drop_shash(&hctx->spawn);
  		kfree(ahash_instance(inst));
  		return;
298c926c6   Adrian Hoban   crypto: cryptd - ...
767
768
769
770
771
772
773
  	case CRYPTO_ALG_TYPE_AEAD:
  		crypto_drop_spawn(&aead_ctx->aead_spawn.base);
  		kfree(inst);
  		return;
  	default:
  		crypto_drop_spawn(&ctx->spawn);
  		kfree(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
774
  	}
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
775
776
777
778
  }
  
  static struct crypto_template cryptd_tmpl = {
  	.name = "cryptd",
9cd899a32   Herbert Xu   crypto: cryptd - ...
779
  	.create = cryptd_create,
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
780
781
782
  	.free = cryptd_free,
  	.module = THIS_MODULE,
  };
1cac2cbc7   Huang Ying   crypto: cryptd - ...
783
784
785
786
  struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
  						  u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
505fd21d6   Huang Ying   crypto: cryptd - ...
787
  	struct crypto_tfm *tfm;
1cac2cbc7   Huang Ying   crypto: cryptd - ...
788
789
790
791
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
505fd21d6   Huang Ying   crypto: cryptd - ...
792
793
794
795
796
  	type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
  	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  	mask &= ~CRYPTO_ALG_TYPE_MASK;
  	mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
  	tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
1cac2cbc7   Huang Ying   crypto: cryptd - ...
797
798
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
505fd21d6   Huang Ying   crypto: cryptd - ...
799
800
  	if (tfm->__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_tfm(tfm);
1cac2cbc7   Huang Ying   crypto: cryptd - ...
801
802
  		return ERR_PTR(-EINVAL);
  	}
505fd21d6   Huang Ying   crypto: cryptd - ...
803
  	return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
1cac2cbc7   Huang Ying   crypto: cryptd - ...
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
  
  struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
  
  void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
  {
  	crypto_free_ablkcipher(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
ace136636   Huang Ying   crypto: cryptd - ...
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
  struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
  					u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
  	struct crypto_ahash *tfm;
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
  	tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
  	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_ahash(tfm);
  		return ERR_PTR(-EINVAL);
  	}
  
  	return __cryptd_ahash_cast(tfm);
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
  
  struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
  {
  	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
  
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_ahash_child);
0e1227d35   Huang Ying   crypto: ghash - A...
847
848
849
850
851
852
  struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	return &rctx->desc;
  }
  EXPORT_SYMBOL_GPL(cryptd_shash_desc);
ace136636   Huang Ying   crypto: cryptd - ...
853
854
855
856
857
  void cryptd_free_ahash(struct cryptd_ahash *tfm)
  {
  	crypto_free_ahash(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_ahash);
298c926c6   Adrian Hoban   crypto: cryptd - ...
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
  struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
  						  u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
  	struct crypto_aead *tfm;
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
  	tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
  	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_aead(tfm);
  		return ERR_PTR(-EINVAL);
  	}
  	return __cryptd_aead_cast(tfm);
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
  
  struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
  {
  	struct cryptd_aead_ctx *ctx;
  	ctx = crypto_aead_ctx(&tfm->base);
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_aead_child);
  
  void cryptd_free_aead(struct cryptd_aead *tfm)
  {
  	crypto_free_aead(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_aead);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
891
892
893
  static int __init cryptd_init(void)
  {
  	int err;
254eff771   Huang Ying   crypto: cryptd - ...
894
  	err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
895
896
897
898
899
  	if (err)
  		return err;
  
  	err = crypto_register_template(&cryptd_tmpl);
  	if (err)
254eff771   Huang Ying   crypto: cryptd - ...
900
  		cryptd_fini_queue(&queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
901
902
903
904
905
906
  
  	return err;
  }
  
  static void __exit cryptd_exit(void)
  {
254eff771   Huang Ying   crypto: cryptd - ...
907
  	cryptd_fini_queue(&queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
908
909
  	crypto_unregister_template(&cryptd_tmpl);
  }
b2bac6acf   Herbert Xu   crypto: cryptd - ...
910
  subsys_initcall(cryptd_init);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
911
912
913
914
  module_exit(cryptd_exit);
  
  MODULE_LICENSE("GPL");
  MODULE_DESCRIPTION("Software async crypto daemon");
4943ba16b   Kees Cook   crypto: include c...
915
  MODULE_ALIAS_CRYPTO("cryptd");