Blame view

crypto/cryptd.c 25.7 KB
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
1
2
3
4
5
  /*
   * Software async crypto daemon.
   *
   * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
   *
298c926c6   Adrian Hoban   crypto: cryptd - ...
6
7
8
9
10
11
12
   * Added AEAD support to cryptd.
   *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
   *             Adrian Hoban <adrian.hoban@intel.com>
   *             Gabriele Paoloni <gabriele.paoloni@intel.com>
   *             Aidan O'Mahony (aidan.o.mahony@intel.com)
   *    Copyright (c) 2010, Intel Corporation.
   *
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
13
14
15
16
17
18
19
20
   * This program is free software; you can redistribute it and/or modify it
   * under the terms of the GNU General Public License as published by the Free
   * Software Foundation; either version 2 of the License, or (at your option)
   * any later version.
   *
   */
  
  #include <crypto/algapi.h>
18e33e6d5   Herbert Xu   crypto: hash - Mo...
21
  #include <crypto/internal/hash.h>
298c926c6   Adrian Hoban   crypto: cryptd - ...
22
  #include <crypto/internal/aead.h>
1cac2cbc7   Huang Ying   crypto: cryptd - ...
23
  #include <crypto/cryptd.h>
254eff771   Huang Ying   crypto: cryptd - ...
24
  #include <crypto/crypto_wq.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
25
26
27
  #include <linux/err.h>
  #include <linux/init.h>
  #include <linux/kernel.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
28
29
  #include <linux/list.h>
  #include <linux/module.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
30
31
32
  #include <linux/scatterlist.h>
  #include <linux/sched.h>
  #include <linux/slab.h>
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
33

254eff771   Huang Ying   crypto: cryptd - ...
34
  #define CRYPTD_MAX_CPU_QLEN 100
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
35

254eff771   Huang Ying   crypto: cryptd - ...
36
  struct cryptd_cpu_queue {
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
37
  	struct crypto_queue queue;
254eff771   Huang Ying   crypto: cryptd - ...
38
39
40
41
  	struct work_struct work;
  };
  
  struct cryptd_queue {
a29d8b8e2   Tejun Heo   percpu: add __per...
42
  	struct cryptd_cpu_queue __percpu *cpu_queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
43
44
45
46
  };
  
  struct cryptd_instance_ctx {
  	struct crypto_spawn spawn;
254eff771   Huang Ying   crypto: cryptd - ...
47
  	struct cryptd_queue *queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
48
  };
46309d893   Herbert Xu   crypto: cryptd - ...
49
50
51
52
  struct hashd_instance_ctx {
  	struct crypto_shash_spawn spawn;
  	struct cryptd_queue *queue;
  };
298c926c6   Adrian Hoban   crypto: cryptd - ...
53
54
55
56
  struct aead_instance_ctx {
  	struct crypto_aead_spawn aead_spawn;
  	struct cryptd_queue *queue;
  };
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
57
58
59
60
61
62
63
  struct cryptd_blkcipher_ctx {
  	struct crypto_blkcipher *child;
  };
  
  struct cryptd_blkcipher_request_ctx {
  	crypto_completion_t complete;
  };
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
64
  struct cryptd_hash_ctx {
46309d893   Herbert Xu   crypto: cryptd - ...
65
  	struct crypto_shash *child;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
66
67
68
69
  };
  
  struct cryptd_hash_request_ctx {
  	crypto_completion_t complete;
46309d893   Herbert Xu   crypto: cryptd - ...
70
  	struct shash_desc desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
71
  };
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
72

298c926c6   Adrian Hoban   crypto: cryptd - ...
73
74
75
76
77
78
79
  struct cryptd_aead_ctx {
  	struct crypto_aead *child;
  };
  
  struct cryptd_aead_request_ctx {
  	crypto_completion_t complete;
  };
254eff771   Huang Ying   crypto: cryptd - ...
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
  static void cryptd_queue_worker(struct work_struct *work);
  
  static int cryptd_init_queue(struct cryptd_queue *queue,
  			     unsigned int max_cpu_qlen)
  {
  	int cpu;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
  	if (!queue->cpu_queue)
  		return -ENOMEM;
  	for_each_possible_cpu(cpu) {
  		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
  		crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
  		INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
  	}
  	return 0;
  }
  
  static void cryptd_fini_queue(struct cryptd_queue *queue)
  {
  	int cpu;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	for_each_possible_cpu(cpu) {
  		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
  		BUG_ON(cpu_queue->queue.qlen);
  	}
  	free_percpu(queue->cpu_queue);
  }
  
  static int cryptd_enqueue_request(struct cryptd_queue *queue,
  				  struct crypto_async_request *request)
  {
  	int cpu, err;
  	struct cryptd_cpu_queue *cpu_queue;
  
  	cpu = get_cpu();
0b44f4861   Christoph Lameter   this_cpu: Use thi...
118
  	cpu_queue = this_cpu_ptr(queue->cpu_queue);
254eff771   Huang Ying   crypto: cryptd - ...
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
  	err = crypto_enqueue_request(&cpu_queue->queue, request);
  	queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
  	put_cpu();
  
  	return err;
  }
  
  /* Called in workqueue context, do one real cryption work (via
   * req->complete) and reschedule itself if there are more work to
   * do. */
  static void cryptd_queue_worker(struct work_struct *work)
  {
  	struct cryptd_cpu_queue *cpu_queue;
  	struct crypto_async_request *req, *backlog;
  
  	cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
9efade1b3   Jussi Kivilinna   crypto: cryptd - ...
135
136
137
138
139
140
141
  	/*
  	 * Only handle one request at a time to avoid hogging crypto workqueue.
  	 * preempt_disable/enable is used to prevent being preempted by
  	 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
  	 * cryptd_enqueue_request() being accessed from software interrupts.
  	 */
  	local_bh_disable();
254eff771   Huang Ying   crypto: cryptd - ...
142
143
144
145
  	preempt_disable();
  	backlog = crypto_get_backlog(&cpu_queue->queue);
  	req = crypto_dequeue_request(&cpu_queue->queue);
  	preempt_enable();
9efade1b3   Jussi Kivilinna   crypto: cryptd - ...
146
  	local_bh_enable();
254eff771   Huang Ying   crypto: cryptd - ...
147
148
149
150
151
152
153
154
155
156
157
158
159
  
  	if (!req)
  		return;
  
  	if (backlog)
  		backlog->complete(backlog, -EINPROGRESS);
  	req->complete(req, 0);
  
  	if (cpu_queue->queue.qlen)
  		queue_work(kcrypto_wq, &cpu_queue->work);
  }
  
  static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
160
161
162
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
254eff771   Huang Ying   crypto: cryptd - ...
163
  	return ictx->queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
164
  }
466a7b9e3   Stephan Mueller   crypto: cryptd - ...
165
166
167
168
169
170
171
172
173
174
175
176
177
  static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
  					 u32 *mask)
  {
  	struct crypto_attr_type *algt;
  
  	algt = crypto_get_attr_type(tb);
  	if (IS_ERR(algt))
  		return;
  	if ((algt->type & CRYPTO_ALG_INTERNAL))
  		*type |= CRYPTO_ALG_INTERNAL;
  	if ((algt->mask & CRYPTO_ALG_INTERNAL))
  		*mask |= CRYPTO_ALG_INTERNAL;
  }
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
  static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
  				   const u8 *key, unsigned int keylen)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
  	struct crypto_blkcipher *child = ctx->child;
  	int err;
  
  	crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  	crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
  					  CRYPTO_TFM_REQ_MASK);
  	err = crypto_blkcipher_setkey(child, key, keylen);
  	crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
  					    CRYPTO_TFM_RES_MASK);
  	return err;
  }
  
  static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
  				   struct crypto_blkcipher *child,
  				   int err,
  				   int (*crypt)(struct blkcipher_desc *desc,
  						struct scatterlist *dst,
  						struct scatterlist *src,
  						unsigned int len))
  {
  	struct cryptd_blkcipher_request_ctx *rctx;
  	struct blkcipher_desc desc;
  
  	rctx = ablkcipher_request_ctx(req);
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
206
207
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
208
209
210
211
212
213
214
215
  
  	desc.tfm = child;
  	desc.info = req->info;
  	desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
  
  	err = crypt(&desc, req->dst, req->src, req->nbytes);
  
  	req->base.complete = rctx->complete;
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
216
  out:
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
217
  	local_bh_disable();
93aa7f8a1   Herbert Xu   [CRYPTO] cryptd: ...
218
  	rctx->complete(&req->base, err);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
  	local_bh_enable();
  }
  
  static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
  	struct crypto_blkcipher *child = ctx->child;
  
  	cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
  			       crypto_blkcipher_crt(child)->encrypt);
  }
  
  static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
  	struct crypto_blkcipher *child = ctx->child;
  
  	cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
  			       crypto_blkcipher_crt(child)->decrypt);
  }
  
  static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
3e3dc25fe   Mark Rustad   crypto: Resolve s...
241
  				    crypto_completion_t compl)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
242
243
244
  {
  	struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
  	struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
254eff771   Huang Ying   crypto: cryptd - ...
245
  	struct cryptd_queue *queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
246

254eff771   Huang Ying   crypto: cryptd - ...
247
  	queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
248
  	rctx->complete = req->base.complete;
3e3dc25fe   Mark Rustad   crypto: Resolve s...
249
  	req->base.complete = compl;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
250

254eff771   Huang Ying   crypto: cryptd - ...
251
  	return cryptd_enqueue_request(queue, &req->base);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
  }
  
  static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
  {
  	return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
  }
  
  static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
  {
  	return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
  }
  
  static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_spawn *spawn = &ictx->spawn;
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
  	struct crypto_blkcipher *cipher;
  
  	cipher = crypto_spawn_blkcipher(spawn);
  	if (IS_ERR(cipher))
  		return PTR_ERR(cipher);
  
  	ctx->child = cipher;
  	tfm->crt_ablkcipher.reqsize =
  		sizeof(struct cryptd_blkcipher_request_ctx);
  	return 0;
  }
  
  static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
285
286
287
  
  	crypto_free_blkcipher(ctx->child);
  }
0b535adfb   Herbert Xu   crypto: cryptd - ...
288
289
  static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
  				   unsigned int tail)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
290
  {
0b535adfb   Herbert Xu   crypto: cryptd - ...
291
  	char *p;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
292
  	struct crypto_instance *inst;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
293
  	int err;
0b535adfb   Herbert Xu   crypto: cryptd - ...
294
295
296
297
298
  	p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
  	if (!p)
  		return ERR_PTR(-ENOMEM);
  
  	inst = (void *)(p + head);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
299
300
301
302
303
  
  	err = -ENAMETOOLONG;
  	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  		goto out_free_inst;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
304
305
306
307
308
309
310
  	memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
  
  	inst->alg.cra_priority = alg->cra_priority + 50;
  	inst->alg.cra_blocksize = alg->cra_blocksize;
  	inst->alg.cra_alignmask = alg->cra_alignmask;
  
  out:
0b535adfb   Herbert Xu   crypto: cryptd - ...
311
  	return p;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
312
313
  
  out_free_inst:
0b535adfb   Herbert Xu   crypto: cryptd - ...
314
315
  	kfree(p);
  	p = ERR_PTR(err);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
316
317
  	goto out;
  }
9cd899a32   Herbert Xu   crypto: cryptd - ...
318
319
320
  static int cryptd_create_blkcipher(struct crypto_template *tmpl,
  				   struct rtattr **tb,
  				   struct cryptd_queue *queue)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
321
  {
46309d893   Herbert Xu   crypto: cryptd - ...
322
  	struct cryptd_instance_ctx *ctx;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
323
324
  	struct crypto_instance *inst;
  	struct crypto_alg *alg;
466a7b9e3   Stephan Mueller   crypto: cryptd - ...
325
326
  	u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
  	u32 mask = CRYPTO_ALG_TYPE_MASK;
46309d893   Herbert Xu   crypto: cryptd - ...
327
  	int err;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
328

466a7b9e3   Stephan Mueller   crypto: cryptd - ...
329
330
331
  	cryptd_check_internal(tb, &type, &mask);
  
  	alg = crypto_get_attr_alg(tb, type, mask);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
332
  	if (IS_ERR(alg))
9cd899a32   Herbert Xu   crypto: cryptd - ...
333
  		return PTR_ERR(alg);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
334

0b535adfb   Herbert Xu   crypto: cryptd - ...
335
  	inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
05ed8758f   Steffen Klassert   crypto: cryptd - ...
336
  	err = PTR_ERR(inst);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
337
338
  	if (IS_ERR(inst))
  		goto out_put_alg;
46309d893   Herbert Xu   crypto: cryptd - ...
339
340
341
342
343
344
345
  	ctx = crypto_instance_ctx(inst);
  	ctx->queue = queue;
  
  	err = crypto_init_spawn(&ctx->spawn, alg, inst,
  				CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
  	if (err)
  		goto out_free_inst;
466a7b9e3   Stephan Mueller   crypto: cryptd - ...
346
347
348
349
  	type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
  	if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
  		type |= CRYPTO_ALG_INTERNAL;
  	inst->alg.cra_flags = type;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
350
351
352
353
354
  	inst->alg.cra_type = &crypto_ablkcipher_type;
  
  	inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
  	inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
  	inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
927eead52   Herbert Xu   [CRYPTO] cryptd: ...
355
  	inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
356
357
358
359
360
361
362
363
  	inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
  
  	inst->alg.cra_init = cryptd_blkcipher_init_tfm;
  	inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
  
  	inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
  	inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
  	inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
9cd899a32   Herbert Xu   crypto: cryptd - ...
364
365
366
367
368
369
  	err = crypto_register_instance(tmpl, inst);
  	if (err) {
  		crypto_drop_spawn(&ctx->spawn);
  out_free_inst:
  		kfree(inst);
  	}
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
370
371
  out_put_alg:
  	crypto_mod_put(alg);
9cd899a32   Herbert Xu   crypto: cryptd - ...
372
  	return err;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
373
  }
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
374
375
376
  static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
46309d893   Herbert Xu   crypto: cryptd - ...
377
378
  	struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_shash_spawn *spawn = &ictx->spawn;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
379
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
46309d893   Herbert Xu   crypto: cryptd - ...
380
  	struct crypto_shash *hash;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
381

46309d893   Herbert Xu   crypto: cryptd - ...
382
383
384
  	hash = crypto_spawn_shash(spawn);
  	if (IS_ERR(hash))
  		return PTR_ERR(hash);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
385

46309d893   Herbert Xu   crypto: cryptd - ...
386
  	ctx->child = hash;
0d6669e2b   Herbert Xu   crypto: cryptd - ...
387
388
389
  	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
  				 sizeof(struct cryptd_hash_request_ctx) +
  				 crypto_shash_descsize(hash));
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
390
391
392
393
394
395
  	return 0;
  }
  
  static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
396

46309d893   Herbert Xu   crypto: cryptd - ...
397
  	crypto_free_shash(ctx->child);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
398
399
400
401
402
403
  }
  
  static int cryptd_hash_setkey(struct crypto_ahash *parent,
  				   const u8 *key, unsigned int keylen)
  {
  	struct cryptd_hash_ctx *ctx   = crypto_ahash_ctx(parent);
46309d893   Herbert Xu   crypto: cryptd - ...
404
  	struct crypto_shash *child = ctx->child;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
405
  	int err;
46309d893   Herbert Xu   crypto: cryptd - ...
406
407
408
409
410
411
  	crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  	crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
  				      CRYPTO_TFM_REQ_MASK);
  	err = crypto_shash_setkey(child, key, keylen);
  	crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
  				       CRYPTO_TFM_RES_MASK);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
412
413
414
415
  	return err;
  }
  
  static int cryptd_hash_enqueue(struct ahash_request *req,
3e3dc25fe   Mark Rustad   crypto: Resolve s...
416
  				crypto_completion_t compl)
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
417
418
419
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
254eff771   Huang Ying   crypto: cryptd - ...
420
421
  	struct cryptd_queue *queue =
  		cryptd_get_queue(crypto_ahash_tfm(tfm));
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
422
423
  
  	rctx->complete = req->base.complete;
3e3dc25fe   Mark Rustad   crypto: Resolve s...
424
  	req->base.complete = compl;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
425

254eff771   Huang Ying   crypto: cryptd - ...
426
  	return cryptd_enqueue_request(queue, &req->base);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
427
428
429
430
  }
  
  static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
431
432
433
434
435
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
  	struct crypto_shash *child = ctx->child;
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct shash_desc *desc = &rctx->desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
436
437
438
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
439
440
  	desc->tfm = child;
  	desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
441

46309d893   Herbert Xu   crypto: cryptd - ...
442
  	err = crypto_shash_init(desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_init_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_init);
  }
  
  static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
459
  	struct ahash_request *req = ahash_request_cast(req_async);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
460
  	struct cryptd_hash_request_ctx *rctx;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
461
462
463
464
465
  
  	rctx = ahash_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
466
  	err = shash_ahash_update(req, &rctx->desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_update_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_update);
  }
  
  static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
483
484
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
485
486
487
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
488
  	err = crypto_shash_final(&rctx->desc, req->result);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
489
490
491
492
493
494
495
496
497
498
499
500
501
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_final_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_final);
  }
6fba00d17   Herbert Xu   crypto: cryptd - ...
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
  static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
  {
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
  
  	err = shash_ahash_finup(req, &rctx->desc);
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_finup_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_finup);
  }
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
524
525
  static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
  {
46309d893   Herbert Xu   crypto: cryptd - ...
526
527
528
529
530
  	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
  	struct crypto_shash *child = ctx->child;
  	struct ahash_request *req = ahash_request_cast(req_async);
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	struct shash_desc *desc = &rctx->desc;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
531
532
533
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
46309d893   Herbert Xu   crypto: cryptd - ...
534
535
  	desc->tfm = child;
  	desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
536

46309d893   Herbert Xu   crypto: cryptd - ...
537
  	err = shash_ahash_digest(req, desc);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
538
539
540
541
542
543
544
545
546
547
548
549
550
  
  	req->base.complete = rctx->complete;
  
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static int cryptd_hash_digest_enqueue(struct ahash_request *req)
  {
  	return cryptd_hash_enqueue(req, cryptd_hash_digest);
  }
6fba00d17   Herbert Xu   crypto: cryptd - ...
551
552
553
554
555
556
557
558
559
560
561
562
563
  static int cryptd_hash_export(struct ahash_request *req, void *out)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	return crypto_shash_export(&rctx->desc, out);
  }
  
  static int cryptd_hash_import(struct ahash_request *req, const void *in)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  
  	return crypto_shash_import(&rctx->desc, in);
  }
9cd899a32   Herbert Xu   crypto: cryptd - ...
564
565
  static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
  			      struct cryptd_queue *queue)
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
566
  {
46309d893   Herbert Xu   crypto: cryptd - ...
567
  	struct hashd_instance_ctx *ctx;
0b535adfb   Herbert Xu   crypto: cryptd - ...
568
  	struct ahash_instance *inst;
46309d893   Herbert Xu   crypto: cryptd - ...
569
  	struct shash_alg *salg;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
570
  	struct crypto_alg *alg;
466a7b9e3   Stephan Mueller   crypto: cryptd - ...
571
572
  	u32 type = 0;
  	u32 mask = 0;
46309d893   Herbert Xu   crypto: cryptd - ...
573
  	int err;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
574

466a7b9e3   Stephan Mueller   crypto: cryptd - ...
575
576
577
  	cryptd_check_internal(tb, &type, &mask);
  
  	salg = shash_attr_alg(tb[1], type, mask);
46309d893   Herbert Xu   crypto: cryptd - ...
578
  	if (IS_ERR(salg))
9cd899a32   Herbert Xu   crypto: cryptd - ...
579
  		return PTR_ERR(salg);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
580

46309d893   Herbert Xu   crypto: cryptd - ...
581
  	alg = &salg->base;
0b535adfb   Herbert Xu   crypto: cryptd - ...
582
583
  	inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
  				     sizeof(*ctx));
05ed8758f   Steffen Klassert   crypto: cryptd - ...
584
  	err = PTR_ERR(inst);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
585
586
  	if (IS_ERR(inst))
  		goto out_put_alg;
0b535adfb   Herbert Xu   crypto: cryptd - ...
587
  	ctx = ahash_instance_ctx(inst);
46309d893   Herbert Xu   crypto: cryptd - ...
588
  	ctx->queue = queue;
0b535adfb   Herbert Xu   crypto: cryptd - ...
589
590
  	err = crypto_init_shash_spawn(&ctx->spawn, salg,
  				      ahash_crypto_instance(inst));
46309d893   Herbert Xu   crypto: cryptd - ...
591
592
  	if (err)
  		goto out_free_inst;
466a7b9e3   Stephan Mueller   crypto: cryptd - ...
593
594
595
596
  	type = CRYPTO_ALG_ASYNC;
  	if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
  		type |= CRYPTO_ALG_INTERNAL;
  	inst->alg.halg.base.cra_flags = type;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
597

0b535adfb   Herbert Xu   crypto: cryptd - ...
598
599
  	inst->alg.halg.digestsize = salg->digestsize;
  	inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
600

0b535adfb   Herbert Xu   crypto: cryptd - ...
601
602
  	inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
  	inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
603

0b535adfb   Herbert Xu   crypto: cryptd - ...
604
605
606
  	inst->alg.init   = cryptd_hash_init_enqueue;
  	inst->alg.update = cryptd_hash_update_enqueue;
  	inst->alg.final  = cryptd_hash_final_enqueue;
6fba00d17   Herbert Xu   crypto: cryptd - ...
607
608
609
  	inst->alg.finup  = cryptd_hash_finup_enqueue;
  	inst->alg.export = cryptd_hash_export;
  	inst->alg.import = cryptd_hash_import;
0b535adfb   Herbert Xu   crypto: cryptd - ...
610
611
  	inst->alg.setkey = cryptd_hash_setkey;
  	inst->alg.digest = cryptd_hash_digest_enqueue;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
612

0b535adfb   Herbert Xu   crypto: cryptd - ...
613
  	err = ahash_register_instance(tmpl, inst);
9cd899a32   Herbert Xu   crypto: cryptd - ...
614
615
616
617
618
  	if (err) {
  		crypto_drop_shash(&ctx->spawn);
  out_free_inst:
  		kfree(inst);
  	}
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
619
620
  out_put_alg:
  	crypto_mod_put(alg);
9cd899a32   Herbert Xu   crypto: cryptd - ...
621
  	return err;
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
622
  }
298c926c6   Adrian Hoban   crypto: cryptd - ...
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
  static void cryptd_aead_crypt(struct aead_request *req,
  			struct crypto_aead *child,
  			int err,
  			int (*crypt)(struct aead_request *req))
  {
  	struct cryptd_aead_request_ctx *rctx;
  	rctx = aead_request_ctx(req);
  
  	if (unlikely(err == -EINPROGRESS))
  		goto out;
  	aead_request_set_tfm(req, child);
  	err = crypt( req );
  	req->base.complete = rctx->complete;
  out:
  	local_bh_disable();
  	rctx->complete(&req->base, err);
  	local_bh_enable();
  }
  
  static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
  	struct crypto_aead *child = ctx->child;
  	struct aead_request *req;
  
  	req = container_of(areq, struct aead_request, base);
  	cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->encrypt);
  }
  
  static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
  	struct crypto_aead *child = ctx->child;
  	struct aead_request *req;
  
  	req = container_of(areq, struct aead_request, base);
  	cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->decrypt);
  }
  
  static int cryptd_aead_enqueue(struct aead_request *req,
3e3dc25fe   Mark Rustad   crypto: Resolve s...
663
  				    crypto_completion_t compl)
298c926c6   Adrian Hoban   crypto: cryptd - ...
664
665
666
667
668
669
  {
  	struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  	struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
  
  	rctx->complete = req->base.complete;
3e3dc25fe   Mark Rustad   crypto: Resolve s...
670
  	req->base.complete = compl;
298c926c6   Adrian Hoban   crypto: cryptd - ...
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
  	return cryptd_enqueue_request(queue, &req->base);
  }
  
  static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
  {
  	return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
  }
  
  static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
  {
  	return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
  }
  
  static int cryptd_aead_init_tfm(struct crypto_tfm *tfm)
  {
  	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
  	struct aead_instance_ctx *ictx = crypto_instance_ctx(inst);
  	struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
  	struct crypto_aead *cipher;
  
  	cipher = crypto_spawn_aead(spawn);
  	if (IS_ERR(cipher))
  		return PTR_ERR(cipher);
  
  	crypto_aead_set_flags(cipher, CRYPTO_TFM_REQ_MAY_SLEEP);
  	ctx->child = cipher;
  	tfm->crt_aead.reqsize = sizeof(struct cryptd_aead_request_ctx);
  	return 0;
  }
  
  static void cryptd_aead_exit_tfm(struct crypto_tfm *tfm)
  {
  	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
  	crypto_free_aead(ctx->child);
  }
  
  static int cryptd_create_aead(struct crypto_template *tmpl,
  		              struct rtattr **tb,
  			      struct cryptd_queue *queue)
  {
  	struct aead_instance_ctx *ctx;
  	struct crypto_instance *inst;
  	struct crypto_alg *alg;
466a7b9e3   Stephan Mueller   crypto: cryptd - ...
715
716
  	u32 type = CRYPTO_ALG_TYPE_AEAD;
  	u32 mask = CRYPTO_ALG_TYPE_MASK;
298c926c6   Adrian Hoban   crypto: cryptd - ...
717
  	int err;
466a7b9e3   Stephan Mueller   crypto: cryptd - ...
718
719
720
  	cryptd_check_internal(tb, &type, &mask);
  
  	alg = crypto_get_attr_alg(tb, type, mask);
298c926c6   Adrian Hoban   crypto: cryptd - ...
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
          if (IS_ERR(alg))
  		return PTR_ERR(alg);
  
  	inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
  	err = PTR_ERR(inst);
  	if (IS_ERR(inst))
  		goto out_put_alg;
  
  	ctx = crypto_instance_ctx(inst);
  	ctx->queue = queue;
  
  	err = crypto_init_spawn(&ctx->aead_spawn.base, alg, inst,
  			CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
  	if (err)
  		goto out_free_inst;
466a7b9e3   Stephan Mueller   crypto: cryptd - ...
736
737
738
739
  	type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
  	if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
  		type |= CRYPTO_ALG_INTERNAL;
  	inst->alg.cra_flags = type;
298c926c6   Adrian Hoban   crypto: cryptd - ...
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
  	inst->alg.cra_type = alg->cra_type;
  	inst->alg.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
  	inst->alg.cra_init = cryptd_aead_init_tfm;
  	inst->alg.cra_exit = cryptd_aead_exit_tfm;
  	inst->alg.cra_aead.setkey      = alg->cra_aead.setkey;
  	inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
  	inst->alg.cra_aead.geniv       = alg->cra_aead.geniv;
  	inst->alg.cra_aead.ivsize      = alg->cra_aead.ivsize;
  	inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
  	inst->alg.cra_aead.encrypt     = cryptd_aead_encrypt_enqueue;
  	inst->alg.cra_aead.decrypt     = cryptd_aead_decrypt_enqueue;
  	inst->alg.cra_aead.givencrypt  = alg->cra_aead.givencrypt;
  	inst->alg.cra_aead.givdecrypt  = alg->cra_aead.givdecrypt;
  
  	err = crypto_register_instance(tmpl, inst);
  	if (err) {
  		crypto_drop_spawn(&ctx->aead_spawn.base);
  out_free_inst:
  		kfree(inst);
  	}
  out_put_alg:
  	crypto_mod_put(alg);
  	return err;
  }
254eff771   Huang Ying   crypto: cryptd - ...
764
  static struct cryptd_queue queue;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
765

9cd899a32   Herbert Xu   crypto: cryptd - ...
766
  static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
767
768
769
770
771
  {
  	struct crypto_attr_type *algt;
  
  	algt = crypto_get_attr_type(tb);
  	if (IS_ERR(algt))
9cd899a32   Herbert Xu   crypto: cryptd - ...
772
  		return PTR_ERR(algt);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
773
774
775
  
  	switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
  	case CRYPTO_ALG_TYPE_BLKCIPHER:
9cd899a32   Herbert Xu   crypto: cryptd - ...
776
  		return cryptd_create_blkcipher(tmpl, tb, &queue);
b8a28251c   Loc Ho   [CRYPTO] cryptd: ...
777
  	case CRYPTO_ALG_TYPE_DIGEST:
9cd899a32   Herbert Xu   crypto: cryptd - ...
778
  		return cryptd_create_hash(tmpl, tb, &queue);
298c926c6   Adrian Hoban   crypto: cryptd - ...
779
780
  	case CRYPTO_ALG_TYPE_AEAD:
  		return cryptd_create_aead(tmpl, tb, &queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
781
  	}
9cd899a32   Herbert Xu   crypto: cryptd - ...
782
  	return -EINVAL;
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
783
784
785
786
787
  }
  
  static void cryptd_free(struct crypto_instance *inst)
  {
  	struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
788
  	struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
298c926c6   Adrian Hoban   crypto: cryptd - ...
789
  	struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
790
791
792
793
794
795
  
  	switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
  	case CRYPTO_ALG_TYPE_AHASH:
  		crypto_drop_shash(&hctx->spawn);
  		kfree(ahash_instance(inst));
  		return;
298c926c6   Adrian Hoban   crypto: cryptd - ...
796
797
798
799
800
801
802
  	case CRYPTO_ALG_TYPE_AEAD:
  		crypto_drop_spawn(&aead_ctx->aead_spawn.base);
  		kfree(inst);
  		return;
  	default:
  		crypto_drop_spawn(&ctx->spawn);
  		kfree(inst);
0b535adfb   Herbert Xu   crypto: cryptd - ...
803
  	}
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
804
805
806
807
  }
  
  static struct crypto_template cryptd_tmpl = {
  	.name = "cryptd",
9cd899a32   Herbert Xu   crypto: cryptd - ...
808
  	.create = cryptd_create,
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
809
810
811
  	.free = cryptd_free,
  	.module = THIS_MODULE,
  };
1cac2cbc7   Huang Ying   crypto: cryptd - ...
812
813
814
815
  struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
  						  u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
505fd21d6   Huang Ying   crypto: cryptd - ...
816
  	struct crypto_tfm *tfm;
1cac2cbc7   Huang Ying   crypto: cryptd - ...
817
818
819
820
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
505fd21d6   Huang Ying   crypto: cryptd - ...
821
822
823
824
825
  	type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
  	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  	mask &= ~CRYPTO_ALG_TYPE_MASK;
  	mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
  	tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
1cac2cbc7   Huang Ying   crypto: cryptd - ...
826
827
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
505fd21d6   Huang Ying   crypto: cryptd - ...
828
829
  	if (tfm->__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_tfm(tfm);
1cac2cbc7   Huang Ying   crypto: cryptd - ...
830
831
  		return ERR_PTR(-EINVAL);
  	}
505fd21d6   Huang Ying   crypto: cryptd - ...
832
  	return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
1cac2cbc7   Huang Ying   crypto: cryptd - ...
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
  
  struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
  {
  	struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
  
  void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
  {
  	crypto_free_ablkcipher(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
ace136636   Huang Ying   crypto: cryptd - ...
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
  struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
  					u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
  	struct crypto_ahash *tfm;
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
  	tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
  	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_ahash(tfm);
  		return ERR_PTR(-EINVAL);
  	}
  
  	return __cryptd_ahash_cast(tfm);
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
  
  struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
  {
  	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
  
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_ahash_child);
0e1227d35   Huang Ying   crypto: ghash - A...
876
877
878
879
880
881
  struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
  {
  	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
  	return &rctx->desc;
  }
  EXPORT_SYMBOL_GPL(cryptd_shash_desc);
ace136636   Huang Ying   crypto: cryptd - ...
882
883
884
885
886
  void cryptd_free_ahash(struct cryptd_ahash *tfm)
  {
  	crypto_free_ahash(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_ahash);
298c926c6   Adrian Hoban   crypto: cryptd - ...
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
  struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
  						  u32 type, u32 mask)
  {
  	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
  	struct crypto_aead *tfm;
  
  	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
  		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
  		return ERR_PTR(-EINVAL);
  	tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
  	if (IS_ERR(tfm))
  		return ERR_CAST(tfm);
  	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
  		crypto_free_aead(tfm);
  		return ERR_PTR(-EINVAL);
  	}
  	return __cryptd_aead_cast(tfm);
  }
  EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
  
  struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
  {
  	struct cryptd_aead_ctx *ctx;
  	ctx = crypto_aead_ctx(&tfm->base);
  	return ctx->child;
  }
  EXPORT_SYMBOL_GPL(cryptd_aead_child);
  
  void cryptd_free_aead(struct cryptd_aead *tfm)
  {
  	crypto_free_aead(&tfm->base);
  }
  EXPORT_SYMBOL_GPL(cryptd_free_aead);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
920
921
922
  static int __init cryptd_init(void)
  {
  	int err;
254eff771   Huang Ying   crypto: cryptd - ...
923
  	err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
924
925
926
927
928
  	if (err)
  		return err;
  
  	err = crypto_register_template(&cryptd_tmpl);
  	if (err)
254eff771   Huang Ying   crypto: cryptd - ...
929
  		cryptd_fini_queue(&queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
930
931
932
933
934
935
  
  	return err;
  }
  
  static void __exit cryptd_exit(void)
  {
254eff771   Huang Ying   crypto: cryptd - ...
936
  	cryptd_fini_queue(&queue);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
937
938
  	crypto_unregister_template(&cryptd_tmpl);
  }
b2bac6acf   Herbert Xu   crypto: cryptd - ...
939
  subsys_initcall(cryptd_init);
124b53d02   Herbert Xu   [CRYPTO] cryptd: ...
940
941
942
943
  module_exit(cryptd_exit);
  
  MODULE_LICENSE("GPL");
  MODULE_DESCRIPTION("Software async crypto daemon");
4943ba16b   Kees Cook   crypto: include c...
944
  MODULE_ALIAS_CRYPTO("cryptd");