Blame view

crypto/crypto_engine.c 13.3 KB
2874c5fd2   Thomas Gleixner   treewide: Replace...
1
  // SPDX-License-Identifier: GPL-2.0-or-later
735d37b54   Baolin Wang   crypto: engine - ...
2
3
4
5
6
7
  /*
   * Handle async block request by crypto hardware engine.
   *
   * Copyright (C) 2016 Linaro, Inc.
   *
   * Author: Baolin Wang <baolin.wang@linaro.org>
735d37b54   Baolin Wang   crypto: engine - ...
8
9
10
11
   */
  
  #include <linux/err.h>
  #include <linux/delay.h>
2589ad840   Corentin LABBE   crypto: engine - ...
12
  #include <crypto/engine.h>
ae7e81c07   Ingo Molnar   sched/headers: Pr...
13
  #include <uapi/linux/sched/types.h>
735d37b54   Baolin Wang   crypto: engine - ...
14
15
16
  #include "internal.h"
  
  #define CRYPTO_ENGINE_MAX_QLEN 10
735d37b54   Baolin Wang   crypto: engine - ...
17
  /**
218d1cc18   Corentin LABBE   crypto: engine - ...
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
   * crypto_finalize_request - finalize one request if the request is done
   * @engine: the hardware engine
   * @req: the request need to be finalized
   * @err: error number
   */
  static void crypto_finalize_request(struct crypto_engine *engine,
  			     struct crypto_async_request *req, int err)
  {
  	unsigned long flags;
  	bool finalize_cur_req = false;
  	int ret;
  	struct crypto_engine_ctx *enginectx;
  
  	spin_lock_irqsave(&engine->queue_lock, flags);
  	if (engine->cur_req == req)
  		finalize_cur_req = true;
  	spin_unlock_irqrestore(&engine->queue_lock, flags);
  
  	if (finalize_cur_req) {
  		enginectx = crypto_tfm_ctx(req->tfm);
  		if (engine->cur_req_prepared &&
  		    enginectx->op.unprepare_request) {
  			ret = enginectx->op.unprepare_request(engine, req);
  			if (ret)
  				dev_err(engine->dev, "failed to unprepare request
  ");
  		}
  		spin_lock_irqsave(&engine->queue_lock, flags);
  		engine->cur_req = NULL;
  		engine->cur_req_prepared = false;
  		spin_unlock_irqrestore(&engine->queue_lock, flags);
  	}
  
  	req->complete(req, err);
  
  	kthread_queue_work(engine->kworker, &engine->pump_requests);
  }
  
  /**
735d37b54   Baolin Wang   crypto: engine - ...
57
58
59
60
61
62
63
64
65
66
67
68
   * crypto_pump_requests - dequeue one request from engine queue to process
   * @engine: the hardware engine
   * @in_kthread: true if we are in the context of the request pump thread
   *
   * This function checks if there is any request in the engine queue that
   * needs processing and if so call out to the driver to initialize hardware
   * and handle each request.
   */
  static void crypto_pump_requests(struct crypto_engine *engine,
  				 bool in_kthread)
  {
  	struct crypto_async_request *async_req, *backlog;
735d37b54   Baolin Wang   crypto: engine - ...
69
70
  	unsigned long flags;
  	bool was_busy = false;
218d1cc18   Corentin LABBE   crypto: engine - ...
71
72
  	int ret;
  	struct crypto_engine_ctx *enginectx;
735d37b54   Baolin Wang   crypto: engine - ...
73
74
75
76
77
78
79
80
81
  
  	spin_lock_irqsave(&engine->queue_lock, flags);
  
  	/* Make sure we are not already running a request */
  	if (engine->cur_req)
  		goto out;
  
  	/* If another context is idling then defer */
  	if (engine->idling) {
c4ca2b0b2   Petr Mladek   crypto: engine - ...
82
  		kthread_queue_work(engine->kworker, &engine->pump_requests);
735d37b54   Baolin Wang   crypto: engine - ...
83
84
85
86
87
88
89
90
91
92
  		goto out;
  	}
  
  	/* Check if the engine queue is idle */
  	if (!crypto_queue_len(&engine->queue) || !engine->running) {
  		if (!engine->busy)
  			goto out;
  
  		/* Only do teardown in the thread */
  		if (!in_kthread) {
c4ca2b0b2   Petr Mladek   crypto: engine - ...
93
  			kthread_queue_work(engine->kworker,
735d37b54   Baolin Wang   crypto: engine - ...
94
95
96
97
98
99
100
101
102
103
  					   &engine->pump_requests);
  			goto out;
  		}
  
  		engine->busy = false;
  		engine->idling = true;
  		spin_unlock_irqrestore(&engine->queue_lock, flags);
  
  		if (engine->unprepare_crypt_hardware &&
  		    engine->unprepare_crypt_hardware(engine))
88d58ef89   Corentin LABBE   crypto: engine - ...
104
105
  			dev_err(engine->dev, "failed to unprepare crypt hardware
  ");
735d37b54   Baolin Wang   crypto: engine - ...
106
107
108
109
110
111
112
113
114
115
116
  
  		spin_lock_irqsave(&engine->queue_lock, flags);
  		engine->idling = false;
  		goto out;
  	}
  
  	/* Get the fist request from the engine queue to handle */
  	backlog = crypto_get_backlog(&engine->queue);
  	async_req = crypto_dequeue_request(&engine->queue);
  	if (!async_req)
  		goto out;
4cba7cf02   Corentin LABBE   crypto: engine - ...
117
  	engine->cur_req = async_req;
735d37b54   Baolin Wang   crypto: engine - ...
118
119
120
121
122
123
124
125
126
127
128
129
130
131
  	if (backlog)
  		backlog->complete(backlog, -EINPROGRESS);
  
  	if (engine->busy)
  		was_busy = true;
  	else
  		engine->busy = true;
  
  	spin_unlock_irqrestore(&engine->queue_lock, flags);
  
  	/* Until here we get the request need to be encrypted successfully */
  	if (!was_busy && engine->prepare_crypt_hardware) {
  		ret = engine->prepare_crypt_hardware(engine);
  		if (ret) {
88d58ef89   Corentin LABBE   crypto: engine - ...
132
133
  			dev_err(engine->dev, "failed to prepare crypt hardware
  ");
735d37b54   Baolin Wang   crypto: engine - ...
134
135
136
  			goto req_err;
  		}
  	}
218d1cc18   Corentin LABBE   crypto: engine - ...
137
138
139
140
  	enginectx = crypto_tfm_ctx(async_req->tfm);
  
  	if (enginectx->op.prepare_request) {
  		ret = enginectx->op.prepare_request(engine, async_req);
4cba7cf02   Corentin LABBE   crypto: engine - ...
141
  		if (ret) {
218d1cc18   Corentin LABBE   crypto: engine - ...
142
143
144
  			dev_err(engine->dev, "failed to prepare request: %d
  ",
  				ret);
4cba7cf02   Corentin LABBE   crypto: engine - ...
145
146
  			goto req_err;
  		}
218d1cc18   Corentin LABBE   crypto: engine - ...
147
148
149
150
151
152
153
  		engine->cur_req_prepared = true;
  	}
  	if (!enginectx->op.do_one_request) {
  		dev_err(engine->dev, "failed to do request
  ");
  		ret = -EINVAL;
  		goto req_err;
735d37b54   Baolin Wang   crypto: engine - ...
154
  	}
218d1cc18   Corentin LABBE   crypto: engine - ...
155
156
157
158
159
160
161
  	ret = enginectx->op.do_one_request(engine, async_req);
  	if (ret) {
  		dev_err(engine->dev, "Failed to do one request from queue: %d
  ", ret);
  		goto req_err;
  	}
  	return;
735d37b54   Baolin Wang   crypto: engine - ...
162
163
  
  req_err:
218d1cc18   Corentin LABBE   crypto: engine - ...
164
  	crypto_finalize_request(engine, async_req, ret);
735d37b54   Baolin Wang   crypto: engine - ...
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
  	return;
  
  out:
  	spin_unlock_irqrestore(&engine->queue_lock, flags);
  }
  
  static void crypto_pump_work(struct kthread_work *work)
  {
  	struct crypto_engine *engine =
  		container_of(work, struct crypto_engine, pump_requests);
  
  	crypto_pump_requests(engine, true);
  }
  
  /**
218d1cc18   Corentin LABBE   crypto: engine - ...
180
   * crypto_transfer_request - transfer the new request into the engine queue
735d37b54   Baolin Wang   crypto: engine - ...
181
182
183
   * @engine: the hardware engine
   * @req: the request need to be listed into the engine queue
   */
218d1cc18   Corentin LABBE   crypto: engine - ...
184
185
  static int crypto_transfer_request(struct crypto_engine *engine,
  				   struct crypto_async_request *req,
4cba7cf02   Corentin LABBE   crypto: engine - ...
186
  				   bool need_pump)
735d37b54   Baolin Wang   crypto: engine - ...
187
188
189
190
191
192
193
194
195
196
  {
  	unsigned long flags;
  	int ret;
  
  	spin_lock_irqsave(&engine->queue_lock, flags);
  
  	if (!engine->running) {
  		spin_unlock_irqrestore(&engine->queue_lock, flags);
  		return -ESHUTDOWN;
  	}
218d1cc18   Corentin LABBE   crypto: engine - ...
197
  	ret = crypto_enqueue_request(&engine->queue, req);
735d37b54   Baolin Wang   crypto: engine - ...
198
199
  
  	if (!engine->busy && need_pump)
c4ca2b0b2   Petr Mladek   crypto: engine - ...
200
  		kthread_queue_work(engine->kworker, &engine->pump_requests);
735d37b54   Baolin Wang   crypto: engine - ...
201
202
203
204
  
  	spin_unlock_irqrestore(&engine->queue_lock, flags);
  	return ret;
  }
4cba7cf02   Corentin LABBE   crypto: engine - ...
205
206
  
  /**
218d1cc18   Corentin LABBE   crypto: engine - ...
207
   * crypto_transfer_request_to_engine - transfer one request to list
4cba7cf02   Corentin LABBE   crypto: engine - ...
208
209
210
211
   * into the engine queue
   * @engine: the hardware engine
   * @req: the request need to be listed into the engine queue
   */
218d1cc18   Corentin LABBE   crypto: engine - ...
212
213
  static int crypto_transfer_request_to_engine(struct crypto_engine *engine,
  					     struct crypto_async_request *req)
4cba7cf02   Corentin LABBE   crypto: engine - ...
214
  {
218d1cc18   Corentin LABBE   crypto: engine - ...
215
  	return crypto_transfer_request(engine, req, true);
4cba7cf02   Corentin LABBE   crypto: engine - ...
216
  }
4cba7cf02   Corentin LABBE   crypto: engine - ...
217
218
  
  /**
218d1cc18   Corentin LABBE   crypto: engine - ...
219
220
   * crypto_transfer_ablkcipher_request_to_engine - transfer one ablkcipher_request
   * to list into the engine queue
4cba7cf02   Corentin LABBE   crypto: engine - ...
221
222
   * @engine: the hardware engine
   * @req: the request need to be listed into the engine queue
218d1cc18   Corentin LABBE   crypto: engine - ...
223
   * TODO: Remove this function when skcipher conversion is finished
4cba7cf02   Corentin LABBE   crypto: engine - ...
224
   */
218d1cc18   Corentin LABBE   crypto: engine - ...
225
226
  int crypto_transfer_ablkcipher_request_to_engine(struct crypto_engine *engine,
  						 struct ablkcipher_request *req)
4cba7cf02   Corentin LABBE   crypto: engine - ...
227
  {
218d1cc18   Corentin LABBE   crypto: engine - ...
228
229
230
  	return crypto_transfer_request_to_engine(engine, &req->base);
  }
  EXPORT_SYMBOL_GPL(crypto_transfer_ablkcipher_request_to_engine);
4cba7cf02   Corentin LABBE   crypto: engine - ...
231

218d1cc18   Corentin LABBE   crypto: engine - ...
232
233
234
235
236
237
238
239
240
241
242
243
  /**
   * crypto_transfer_aead_request_to_engine - transfer one aead_request
   * to list into the engine queue
   * @engine: the hardware engine
   * @req: the request need to be listed into the engine queue
   */
  int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine,
  					   struct aead_request *req)
  {
  	return crypto_transfer_request_to_engine(engine, &req->base);
  }
  EXPORT_SYMBOL_GPL(crypto_transfer_aead_request_to_engine);
4cba7cf02   Corentin LABBE   crypto: engine - ...
244

218d1cc18   Corentin LABBE   crypto: engine - ...
245
246
247
248
249
250
251
252
253
254
  /**
   * crypto_transfer_akcipher_request_to_engine - transfer one akcipher_request
   * to list into the engine queue
   * @engine: the hardware engine
   * @req: the request need to be listed into the engine queue
   */
  int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine,
  					       struct akcipher_request *req)
  {
  	return crypto_transfer_request_to_engine(engine, &req->base);
4cba7cf02   Corentin LABBE   crypto: engine - ...
255
  }
218d1cc18   Corentin LABBE   crypto: engine - ...
256
  EXPORT_SYMBOL_GPL(crypto_transfer_akcipher_request_to_engine);
735d37b54   Baolin Wang   crypto: engine - ...
257
258
  
  /**
218d1cc18   Corentin LABBE   crypto: engine - ...
259
260
   * crypto_transfer_hash_request_to_engine - transfer one ahash_request
   * to list into the engine queue
735d37b54   Baolin Wang   crypto: engine - ...
261
262
263
   * @engine: the hardware engine
   * @req: the request need to be listed into the engine queue
   */
4cba7cf02   Corentin LABBE   crypto: engine - ...
264
265
  int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine,
  					   struct ahash_request *req)
735d37b54   Baolin Wang   crypto: engine - ...
266
  {
218d1cc18   Corentin LABBE   crypto: engine - ...
267
  	return crypto_transfer_request_to_engine(engine, &req->base);
735d37b54   Baolin Wang   crypto: engine - ...
268
  }
4cba7cf02   Corentin LABBE   crypto: engine - ...
269
  EXPORT_SYMBOL_GPL(crypto_transfer_hash_request_to_engine);
735d37b54   Baolin Wang   crypto: engine - ...
270
271
  
  /**
218d1cc18   Corentin LABBE   crypto: engine - ...
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
   * crypto_transfer_skcipher_request_to_engine - transfer one skcipher_request
   * to list into the engine queue
   * @engine: the hardware engine
   * @req: the request need to be listed into the engine queue
   */
  int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
  					       struct skcipher_request *req)
  {
  	return crypto_transfer_request_to_engine(engine, &req->base);
  }
  EXPORT_SYMBOL_GPL(crypto_transfer_skcipher_request_to_engine);
  
  /**
   * crypto_finalize_ablkcipher_request - finalize one ablkcipher_request if
   * the request is done
735d37b54   Baolin Wang   crypto: engine - ...
287
288
289
   * @engine: the hardware engine
   * @req: the request need to be finalized
   * @err: error number
218d1cc18   Corentin LABBE   crypto: engine - ...
290
   * TODO: Remove this function when skcipher conversion is finished
735d37b54   Baolin Wang   crypto: engine - ...
291
   */
218d1cc18   Corentin LABBE   crypto: engine - ...
292
293
  void crypto_finalize_ablkcipher_request(struct crypto_engine *engine,
  					struct ablkcipher_request *req, int err)
735d37b54   Baolin Wang   crypto: engine - ...
294
  {
218d1cc18   Corentin LABBE   crypto: engine - ...
295
296
297
  	return crypto_finalize_request(engine, &req->base, err);
  }
  EXPORT_SYMBOL_GPL(crypto_finalize_ablkcipher_request);
4cba7cf02   Corentin LABBE   crypto: engine - ...
298

218d1cc18   Corentin LABBE   crypto: engine - ...
299
300
301
302
303
304
305
306
307
308
309
310
311
  /**
   * crypto_finalize_aead_request - finalize one aead_request if
   * the request is done
   * @engine: the hardware engine
   * @req: the request need to be finalized
   * @err: error number
   */
  void crypto_finalize_aead_request(struct crypto_engine *engine,
  				  struct aead_request *req, int err)
  {
  	return crypto_finalize_request(engine, &req->base, err);
  }
  EXPORT_SYMBOL_GPL(crypto_finalize_aead_request);
735d37b54   Baolin Wang   crypto: engine - ...
312

218d1cc18   Corentin LABBE   crypto: engine - ...
313
314
315
316
317
318
319
320
321
322
323
  /**
   * crypto_finalize_akcipher_request - finalize one akcipher_request if
   * the request is done
   * @engine: the hardware engine
   * @req: the request need to be finalized
   * @err: error number
   */
  void crypto_finalize_akcipher_request(struct crypto_engine *engine,
  				      struct akcipher_request *req, int err)
  {
  	return crypto_finalize_request(engine, &req->base, err);
4cba7cf02   Corentin LABBE   crypto: engine - ...
324
  }
218d1cc18   Corentin LABBE   crypto: engine - ...
325
  EXPORT_SYMBOL_GPL(crypto_finalize_akcipher_request);
4cba7cf02   Corentin LABBE   crypto: engine - ...
326
327
  
  /**
218d1cc18   Corentin LABBE   crypto: engine - ...
328
329
   * crypto_finalize_hash_request - finalize one ahash_request if
   * the request is done
4cba7cf02   Corentin LABBE   crypto: engine - ...
330
331
332
333
334
335
336
   * @engine: the hardware engine
   * @req: the request need to be finalized
   * @err: error number
   */
  void crypto_finalize_hash_request(struct crypto_engine *engine,
  				  struct ahash_request *req, int err)
  {
218d1cc18   Corentin LABBE   crypto: engine - ...
337
  	return crypto_finalize_request(engine, &req->base, err);
735d37b54   Baolin Wang   crypto: engine - ...
338
  }
4cba7cf02   Corentin LABBE   crypto: engine - ...
339
  EXPORT_SYMBOL_GPL(crypto_finalize_hash_request);
735d37b54   Baolin Wang   crypto: engine - ...
340
341
  
  /**
218d1cc18   Corentin LABBE   crypto: engine - ...
342
343
344
345
346
347
348
349
350
351
352
353
354
355
   * crypto_finalize_skcipher_request - finalize one skcipher_request if
   * the request is done
   * @engine: the hardware engine
   * @req: the request need to be finalized
   * @err: error number
   */
  void crypto_finalize_skcipher_request(struct crypto_engine *engine,
  				      struct skcipher_request *req, int err)
  {
  	return crypto_finalize_request(engine, &req->base, err);
  }
  EXPORT_SYMBOL_GPL(crypto_finalize_skcipher_request);
  
  /**
735d37b54   Baolin Wang   crypto: engine - ...
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
   * crypto_engine_start - start the hardware engine
   * @engine: the hardware engine need to be started
   *
   * Return 0 on success, else on fail.
   */
  int crypto_engine_start(struct crypto_engine *engine)
  {
  	unsigned long flags;
  
  	spin_lock_irqsave(&engine->queue_lock, flags);
  
  	if (engine->running || engine->busy) {
  		spin_unlock_irqrestore(&engine->queue_lock, flags);
  		return -EBUSY;
  	}
  
  	engine->running = true;
  	spin_unlock_irqrestore(&engine->queue_lock, flags);
c4ca2b0b2   Petr Mladek   crypto: engine - ...
374
  	kthread_queue_work(engine->kworker, &engine->pump_requests);
735d37b54   Baolin Wang   crypto: engine - ...
375
376
377
378
379
380
381
382
383
384
385
386
387
388
  
  	return 0;
  }
  EXPORT_SYMBOL_GPL(crypto_engine_start);
  
  /**
   * crypto_engine_stop - stop the hardware engine
   * @engine: the hardware engine need to be stopped
   *
   * Return 0 on success, else on fail.
   */
  int crypto_engine_stop(struct crypto_engine *engine)
  {
  	unsigned long flags;
4cba7cf02   Corentin LABBE   crypto: engine - ...
389
  	unsigned int limit = 500;
735d37b54   Baolin Wang   crypto: engine - ...
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
  	int ret = 0;
  
  	spin_lock_irqsave(&engine->queue_lock, flags);
  
  	/*
  	 * If the engine queue is not empty or the engine is on busy state,
  	 * we need to wait for a while to pump the requests of engine queue.
  	 */
  	while ((crypto_queue_len(&engine->queue) || engine->busy) && limit--) {
  		spin_unlock_irqrestore(&engine->queue_lock, flags);
  		msleep(20);
  		spin_lock_irqsave(&engine->queue_lock, flags);
  	}
  
  	if (crypto_queue_len(&engine->queue) || engine->busy)
  		ret = -EBUSY;
  	else
  		engine->running = false;
  
  	spin_unlock_irqrestore(&engine->queue_lock, flags);
  
  	if (ret)
88d58ef89   Corentin LABBE   crypto: engine - ...
412
413
  		dev_warn(engine->dev, "could not stop engine
  ");
735d37b54   Baolin Wang   crypto: engine - ...
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
  
  	return ret;
  }
  EXPORT_SYMBOL_GPL(crypto_engine_stop);
  
  /**
   * crypto_engine_alloc_init - allocate crypto hardware engine structure and
   * initialize it.
   * @dev: the device attached with one hardware engine
   * @rt: whether this queue is set to run as a realtime task
   *
   * This must be called from context that can sleep.
   * Return: the crypto engine structure on success, else NULL.
   */
  struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt)
  {
d13dfae3c   Peter Zijlstra   crypto: engine - ...
430
  	struct sched_param param = { .sched_priority = MAX_RT_PRIO / 2 };
735d37b54   Baolin Wang   crypto: engine - ...
431
432
433
434
435
436
437
438
  	struct crypto_engine *engine;
  
  	if (!dev)
  		return NULL;
  
  	engine = devm_kzalloc(dev, sizeof(*engine), GFP_KERNEL);
  	if (!engine)
  		return NULL;
88d58ef89   Corentin LABBE   crypto: engine - ...
439
  	engine->dev = dev;
735d37b54   Baolin Wang   crypto: engine - ...
440
441
442
443
444
445
446
447
448
449
450
  	engine->rt = rt;
  	engine->running = false;
  	engine->busy = false;
  	engine->idling = false;
  	engine->cur_req_prepared = false;
  	engine->priv_data = dev;
  	snprintf(engine->name, sizeof(engine->name),
  		 "%s-engine", dev_name(dev));
  
  	crypto_init_queue(&engine->queue, CRYPTO_ENGINE_MAX_QLEN);
  	spin_lock_init(&engine->queue_lock);
c4ca2b0b2   Petr Mladek   crypto: engine - ...
451
452
  	engine->kworker = kthread_create_worker(0, "%s", engine->name);
  	if (IS_ERR(engine->kworker)) {
735d37b54   Baolin Wang   crypto: engine - ...
453
454
455
456
  		dev_err(dev, "failed to create crypto request pump task
  ");
  		return NULL;
  	}
3989144f8   Petr Mladek   kthread: kthread ...
457
  	kthread_init_work(&engine->pump_requests, crypto_pump_work);
735d37b54   Baolin Wang   crypto: engine - ...
458
459
460
461
  
  	if (engine->rt) {
  		dev_info(dev, "will run requests pump with realtime priority
  ");
c4ca2b0b2   Petr Mladek   crypto: engine - ...
462
  		sched_setscheduler(engine->kworker->task, SCHED_FIFO, &param);
735d37b54   Baolin Wang   crypto: engine - ...
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
  	}
  
  	return engine;
  }
  EXPORT_SYMBOL_GPL(crypto_engine_alloc_init);
  
  /**
   * crypto_engine_exit - free the resources of hardware engine when exit
   * @engine: the hardware engine need to be freed
   *
   * Return 0 for success.
   */
  int crypto_engine_exit(struct crypto_engine *engine)
  {
  	int ret;
  
  	ret = crypto_engine_stop(engine);
  	if (ret)
  		return ret;
c4ca2b0b2   Petr Mladek   crypto: engine - ...
482
  	kthread_destroy_worker(engine->kworker);
735d37b54   Baolin Wang   crypto: engine - ...
483
484
485
486
487
488
489
  
  	return 0;
  }
  EXPORT_SYMBOL_GPL(crypto_engine_exit);
  
  MODULE_LICENSE("GPL");
  MODULE_DESCRIPTION("Crypto hardware engine framework");