Commit 66f6ce5e52f2f209d5bf1f06167cec888f4f4c13
1 parent
093900c2b9
Exists in
master
and in
4 other branches
crypto: ahash - Add unaligned handling and default operations
This patch exports the finup operation where available and adds a default finup operation for ahash. The operations final, finup and digest also will now deal with unaligned result pointers by copying it. Finally export/import operations are will now be exported too. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Showing 4 changed files with 263 additions and 22 deletions Side-by-side Diff
crypto/ahash.c
... | ... | @@ -24,6 +24,13 @@ |
24 | 24 | |
25 | 25 | #include "internal.h" |
26 | 26 | |
27 | +struct ahash_request_priv { | |
28 | + crypto_completion_t complete; | |
29 | + void *data; | |
30 | + u8 *result; | |
31 | + void *ubuf[] CRYPTO_MINALIGN_ATTR; | |
32 | +}; | |
33 | + | |
27 | 34 | static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) |
28 | 35 | { |
29 | 36 | return container_of(crypto_hash_alg_common(hash), struct ahash_alg, |
... | ... | @@ -156,7 +163,7 @@ |
156 | 163 | return ret; |
157 | 164 | } |
158 | 165 | |
159 | -static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, | |
166 | +int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, | |
160 | 167 | unsigned int keylen) |
161 | 168 | { |
162 | 169 | struct ahash_alg *ahash = crypto_ahash_alg(tfm); |
... | ... | @@ -167,6 +174,7 @@ |
167 | 174 | |
168 | 175 | return ahash->setkey(tfm, key, keylen); |
169 | 176 | } |
177 | +EXPORT_SYMBOL_GPL(crypto_ahash_setkey); | |
170 | 178 | |
171 | 179 | static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, |
172 | 180 | unsigned int keylen) |
173 | 181 | |
174 | 182 | |
175 | 183 | |
... | ... | @@ -174,19 +182,209 @@ |
174 | 182 | return -ENOSYS; |
175 | 183 | } |
176 | 184 | |
185 | +static inline unsigned int ahash_align_buffer_size(unsigned len, | |
186 | + unsigned long mask) | |
187 | +{ | |
188 | + return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); | |
189 | +} | |
190 | + | |
191 | +static void ahash_op_unaligned_finish(struct ahash_request *req, int err) | |
192 | +{ | |
193 | + struct ahash_request_priv *priv = req->priv; | |
194 | + | |
195 | + if (err == -EINPROGRESS) | |
196 | + return; | |
197 | + | |
198 | + if (!err) | |
199 | + memcpy(priv->result, req->result, | |
200 | + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); | |
201 | + | |
202 | + kzfree(priv); | |
203 | +} | |
204 | + | |
205 | +static void ahash_op_unaligned_done(struct crypto_async_request *req, int err) | |
206 | +{ | |
207 | + struct ahash_request *areq = req->data; | |
208 | + struct ahash_request_priv *priv = areq->priv; | |
209 | + crypto_completion_t complete = priv->complete; | |
210 | + void *data = priv->data; | |
211 | + | |
212 | + ahash_op_unaligned_finish(areq, err); | |
213 | + | |
214 | + complete(data, err); | |
215 | +} | |
216 | + | |
217 | +static int ahash_op_unaligned(struct ahash_request *req, | |
218 | + int (*op)(struct ahash_request *)) | |
219 | +{ | |
220 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
221 | + unsigned long alignmask = crypto_ahash_alignmask(tfm); | |
222 | + unsigned int ds = crypto_ahash_digestsize(tfm); | |
223 | + struct ahash_request_priv *priv; | |
224 | + int err; | |
225 | + | |
226 | + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), | |
227 | + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? | |
228 | + GFP_ATOMIC : GFP_ATOMIC); | |
229 | + if (!priv) | |
230 | + return -ENOMEM; | |
231 | + | |
232 | + priv->result = req->result; | |
233 | + priv->complete = req->base.complete; | |
234 | + priv->data = req->base.data; | |
235 | + | |
236 | + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); | |
237 | + req->base.complete = ahash_op_unaligned_done; | |
238 | + req->base.data = req; | |
239 | + req->priv = priv; | |
240 | + | |
241 | + err = op(req); | |
242 | + ahash_op_unaligned_finish(req, err); | |
243 | + | |
244 | + return err; | |
245 | +} | |
246 | + | |
247 | +static int crypto_ahash_op(struct ahash_request *req, | |
248 | + int (*op)(struct ahash_request *)) | |
249 | +{ | |
250 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
251 | + unsigned long alignmask = crypto_ahash_alignmask(tfm); | |
252 | + | |
253 | + if ((unsigned long)req->result & alignmask) | |
254 | + return ahash_op_unaligned(req, op); | |
255 | + | |
256 | + return op(req); | |
257 | +} | |
258 | + | |
259 | +int crypto_ahash_final(struct ahash_request *req) | |
260 | +{ | |
261 | + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); | |
262 | +} | |
263 | +EXPORT_SYMBOL_GPL(crypto_ahash_final); | |
264 | + | |
265 | +int crypto_ahash_finup(struct ahash_request *req) | |
266 | +{ | |
267 | + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); | |
268 | +} | |
269 | +EXPORT_SYMBOL_GPL(crypto_ahash_finup); | |
270 | + | |
271 | +int crypto_ahash_digest(struct ahash_request *req) | |
272 | +{ | |
273 | + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest); | |
274 | +} | |
275 | +EXPORT_SYMBOL_GPL(crypto_ahash_digest); | |
276 | + | |
277 | +static void ahash_def_finup_finish2(struct ahash_request *req, int err) | |
278 | +{ | |
279 | + struct ahash_request_priv *priv = req->priv; | |
280 | + | |
281 | + if (err == -EINPROGRESS) | |
282 | + return; | |
283 | + | |
284 | + if (!err) | |
285 | + memcpy(priv->result, req->result, | |
286 | + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); | |
287 | + | |
288 | + kzfree(priv); | |
289 | +} | |
290 | + | |
291 | +static void ahash_def_finup_done2(struct crypto_async_request *req, int err) | |
292 | +{ | |
293 | + struct ahash_request *areq = req->data; | |
294 | + struct ahash_request_priv *priv = areq->priv; | |
295 | + crypto_completion_t complete = priv->complete; | |
296 | + void *data = priv->data; | |
297 | + | |
298 | + ahash_def_finup_finish2(areq, err); | |
299 | + | |
300 | + complete(data, err); | |
301 | +} | |
302 | + | |
303 | +static int ahash_def_finup_finish1(struct ahash_request *req, int err) | |
304 | +{ | |
305 | + if (err) | |
306 | + goto out; | |
307 | + | |
308 | + req->base.complete = ahash_def_finup_done2; | |
309 | + req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | |
310 | + err = crypto_ahash_reqtfm(req)->final(req); | |
311 | + | |
312 | +out: | |
313 | + ahash_def_finup_finish2(req, err); | |
314 | + return err; | |
315 | +} | |
316 | + | |
317 | +static void ahash_def_finup_done1(struct crypto_async_request *req, int err) | |
318 | +{ | |
319 | + struct ahash_request *areq = req->data; | |
320 | + struct ahash_request_priv *priv = areq->priv; | |
321 | + crypto_completion_t complete = priv->complete; | |
322 | + void *data = priv->data; | |
323 | + | |
324 | + err = ahash_def_finup_finish1(areq, err); | |
325 | + | |
326 | + complete(data, err); | |
327 | +} | |
328 | + | |
329 | +static int ahash_def_finup(struct ahash_request *req) | |
330 | +{ | |
331 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | |
332 | + unsigned long alignmask = crypto_ahash_alignmask(tfm); | |
333 | + unsigned int ds = crypto_ahash_digestsize(tfm); | |
334 | + struct ahash_request_priv *priv; | |
335 | + | |
336 | + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), | |
337 | + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? | |
338 | + GFP_ATOMIC : GFP_ATOMIC); | |
339 | + if (!priv) | |
340 | + return -ENOMEM; | |
341 | + | |
342 | + priv->result = req->result; | |
343 | + priv->complete = req->base.complete; | |
344 | + priv->data = req->base.data; | |
345 | + | |
346 | + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); | |
347 | + req->base.complete = ahash_def_finup_done1; | |
348 | + req->base.data = req; | |
349 | + req->priv = priv; | |
350 | + | |
351 | + return ahash_def_finup_finish1(req, tfm->update(req)); | |
352 | +} | |
353 | + | |
354 | +static int ahash_no_export(struct ahash_request *req, void *out) | |
355 | +{ | |
356 | + return -ENOSYS; | |
357 | +} | |
358 | + | |
359 | +static int ahash_no_import(struct ahash_request *req, const void *in) | |
360 | +{ | |
361 | + return -ENOSYS; | |
362 | +} | |
363 | + | |
177 | 364 | static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) |
178 | 365 | { |
179 | 366 | struct crypto_ahash *hash = __crypto_ahash_cast(tfm); |
180 | 367 | struct ahash_alg *alg = crypto_ahash_alg(hash); |
181 | 368 | |
369 | + hash->setkey = ahash_nosetkey; | |
370 | + hash->export = ahash_no_export; | |
371 | + hash->import = ahash_no_import; | |
372 | + | |
182 | 373 | if (tfm->__crt_alg->cra_type != &crypto_ahash_type) |
183 | 374 | return crypto_init_shash_ops_async(tfm); |
184 | 375 | |
185 | 376 | hash->init = alg->init; |
186 | 377 | hash->update = alg->update; |
187 | - hash->final = alg->final; | |
378 | + hash->final = alg->final; | |
379 | + hash->finup = alg->finup ?: ahash_def_finup; | |
188 | 380 | hash->digest = alg->digest; |
189 | - hash->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; | |
381 | + | |
382 | + if (alg->setkey) | |
383 | + hash->setkey = alg->setkey; | |
384 | + if (alg->export) | |
385 | + hash->export = alg->export; | |
386 | + if (alg->import) | |
387 | + hash->import = alg->import; | |
190 | 388 | |
191 | 389 | return 0; |
192 | 390 | } |
crypto/shash.c
... | ... | @@ -235,6 +235,33 @@ |
235 | 235 | return crypto_shash_final(ahash_request_ctx(req), req->result); |
236 | 236 | } |
237 | 237 | |
238 | +int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) | |
239 | +{ | |
240 | + struct crypto_hash_walk walk; | |
241 | + int nbytes; | |
242 | + | |
243 | + for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; | |
244 | + nbytes = crypto_hash_walk_done(&walk, nbytes)) | |
245 | + nbytes = crypto_hash_walk_last(&walk) ? | |
246 | + crypto_shash_finup(desc, walk.data, nbytes, | |
247 | + req->result) : | |
248 | + crypto_shash_update(desc, walk.data, nbytes); | |
249 | + | |
250 | + return nbytes; | |
251 | +} | |
252 | +EXPORT_SYMBOL_GPL(shash_ahash_finup); | |
253 | + | |
254 | +static int shash_async_finup(struct ahash_request *req) | |
255 | +{ | |
256 | + struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | |
257 | + struct shash_desc *desc = ahash_request_ctx(req); | |
258 | + | |
259 | + desc->tfm = *ctx; | |
260 | + desc->flags = req->base.flags; | |
261 | + | |
262 | + return shash_ahash_finup(req, desc); | |
263 | +} | |
264 | + | |
238 | 265 | int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) |
239 | 266 | { |
240 | 267 | struct scatterlist *sg = req->src; |
... | ... | @@ -252,8 +279,7 @@ |
252 | 279 | crypto_yield(desc->flags); |
253 | 280 | } else |
254 | 281 | err = crypto_shash_init(desc) ?: |
255 | - shash_ahash_update(req, desc) ?: | |
256 | - crypto_shash_final(desc, req->result); | |
282 | + shash_ahash_finup(req, desc); | |
257 | 283 | |
258 | 284 | return err; |
259 | 285 | } |
... | ... | @@ -270,6 +296,16 @@ |
270 | 296 | return shash_ahash_digest(req, desc); |
271 | 297 | } |
272 | 298 | |
299 | +static int shash_async_export(struct ahash_request *req, void *out) | |
300 | +{ | |
301 | + return crypto_shash_export(ahash_request_ctx(req), out); | |
302 | +} | |
303 | + | |
304 | +static int shash_async_import(struct ahash_request *req, const void *in) | |
305 | +{ | |
306 | + return crypto_shash_import(ahash_request_ctx(req), in); | |
307 | +} | |
308 | + | |
273 | 309 | static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) |
274 | 310 | { |
275 | 311 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); |
... | ... | @@ -280,6 +316,7 @@ |
280 | 316 | int crypto_init_shash_ops_async(struct crypto_tfm *tfm) |
281 | 317 | { |
282 | 318 | struct crypto_alg *calg = tfm->__crt_alg; |
319 | + struct shash_alg *alg = __crypto_shash_alg(calg); | |
283 | 320 | struct crypto_ahash *crt = __crypto_ahash_cast(tfm); |
284 | 321 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); |
285 | 322 | struct crypto_shash *shash; |
286 | 323 | |
... | ... | @@ -298,9 +335,16 @@ |
298 | 335 | |
299 | 336 | crt->init = shash_async_init; |
300 | 337 | crt->update = shash_async_update; |
301 | - crt->final = shash_async_final; | |
338 | + crt->final = shash_async_final; | |
339 | + crt->finup = shash_async_finup; | |
302 | 340 | crt->digest = shash_async_digest; |
303 | - crt->setkey = shash_async_setkey; | |
341 | + | |
342 | + if (alg->setkey) | |
343 | + crt->setkey = shash_async_setkey; | |
344 | + if (alg->export) | |
345 | + crt->export = shash_async_export; | |
346 | + if (alg->setkey) | |
347 | + crt->import = shash_async_import; | |
304 | 348 | |
305 | 349 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); |
306 | 350 |
include/crypto/hash.h
... | ... | @@ -31,6 +31,9 @@ |
31 | 31 | struct scatterlist *src; |
32 | 32 | u8 *result; |
33 | 33 | |
34 | + /* This field may only be used by the ahash API code. */ | |
35 | + void *priv; | |
36 | + | |
34 | 37 | void *__ctx[] CRYPTO_MINALIGN_ATTR; |
35 | 38 | }; |
36 | 39 | |
37 | 40 | |
... | ... | @@ -175,17 +178,12 @@ |
175 | 178 | return req->__ctx; |
176 | 179 | } |
177 | 180 | |
178 | -static inline int crypto_ahash_setkey(struct crypto_ahash *tfm, | |
179 | - const u8 *key, unsigned int keylen) | |
180 | -{ | |
181 | - return tfm->setkey(tfm, key, keylen); | |
182 | -} | |
181 | +int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, | |
182 | + unsigned int keylen); | |
183 | +int crypto_ahash_finup(struct ahash_request *req); | |
184 | +int crypto_ahash_final(struct ahash_request *req); | |
185 | +int crypto_ahash_digest(struct ahash_request *req); | |
183 | 186 | |
184 | -static inline int crypto_ahash_digest(struct ahash_request *req) | |
185 | -{ | |
186 | - return crypto_ahash_reqtfm(req)->digest(req); | |
187 | -} | |
188 | - | |
189 | 187 | static inline int crypto_ahash_export(struct ahash_request *req, void *out) |
190 | 188 | { |
191 | 189 | return crypto_ahash_reqtfm(req)->export(req, out); |
... | ... | @@ -204,11 +202,6 @@ |
204 | 202 | static inline int crypto_ahash_update(struct ahash_request *req) |
205 | 203 | { |
206 | 204 | return crypto_ahash_reqtfm(req)->update(req); |
207 | -} | |
208 | - | |
209 | -static inline int crypto_ahash_final(struct ahash_request *req) | |
210 | -{ | |
211 | - return crypto_ahash_reqtfm(req)->final(req); | |
212 | 205 | } |
213 | 206 | |
214 | 207 | static inline void ahash_request_set_tfm(struct ahash_request *req, |
include/crypto/internal/hash.h
... | ... | @@ -59,6 +59,11 @@ |
59 | 59 | struct crypto_hash_walk *walk, |
60 | 60 | struct scatterlist *sg, unsigned int len); |
61 | 61 | |
62 | +static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk) | |
63 | +{ | |
64 | + return !(walk->entrylen | walk->total); | |
65 | +} | |
66 | + | |
62 | 67 | int crypto_register_ahash(struct ahash_alg *alg); |
63 | 68 | int crypto_unregister_ahash(struct ahash_alg *alg); |
64 | 69 | int ahash_register_instance(struct crypto_template *tmpl, |
... | ... | @@ -94,6 +99,7 @@ |
94 | 99 | struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask); |
95 | 100 | |
96 | 101 | int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc); |
102 | +int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc); | |
97 | 103 | int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc); |
98 | 104 | |
99 | 105 | int crypto_init_shash_ops_async(struct crypto_tfm *tfm); |