Commit 40725181b74be6b0e3bdc8c05bd1e0b9873ec5cc
Committed by
David S. Miller
1 parent
c774e93e21
Exists in
master
and in
39 other branches
[CRYPTO] Add support for low-level multi-block operations
This patch adds hooks for cipher algorithms to implement multi-block ECB/CBC operations directly. This is expected to provide significant performance boots to the VIA Padlock. It could also be used for improving software implementations such as AES where operating on multiple blocks at a time may enable certain optimisations. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> Signed-off-by: David S. Miller <davem@davemloft.net>
Showing 3 changed files with 45 additions and 26 deletions Side-by-side Diff
crypto/cipher.c
... | ... | @@ -23,14 +23,6 @@ |
23 | 23 | #include "internal.h" |
24 | 24 | #include "scatterwalk.h" |
25 | 25 | |
26 | -struct cipher_desc { | |
27 | - struct crypto_tfm *tfm; | |
28 | - void (*crfn)(void *ctx, u8 *dst, const u8 *src); | |
29 | - unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, | |
30 | - const u8 *src, unsigned int nbytes); | |
31 | - void *info; | |
32 | -}; | |
33 | - | |
34 | 26 | static inline void xor_64(u8 *a, const u8 *b) |
35 | 27 | { |
36 | 28 | ((u32 *)a)[0] ^= ((u32 *)b)[0]; |
37 | 29 | |
... | ... | @@ -224,10 +216,11 @@ |
224 | 216 | struct scatterlist *src, unsigned int nbytes) |
225 | 217 | { |
226 | 218 | struct cipher_desc desc; |
219 | + struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | |
227 | 220 | |
228 | 221 | desc.tfm = tfm; |
229 | - desc.crfn = tfm->__crt_alg->cra_cipher.cia_encrypt; | |
230 | - desc.prfn = ecb_process; | |
222 | + desc.crfn = cipher->cia_encrypt; | |
223 | + desc.prfn = cipher->cia_encrypt_ecb ?: ecb_process; | |
231 | 224 | |
232 | 225 | return crypt(&desc, dst, src, nbytes); |
233 | 226 | } |
234 | 227 | |
... | ... | @@ -238,10 +231,11 @@ |
238 | 231 | unsigned int nbytes) |
239 | 232 | { |
240 | 233 | struct cipher_desc desc; |
234 | + struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | |
241 | 235 | |
242 | 236 | desc.tfm = tfm; |
243 | - desc.crfn = tfm->__crt_alg->cra_cipher.cia_decrypt; | |
244 | - desc.prfn = ecb_process; | |
237 | + desc.crfn = cipher->cia_decrypt; | |
238 | + desc.prfn = cipher->cia_decrypt_ecb ?: ecb_process; | |
245 | 239 | |
246 | 240 | return crypt(&desc, dst, src, nbytes); |
247 | 241 | } |
248 | 242 | |
... | ... | @@ -252,10 +246,11 @@ |
252 | 246 | unsigned int nbytes) |
253 | 247 | { |
254 | 248 | struct cipher_desc desc; |
249 | + struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | |
255 | 250 | |
256 | 251 | desc.tfm = tfm; |
257 | - desc.crfn = tfm->__crt_alg->cra_cipher.cia_encrypt; | |
258 | - desc.prfn = cbc_process_encrypt; | |
252 | + desc.crfn = cipher->cia_encrypt; | |
253 | + desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt; | |
259 | 254 | desc.info = tfm->crt_cipher.cit_iv; |
260 | 255 | |
261 | 256 | return crypt(&desc, dst, src, nbytes); |
262 | 257 | |
... | ... | @@ -267,10 +262,11 @@ |
267 | 262 | unsigned int nbytes, u8 *iv) |
268 | 263 | { |
269 | 264 | struct cipher_desc desc; |
265 | + struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | |
270 | 266 | |
271 | 267 | desc.tfm = tfm; |
272 | - desc.crfn = tfm->__crt_alg->cra_cipher.cia_encrypt; | |
273 | - desc.prfn = cbc_process_encrypt; | |
268 | + desc.crfn = cipher->cia_encrypt; | |
269 | + desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt; | |
274 | 270 | desc.info = iv; |
275 | 271 | |
276 | 272 | return crypt(&desc, dst, src, nbytes); |
277 | 273 | |
... | ... | @@ -282,10 +278,11 @@ |
282 | 278 | unsigned int nbytes) |
283 | 279 | { |
284 | 280 | struct cipher_desc desc; |
281 | + struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | |
285 | 282 | |
286 | 283 | desc.tfm = tfm; |
287 | - desc.crfn = tfm->__crt_alg->cra_cipher.cia_decrypt; | |
288 | - desc.prfn = cbc_process_decrypt; | |
284 | + desc.crfn = cipher->cia_decrypt; | |
285 | + desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt; | |
289 | 286 | desc.info = tfm->crt_cipher.cit_iv; |
290 | 287 | |
291 | 288 | return crypt(&desc, dst, src, nbytes); |
292 | 289 | |
... | ... | @@ -297,10 +294,11 @@ |
297 | 294 | unsigned int nbytes, u8 *iv) |
298 | 295 | { |
299 | 296 | struct cipher_desc desc; |
297 | + struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | |
300 | 298 | |
301 | 299 | desc.tfm = tfm; |
302 | - desc.crfn = tfm->__crt_alg->cra_cipher.cia_decrypt; | |
303 | - desc.prfn = cbc_process_decrypt; | |
300 | + desc.crfn = cipher->cia_decrypt; | |
301 | + desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt; | |
304 | 302 | desc.info = iv; |
305 | 303 | |
306 | 304 | return crypt(&desc, dst, src, nbytes); |
crypto/internal.h
... | ... | @@ -42,11 +42,6 @@ |
42 | 42 | cond_resched(); |
43 | 43 | } |
44 | 44 | |
45 | -static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) | |
46 | -{ | |
47 | - return (void *)&tfm[1]; | |
48 | -} | |
49 | - | |
50 | 45 | struct crypto_alg *crypto_alg_lookup(const char *name); |
51 | 46 | |
52 | 47 | /* A far more intelligent version of this is planned. For now, just |
include/linux/crypto.h
... | ... | @@ -61,7 +61,16 @@ |
61 | 61 | #define CRYPTO_DIR_DECRYPT 0 |
62 | 62 | |
63 | 63 | struct scatterlist; |
64 | +struct crypto_tfm; | |
64 | 65 | |
66 | +struct cipher_desc { | |
67 | + struct crypto_tfm *tfm; | |
68 | + void (*crfn)(void *ctx, u8 *dst, const u8 *src); | |
69 | + unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, | |
70 | + const u8 *src, unsigned int nbytes); | |
71 | + void *info; | |
72 | +}; | |
73 | + | |
65 | 74 | /* |
66 | 75 | * Algorithms: modular crypto algorithm implementations, managed |
67 | 76 | * via crypto_register_alg() and crypto_unregister_alg(). |
... | ... | @@ -73,6 +82,19 @@ |
73 | 82 | unsigned int keylen, u32 *flags); |
74 | 83 | void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src); |
75 | 84 | void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src); |
85 | + | |
86 | + unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc, | |
87 | + u8 *dst, const u8 *src, | |
88 | + unsigned int nbytes); | |
89 | + unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc, | |
90 | + u8 *dst, const u8 *src, | |
91 | + unsigned int nbytes); | |
92 | + unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc, | |
93 | + u8 *dst, const u8 *src, | |
94 | + unsigned int nbytes); | |
95 | + unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc, | |
96 | + u8 *dst, const u8 *src, | |
97 | + unsigned int nbytes); | |
76 | 98 | }; |
77 | 99 | |
78 | 100 | struct digest_alg { |
... | ... | @@ -136,7 +158,6 @@ |
136 | 158 | * and core processing logic. Managed via crypto_alloc_tfm() and |
137 | 159 | * crypto_free_tfm(), as well as the various helpers below. |
138 | 160 | */ |
139 | -struct crypto_tfm; | |
140 | 161 | |
141 | 162 | struct cipher_tfm { |
142 | 163 | void *cit_iv; |
... | ... | @@ -264,6 +285,11 @@ |
264 | 285 | { |
265 | 286 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST); |
266 | 287 | return tfm->__crt_alg->cra_digest.dia_digestsize; |
288 | +} | |
289 | + | |
290 | +static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) | |
291 | +{ | |
292 | + return (void *)&tfm[1]; | |
267 | 293 | } |
268 | 294 | |
269 | 295 | /* |