Commit d0b9007a27206fe944d9db72e13dab157b8e118c
1 parent
50b6544e13
Exists in
master
and in
7 other branches
[CRYPTO] pcbc: Use crypto_xor
This patch replaces the custom xor in CBC with the generic crypto_xor. It changes the operations for in-place encryption slightly to avoid calling crypto_xor with tmpbuf since it is not necessarily aligned. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Showing 1 changed file with 24 additions and 81 deletions Side-by-side Diff
crypto/pcbc.c
... | ... | @@ -24,7 +24,6 @@ |
24 | 24 | |
25 | 25 | struct crypto_pcbc_ctx { |
26 | 26 | struct crypto_cipher *child; |
27 | - void (*xor)(u8 *dst, const u8 *src, unsigned int bs); | |
28 | 27 | }; |
29 | 28 | |
30 | 29 | static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key, |
... | ... | @@ -45,9 +44,7 @@ |
45 | 44 | |
46 | 45 | static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc, |
47 | 46 | struct blkcipher_walk *walk, |
48 | - struct crypto_cipher *tfm, | |
49 | - void (*xor)(u8 *, const u8 *, | |
50 | - unsigned int)) | |
47 | + struct crypto_cipher *tfm) | |
51 | 48 | { |
52 | 49 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
53 | 50 | crypto_cipher_alg(tfm)->cia_encrypt; |
54 | 51 | |
... | ... | @@ -58,10 +55,10 @@ |
58 | 55 | u8 *iv = walk->iv; |
59 | 56 | |
60 | 57 | do { |
61 | - xor(iv, src, bsize); | |
58 | + crypto_xor(iv, src, bsize); | |
62 | 59 | fn(crypto_cipher_tfm(tfm), dst, iv); |
63 | 60 | memcpy(iv, dst, bsize); |
64 | - xor(iv, src, bsize); | |
61 | + crypto_xor(iv, src, bsize); | |
65 | 62 | |
66 | 63 | src += bsize; |
67 | 64 | dst += bsize; |
... | ... | @@ -72,9 +69,7 @@ |
72 | 69 | |
73 | 70 | static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc, |
74 | 71 | struct blkcipher_walk *walk, |
75 | - struct crypto_cipher *tfm, | |
76 | - void (*xor)(u8 *, const u8 *, | |
77 | - unsigned int)) | |
72 | + struct crypto_cipher *tfm) | |
78 | 73 | { |
79 | 74 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
80 | 75 | crypto_cipher_alg(tfm)->cia_encrypt; |
81 | 76 | |
... | ... | @@ -86,10 +81,10 @@ |
86 | 81 | |
87 | 82 | do { |
88 | 83 | memcpy(tmpbuf, src, bsize); |
89 | - xor(iv, tmpbuf, bsize); | |
84 | + crypto_xor(iv, src, bsize); | |
90 | 85 | fn(crypto_cipher_tfm(tfm), src, iv); |
91 | - memcpy(iv, src, bsize); | |
92 | - xor(iv, tmpbuf, bsize); | |
86 | + memcpy(iv, tmpbuf, bsize); | |
87 | + crypto_xor(iv, src, bsize); | |
93 | 88 | |
94 | 89 | src += bsize; |
95 | 90 | } while ((nbytes -= bsize) >= bsize); |
... | ... | @@ -107,7 +102,6 @@ |
107 | 102 | struct crypto_blkcipher *tfm = desc->tfm; |
108 | 103 | struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm); |
109 | 104 | struct crypto_cipher *child = ctx->child; |
110 | - void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor; | |
111 | 105 | int err; |
112 | 106 | |
113 | 107 | blkcipher_walk_init(&walk, dst, src, nbytes); |
114 | 108 | |
... | ... | @@ -115,11 +109,11 @@ |
115 | 109 | |
116 | 110 | while ((nbytes = walk.nbytes)) { |
117 | 111 | if (walk.src.virt.addr == walk.dst.virt.addr) |
118 | - nbytes = crypto_pcbc_encrypt_inplace(desc, &walk, child, | |
119 | - xor); | |
112 | + nbytes = crypto_pcbc_encrypt_inplace(desc, &walk, | |
113 | + child); | |
120 | 114 | else |
121 | - nbytes = crypto_pcbc_encrypt_segment(desc, &walk, child, | |
122 | - xor); | |
115 | + nbytes = crypto_pcbc_encrypt_segment(desc, &walk, | |
116 | + child); | |
123 | 117 | err = blkcipher_walk_done(desc, &walk, nbytes); |
124 | 118 | } |
125 | 119 | |
... | ... | @@ -128,9 +122,7 @@ |
128 | 122 | |
129 | 123 | static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc, |
130 | 124 | struct blkcipher_walk *walk, |
131 | - struct crypto_cipher *tfm, | |
132 | - void (*xor)(u8 *, const u8 *, | |
133 | - unsigned int)) | |
125 | + struct crypto_cipher *tfm) | |
134 | 126 | { |
135 | 127 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
136 | 128 | crypto_cipher_alg(tfm)->cia_decrypt; |
137 | 129 | |
... | ... | @@ -142,9 +134,9 @@ |
142 | 134 | |
143 | 135 | do { |
144 | 136 | fn(crypto_cipher_tfm(tfm), dst, src); |
145 | - xor(dst, iv, bsize); | |
137 | + crypto_xor(dst, iv, bsize); | |
146 | 138 | memcpy(iv, src, bsize); |
147 | - xor(iv, dst, bsize); | |
139 | + crypto_xor(iv, dst, bsize); | |
148 | 140 | |
149 | 141 | src += bsize; |
150 | 142 | dst += bsize; |
... | ... | @@ -157,9 +149,7 @@ |
157 | 149 | |
158 | 150 | static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc, |
159 | 151 | struct blkcipher_walk *walk, |
160 | - struct crypto_cipher *tfm, | |
161 | - void (*xor)(u8 *, const u8 *, | |
162 | - unsigned int)) | |
152 | + struct crypto_cipher *tfm) | |
163 | 153 | { |
164 | 154 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
165 | 155 | crypto_cipher_alg(tfm)->cia_decrypt; |
166 | 156 | |
... | ... | @@ -172,9 +162,9 @@ |
172 | 162 | do { |
173 | 163 | memcpy(tmpbuf, src, bsize); |
174 | 164 | fn(crypto_cipher_tfm(tfm), src, src); |
175 | - xor(src, iv, bsize); | |
165 | + crypto_xor(src, iv, bsize); | |
176 | 166 | memcpy(iv, tmpbuf, bsize); |
177 | - xor(iv, src, bsize); | |
167 | + crypto_xor(iv, src, bsize); | |
178 | 168 | |
179 | 169 | src += bsize; |
180 | 170 | } while ((nbytes -= bsize) >= bsize); |
... | ... | @@ -192,7 +182,6 @@ |
192 | 182 | struct crypto_blkcipher *tfm = desc->tfm; |
193 | 183 | struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm); |
194 | 184 | struct crypto_cipher *child = ctx->child; |
195 | - void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor; | |
196 | 185 | int err; |
197 | 186 | |
198 | 187 | blkcipher_walk_init(&walk, dst, src, nbytes); |
199 | 188 | |
200 | 189 | |
... | ... | @@ -200,48 +189,17 @@ |
200 | 189 | |
201 | 190 | while ((nbytes = walk.nbytes)) { |
202 | 191 | if (walk.src.virt.addr == walk.dst.virt.addr) |
203 | - nbytes = crypto_pcbc_decrypt_inplace(desc, &walk, child, | |
204 | - xor); | |
192 | + nbytes = crypto_pcbc_decrypt_inplace(desc, &walk, | |
193 | + child); | |
205 | 194 | else |
206 | - nbytes = crypto_pcbc_decrypt_segment(desc, &walk, child, | |
207 | - xor); | |
195 | + nbytes = crypto_pcbc_decrypt_segment(desc, &walk, | |
196 | + child); | |
208 | 197 | err = blkcipher_walk_done(desc, &walk, nbytes); |
209 | 198 | } |
210 | 199 | |
211 | 200 | return err; |
212 | 201 | } |
213 | 202 | |
214 | -static void xor_byte(u8 *a, const u8 *b, unsigned int bs) | |
215 | -{ | |
216 | - do { | |
217 | - *a++ ^= *b++; | |
218 | - } while (--bs); | |
219 | -} | |
220 | - | |
221 | -static void xor_quad(u8 *dst, const u8 *src, unsigned int bs) | |
222 | -{ | |
223 | - u32 *a = (u32 *)dst; | |
224 | - u32 *b = (u32 *)src; | |
225 | - | |
226 | - do { | |
227 | - *a++ ^= *b++; | |
228 | - } while ((bs -= 4)); | |
229 | -} | |
230 | - | |
231 | -static void xor_64(u8 *a, const u8 *b, unsigned int bs) | |
232 | -{ | |
233 | - ((u32 *)a)[0] ^= ((u32 *)b)[0]; | |
234 | - ((u32 *)a)[1] ^= ((u32 *)b)[1]; | |
235 | -} | |
236 | - | |
237 | -static void xor_128(u8 *a, const u8 *b, unsigned int bs) | |
238 | -{ | |
239 | - ((u32 *)a)[0] ^= ((u32 *)b)[0]; | |
240 | - ((u32 *)a)[1] ^= ((u32 *)b)[1]; | |
241 | - ((u32 *)a)[2] ^= ((u32 *)b)[2]; | |
242 | - ((u32 *)a)[3] ^= ((u32 *)b)[3]; | |
243 | -} | |
244 | - | |
245 | 203 | static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm) |
246 | 204 | { |
247 | 205 | struct crypto_instance *inst = (void *)tfm->__crt_alg; |
... | ... | @@ -249,22 +207,6 @@ |
249 | 207 | struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm); |
250 | 208 | struct crypto_cipher *cipher; |
251 | 209 | |
252 | - switch (crypto_tfm_alg_blocksize(tfm)) { | |
253 | - case 8: | |
254 | - ctx->xor = xor_64; | |
255 | - break; | |
256 | - | |
257 | - case 16: | |
258 | - ctx->xor = xor_128; | |
259 | - break; | |
260 | - | |
261 | - default: | |
262 | - if (crypto_tfm_alg_blocksize(tfm) % 4) | |
263 | - ctx->xor = xor_byte; | |
264 | - else | |
265 | - ctx->xor = xor_quad; | |
266 | - } | |
267 | - | |
268 | 210 | cipher = crypto_spawn_cipher(spawn); |
269 | 211 | if (IS_ERR(cipher)) |
270 | 212 | return PTR_ERR(cipher); |
... | ... | @@ -304,8 +246,9 @@ |
304 | 246 | inst->alg.cra_alignmask = alg->cra_alignmask; |
305 | 247 | inst->alg.cra_type = &crypto_blkcipher_type; |
306 | 248 | |
307 | - if (!(alg->cra_blocksize % 4)) | |
308 | - inst->alg.cra_alignmask |= 3; | |
249 | + /* We access the data as u32s when xoring. */ | |
250 | + inst->alg.cra_alignmask |= __alignof__(u32) - 1; | |
251 | + | |
309 | 252 | inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; |
310 | 253 | inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; |
311 | 254 | inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; |