Commit 84c911523020a2e39b307a2da26ee1886b7214fe

Authored by Herbert Xu
1 parent 5311f248b7

[CRYPTO] gcm: Add support for async ciphers

This patch adds the necessary changes for GCM to be used with async
ciphers.  This would allow it to be used with hardware devices that
support CTR.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Showing 1 changed file with 112 additions and 78 deletions Side-by-side Diff

... ... @@ -11,6 +11,7 @@
11 11 #include <crypto/algapi.h>
12 12 #include <crypto/gf128mul.h>
13 13 #include <crypto/scatterwalk.h>
  14 +#include <linux/completion.h>
14 15 #include <linux/err.h>
15 16 #include <linux/init.h>
16 17 #include <linux/kernel.h>
17 18  
... ... @@ -38,11 +39,17 @@
38 39 struct crypto_gcm_req_priv_ctx {
39 40 u8 auth_tag[16];
40 41 u8 iauth_tag[16];
41   - u8 counter[16];
  42 + struct scatterlist src[2];
  43 + struct scatterlist dst[2];
42 44 struct crypto_gcm_ghash_ctx ghash;
43 45 struct ablkcipher_request abreq;
44 46 };
45 47  
  48 +struct crypto_gcm_setkey_result {
  49 + int err;
  50 + struct completion completion;
  51 +};
  52 +
46 53 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
47 54 struct aead_request *req)
48 55 {
49 56  
50 57  
51 58  
... ... @@ -158,33 +165,15 @@
158 165 crypto_xor(dst, buf, 16);
159 166 }
160 167  
161   -static inline void crypto_gcm_set_counter(u8 *counterblock, u32 value)
  168 +static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
162 169 {
163   - *((u32 *)&counterblock[12]) = cpu_to_be32(value + 1);
164   -}
  170 + struct crypto_gcm_setkey_result *result = req->data;
165 171  
166   -static int crypto_gcm_encrypt_counter(struct crypto_aead *aead, u8 *block,
167   - u32 value, const u8 *iv)
168   -{
169   - struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
170   - struct crypto_ablkcipher *ctr = ctx->ctr;
171   - struct ablkcipher_request req;
172   - struct scatterlist sg;
173   - u8 counterblock[16];
  172 + if (err == -EINPROGRESS)
  173 + return;
174 174  
175   - if (iv == NULL)
176   - memset(counterblock, 0, 12);
177   - else
178   - memcpy(counterblock, iv, 12);
179   -
180   - crypto_gcm_set_counter(counterblock, value);
181   -
182   - sg_init_one(&sg, block, 16);
183   - ablkcipher_request_set_tfm(&req, ctr);
184   - ablkcipher_request_set_crypt(&req, &sg, &sg, 16, counterblock);
185   - ablkcipher_request_set_callback(&req, 0, NULL, NULL);
186   - memset(block, 0, 16);
187   - return crypto_ablkcipher_encrypt(&req);
  175 + result->err = err;
  176 + complete(&result->completion);
188 177 }
189 178  
190 179 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
191 180  
192 181  
193 182  
194 183  
195 184  
196 185  
197 186  
198 187  
199 188  
200 189  
201 190  
202 191  
203 192  
... ... @@ -192,73 +181,103 @@
192 181 {
193 182 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
194 183 struct crypto_ablkcipher *ctr = ctx->ctr;
195   - int alignmask = crypto_ablkcipher_alignmask(ctr);
196   - u8 alignbuf[16+alignmask];
197   - u8 *hash = (u8 *)ALIGN((unsigned long)alignbuf, alignmask+1);
198   - int err = 0;
  184 + struct {
  185 + be128 hash;
  186 + u8 iv[8];
199 187  
  188 + struct crypto_gcm_setkey_result result;
  189 +
  190 + struct scatterlist sg[1];
  191 + struct ablkcipher_request req;
  192 + } *data;
  193 + int err;
  194 +
200 195 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
201 196 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
202 197 CRYPTO_TFM_REQ_MASK);
203 198  
204 199 err = crypto_ablkcipher_setkey(ctr, key, keylen);
205 200 if (err)
206   - goto out;
  201 + return err;
207 202  
208 203 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
209 204 CRYPTO_TFM_RES_MASK);
210 205  
211   - err = crypto_gcm_encrypt_counter(aead, hash, -1, NULL);
  206 + data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
  207 + GFP_KERNEL);
  208 + if (!data)
  209 + return -ENOMEM;
  210 +
  211 + init_completion(&data->result.completion);
  212 + sg_init_one(data->sg, &data->hash, sizeof(data->hash));
  213 + ablkcipher_request_set_tfm(&data->req, ctr);
  214 + ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
  215 + CRYPTO_TFM_REQ_MAY_BACKLOG,
  216 + crypto_gcm_setkey_done,
  217 + &data->result);
  218 + ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
  219 + sizeof(data->hash), data->iv);
  220 +
  221 + err = crypto_ablkcipher_encrypt(&data->req);
  222 + if (err == -EINPROGRESS || err == -EBUSY) {
  223 + err = wait_for_completion_interruptible(
  224 + &data->result.completion);
  225 + if (!err)
  226 + err = data->result.err;
  227 + }
  228 +
212 229 if (err)
213 230 goto out;
214 231  
215 232 if (ctx->gf128 != NULL)
216 233 gf128mul_free_4k(ctx->gf128);
217 234  
218   - ctx->gf128 = gf128mul_init_4k_lle((be128 *)hash);
  235 + ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
219 236  
220 237 if (ctx->gf128 == NULL)
221 238 err = -ENOMEM;
222 239  
223   - out:
  240 +out:
  241 + kfree(data);
224 242 return err;
225 243 }
226 244  
227   -static int crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
228   - struct aead_request *req,
229   - unsigned int cryptlen,
230   - void (*done)(struct crypto_async_request *,
231   - int))
  245 +static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
  246 + struct aead_request *req,
  247 + unsigned int cryptlen)
232 248 {
233 249 struct crypto_aead *aead = crypto_aead_reqtfm(req);
234 250 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
235 251 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
236 252 u32 flags = req->base.tfm->crt_flags;
237   - u8 *auth_tag = pctx->auth_tag;
238   - u8 *counter = pctx->counter;
239 253 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
240   - int err = 0;
  254 + struct scatterlist *dst;
  255 + __be32 counter = cpu_to_be32(1);
241 256  
242   - ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
243   - ablkcipher_request_set_callback(ablk_req, aead_request_flags(req),
244   - done, req);
245   - ablkcipher_request_set_crypt(ablk_req, req->src, req->dst,
246   - cryptlen, counter);
  257 + memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
  258 + memcpy(req->iv + 12, &counter, 4);
247 259  
248   - err = crypto_gcm_encrypt_counter(aead, auth_tag, 0, req->iv);
249   - if (err)
250   - goto out;
  260 + sg_init_table(pctx->src, 2);
  261 + sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
  262 + scatterwalk_sg_chain(pctx->src, 2, req->src);
251 263  
252   - memcpy(counter, req->iv, 12);
253   - crypto_gcm_set_counter(counter, 1);
  264 + dst = pctx->src;
  265 + if (req->src != req->dst) {
  266 + sg_init_table(pctx->dst, 2);
  267 + sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
  268 + scatterwalk_sg_chain(pctx->dst, 2, req->dst);
  269 + dst = pctx->dst;
  270 + }
254 271  
  272 + ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
  273 + ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
  274 + cryptlen + sizeof(pctx->auth_tag),
  275 + req->iv);
  276 +
255 277 crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
256 278  
257 279 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
258 280 crypto_gcm_ghash_flush(ghash);
259   -
260   - out:
261   - return err;
262 281 }
263 282  
264 283 static int crypto_gcm_hash(struct aead_request *req)
265 284  
266 285  
267 286  
268 287  
... ... @@ -291,25 +310,44 @@
291 310 {
292 311 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
293 312 struct ablkcipher_request *abreq = &pctx->abreq;
294   - int err = 0;
  313 + int err;
295 314  
296   - err = crypto_gcm_init_crypt(abreq, req, req->cryptlen,
297   - crypto_gcm_encrypt_done);
  315 + crypto_gcm_init_crypt(abreq, req, req->cryptlen);
  316 + ablkcipher_request_set_callback(abreq, aead_request_flags(req),
  317 + crypto_gcm_encrypt_done, req);
  318 +
  319 + err = crypto_ablkcipher_encrypt(abreq);
298 320 if (err)
299 321 return err;
300 322  
301   - if (req->cryptlen) {
302   - err = crypto_ablkcipher_encrypt(abreq);
303   - if (err)
304   - return err;
305   - }
306   -
307 323 return crypto_gcm_hash(req);
308 324 }
309 325  
  326 +static int crypto_gcm_verify(struct aead_request *req)
  327 +{
  328 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
  329 + struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  330 + struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
  331 + u8 *auth_tag = pctx->auth_tag;
  332 + u8 *iauth_tag = pctx->iauth_tag;
  333 + unsigned int authsize = crypto_aead_authsize(aead);
  334 + unsigned int cryptlen = req->cryptlen - authsize;
  335 +
  336 + crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
  337 +
  338 + authsize = crypto_aead_authsize(aead);
  339 + scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
  340 + return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
  341 +}
  342 +
310 343 static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
311 344 {
312   - aead_request_complete(areq->data, err);
  345 + struct aead_request *req = areq->data;
  346 +
  347 + if (!err)
  348 + err = crypto_gcm_verify(req);
  349 +
  350 + aead_request_complete(req, err);
313 351 }
314 352  
315 353 static int crypto_gcm_decrypt(struct aead_request *req)
... ... @@ -317,8 +355,6 @@
317 355 struct crypto_aead *aead = crypto_aead_reqtfm(req);
318 356 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
319 357 struct ablkcipher_request *abreq = &pctx->abreq;
320   - u8 *auth_tag = pctx->auth_tag;
321   - u8 *iauth_tag = pctx->iauth_tag;
322 358 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
323 359 unsigned int cryptlen = req->cryptlen;
324 360 unsigned int authsize = crypto_aead_authsize(aead);
325 361  
326 362  
327 363  
... ... @@ -328,19 +364,17 @@
328 364 return -EINVAL;
329 365 cryptlen -= authsize;
330 366  
331   - err = crypto_gcm_init_crypt(abreq, req, cryptlen,
332   - crypto_gcm_decrypt_done);
333   - if (err)
334   - return err;
  367 + crypto_gcm_init_crypt(abreq, req, cryptlen);
  368 + ablkcipher_request_set_callback(abreq, aead_request_flags(req),
  369 + crypto_gcm_decrypt_done, req);
335 370  
336 371 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
337   - crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
338 372  
339   - scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
340   - if (memcmp(iauth_tag, auth_tag, authsize))
341   - return -EBADMSG;
  373 + err = crypto_ablkcipher_decrypt(abreq);
  374 + if (err)
  375 + return err;
342 376  
343   - return crypto_ablkcipher_decrypt(abreq);
  377 + return crypto_gcm_verify(req);
344 378 }
345 379  
346 380 static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
... ... @@ -436,7 +470,7 @@
436 470 inst->alg.cra_blocksize = 16;
437 471 inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
438 472 inst->alg.cra_type = &crypto_aead_type;
439   - inst->alg.cra_aead.ivsize = 12;
  473 + inst->alg.cra_aead.ivsize = 16;
440 474 inst->alg.cra_aead.maxauthsize = 16;
441 475 inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
442 476 inst->alg.cra_init = crypto_gcm_init_tfm;