Commit 84c911523020a2e39b307a2da26ee1886b7214fe

Authored by Herbert Xu
1 parent 5311f248b7

[CRYPTO] gcm: Add support for async ciphers

This patch adds the necessary changes for GCM to be used with async
ciphers.  This would allow it to be used with hardware devices that
support CTR.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Showing 1 changed file with 112 additions and 78 deletions Inline Diff

1 /* 1 /*
2 * GCM: Galois/Counter Mode. 2 * GCM: Galois/Counter Mode.
3 * 3 *
4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi> 4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
5 * 5 *
6 * This program is free software; you can redistribute it and/or modify it 6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published 7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation. 8 * by the Free Software Foundation.
9 */ 9 */
10 10
11 #include <crypto/algapi.h> 11 #include <crypto/algapi.h>
12 #include <crypto/gf128mul.h> 12 #include <crypto/gf128mul.h>
13 #include <crypto/scatterwalk.h> 13 #include <crypto/scatterwalk.h>
14 #include <linux/completion.h>
14 #include <linux/err.h> 15 #include <linux/err.h>
15 #include <linux/init.h> 16 #include <linux/init.h>
16 #include <linux/kernel.h> 17 #include <linux/kernel.h>
17 #include <linux/module.h> 18 #include <linux/module.h>
18 #include <linux/slab.h> 19 #include <linux/slab.h>
19 20
20 #include "internal.h" 21 #include "internal.h"
21 22
22 struct gcm_instance_ctx { 23 struct gcm_instance_ctx {
23 struct crypto_spawn ctr; 24 struct crypto_spawn ctr;
24 }; 25 };
25 26
26 struct crypto_gcm_ctx { 27 struct crypto_gcm_ctx {
27 struct crypto_ablkcipher *ctr; 28 struct crypto_ablkcipher *ctr;
28 struct gf128mul_4k *gf128; 29 struct gf128mul_4k *gf128;
29 }; 30 };
30 31
31 struct crypto_gcm_ghash_ctx { 32 struct crypto_gcm_ghash_ctx {
32 u32 bytes; 33 u32 bytes;
33 u32 flags; 34 u32 flags;
34 struct gf128mul_4k *gf128; 35 struct gf128mul_4k *gf128;
35 u8 buffer[16]; 36 u8 buffer[16];
36 }; 37 };
37 38
38 struct crypto_gcm_req_priv_ctx { 39 struct crypto_gcm_req_priv_ctx {
39 u8 auth_tag[16]; 40 u8 auth_tag[16];
40 u8 iauth_tag[16]; 41 u8 iauth_tag[16];
41 u8 counter[16]; 42 struct scatterlist src[2];
43 struct scatterlist dst[2];
42 struct crypto_gcm_ghash_ctx ghash; 44 struct crypto_gcm_ghash_ctx ghash;
43 struct ablkcipher_request abreq; 45 struct ablkcipher_request abreq;
44 }; 46 };
45 47
48 struct crypto_gcm_setkey_result {
49 int err;
50 struct completion completion;
51 };
52
46 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx( 53 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
47 struct aead_request *req) 54 struct aead_request *req)
48 { 55 {
49 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); 56 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
50 57
51 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); 58 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
52 } 59 }
53 60
54 static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags, 61 static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
55 struct gf128mul_4k *gf128) 62 struct gf128mul_4k *gf128)
56 { 63 {
57 ctx->bytes = 0; 64 ctx->bytes = 0;
58 ctx->flags = flags; 65 ctx->flags = flags;
59 ctx->gf128 = gf128; 66 ctx->gf128 = gf128;
60 memset(ctx->buffer, 0, 16); 67 memset(ctx->buffer, 0, 16);
61 } 68 }
62 69
63 static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx, 70 static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
64 const u8 *src, unsigned int srclen) 71 const u8 *src, unsigned int srclen)
65 { 72 {
66 u8 *dst = ctx->buffer; 73 u8 *dst = ctx->buffer;
67 74
68 if (ctx->bytes) { 75 if (ctx->bytes) {
69 int n = min(srclen, ctx->bytes); 76 int n = min(srclen, ctx->bytes);
70 u8 *pos = dst + (16 - ctx->bytes); 77 u8 *pos = dst + (16 - ctx->bytes);
71 78
72 ctx->bytes -= n; 79 ctx->bytes -= n;
73 srclen -= n; 80 srclen -= n;
74 81
75 while (n--) 82 while (n--)
76 *pos++ ^= *src++; 83 *pos++ ^= *src++;
77 84
78 if (!ctx->bytes) 85 if (!ctx->bytes)
79 gf128mul_4k_lle((be128 *)dst, ctx->gf128); 86 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
80 } 87 }
81 88
82 while (srclen >= 16) { 89 while (srclen >= 16) {
83 crypto_xor(dst, src, 16); 90 crypto_xor(dst, src, 16);
84 gf128mul_4k_lle((be128 *)dst, ctx->gf128); 91 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
85 src += 16; 92 src += 16;
86 srclen -= 16; 93 srclen -= 16;
87 } 94 }
88 95
89 if (srclen) { 96 if (srclen) {
90 ctx->bytes = 16 - srclen; 97 ctx->bytes = 16 - srclen;
91 while (srclen--) 98 while (srclen--)
92 *dst++ ^= *src++; 99 *dst++ ^= *src++;
93 } 100 }
94 } 101 }
95 102
96 static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx, 103 static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
97 struct scatterlist *sg, int len) 104 struct scatterlist *sg, int len)
98 { 105 {
99 struct scatter_walk walk; 106 struct scatter_walk walk;
100 u8 *src; 107 u8 *src;
101 int n; 108 int n;
102 109
103 if (!len) 110 if (!len)
104 return; 111 return;
105 112
106 scatterwalk_start(&walk, sg); 113 scatterwalk_start(&walk, sg);
107 114
108 while (len) { 115 while (len) {
109 n = scatterwalk_clamp(&walk, len); 116 n = scatterwalk_clamp(&walk, len);
110 117
111 if (!n) { 118 if (!n) {
112 scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg)); 119 scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
113 n = scatterwalk_clamp(&walk, len); 120 n = scatterwalk_clamp(&walk, len);
114 } 121 }
115 122
116 src = scatterwalk_map(&walk, 0); 123 src = scatterwalk_map(&walk, 0);
117 124
118 crypto_gcm_ghash_update(ctx, src, n); 125 crypto_gcm_ghash_update(ctx, src, n);
119 len -= n; 126 len -= n;
120 127
121 scatterwalk_unmap(src, 0); 128 scatterwalk_unmap(src, 0);
122 scatterwalk_advance(&walk, n); 129 scatterwalk_advance(&walk, n);
123 scatterwalk_done(&walk, 0, len); 130 scatterwalk_done(&walk, 0, len);
124 if (len) 131 if (len)
125 crypto_yield(ctx->flags); 132 crypto_yield(ctx->flags);
126 } 133 }
127 } 134 }
128 135
129 static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx) 136 static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
130 { 137 {
131 u8 *dst = ctx->buffer; 138 u8 *dst = ctx->buffer;
132 139
133 if (ctx->bytes) { 140 if (ctx->bytes) {
134 u8 *tmp = dst + (16 - ctx->bytes); 141 u8 *tmp = dst + (16 - ctx->bytes);
135 142
136 while (ctx->bytes--) 143 while (ctx->bytes--)
137 *tmp++ ^= 0; 144 *tmp++ ^= 0;
138 145
139 gf128mul_4k_lle((be128 *)dst, ctx->gf128); 146 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
140 } 147 }
141 148
142 ctx->bytes = 0; 149 ctx->bytes = 0;
143 } 150 }
144 151
145 static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx, 152 static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
146 unsigned int authlen, 153 unsigned int authlen,
147 unsigned int cryptlen, u8 *dst) 154 unsigned int cryptlen, u8 *dst)
148 { 155 {
149 u8 *buf = ctx->buffer; 156 u8 *buf = ctx->buffer;
150 u128 lengths; 157 u128 lengths;
151 158
152 lengths.a = cpu_to_be64(authlen * 8); 159 lengths.a = cpu_to_be64(authlen * 8);
153 lengths.b = cpu_to_be64(cryptlen * 8); 160 lengths.b = cpu_to_be64(cryptlen * 8);
154 161
155 crypto_gcm_ghash_flush(ctx); 162 crypto_gcm_ghash_flush(ctx);
156 crypto_xor(buf, (u8 *)&lengths, 16); 163 crypto_xor(buf, (u8 *)&lengths, 16);
157 gf128mul_4k_lle((be128 *)buf, ctx->gf128); 164 gf128mul_4k_lle((be128 *)buf, ctx->gf128);
158 crypto_xor(dst, buf, 16); 165 crypto_xor(dst, buf, 16);
159 } 166 }
160 167
161 static inline void crypto_gcm_set_counter(u8 *counterblock, u32 value) 168 static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
162 { 169 {
163 *((u32 *)&counterblock[12]) = cpu_to_be32(value + 1); 170 struct crypto_gcm_setkey_result *result = req->data;
164 }
165 171
166 static int crypto_gcm_encrypt_counter(struct crypto_aead *aead, u8 *block, 172 if (err == -EINPROGRESS)
167 u32 value, const u8 *iv) 173 return;
168 {
169 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
170 struct crypto_ablkcipher *ctr = ctx->ctr;
171 struct ablkcipher_request req;
172 struct scatterlist sg;
173 u8 counterblock[16];
174 174
175 if (iv == NULL) 175 result->err = err;
176 memset(counterblock, 0, 12); 176 complete(&result->completion);
177 else
178 memcpy(counterblock, iv, 12);
179
180 crypto_gcm_set_counter(counterblock, value);
181
182 sg_init_one(&sg, block, 16);
183 ablkcipher_request_set_tfm(&req, ctr);
184 ablkcipher_request_set_crypt(&req, &sg, &sg, 16, counterblock);
185 ablkcipher_request_set_callback(&req, 0, NULL, NULL);
186 memset(block, 0, 16);
187 return crypto_ablkcipher_encrypt(&req);
188 } 177 }
189 178
190 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key, 179 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
191 unsigned int keylen) 180 unsigned int keylen)
192 { 181 {
193 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 182 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
194 struct crypto_ablkcipher *ctr = ctx->ctr; 183 struct crypto_ablkcipher *ctr = ctx->ctr;
195 int alignmask = crypto_ablkcipher_alignmask(ctr); 184 struct {
196 u8 alignbuf[16+alignmask]; 185 be128 hash;
197 u8 *hash = (u8 *)ALIGN((unsigned long)alignbuf, alignmask+1); 186 u8 iv[8];
198 int err = 0;
199 187
188 struct crypto_gcm_setkey_result result;
189
190 struct scatterlist sg[1];
191 struct ablkcipher_request req;
192 } *data;
193 int err;
194
200 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); 195 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
201 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) & 196 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
202 CRYPTO_TFM_REQ_MASK); 197 CRYPTO_TFM_REQ_MASK);
203 198
204 err = crypto_ablkcipher_setkey(ctr, key, keylen); 199 err = crypto_ablkcipher_setkey(ctr, key, keylen);
205 if (err) 200 if (err)
206 goto out; 201 return err;
207 202
208 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) & 203 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
209 CRYPTO_TFM_RES_MASK); 204 CRYPTO_TFM_RES_MASK);
210 205
211 err = crypto_gcm_encrypt_counter(aead, hash, -1, NULL); 206 data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
207 GFP_KERNEL);
208 if (!data)
209 return -ENOMEM;
210
211 init_completion(&data->result.completion);
212 sg_init_one(data->sg, &data->hash, sizeof(data->hash));
213 ablkcipher_request_set_tfm(&data->req, ctr);
214 ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
215 CRYPTO_TFM_REQ_MAY_BACKLOG,
216 crypto_gcm_setkey_done,
217 &data->result);
218 ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
219 sizeof(data->hash), data->iv);
220
221 err = crypto_ablkcipher_encrypt(&data->req);
222 if (err == -EINPROGRESS || err == -EBUSY) {
223 err = wait_for_completion_interruptible(
224 &data->result.completion);
225 if (!err)
226 err = data->result.err;
227 }
228
212 if (err) 229 if (err)
213 goto out; 230 goto out;
214 231
215 if (ctx->gf128 != NULL) 232 if (ctx->gf128 != NULL)
216 gf128mul_free_4k(ctx->gf128); 233 gf128mul_free_4k(ctx->gf128);
217 234
218 ctx->gf128 = gf128mul_init_4k_lle((be128 *)hash); 235 ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
219 236
220 if (ctx->gf128 == NULL) 237 if (ctx->gf128 == NULL)
221 err = -ENOMEM; 238 err = -ENOMEM;
222 239
223 out: 240 out:
241 kfree(data);
224 return err; 242 return err;
225 } 243 }
226 244
227 static int crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req, 245 static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
228 struct aead_request *req, 246 struct aead_request *req,
229 unsigned int cryptlen, 247 unsigned int cryptlen)
230 void (*done)(struct crypto_async_request *,
231 int))
232 { 248 {
233 struct crypto_aead *aead = crypto_aead_reqtfm(req); 249 struct crypto_aead *aead = crypto_aead_reqtfm(req);
234 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 250 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
235 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 251 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
236 u32 flags = req->base.tfm->crt_flags; 252 u32 flags = req->base.tfm->crt_flags;
237 u8 *auth_tag = pctx->auth_tag;
238 u8 *counter = pctx->counter;
239 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; 253 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
240 int err = 0; 254 struct scatterlist *dst;
255 __be32 counter = cpu_to_be32(1);
241 256
242 ablkcipher_request_set_tfm(ablk_req, ctx->ctr); 257 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
243 ablkcipher_request_set_callback(ablk_req, aead_request_flags(req), 258 memcpy(req->iv + 12, &counter, 4);
244 done, req);
245 ablkcipher_request_set_crypt(ablk_req, req->src, req->dst,
246 cryptlen, counter);
247 259
248 err = crypto_gcm_encrypt_counter(aead, auth_tag, 0, req->iv); 260 sg_init_table(pctx->src, 2);
249 if (err) 261 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
250 goto out; 262 scatterwalk_sg_chain(pctx->src, 2, req->src);
251 263
252 memcpy(counter, req->iv, 12); 264 dst = pctx->src;
253 crypto_gcm_set_counter(counter, 1); 265 if (req->src != req->dst) {
266 sg_init_table(pctx->dst, 2);
267 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
268 scatterwalk_sg_chain(pctx->dst, 2, req->dst);
269 dst = pctx->dst;
270 }
254 271
272 ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
273 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
274 cryptlen + sizeof(pctx->auth_tag),
275 req->iv);
276
255 crypto_gcm_ghash_init(ghash, flags, ctx->gf128); 277 crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
256 278
257 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen); 279 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
258 crypto_gcm_ghash_flush(ghash); 280 crypto_gcm_ghash_flush(ghash);
259
260 out:
261 return err;
262 } 281 }
263 282
264 static int crypto_gcm_hash(struct aead_request *req) 283 static int crypto_gcm_hash(struct aead_request *req)
265 { 284 {
266 struct crypto_aead *aead = crypto_aead_reqtfm(req); 285 struct crypto_aead *aead = crypto_aead_reqtfm(req);
267 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 286 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
268 u8 *auth_tag = pctx->auth_tag; 287 u8 *auth_tag = pctx->auth_tag;
269 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; 288 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
270 289
271 crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen); 290 crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
272 crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, 291 crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
273 auth_tag); 292 auth_tag);
274 293
275 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen, 294 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
276 crypto_aead_authsize(aead), 1); 295 crypto_aead_authsize(aead), 1);
277 return 0; 296 return 0;
278 } 297 }
279 298
280 static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err) 299 static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
281 { 300 {
282 struct aead_request *req = areq->data; 301 struct aead_request *req = areq->data;
283 302
284 if (!err) 303 if (!err)
285 err = crypto_gcm_hash(req); 304 err = crypto_gcm_hash(req);
286 305
287 aead_request_complete(req, err); 306 aead_request_complete(req, err);
288 } 307 }
289 308
290 static int crypto_gcm_encrypt(struct aead_request *req) 309 static int crypto_gcm_encrypt(struct aead_request *req)
291 { 310 {
292 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 311 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
293 struct ablkcipher_request *abreq = &pctx->abreq; 312 struct ablkcipher_request *abreq = &pctx->abreq;
294 int err = 0; 313 int err;
295 314
296 err = crypto_gcm_init_crypt(abreq, req, req->cryptlen, 315 crypto_gcm_init_crypt(abreq, req, req->cryptlen);
297 crypto_gcm_encrypt_done); 316 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
317 crypto_gcm_encrypt_done, req);
318
319 err = crypto_ablkcipher_encrypt(abreq);
298 if (err) 320 if (err)
299 return err; 321 return err;
300 322
301 if (req->cryptlen) {
302 err = crypto_ablkcipher_encrypt(abreq);
303 if (err)
304 return err;
305 }
306
307 return crypto_gcm_hash(req); 323 return crypto_gcm_hash(req);
308 } 324 }
309 325
326 static int crypto_gcm_verify(struct aead_request *req)
327 {
328 struct crypto_aead *aead = crypto_aead_reqtfm(req);
329 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
330 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
331 u8 *auth_tag = pctx->auth_tag;
332 u8 *iauth_tag = pctx->iauth_tag;
333 unsigned int authsize = crypto_aead_authsize(aead);
334 unsigned int cryptlen = req->cryptlen - authsize;
335
336 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
337
338 authsize = crypto_aead_authsize(aead);
339 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
340 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
341 }
342
310 static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err) 343 static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
311 { 344 {
312 aead_request_complete(areq->data, err); 345 struct aead_request *req = areq->data;
346
347 if (!err)
348 err = crypto_gcm_verify(req);
349
350 aead_request_complete(req, err);
313 } 351 }
314 352
315 static int crypto_gcm_decrypt(struct aead_request *req) 353 static int crypto_gcm_decrypt(struct aead_request *req)
316 { 354 {
317 struct crypto_aead *aead = crypto_aead_reqtfm(req); 355 struct crypto_aead *aead = crypto_aead_reqtfm(req);
318 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 356 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
319 struct ablkcipher_request *abreq = &pctx->abreq; 357 struct ablkcipher_request *abreq = &pctx->abreq;
320 u8 *auth_tag = pctx->auth_tag;
321 u8 *iauth_tag = pctx->iauth_tag;
322 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; 358 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
323 unsigned int cryptlen = req->cryptlen; 359 unsigned int cryptlen = req->cryptlen;
324 unsigned int authsize = crypto_aead_authsize(aead); 360 unsigned int authsize = crypto_aead_authsize(aead);
325 int err; 361 int err;
326 362
327 if (cryptlen < authsize) 363 if (cryptlen < authsize)
328 return -EINVAL; 364 return -EINVAL;
329 cryptlen -= authsize; 365 cryptlen -= authsize;
330 366
331 err = crypto_gcm_init_crypt(abreq, req, cryptlen, 367 crypto_gcm_init_crypt(abreq, req, cryptlen);
332 crypto_gcm_decrypt_done); 368 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
333 if (err) 369 crypto_gcm_decrypt_done, req);
334 return err;
335 370
336 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen); 371 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
337 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
338 372
339 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0); 373 err = crypto_ablkcipher_decrypt(abreq);
340 if (memcmp(iauth_tag, auth_tag, authsize)) 374 if (err)
341 return -EBADMSG; 375 return err;
342 376
343 return crypto_ablkcipher_decrypt(abreq); 377 return crypto_gcm_verify(req);
344 } 378 }
345 379
346 static int crypto_gcm_init_tfm(struct crypto_tfm *tfm) 380 static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
347 { 381 {
348 struct crypto_instance *inst = (void *)tfm->__crt_alg; 382 struct crypto_instance *inst = (void *)tfm->__crt_alg;
349 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst); 383 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
350 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); 384 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
351 struct crypto_ablkcipher *ctr; 385 struct crypto_ablkcipher *ctr;
352 unsigned long align; 386 unsigned long align;
353 int err; 387 int err;
354 388
355 ctr = crypto_spawn_ablkcipher(&ictx->ctr); 389 ctr = crypto_spawn_ablkcipher(&ictx->ctr);
356 err = PTR_ERR(ctr); 390 err = PTR_ERR(ctr);
357 if (IS_ERR(ctr)) 391 if (IS_ERR(ctr))
358 return err; 392 return err;
359 393
360 ctx->ctr = ctr; 394 ctx->ctr = ctr;
361 ctx->gf128 = NULL; 395 ctx->gf128 = NULL;
362 396
363 align = crypto_tfm_alg_alignmask(tfm); 397 align = crypto_tfm_alg_alignmask(tfm);
364 align &= ~(crypto_tfm_ctx_alignment() - 1); 398 align &= ~(crypto_tfm_ctx_alignment() - 1);
365 tfm->crt_aead.reqsize = align + 399 tfm->crt_aead.reqsize = align +
366 sizeof(struct crypto_gcm_req_priv_ctx) + 400 sizeof(struct crypto_gcm_req_priv_ctx) +
367 crypto_ablkcipher_reqsize(ctr); 401 crypto_ablkcipher_reqsize(ctr);
368 402
369 return 0; 403 return 0;
370 } 404 }
371 405
372 static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm) 406 static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
373 { 407 {
374 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); 408 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
375 409
376 if (ctx->gf128 != NULL) 410 if (ctx->gf128 != NULL)
377 gf128mul_free_4k(ctx->gf128); 411 gf128mul_free_4k(ctx->gf128);
378 412
379 crypto_free_ablkcipher(ctx->ctr); 413 crypto_free_ablkcipher(ctx->ctr);
380 } 414 }
381 415
382 static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb) 416 static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
383 { 417 {
384 struct crypto_instance *inst; 418 struct crypto_instance *inst;
385 struct crypto_alg *ctr; 419 struct crypto_alg *ctr;
386 struct crypto_alg *cipher; 420 struct crypto_alg *cipher;
387 struct gcm_instance_ctx *ctx; 421 struct gcm_instance_ctx *ctx;
388 int err; 422 int err;
389 char ctr_name[CRYPTO_MAX_ALG_NAME]; 423 char ctr_name[CRYPTO_MAX_ALG_NAME];
390 424
391 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD); 425 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD);
392 if (err) 426 if (err)
393 return ERR_PTR(err); 427 return ERR_PTR(err);
394 428
395 cipher = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, 429 cipher = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
396 CRYPTO_ALG_TYPE_MASK); 430 CRYPTO_ALG_TYPE_MASK);
397 431
398 inst = ERR_PTR(PTR_ERR(cipher)); 432 inst = ERR_PTR(PTR_ERR(cipher));
399 if (IS_ERR(cipher)) 433 if (IS_ERR(cipher))
400 return inst; 434 return inst;
401 435
402 inst = ERR_PTR(ENAMETOOLONG); 436 inst = ERR_PTR(ENAMETOOLONG);
403 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", 437 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
404 cipher->cra_name) >= CRYPTO_MAX_ALG_NAME) 438 cipher->cra_name) >= CRYPTO_MAX_ALG_NAME)
405 return inst; 439 return inst;
406 440
407 ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER, 441 ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER,
408 CRYPTO_ALG_TYPE_MASK); 442 CRYPTO_ALG_TYPE_MASK);
409 443
410 if (IS_ERR(ctr)) 444 if (IS_ERR(ctr))
411 return ERR_PTR(PTR_ERR(ctr)); 445 return ERR_PTR(PTR_ERR(ctr));
412 446
413 if (cipher->cra_blocksize != 16) 447 if (cipher->cra_blocksize != 16)
414 goto out_put_ctr; 448 goto out_put_ctr;
415 449
416 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 450 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
417 err = -ENOMEM; 451 err = -ENOMEM;
418 if (!inst) 452 if (!inst)
419 goto out_put_ctr; 453 goto out_put_ctr;
420 454
421 err = -ENAMETOOLONG; 455 err = -ENAMETOOLONG;
422 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, 456 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
423 "gcm(%s)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME || 457 "gcm(%s)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME ||
424 snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, 458 snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
425 "gcm(%s)", cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 459 "gcm(%s)", cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
426 goto err_free_inst; 460 goto err_free_inst;
427 461
428 462
429 ctx = crypto_instance_ctx(inst); 463 ctx = crypto_instance_ctx(inst);
430 err = crypto_init_spawn(&ctx->ctr, ctr, inst, CRYPTO_ALG_TYPE_MASK); 464 err = crypto_init_spawn(&ctx->ctr, ctr, inst, CRYPTO_ALG_TYPE_MASK);
431 if (err) 465 if (err)
432 goto err_free_inst; 466 goto err_free_inst;
433 467
434 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC; 468 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
435 inst->alg.cra_priority = ctr->cra_priority; 469 inst->alg.cra_priority = ctr->cra_priority;
436 inst->alg.cra_blocksize = 16; 470 inst->alg.cra_blocksize = 16;
437 inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1); 471 inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
438 inst->alg.cra_type = &crypto_aead_type; 472 inst->alg.cra_type = &crypto_aead_type;
439 inst->alg.cra_aead.ivsize = 12; 473 inst->alg.cra_aead.ivsize = 16;
440 inst->alg.cra_aead.maxauthsize = 16; 474 inst->alg.cra_aead.maxauthsize = 16;
441 inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx); 475 inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
442 inst->alg.cra_init = crypto_gcm_init_tfm; 476 inst->alg.cra_init = crypto_gcm_init_tfm;
443 inst->alg.cra_exit = crypto_gcm_exit_tfm; 477 inst->alg.cra_exit = crypto_gcm_exit_tfm;
444 inst->alg.cra_aead.setkey = crypto_gcm_setkey; 478 inst->alg.cra_aead.setkey = crypto_gcm_setkey;
445 inst->alg.cra_aead.encrypt = crypto_gcm_encrypt; 479 inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
446 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt; 480 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
447 481
448 out: 482 out:
449 crypto_mod_put(ctr); 483 crypto_mod_put(ctr);
450 return inst; 484 return inst;