Commit 5311f248b7764ba8b59e6d477355f766e5609686

Authored by Herbert Xu
1 parent 653ebd9c85

[CRYPTO] ctr: Refactor into ctr and rfc3686

As discussed previously, this patch moves the basic CTR functionality
into a chainable algorithm called ctr.  The IPsec-specific variant of
it is now placed on top with the name rfc3686.

So ctr(aes) gives a chainable cipher with IV size 16 while the IPsec
variant will be called rfc3686(ctr(aes)).  This patch also adjusts
gcm accordingly.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Showing 4 changed files with 233 additions and 136 deletions Side-by-side Diff

... ... @@ -11,6 +11,7 @@
11 11 */
12 12  
13 13 #include <crypto/algapi.h>
  14 +#include <crypto/ctr.h>
14 15 #include <linux/err.h>
15 16 #include <linux/init.h>
16 17 #include <linux/kernel.h>
17 18  
18 19  
19 20  
20 21  
... ... @@ -19,36 +20,22 @@
19 20 #include <linux/scatterlist.h>
20 21 #include <linux/slab.h>
21 22  
22   -struct ctr_instance_ctx {
23   - struct crypto_spawn alg;
24   - unsigned int noncesize;
25   - unsigned int ivsize;
26   - unsigned int countersize;
27   -};
28   -
29 23 struct crypto_ctr_ctx {
30 24 struct crypto_cipher *child;
31   - u8 *nonce;
32 25 };
33 26  
  27 +struct crypto_rfc3686_ctx {
  28 + struct crypto_blkcipher *child;
  29 + u8 nonce[CTR_RFC3686_NONCE_SIZE];
  30 +};
  31 +
34 32 static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
35 33 unsigned int keylen)
36 34 {
37 35 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
38 36 struct crypto_cipher *child = ctx->child;
39   - struct ctr_instance_ctx *ictx =
40   - crypto_instance_ctx(crypto_tfm_alg_instance(parent));
41   - unsigned int noncelen = ictx->noncesize;
42   - int err = 0;
  37 + int err;
43 38  
44   - /* the nonce is stored in bytes at end of key */
45   - if (keylen < noncelen)
46   - return -EINVAL;
47   -
48   - memcpy(ctx->nonce, key + (keylen - noncelen), noncelen);
49   -
50   - keylen -= noncelen;
51   -
52 39 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
53 40 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
54 41 CRYPTO_TFM_REQ_MASK);
55 42  
... ... @@ -60,11 +47,13 @@
60 47 }
61 48  
62 49 static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
63   - struct crypto_cipher *tfm, u8 *ctrblk,
64   - unsigned int countersize)
  50 + struct crypto_cipher *tfm)
65 51 {
66 52 unsigned int bsize = crypto_cipher_blocksize(tfm);
67   - u8 *keystream = ctrblk + bsize;
  53 + unsigned long alignmask = crypto_cipher_alignmask(tfm);
  54 + u8 *ctrblk = walk->iv;
  55 + u8 tmp[bsize + alignmask];
  56 + u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
68 57 u8 *src = walk->src.virt.addr;
69 58 u8 *dst = walk->dst.virt.addr;
70 59 unsigned int nbytes = walk->nbytes;
71 60  
72 61  
... ... @@ -72,15 +61,17 @@
72 61 crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
73 62 crypto_xor(keystream, src, nbytes);
74 63 memcpy(dst, keystream, nbytes);
  64 +
  65 + crypto_inc(ctrblk, bsize);
75 66 }
76 67  
77 68 static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
78   - struct crypto_cipher *tfm, u8 *ctrblk,
79   - unsigned int countersize)
  69 + struct crypto_cipher *tfm)
80 70 {
81 71 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
82 72 crypto_cipher_alg(tfm)->cia_encrypt;
83 73 unsigned int bsize = crypto_cipher_blocksize(tfm);
  74 + u8 *ctrblk = walk->iv;
84 75 u8 *src = walk->src.virt.addr;
85 76 u8 *dst = walk->dst.virt.addr;
86 77 unsigned int nbytes = walk->nbytes;
... ... @@ -91,7 +82,7 @@
91 82 crypto_xor(dst, src, bsize);
92 83  
93 84 /* increment counter in counterblock */
94   - crypto_inc(ctrblk + bsize - countersize, countersize);
  85 + crypto_inc(ctrblk, bsize);
95 86  
96 87 src += bsize;
97 88 dst += bsize;
98 89  
99 90  
100 91  
... ... @@ -101,15 +92,17 @@
101 92 }
102 93  
103 94 static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
104   - struct crypto_cipher *tfm, u8 *ctrblk,
105   - unsigned int countersize)
  95 + struct crypto_cipher *tfm)
106 96 {
107 97 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
108 98 crypto_cipher_alg(tfm)->cia_encrypt;
109 99 unsigned int bsize = crypto_cipher_blocksize(tfm);
  100 + unsigned long alignmask = crypto_cipher_alignmask(tfm);
110 101 unsigned int nbytes = walk->nbytes;
  102 + u8 *ctrblk = walk->iv;
111 103 u8 *src = walk->src.virt.addr;
112   - u8 *keystream = ctrblk + bsize;
  104 + u8 tmp[bsize + alignmask];
  105 + u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
113 106  
114 107 do {
115 108 /* create keystream */
... ... @@ -117,7 +110,7 @@
117 110 crypto_xor(src, keystream, bsize);
118 111  
119 112 /* increment counter in counterblock */
120   - crypto_inc(ctrblk + bsize - countersize, countersize);
  113 + crypto_inc(ctrblk, bsize);
121 114  
122 115 src += bsize;
123 116 } while ((nbytes -= bsize) >= bsize);
124 117  
125 118  
126 119  
127 120  
... ... @@ -134,41 +127,22 @@
134 127 struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
135 128 struct crypto_cipher *child = ctx->child;
136 129 unsigned int bsize = crypto_cipher_blocksize(child);
137   - struct ctr_instance_ctx *ictx =
138   - crypto_instance_ctx(crypto_tfm_alg_instance(&tfm->base));
139   - unsigned long alignmask = crypto_cipher_alignmask(child) |
140   - (__alignof__(u32) - 1);
141   - u8 cblk[bsize * 2 + alignmask];
142   - u8 *counterblk = (u8 *)ALIGN((unsigned long)cblk, alignmask + 1);
143 130 int err;
144 131  
145 132 blkcipher_walk_init(&walk, dst, src, nbytes);
146 133 err = blkcipher_walk_virt_block(desc, &walk, bsize);
147 134  
148   - /* set up counter block */
149   - memset(counterblk, 0 , bsize);
150   - memcpy(counterblk, ctx->nonce, ictx->noncesize);
151   - memcpy(counterblk + ictx->noncesize, walk.iv, ictx->ivsize);
152   -
153   - /* initialize counter portion of counter block */
154   - crypto_inc(counterblk + bsize - ictx->countersize, ictx->countersize);
155   -
156 135 while (walk.nbytes >= bsize) {
157 136 if (walk.src.virt.addr == walk.dst.virt.addr)
158   - nbytes = crypto_ctr_crypt_inplace(&walk, child,
159   - counterblk,
160   - ictx->countersize);
  137 + nbytes = crypto_ctr_crypt_inplace(&walk, child);
161 138 else
162   - nbytes = crypto_ctr_crypt_segment(&walk, child,
163   - counterblk,
164   - ictx->countersize);
  139 + nbytes = crypto_ctr_crypt_segment(&walk, child);
165 140  
166 141 err = blkcipher_walk_done(desc, &walk, nbytes);
167 142 }
168 143  
169 144 if (walk.nbytes) {
170   - crypto_ctr_crypt_final(&walk, child, counterblk,
171   - ictx->countersize);
  145 + crypto_ctr_crypt_final(&walk, child);
172 146 err = blkcipher_walk_done(desc, &walk, 0);
173 147 }
174 148  
175 149  
... ... @@ -178,15 +152,11 @@
178 152 static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
179 153 {
180 154 struct crypto_instance *inst = (void *)tfm->__crt_alg;
181   - struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst);
  155 + struct crypto_spawn *spawn = crypto_instance_ctx(inst);
182 156 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
183 157 struct crypto_cipher *cipher;
184 158  
185   - ctx->nonce = kzalloc(ictx->noncesize, GFP_KERNEL);
186   - if (!ctx->nonce)
187   - return -ENOMEM;
188   -
189   - cipher = crypto_spawn_cipher(&ictx->alg);
  159 + cipher = crypto_spawn_cipher(spawn);
190 160 if (IS_ERR(cipher))
191 161 return PTR_ERR(cipher);
192 162  
... ... @@ -199,7 +169,6 @@
199 169 {
200 170 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
201 171  
202   - kfree(ctx->nonce);
203 172 crypto_free_cipher(ctx->child);
204 173 }
205 174  
... ... @@ -207,10 +176,6 @@
207 176 {
208 177 struct crypto_instance *inst;
209 178 struct crypto_alg *alg;
210   - struct ctr_instance_ctx *ictx;
211   - unsigned int noncesize;
212   - unsigned int ivsize;
213   - unsigned int countersize;
214 179 int err;
215 180  
216 181 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
217 182  
218 183  
219 184  
220 185  
221 186  
... ... @@ -222,71 +187,28 @@
222 187 if (IS_ERR(alg))
223 188 return ERR_PTR(PTR_ERR(alg));
224 189  
225   - err = crypto_attr_u32(tb[2], &noncesize);
226   - if (err)
227   - goto out_put_alg;
228   -
229   - err = crypto_attr_u32(tb[3], &ivsize);
230   - if (err)
231   - goto out_put_alg;
232   -
233   - err = crypto_attr_u32(tb[4], &countersize);
234   - if (err)
235   - goto out_put_alg;
236   -
237   - /* verify size of nonce + iv + counter
238   - * counter must be >= 4 bytes.
239   - */
  190 + /* Block size must be >= 4 bytes. */
240 191 err = -EINVAL;
241   - if (((noncesize + ivsize + countersize) < alg->cra_blocksize) ||
242   - ((noncesize + ivsize) > alg->cra_blocksize) ||
243   - (countersize > alg->cra_blocksize) || (countersize < 4))
  192 + if (alg->cra_blocksize < 4)
244 193 goto out_put_alg;
245 194  
246 195 /* If this is false we'd fail the alignment of crypto_inc. */
247   - if ((alg->cra_blocksize - countersize) % 4)
  196 + if (alg->cra_blocksize % 4)
248 197 goto out_put_alg;
249 198  
250   - inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
251   - err = -ENOMEM;
252   - if (!inst)
253   - goto out_put_alg;
  199 + inst = crypto_alloc_instance("ctr", alg);
  200 + if (IS_ERR(inst))
  201 + goto out;
254 202  
255   - err = -ENAMETOOLONG;
256   - if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
257   - "ctr(%s,%u,%u,%u)", alg->cra_name, noncesize,
258   - ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) {
259   - goto err_free_inst;
260   - }
261   -
262   - if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
263   - "ctr(%s,%u,%u,%u)", alg->cra_driver_name, noncesize,
264   - ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) {
265   - goto err_free_inst;
266   - }
267   -
268   - ictx = crypto_instance_ctx(inst);
269   - ictx->noncesize = noncesize;
270   - ictx->ivsize = ivsize;
271   - ictx->countersize = countersize;
272   -
273   - err = crypto_init_spawn(&ictx->alg, alg, inst,
274   - CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
275   - if (err)
276   - goto err_free_inst;
277   -
278   - err = 0;
279 203 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
280 204 inst->alg.cra_priority = alg->cra_priority;
281 205 inst->alg.cra_blocksize = 1;
282 206 inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1);
283 207 inst->alg.cra_type = &crypto_blkcipher_type;
284 208  
285   - inst->alg.cra_blkcipher.ivsize = ivsize;
286   - inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize
287   - + noncesize;
288   - inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize
289   - + noncesize;
  209 + inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
  210 + inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
  211 + inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
290 212  
291 213 inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
292 214  
293 215  
294 216  
295 217  
... ... @@ -297,24 +219,18 @@
297 219 inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
298 220 inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
299 221  
300   -err_free_inst:
301   - if (err)
302   - kfree(inst);
303   -
304   -out_put_alg:
  222 +out:
305 223 crypto_mod_put(alg);
306   -
307   - if (err)
308   - inst = ERR_PTR(err);
309   -
310 224 return inst;
  225 +
  226 +out_put_alg:
  227 + inst = ERR_PTR(err);
  228 + goto out;
311 229 }
312 230  
313 231 static void crypto_ctr_free(struct crypto_instance *inst)
314 232 {
315   - struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst);
316   -
317   - crypto_drop_spawn(&ictx->alg);
  233 + crypto_drop_spawn(crypto_instance_ctx(inst));
318 234 kfree(inst);
319 235 }
320 236  
321 237  
322 238  
... ... @@ -325,13 +241,174 @@
325 241 .module = THIS_MODULE,
326 242 };
327 243  
  244 +static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key,
  245 + unsigned int keylen)
  246 +{
  247 + struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent);
  248 + struct crypto_blkcipher *child = ctx->child;
  249 + int err;
  250 +
  251 + /* the nonce is stored in bytes at end of key */
  252 + if (keylen < CTR_RFC3686_NONCE_SIZE)
  253 + return -EINVAL;
  254 +
  255 + memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
  256 + CTR_RFC3686_NONCE_SIZE);
  257 +
  258 + keylen -= CTR_RFC3686_NONCE_SIZE;
  259 +
  260 + crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  261 + crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) &
  262 + CRYPTO_TFM_REQ_MASK);
  263 + err = crypto_blkcipher_setkey(child, key, keylen);
  264 + crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) &
  265 + CRYPTO_TFM_RES_MASK);
  266 +
  267 + return err;
  268 +}
  269 +
  270 +static int crypto_rfc3686_crypt(struct blkcipher_desc *desc,
  271 + struct scatterlist *dst,
  272 + struct scatterlist *src, unsigned int nbytes)
  273 +{
  274 + struct crypto_blkcipher *tfm = desc->tfm;
  275 + struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm);
  276 + struct crypto_blkcipher *child = ctx->child;
  277 + unsigned long alignmask = crypto_blkcipher_alignmask(tfm);
  278 + u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask];
  279 + u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1);
  280 + u8 *info = desc->info;
  281 + int err;
  282 +
  283 + /* set up counter block */
  284 + memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
  285 + memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);
  286 +
  287 + /* initialize counter portion of counter block */
  288 + *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
  289 + cpu_to_be32(1);
  290 +
  291 + desc->tfm = child;
  292 + desc->info = iv;
  293 + err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  294 + desc->tfm = tfm;
  295 + desc->info = info;
  296 +
  297 + return err;
  298 +}
  299 +
  300 +static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm)
  301 +{
  302 + struct crypto_instance *inst = (void *)tfm->__crt_alg;
  303 + struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  304 + struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
  305 + struct crypto_blkcipher *cipher;
  306 +
  307 + cipher = crypto_spawn_blkcipher(spawn);
  308 + if (IS_ERR(cipher))
  309 + return PTR_ERR(cipher);
  310 +
  311 + ctx->child = cipher;
  312 +
  313 + return 0;
  314 +}
  315 +
  316 +static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm)
  317 +{
  318 + struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
  319 +
  320 + crypto_free_blkcipher(ctx->child);
  321 +}
  322 +
  323 +static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb)
  324 +{
  325 + struct crypto_instance *inst;
  326 + struct crypto_alg *alg;
  327 + int err;
  328 +
  329 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
  330 + if (err)
  331 + return ERR_PTR(err);
  332 +
  333 + alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
  334 + CRYPTO_ALG_TYPE_MASK);
  335 + err = PTR_ERR(alg);
  336 + if (IS_ERR(alg))
  337 + return ERR_PTR(err);
  338 +
  339 + /* We only support 16-byte blocks. */
  340 + err = -EINVAL;
  341 + if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE)
  342 + goto out_put_alg;
  343 +
  344 + /* Not a stream cipher? */
  345 + if (alg->cra_blocksize != 1)
  346 + goto out_put_alg;
  347 +
  348 + inst = crypto_alloc_instance("rfc3686", alg);
  349 + if (IS_ERR(inst))
  350 + goto out;
  351 +
  352 + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
  353 + inst->alg.cra_priority = alg->cra_priority;
  354 + inst->alg.cra_blocksize = 1;
  355 + inst->alg.cra_alignmask = alg->cra_alignmask;
  356 + inst->alg.cra_type = &crypto_blkcipher_type;
  357 +
  358 + inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE;
  359 + inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize
  360 + + CTR_RFC3686_NONCE_SIZE;
  361 + inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize
  362 + + CTR_RFC3686_NONCE_SIZE;
  363 +
  364 + inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
  365 +
  366 + inst->alg.cra_init = crypto_rfc3686_init_tfm;
  367 + inst->alg.cra_exit = crypto_rfc3686_exit_tfm;
  368 +
  369 + inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey;
  370 + inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt;
  371 + inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt;
  372 +
  373 +out:
  374 + crypto_mod_put(alg);
  375 + return inst;
  376 +
  377 +out_put_alg:
  378 + inst = ERR_PTR(err);
  379 + goto out;
  380 +}
  381 +
  382 +static struct crypto_template crypto_rfc3686_tmpl = {
  383 + .name = "rfc3686",
  384 + .alloc = crypto_rfc3686_alloc,
  385 + .free = crypto_ctr_free,
  386 + .module = THIS_MODULE,
  387 +};
  388 +
328 389 static int __init crypto_ctr_module_init(void)
329 390 {
330   - return crypto_register_template(&crypto_ctr_tmpl);
  391 + int err;
  392 +
  393 + err = crypto_register_template(&crypto_ctr_tmpl);
  394 + if (err)
  395 + goto out;
  396 +
  397 + err = crypto_register_template(&crypto_rfc3686_tmpl);
  398 + if (err)
  399 + goto out_drop_ctr;
  400 +
  401 +out:
  402 + return err;
  403 +
  404 +out_drop_ctr:
  405 + crypto_unregister_template(&crypto_ctr_tmpl);
  406 + goto out;
331 407 }
332 408  
333 409 static void __exit crypto_ctr_module_exit(void)
334 410 {
  411 + crypto_unregister_template(&crypto_rfc3686_tmpl);
335 412 crypto_unregister_template(&crypto_ctr_tmpl);
336 413 }
337 414  
... ... @@ -340,4 +417,5 @@
340 417  
341 418 MODULE_LICENSE("GPL");
342 419 MODULE_DESCRIPTION("CTR Counter block mode");
  420 +MODULE_ALIAS("rfc3686");
... ... @@ -160,7 +160,7 @@
160 160  
161 161 static inline void crypto_gcm_set_counter(u8 *counterblock, u32 value)
162 162 {
163   - *((u32 *)&counterblock[12]) = cpu_to_be32(value);
  163 + *((u32 *)&counterblock[12]) = cpu_to_be32(value + 1);
164 164 }
165 165  
166 166 static int crypto_gcm_encrypt_counter(struct crypto_aead *aead, u8 *block,
... ... @@ -400,9 +400,8 @@
400 400 return inst;
401 401  
402 402 inst = ERR_PTR(ENAMETOOLONG);
403   - if (snprintf(
404   - ctr_name, CRYPTO_MAX_ALG_NAME,
405   - "ctr(%s,0,16,4)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME)
  403 + if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  404 + cipher->cra_name) >= CRYPTO_MAX_ALG_NAME)
406 405 return inst;
407 406  
408 407 ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER,
... ... @@ -1193,9 +1193,9 @@
1193 1193 AES_XTS_ENC_TEST_VECTORS);
1194 1194 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
1195 1195 AES_XTS_DEC_TEST_VECTORS);
1196   - test_cipher("ctr(aes,4,8,4)", ENCRYPT, aes_ctr_enc_tv_template,
  1196 + test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
1197 1197 AES_CTR_ENC_TEST_VECTORS);
1198   - test_cipher("ctr(aes,4,8,4)", DECRYPT, aes_ctr_dec_tv_template,
  1198 + test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
1199 1199 AES_CTR_DEC_TEST_VECTORS);
1200 1200 test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template,
1201 1201 AES_GCM_ENC_TEST_VECTORS);
1202 1202  
... ... @@ -1394,9 +1394,9 @@
1394 1394 AES_XTS_ENC_TEST_VECTORS);
1395 1395 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
1396 1396 AES_XTS_DEC_TEST_VECTORS);
1397   - test_cipher("ctr(aes,4,8,4)", ENCRYPT, aes_ctr_enc_tv_template,
  1397 + test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
1398 1398 AES_CTR_ENC_TEST_VECTORS);
1399   - test_cipher("ctr(aes,4,8,4)", DECRYPT, aes_ctr_dec_tv_template,
  1399 + test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
1400 1400 AES_CTR_DEC_TEST_VECTORS);
1401 1401 break;
1402 1402  
include/crypto/ctr.h
  1 +/*
  2 + * CTR: Counter mode
  3 + *
  4 + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
  5 + *
  6 + * This program is free software; you can redistribute it and/or modify it
  7 + * under the terms of the GNU General Public License as published by the Free
  8 + * Software Foundation; either version 2 of the License, or (at your option)
  9 + * any later version.
  10 + *
  11 + */
  12 +
  13 +#ifndef _CRYPTO_CTR_H
  14 +#define _CRYPTO_CTR_H
  15 +
  16 +#define CTR_RFC3686_NONCE_SIZE 4
  17 +#define CTR_RFC3686_IV_SIZE 8
  18 +#define CTR_RFC3686_BLOCK_SIZE 16
  19 +
  20 +#endif /* _CRYPTO_CTR_H */