Commit 4a49b499dfa0c9e42be6d6fdd771f3434c776278

Authored by Joy Latten
Committed by Herbert Xu
1 parent d29ce988ae

[CRYPTO] ccm: Added CCM mode

This patch adds Counter with CBC-MAC (CCM) support.
RFC 3610 and NIST Special Publication 800-38C were referenced.

Signed-off-by: Joy Latten <latten@austin.ibm.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Showing 3 changed files with 897 additions and 0 deletions Side-by-side Diff

... ... @@ -220,6 +220,13 @@
220 220 Support for Galois/Counter Mode (GCM) and Galois Message
221 221 Authentication Code (GMAC). Required for IPSec.
222 222  
  223 +config CRYPTO_CCM
  224 + tristate "CCM support"
  225 + select CRYPTO_CTR
  226 + select CRYPTO_AEAD
  227 + help
  228 + Support for Counter with CBC MAC. Required for IPsec.
  229 +
223 230 config CRYPTO_CRYPTD
224 231 tristate "Software async crypto daemon"
225 232 select CRYPTO_BLKCIPHER
... ... @@ -39,6 +39,7 @@
39 39 obj-$(CONFIG_CRYPTO_XTS) += xts.o
40 40 obj-$(CONFIG_CRYPTO_CTR) += ctr.o
41 41 obj-$(CONFIG_CRYPTO_GCM) += gcm.o
  42 +obj-$(CONFIG_CRYPTO_CCM) += ccm.o
42 43 obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o
43 44 obj-$(CONFIG_CRYPTO_DES) += des_generic.o
44 45 obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o
  1 +/*
  2 + * CCM: Counter with CBC-MAC
  3 + *
  4 + * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5 + *
  6 + * This program is free software; you can redistribute it and/or modify it
  7 + * under the terms of the GNU General Public License as published by the Free
  8 + * Software Foundation; either version 2 of the License, or (at your option)
  9 + * any later version.
  10 + *
  11 + */
  12 +
  13 +#include <crypto/internal/aead.h>
  14 +#include <crypto/internal/skcipher.h>
  15 +#include <crypto/scatterwalk.h>
  16 +#include <linux/err.h>
  17 +#include <linux/init.h>
  18 +#include <linux/kernel.h>
  19 +#include <linux/module.h>
  20 +#include <linux/slab.h>
  21 +
  22 +#include "internal.h"
  23 +
  24 +struct ccm_instance_ctx {
  25 + struct crypto_skcipher_spawn ctr;
  26 + struct crypto_spawn cipher;
  27 +};
  28 +
  29 +struct crypto_ccm_ctx {
  30 + struct crypto_cipher *cipher;
  31 + struct crypto_ablkcipher *ctr;
  32 +};
  33 +
  34 +struct crypto_rfc4309_ctx {
  35 + struct crypto_aead *child;
  36 + u8 nonce[3];
  37 +};
  38 +
  39 +struct crypto_ccm_req_priv_ctx {
  40 + u8 odata[16];
  41 + u8 idata[16];
  42 + u8 auth_tag[16];
  43 + u32 ilen;
  44 + u32 flags;
  45 + struct scatterlist src[2];
  46 + struct scatterlist dst[2];
  47 + struct ablkcipher_request abreq;
  48 +};
  49 +
  50 +static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
  51 + struct aead_request *req)
  52 +{
  53 + unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  54 +
  55 + return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  56 +}
  57 +
  58 +static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  59 +{
  60 + __be32 data;
  61 +
  62 + memset(block, 0, csize);
  63 + block += csize;
  64 +
  65 + if (csize >= 4)
  66 + csize = 4;
  67 + else if (msglen > (1 << (8 * csize)))
  68 + return -EOVERFLOW;
  69 +
  70 + data = cpu_to_be32(msglen);
  71 + memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  72 +
  73 + return 0;
  74 +}
  75 +
  76 +static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
  77 + unsigned int keylen)
  78 +{
  79 + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  80 + struct crypto_ablkcipher *ctr = ctx->ctr;
  81 + struct crypto_cipher *tfm = ctx->cipher;
  82 + int err = 0;
  83 +
  84 + crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  85 + crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  86 + CRYPTO_TFM_REQ_MASK);
  87 + err = crypto_ablkcipher_setkey(ctr, key, keylen);
  88 + crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
  89 + CRYPTO_TFM_RES_MASK);
  90 + if (err)
  91 + goto out;
  92 +
  93 + crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK);
  94 + crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) &
  95 + CRYPTO_TFM_REQ_MASK);
  96 + err = crypto_cipher_setkey(tfm, key, keylen);
  97 + crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) &
  98 + CRYPTO_TFM_RES_MASK);
  99 +
  100 +out:
  101 + return err;
  102 +}
  103 +
  104 +static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
  105 + unsigned int authsize)
  106 +{
  107 + switch (authsize) {
  108 + case 4:
  109 + case 6:
  110 + case 8:
  111 + case 10:
  112 + case 12:
  113 + case 14:
  114 + case 16:
  115 + break;
  116 + default:
  117 + return -EINVAL;
  118 + }
  119 +
  120 + return 0;
  121 +}
  122 +
  123 +static int format_input(u8 *info, struct aead_request *req,
  124 + unsigned int cryptlen)
  125 +{
  126 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
  127 + unsigned int lp = req->iv[0];
  128 + unsigned int l = lp + 1;
  129 + unsigned int m;
  130 +
  131 + m = crypto_aead_authsize(aead);
  132 +
  133 + memcpy(info, req->iv, 16);
  134 +
  135 + /* format control info per RFC 3610 and
  136 + * NIST Special Publication 800-38C
  137 + */
  138 + *info |= (8 * ((m - 2) / 2));
  139 + if (req->assoclen)
  140 + *info |= 64;
  141 +
  142 + return set_msg_len(info + 16 - l, cryptlen, l);
  143 +}
  144 +
  145 +static int format_adata(u8 *adata, unsigned int a)
  146 +{
  147 + int len = 0;
  148 +
  149 + /* add control info for associated data
  150 + * RFC 3610 and NIST Special Publication 800-38C
  151 + */
  152 + if (a < 65280) {
  153 + *(__be16 *)adata = cpu_to_be16(a);
  154 + len = 2;
  155 + } else {
  156 + *(__be16 *)adata = cpu_to_be16(0xfffe);
  157 + *(__be32 *)&adata[2] = cpu_to_be32(a);
  158 + len = 6;
  159 + }
  160 +
  161 + return len;
  162 +}
  163 +
  164 +static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n,
  165 + struct crypto_ccm_req_priv_ctx *pctx)
  166 +{
  167 + unsigned int bs = 16;
  168 + u8 *odata = pctx->odata;
  169 + u8 *idata = pctx->idata;
  170 + int datalen, getlen;
  171 +
  172 + datalen = n;
  173 +
  174 + /* first time in here, block may be partially filled. */
  175 + getlen = bs - pctx->ilen;
  176 + if (datalen >= getlen) {
  177 + memcpy(idata + pctx->ilen, data, getlen);
  178 + crypto_xor(odata, idata, bs);
  179 + crypto_cipher_encrypt_one(tfm, odata, odata);
  180 + datalen -= getlen;
  181 + data += getlen;
  182 + pctx->ilen = 0;
  183 + }
  184 +
  185 + /* now encrypt rest of data */
  186 + while (datalen >= bs) {
  187 + crypto_xor(odata, data, bs);
  188 + crypto_cipher_encrypt_one(tfm, odata, odata);
  189 +
  190 + datalen -= bs;
  191 + data += bs;
  192 + }
  193 +
  194 + /* check and see if there's leftover data that wasn't
  195 + * enough to fill a block.
  196 + */
  197 + if (datalen) {
  198 + memcpy(idata + pctx->ilen, data, datalen);
  199 + pctx->ilen += datalen;
  200 + }
  201 +}
  202 +
  203 +static void get_data_to_compute(struct crypto_cipher *tfm,
  204 + struct crypto_ccm_req_priv_ctx *pctx,
  205 + struct scatterlist *sg, unsigned int len)
  206 +{
  207 + struct scatter_walk walk;
  208 + u8 *data_src;
  209 + int n;
  210 +
  211 + scatterwalk_start(&walk, sg);
  212 +
  213 + while (len) {
  214 + n = scatterwalk_clamp(&walk, len);
  215 + if (!n) {
  216 + scatterwalk_start(&walk, sg_next(walk.sg));
  217 + n = scatterwalk_clamp(&walk, len);
  218 + }
  219 + data_src = scatterwalk_map(&walk, 0);
  220 +
  221 + compute_mac(tfm, data_src, n, pctx);
  222 + len -= n;
  223 +
  224 + scatterwalk_unmap(data_src, 0);
  225 + scatterwalk_advance(&walk, n);
  226 + scatterwalk_done(&walk, 0, len);
  227 + if (len)
  228 + crypto_yield(pctx->flags);
  229 + }
  230 +
  231 + /* any leftover needs padding and then encrypted */
  232 + if (pctx->ilen) {
  233 + int padlen;
  234 + u8 *odata = pctx->odata;
  235 + u8 *idata = pctx->idata;
  236 +
  237 + padlen = 16 - pctx->ilen;
  238 + memset(idata + pctx->ilen, 0, padlen);
  239 + crypto_xor(odata, idata, 16);
  240 + crypto_cipher_encrypt_one(tfm, odata, odata);
  241 + pctx->ilen = 0;
  242 + }
  243 +}
  244 +
  245 +static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
  246 + unsigned int cryptlen)
  247 +{
  248 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
  249 + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  250 + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  251 + struct crypto_cipher *cipher = ctx->cipher;
  252 + unsigned int assoclen = req->assoclen;
  253 + u8 *odata = pctx->odata;
  254 + u8 *idata = pctx->idata;
  255 + int err;
  256 +
  257 + /* format control data for input */
  258 + err = format_input(odata, req, cryptlen);
  259 + if (err)
  260 + goto out;
  261 +
  262 + /* encrypt first block to use as start in computing mac */
  263 + crypto_cipher_encrypt_one(cipher, odata, odata);
  264 +
  265 + /* format associated data and compute into mac */
  266 + if (assoclen) {
  267 + pctx->ilen = format_adata(idata, assoclen);
  268 + get_data_to_compute(cipher, pctx, req->assoc, req->assoclen);
  269 + }
  270 +
  271 + /* compute plaintext into mac */
  272 + get_data_to_compute(cipher, pctx, plain, cryptlen);
  273 +
  274 +out:
  275 + return err;
  276 +}
  277 +
  278 +static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
  279 +{
  280 + struct aead_request *req = areq->data;
  281 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
  282 + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  283 + u8 *odata = pctx->odata;
  284 +
  285 + if (!err)
  286 + scatterwalk_map_and_copy(odata, req->dst, req->cryptlen,
  287 + crypto_aead_authsize(aead), 1);
  288 + aead_request_complete(req, err);
  289 +}
  290 +
  291 +static inline int crypto_ccm_check_iv(const u8 *iv)
  292 +{
  293 + /* 2 <= L <= 8, so 1 <= L' <= 7. */
  294 + if (1 > iv[0] || iv[0] > 7)
  295 + return -EINVAL;
  296 +
  297 + return 0;
  298 +}
  299 +
  300 +static int crypto_ccm_encrypt(struct aead_request *req)
  301 +{
  302 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
  303 + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  304 + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  305 + struct ablkcipher_request *abreq = &pctx->abreq;
  306 + struct scatterlist *dst;
  307 + unsigned int cryptlen = req->cryptlen;
  308 + u8 *odata = pctx->odata;
  309 + u8 *iv = req->iv;
  310 + int err;
  311 +
  312 + err = crypto_ccm_check_iv(iv);
  313 + if (err)
  314 + return err;
  315 +
  316 + pctx->flags = aead_request_flags(req);
  317 +
  318 + err = crypto_ccm_auth(req, req->src, cryptlen);
  319 + if (err)
  320 + return err;
  321 +
  322 + /* Note: rfc 3610 and NIST 800-38C require counter of
  323 + * zero to encrypt auth tag.
  324 + */
  325 + memset(iv + 15 - iv[0], 0, iv[0] + 1);
  326 +
  327 + sg_init_table(pctx->src, 2);
  328 + sg_set_buf(pctx->src, odata, 16);
  329 + scatterwalk_sg_chain(pctx->src, 2, req->src);
  330 +
  331 + dst = pctx->src;
  332 + if (req->src != req->dst) {
  333 + sg_init_table(pctx->dst, 2);
  334 + sg_set_buf(pctx->dst, odata, 16);
  335 + scatterwalk_sg_chain(pctx->dst, 2, req->dst);
  336 + dst = pctx->dst;
  337 + }
  338 +
  339 + ablkcipher_request_set_tfm(abreq, ctx->ctr);
  340 + ablkcipher_request_set_callback(abreq, pctx->flags,
  341 + crypto_ccm_encrypt_done, req);
  342 + ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
  343 + err = crypto_ablkcipher_encrypt(abreq);
  344 + if (err)
  345 + return err;
  346 +
  347 + /* copy authtag to end of dst */
  348 + scatterwalk_map_and_copy(odata, req->dst, cryptlen,
  349 + crypto_aead_authsize(aead), 1);
  350 + return err;
  351 +}
  352 +
  353 +static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
  354 + int err)
  355 +{
  356 + struct aead_request *req = areq->data;
  357 + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  358 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
  359 + unsigned int authsize = crypto_aead_authsize(aead);
  360 + unsigned int cryptlen = req->cryptlen - authsize;
  361 +
  362 + if (!err) {
  363 + err = crypto_ccm_auth(req, req->dst, cryptlen);
  364 + if (!err && memcmp(pctx->auth_tag, pctx->odata, authsize))
  365 + err = -EBADMSG;
  366 + }
  367 + aead_request_complete(req, err);
  368 +}
  369 +
  370 +static int crypto_ccm_decrypt(struct aead_request *req)
  371 +{
  372 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
  373 + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  374 + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  375 + struct ablkcipher_request *abreq = &pctx->abreq;
  376 + struct scatterlist *dst;
  377 + unsigned int authsize = crypto_aead_authsize(aead);
  378 + unsigned int cryptlen = req->cryptlen;
  379 + u8 *authtag = pctx->auth_tag;
  380 + u8 *odata = pctx->odata;
  381 + u8 *iv = req->iv;
  382 + int err;
  383 +
  384 + if (cryptlen < authsize)
  385 + return -EINVAL;
  386 + cryptlen -= authsize;
  387 +
  388 + err = crypto_ccm_check_iv(iv);
  389 + if (err)
  390 + return err;
  391 +
  392 + pctx->flags = aead_request_flags(req);
  393 +
  394 + scatterwalk_map_and_copy(authtag, req->src, cryptlen, authsize, 0);
  395 +
  396 + memset(iv + 15 - iv[0], 0, iv[0] + 1);
  397 +
  398 + sg_init_table(pctx->src, 2);
  399 + sg_set_buf(pctx->src, authtag, 16);
  400 + scatterwalk_sg_chain(pctx->src, 2, req->src);
  401 +
  402 + dst = pctx->src;
  403 + if (req->src != req->dst) {
  404 + sg_init_table(pctx->dst, 2);
  405 + sg_set_buf(pctx->dst, authtag, 16);
  406 + scatterwalk_sg_chain(pctx->dst, 2, req->dst);
  407 + dst = pctx->dst;
  408 + }
  409 +
  410 + ablkcipher_request_set_tfm(abreq, ctx->ctr);
  411 + ablkcipher_request_set_callback(abreq, pctx->flags,
  412 + crypto_ccm_decrypt_done, req);
  413 + ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
  414 + err = crypto_ablkcipher_decrypt(abreq);
  415 + if (err)
  416 + return err;
  417 +
  418 + err = crypto_ccm_auth(req, req->dst, cryptlen);
  419 + if (err)
  420 + return err;
  421 +
  422 + /* verify */
  423 + if (memcmp(authtag, odata, authsize))
  424 + return -EBADMSG;
  425 +
  426 + return err;
  427 +}
  428 +
  429 +static int crypto_ccm_init_tfm(struct crypto_tfm *tfm)
  430 +{
  431 + struct crypto_instance *inst = (void *)tfm->__crt_alg;
  432 + struct ccm_instance_ctx *ictx = crypto_instance_ctx(inst);
  433 + struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
  434 + struct crypto_cipher *cipher;
  435 + struct crypto_ablkcipher *ctr;
  436 + unsigned long align;
  437 + int err;
  438 +
  439 + cipher = crypto_spawn_cipher(&ictx->cipher);
  440 + if (IS_ERR(cipher))
  441 + return PTR_ERR(cipher);
  442 +
  443 + ctr = crypto_spawn_skcipher(&ictx->ctr);
  444 + err = PTR_ERR(ctr);
  445 + if (IS_ERR(ctr))
  446 + goto err_free_cipher;
  447 +
  448 + ctx->cipher = cipher;
  449 + ctx->ctr = ctr;
  450 +
  451 + align = crypto_tfm_alg_alignmask(tfm);
  452 + align &= ~(crypto_tfm_ctx_alignment() - 1);
  453 + tfm->crt_aead.reqsize = align +
  454 + sizeof(struct crypto_ccm_req_priv_ctx) +
  455 + crypto_ablkcipher_reqsize(ctr);
  456 +
  457 + return 0;
  458 +
  459 +err_free_cipher:
  460 + crypto_free_cipher(cipher);
  461 + return err;
  462 +}
  463 +
  464 +static void crypto_ccm_exit_tfm(struct crypto_tfm *tfm)
  465 +{
  466 + struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
  467 +
  468 + crypto_free_cipher(ctx->cipher);
  469 + crypto_free_ablkcipher(ctx->ctr);
  470 +}
  471 +
  472 +static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb,
  473 + const char *full_name,
  474 + const char *ctr_name,
  475 + const char *cipher_name)
  476 +{
  477 + struct crypto_attr_type *algt;
  478 + struct crypto_instance *inst;
  479 + struct crypto_alg *ctr;
  480 + struct crypto_alg *cipher;
  481 + struct ccm_instance_ctx *ictx;
  482 + int err;
  483 +
  484 + algt = crypto_get_attr_type(tb);
  485 + err = PTR_ERR(algt);
  486 + if (IS_ERR(algt))
  487 + return ERR_PTR(err);
  488 +
  489 + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  490 + return ERR_PTR(-EINVAL);
  491 +
  492 + cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
  493 + CRYPTO_ALG_TYPE_MASK);
  494 + err = PTR_ERR(cipher);
  495 + if (IS_ERR(cipher))
  496 + return ERR_PTR(err);
  497 +
  498 + err = -EINVAL;
  499 + if (cipher->cra_blocksize != 16)
  500 + goto out_put_cipher;
  501 +
  502 + inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  503 + err = -ENOMEM;
  504 + if (!inst)
  505 + goto out_put_cipher;
  506 +
  507 + ictx = crypto_instance_ctx(inst);
  508 +
  509 + err = crypto_init_spawn(&ictx->cipher, cipher, inst,
  510 + CRYPTO_ALG_TYPE_MASK);
  511 + if (err)
  512 + goto err_free_inst;
  513 +
  514 + crypto_set_skcipher_spawn(&ictx->ctr, inst);
  515 + err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
  516 + crypto_requires_sync(algt->type,
  517 + algt->mask));
  518 + if (err)
  519 + goto err_drop_cipher;
  520 +
  521 + ctr = crypto_skcipher_spawn_alg(&ictx->ctr);
  522 +
  523 + /* Not a stream cipher? */
  524 + err = -EINVAL;
  525 + if (ctr->cra_blocksize != 1)
  526 + goto err_drop_ctr;
  527 +
  528 + /* We want the real thing! */
  529 + if (ctr->cra_ablkcipher.ivsize != 16)
  530 + goto err_drop_ctr;
  531 +
  532 + err = -ENAMETOOLONG;
  533 + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  534 + "ccm_base(%s,%s)", ctr->cra_driver_name,
  535 + cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  536 + goto err_drop_ctr;
  537 +
  538 + memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  539 +
  540 + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
  541 + inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
  542 + inst->alg.cra_priority = cipher->cra_priority + ctr->cra_priority;
  543 + inst->alg.cra_blocksize = 1;
  544 + inst->alg.cra_alignmask = cipher->cra_alignmask | ctr->cra_alignmask |
  545 + (__alignof__(u32) - 1);
  546 + inst->alg.cra_type = &crypto_aead_type;
  547 + inst->alg.cra_aead.ivsize = 16;
  548 + inst->alg.cra_aead.maxauthsize = 16;
  549 + inst->alg.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
  550 + inst->alg.cra_init = crypto_ccm_init_tfm;
  551 + inst->alg.cra_exit = crypto_ccm_exit_tfm;
  552 + inst->alg.cra_aead.setkey = crypto_ccm_setkey;
  553 + inst->alg.cra_aead.setauthsize = crypto_ccm_setauthsize;
  554 + inst->alg.cra_aead.encrypt = crypto_ccm_encrypt;
  555 + inst->alg.cra_aead.decrypt = crypto_ccm_decrypt;
  556 +
  557 +out:
  558 + crypto_mod_put(cipher);
  559 + return inst;
  560 +
  561 +err_drop_ctr:
  562 + crypto_drop_skcipher(&ictx->ctr);
  563 +err_drop_cipher:
  564 + crypto_drop_spawn(&ictx->cipher);
  565 +err_free_inst:
  566 + kfree(inst);
  567 +out_put_cipher:
  568 + inst = ERR_PTR(err);
  569 + goto out;
  570 +}
  571 +
  572 +static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb)
  573 +{
  574 + int err;
  575 + const char *cipher_name;
  576 + char ctr_name[CRYPTO_MAX_ALG_NAME];
  577 + char full_name[CRYPTO_MAX_ALG_NAME];
  578 +
  579 + cipher_name = crypto_attr_alg_name(tb[1]);
  580 + err = PTR_ERR(cipher_name);
  581 + if (IS_ERR(cipher_name))
  582 + return ERR_PTR(err);
  583 +
  584 + if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  585 + cipher_name) >= CRYPTO_MAX_ALG_NAME)
  586 + return ERR_PTR(-ENAMETOOLONG);
  587 +
  588 + if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
  589 + CRYPTO_MAX_ALG_NAME)
  590 + return ERR_PTR(-ENAMETOOLONG);
  591 +
  592 + return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
  593 +}
  594 +
  595 +static void crypto_ccm_free(struct crypto_instance *inst)
  596 +{
  597 + struct ccm_instance_ctx *ctx = crypto_instance_ctx(inst);
  598 +
  599 + crypto_drop_spawn(&ctx->cipher);
  600 + crypto_drop_skcipher(&ctx->ctr);
  601 + kfree(inst);
  602 +}
  603 +
  604 +static struct crypto_template crypto_ccm_tmpl = {
  605 + .name = "ccm",
  606 + .alloc = crypto_ccm_alloc,
  607 + .free = crypto_ccm_free,
  608 + .module = THIS_MODULE,
  609 +};
  610 +
  611 +static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb)
  612 +{
  613 + int err;
  614 + const char *ctr_name;
  615 + const char *cipher_name;
  616 + char full_name[CRYPTO_MAX_ALG_NAME];
  617 +
  618 + ctr_name = crypto_attr_alg_name(tb[1]);
  619 + err = PTR_ERR(ctr_name);
  620 + if (IS_ERR(ctr_name))
  621 + return ERR_PTR(err);
  622 +
  623 + cipher_name = crypto_attr_alg_name(tb[2]);
  624 + err = PTR_ERR(cipher_name);
  625 + if (IS_ERR(cipher_name))
  626 + return ERR_PTR(err);
  627 +
  628 + if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
  629 + ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
  630 + return ERR_PTR(-ENAMETOOLONG);
  631 +
  632 + return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
  633 +}
  634 +
  635 +static struct crypto_template crypto_ccm_base_tmpl = {
  636 + .name = "ccm_base",
  637 + .alloc = crypto_ccm_base_alloc,
  638 + .free = crypto_ccm_free,
  639 + .module = THIS_MODULE,
  640 +};
  641 +
  642 +static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
  643 + unsigned int keylen)
  644 +{
  645 + struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  646 + struct crypto_aead *child = ctx->child;
  647 + int err;
  648 +
  649 + if (keylen < 3)
  650 + return -EINVAL;
  651 +
  652 + keylen -= 3;
  653 + memcpy(ctx->nonce, key + keylen, 3);
  654 +
  655 + crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  656 + crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  657 + CRYPTO_TFM_REQ_MASK);
  658 + err = crypto_aead_setkey(child, key, keylen);
  659 + crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  660 + CRYPTO_TFM_RES_MASK);
  661 +
  662 + return err;
  663 +}
  664 +
  665 +static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
  666 + unsigned int authsize)
  667 +{
  668 + struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  669 +
  670 + switch (authsize) {
  671 + case 8:
  672 + case 12:
  673 + case 16:
  674 + break;
  675 + default:
  676 + return -EINVAL;
  677 + }
  678 +
  679 + return crypto_aead_setauthsize(ctx->child, authsize);
  680 +}
  681 +
  682 +static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
  683 +{
  684 + struct aead_request *subreq = aead_request_ctx(req);
  685 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
  686 + struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
  687 + struct crypto_aead *child = ctx->child;
  688 + u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  689 + crypto_aead_alignmask(child) + 1);
  690 +
  691 + /* L' */
  692 + iv[0] = 3;
  693 +
  694 + memcpy(iv + 1, ctx->nonce, 3);
  695 + memcpy(iv + 4, req->iv, 8);
  696 +
  697 + aead_request_set_tfm(subreq, child);
  698 + aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  699 + req->base.data);
  700 + aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv);
  701 + aead_request_set_assoc(subreq, req->assoc, req->assoclen);
  702 +
  703 + return subreq;
  704 +}
  705 +
  706 +static int crypto_rfc4309_encrypt(struct aead_request *req)
  707 +{
  708 + req = crypto_rfc4309_crypt(req);
  709 +
  710 + return crypto_aead_encrypt(req);
  711 +}
  712 +
  713 +static int crypto_rfc4309_decrypt(struct aead_request *req)
  714 +{
  715 + req = crypto_rfc4309_crypt(req);
  716 +
  717 + return crypto_aead_decrypt(req);
  718 +}
  719 +
  720 +static int crypto_rfc4309_init_tfm(struct crypto_tfm *tfm)
  721 +{
  722 + struct crypto_instance *inst = (void *)tfm->__crt_alg;
  723 + struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
  724 + struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
  725 + struct crypto_aead *aead;
  726 + unsigned long align;
  727 +
  728 + aead = crypto_spawn_aead(spawn);
  729 + if (IS_ERR(aead))
  730 + return PTR_ERR(aead);
  731 +
  732 + ctx->child = aead;
  733 +
  734 + align = crypto_aead_alignmask(aead);
  735 + align &= ~(crypto_tfm_ctx_alignment() - 1);
  736 + tfm->crt_aead.reqsize = sizeof(struct aead_request) +
  737 + ALIGN(crypto_aead_reqsize(aead),
  738 + crypto_tfm_ctx_alignment()) +
  739 + align + 16;
  740 +
  741 + return 0;
  742 +}
  743 +
  744 +static void crypto_rfc4309_exit_tfm(struct crypto_tfm *tfm)
  745 +{
  746 + struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
  747 +
  748 + crypto_free_aead(ctx->child);
  749 +}
  750 +
  751 +static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb)
  752 +{
  753 + struct crypto_attr_type *algt;
  754 + struct crypto_instance *inst;
  755 + struct crypto_aead_spawn *spawn;
  756 + struct crypto_alg *alg;
  757 + const char *ccm_name;
  758 + int err;
  759 +
  760 + algt = crypto_get_attr_type(tb);
  761 + err = PTR_ERR(algt);
  762 + if (IS_ERR(algt))
  763 + return ERR_PTR(err);
  764 +
  765 + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  766 + return ERR_PTR(-EINVAL);
  767 +
  768 + ccm_name = crypto_attr_alg_name(tb[1]);
  769 + err = PTR_ERR(ccm_name);
  770 + if (IS_ERR(ccm_name))
  771 + return ERR_PTR(err);
  772 +
  773 + inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  774 + if (!inst)
  775 + return ERR_PTR(-ENOMEM);
  776 +
  777 + spawn = crypto_instance_ctx(inst);
  778 + crypto_set_aead_spawn(spawn, inst);
  779 + err = crypto_grab_aead(spawn, ccm_name, 0,
  780 + crypto_requires_sync(algt->type, algt->mask));
  781 + if (err)
  782 + goto out_free_inst;
  783 +
  784 + alg = crypto_aead_spawn_alg(spawn);
  785 +
  786 + err = -EINVAL;
  787 +
  788 + /* We only support 16-byte blocks. */
  789 + if (alg->cra_aead.ivsize != 16)
  790 + goto out_drop_alg;
  791 +
  792 + /* Not a stream cipher? */
  793 + if (alg->cra_blocksize != 1)
  794 + goto out_drop_alg;
  795 +
  796 + err = -ENAMETOOLONG;
  797 + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
  798 + "rfc4309(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
  799 + snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  800 + "rfc4309(%s)", alg->cra_driver_name) >=
  801 + CRYPTO_MAX_ALG_NAME)
  802 + goto out_drop_alg;
  803 +
  804 + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
  805 + inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
  806 + inst->alg.cra_priority = alg->cra_priority;
  807 + inst->alg.cra_blocksize = 1;
  808 + inst->alg.cra_alignmask = alg->cra_alignmask;
  809 + inst->alg.cra_type = &crypto_nivaead_type;
  810 +
  811 + inst->alg.cra_aead.ivsize = 8;
  812 + inst->alg.cra_aead.maxauthsize = 16;
  813 +
  814 + inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
  815 +
  816 + inst->alg.cra_init = crypto_rfc4309_init_tfm;
  817 + inst->alg.cra_exit = crypto_rfc4309_exit_tfm;
  818 +
  819 + inst->alg.cra_aead.setkey = crypto_rfc4309_setkey;
  820 + inst->alg.cra_aead.setauthsize = crypto_rfc4309_setauthsize;
  821 + inst->alg.cra_aead.encrypt = crypto_rfc4309_encrypt;
  822 + inst->alg.cra_aead.decrypt = crypto_rfc4309_decrypt;
  823 +
  824 + inst->alg.cra_aead.geniv = "seqiv";
  825 +
  826 +out:
  827 + return inst;
  828 +
  829 +out_drop_alg:
  830 + crypto_drop_aead(spawn);
  831 +out_free_inst:
  832 + kfree(inst);
  833 + inst = ERR_PTR(err);
  834 + goto out;
  835 +}
  836 +
  837 +static void crypto_rfc4309_free(struct crypto_instance *inst)
  838 +{
  839 + crypto_drop_spawn(crypto_instance_ctx(inst));
  840 + kfree(inst);
  841 +}
  842 +
  843 +static struct crypto_template crypto_rfc4309_tmpl = {
  844 + .name = "rfc4309",
  845 + .alloc = crypto_rfc4309_alloc,
  846 + .free = crypto_rfc4309_free,
  847 + .module = THIS_MODULE,
  848 +};
  849 +
  850 +static int __init crypto_ccm_module_init(void)
  851 +{
  852 + int err;
  853 +
  854 + err = crypto_register_template(&crypto_ccm_base_tmpl);
  855 + if (err)
  856 + goto out;
  857 +
  858 + err = crypto_register_template(&crypto_ccm_tmpl);
  859 + if (err)
  860 + goto out_undo_base;
  861 +
  862 + err = crypto_register_template(&crypto_rfc4309_tmpl);
  863 + if (err)
  864 + goto out_undo_ccm;
  865 +
  866 +out:
  867 + return err;
  868 +
  869 +out_undo_ccm:
  870 + crypto_unregister_template(&crypto_ccm_tmpl);
  871 +out_undo_base:
  872 + crypto_unregister_template(&crypto_ccm_base_tmpl);
  873 + goto out;
  874 +}
  875 +
  876 +static void __exit crypto_ccm_module_exit(void)
  877 +{
  878 + crypto_unregister_template(&crypto_rfc4309_tmpl);
  879 + crypto_unregister_template(&crypto_ccm_tmpl);
  880 + crypto_unregister_template(&crypto_ccm_base_tmpl);
  881 +}
  882 +
  883 +module_init(crypto_ccm_module_init);
  884 +module_exit(crypto_ccm_module_exit);
  885 +
  886 +MODULE_LICENSE("GPL");
  887 +MODULE_DESCRIPTION("Counter with CBC MAC");
  888 +MODULE_ALIAS("ccm_base");
  889 +MODULE_ALIAS("rfc4309");