Blame view
crypto/keywrap.c
9.51 KB
e28facde3
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
/* * Key Wrapping: RFC3394 / NIST SP800-38F * * Copyright (C) 2015, Stephan Mueller <smueller@chronox.de> * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, and the entire permission notice in its entirety, * including the disclaimer of warranties. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote * products derived from this software without specific prior * written permission. * * ALTERNATIVELY, this product may be distributed under the terms of * the GNU General Public License, in which case the provisions of the GPL2 * are required INSTEAD OF the above restrictions. (This clause is * necessary due to a potential bad interaction between the GPL and * the restrictions contained in a BSD-style copyright.) * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF * WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE * USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. */ /* * Note for using key wrapping: * * * The result of the encryption operation is the ciphertext starting * with the 2nd semiblock. The first semiblock is provided as the IV. * The IV used to start the encryption operation is the default IV. * * * The input for the decryption is the first semiblock handed in as an * IV. The ciphertext is the data starting with the 2nd semiblock. The * return code of the decryption operation will be EBADMSG in case an * integrity error occurs. * * To obtain the full result of an encryption as expected by SP800-38F, the * caller must allocate a buffer of plaintext + 8 bytes: * * unsigned int datalen = ptlen + crypto_skcipher_ivsize(tfm); * u8 data[datalen]; * u8 *iv = data; * u8 *pt = data + crypto_skcipher_ivsize(tfm); * <ensure that pt contains the plaintext of size ptlen> |
6b611d98c
|
59 |
* sg_init_one(&sg, pt, ptlen); |
e28facde3
|
60 61 62 63 64 65 66 67 68 69 70 71 72 |
* skcipher_request_set_crypt(req, &sg, &sg, ptlen, iv); * * ==> After encryption, data now contains full KW result as per SP800-38F. * * In case of decryption, ciphertext now already has the expected length * and must be segmented appropriately: * * unsigned int datalen = CTLEN; * u8 data[datalen]; * <ensure that data contains full ciphertext> * u8 *iv = data; * u8 *ct = data + crypto_skcipher_ivsize(tfm); * unsigned int ctlen = datalen - crypto_skcipher_ivsize(tfm); |
6b611d98c
|
73 74 |
* sg_init_one(&sg, ct, ctlen); * skcipher_request_set_crypt(req, &sg, &sg, ctlen, iv); |
e28facde3
|
75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
* * ==> After decryption (which hopefully does not return EBADMSG), the ct * pointer now points to the plaintext of size ctlen. * * Note 2: KWP is not implemented as this would defy in-place operation. * If somebody wants to wrap non-aligned data, he should simply pad * the input with zeros to fill it up to the 8 byte boundary. */ #include <linux/module.h> #include <linux/crypto.h> #include <linux/scatterlist.h> #include <crypto/scatterwalk.h> #include <crypto/internal/skcipher.h> |
e28facde3
|
89 90 |
struct crypto_kw_block { #define SEMIBSIZE 8 |
9e49451d7
|
91 92 |
__be64 A; __be64 R; |
e28facde3
|
93 |
}; |
e28facde3
|
94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 |
/* * Fast forward the SGL to the "end" length minus SEMIBSIZE. * The start in the SGL defined by the fast-forward is returned with * the walk variable */ static void crypto_kw_scatterlist_ff(struct scatter_walk *walk, struct scatterlist *sg, unsigned int end) { unsigned int skip = 0; /* The caller should only operate on full SEMIBLOCKs. */ BUG_ON(end < SEMIBSIZE); skip = end - SEMIBSIZE; while (sg) { if (sg->length > skip) { scatterwalk_start(walk, sg); scatterwalk_advance(walk, skip); break; } else skip -= sg->length; sg = sg_next(sg); } } |
6b611d98c
|
120 |
static int crypto_kw_decrypt(struct skcipher_request *req) |
e28facde3
|
121 |
{ |
6b611d98c
|
122 123 |
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); |
9e49451d7
|
124 |
struct crypto_kw_block block; |
6b611d98c
|
125 126 |
struct scatterlist *src, *dst; u64 t = 6 * ((req->cryptlen) >> 3); |
9e49451d7
|
127 |
unsigned int i; |
e28facde3
|
128 129 130 131 132 133 |
int ret = 0; /* * Require at least 2 semiblocks (note, the 3rd semiblock that is * required by SP800-38F is the IV. */ |
6b611d98c
|
134 |
if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) |
e28facde3
|
135 136 137 |
return -EINVAL; /* Place the IV into block A */ |
6b611d98c
|
138 |
memcpy(&block.A, req->iv, SEMIBSIZE); |
e28facde3
|
139 140 141 |
/* * src scatterlist is read-only. dst scatterlist is r/w. During the |
6b611d98c
|
142 143 |
* first loop, src points to req->src and dst to req->dst. For any * subsequent round, the code operates on req->dst only. |
e28facde3
|
144 |
*/ |
6b611d98c
|
145 146 |
src = req->src; dst = req->dst; |
e28facde3
|
147 148 |
for (i = 0; i < 6; i++) { |
e28facde3
|
149 |
struct scatter_walk src_walk, dst_walk; |
6b611d98c
|
150 |
unsigned int nbytes = req->cryptlen; |
e28facde3
|
151 |
|
6b611d98c
|
152 153 154 |
while (nbytes) { /* move pointer by nbytes in the SGL */ crypto_kw_scatterlist_ff(&src_walk, src, nbytes); |
e28facde3
|
155 |
/* get the source block */ |
9e49451d7
|
156 |
scatterwalk_copychunks(&block.R, &src_walk, SEMIBSIZE, |
e28facde3
|
157 |
false); |
e28facde3
|
158 |
/* perform KW operation: modify IV with counter */ |
9e49451d7
|
159 |
block.A ^= cpu_to_be64(t); |
e28facde3
|
160 161 |
t--; /* perform KW operation: decrypt block */ |
6b611d98c
|
162 163 |
crypto_cipher_decrypt_one(cipher, (u8 *)&block, (u8 *)&block); |
e28facde3
|
164 |
|
6b611d98c
|
165 166 |
/* move pointer by nbytes in the SGL */ crypto_kw_scatterlist_ff(&dst_walk, dst, nbytes); |
e28facde3
|
167 |
/* Copy block->R into place */ |
9e49451d7
|
168 |
scatterwalk_copychunks(&block.R, &dst_walk, SEMIBSIZE, |
e28facde3
|
169 |
true); |
6b611d98c
|
170 |
nbytes -= SEMIBSIZE; |
e28facde3
|
171 172 173 |
} /* we now start to operate on the dst SGL only */ |
6b611d98c
|
174 175 |
src = req->dst; dst = req->dst; |
e28facde3
|
176 177 178 |
} /* Perform authentication check */ |
c9683276d
|
179 |
if (block.A != cpu_to_be64(0xa6a6a6a6a6a6a6a6ULL)) |
e28facde3
|
180 |
ret = -EBADMSG; |
9e49451d7
|
181 |
memzero_explicit(&block, sizeof(struct crypto_kw_block)); |
e28facde3
|
182 183 184 |
return ret; } |
6b611d98c
|
185 |
static int crypto_kw_encrypt(struct skcipher_request *req) |
e28facde3
|
186 |
{ |
6b611d98c
|
187 188 |
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); |
9e49451d7
|
189 |
struct crypto_kw_block block; |
6b611d98c
|
190 |
struct scatterlist *src, *dst; |
9e49451d7
|
191 192 |
u64 t = 1; unsigned int i; |
e28facde3
|
193 194 195 196 197 198 199 |
/* * Require at least 2 semiblocks (note, the 3rd semiblock that is * required by SP800-38F is the IV that occupies the first semiblock. * This means that the dst memory must be one semiblock larger than src. * Also ensure that the given data is aligned to semiblock. */ |
6b611d98c
|
200 |
if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) |
e28facde3
|
201 202 203 204 205 206 |
return -EINVAL; /* * Place the predefined IV into block A -- for encrypt, the caller * does not need to provide an IV, but he needs to fetch the final IV. */ |
c9683276d
|
207 |
block.A = cpu_to_be64(0xa6a6a6a6a6a6a6a6ULL); |
e28facde3
|
208 209 210 |
/* * src scatterlist is read-only. dst scatterlist is r/w. During the |
6b611d98c
|
211 212 |
* first loop, src points to req->src and dst to req->dst. For any * subsequent round, the code operates on req->dst only. |
e28facde3
|
213 |
*/ |
6b611d98c
|
214 215 |
src = req->src; dst = req->dst; |
e28facde3
|
216 217 |
for (i = 0; i < 6; i++) { |
e28facde3
|
218 |
struct scatter_walk src_walk, dst_walk; |
6b611d98c
|
219 |
unsigned int nbytes = req->cryptlen; |
e28facde3
|
220 |
|
6b611d98c
|
221 222 |
scatterwalk_start(&src_walk, src); scatterwalk_start(&dst_walk, dst); |
e28facde3
|
223 |
|
6b611d98c
|
224 |
while (nbytes) { |
e28facde3
|
225 |
/* get the source block */ |
9e49451d7
|
226 |
scatterwalk_copychunks(&block.R, &src_walk, SEMIBSIZE, |
e28facde3
|
227 228 229 |
false); /* perform KW operation: encrypt block */ |
6b611d98c
|
230 |
crypto_cipher_encrypt_one(cipher, (u8 *)&block, |
9e49451d7
|
231 |
(u8 *)&block); |
e28facde3
|
232 |
/* perform KW operation: modify IV with counter */ |
9e49451d7
|
233 |
block.A ^= cpu_to_be64(t); |
e28facde3
|
234 235 236 |
t++; /* Copy block->R into place */ |
9e49451d7
|
237 |
scatterwalk_copychunks(&block.R, &dst_walk, SEMIBSIZE, |
e28facde3
|
238 |
true); |
6b611d98c
|
239 |
nbytes -= SEMIBSIZE; |
e28facde3
|
240 241 242 |
} /* we now start to operate on the dst SGL only */ |
6b611d98c
|
243 244 |
src = req->dst; dst = req->dst; |
e28facde3
|
245 246 247 |
} /* establish the IV for the caller to pick up */ |
6b611d98c
|
248 |
memcpy(req->iv, &block.A, SEMIBSIZE); |
e28facde3
|
249 |
|
9e49451d7
|
250 |
memzero_explicit(&block, sizeof(struct crypto_kw_block)); |
e28facde3
|
251 252 253 |
return 0; } |
6b611d98c
|
254 |
static int crypto_kw_create(struct crypto_template *tmpl, struct rtattr **tb) |
e28facde3
|
255 |
{ |
6b611d98c
|
256 257 |
struct skcipher_instance *inst; struct crypto_alg *alg; |
e28facde3
|
258 |
int err; |
6b611d98c
|
259 260 261 |
inst = skcipher_alloc_instance_simple(tmpl, tb, &alg); if (IS_ERR(inst)) return PTR_ERR(inst); |
e28facde3
|
262 |
|
6b611d98c
|
263 |
err = -EINVAL; |
e28facde3
|
264 265 |
/* Section 5.1 requirement for KW */ if (alg->cra_blocksize != sizeof(struct crypto_kw_block)) |
6b611d98c
|
266 |
goto out_free_inst; |
e28facde3
|
267 |
|
6b611d98c
|
268 269 270 |
inst->alg.base.cra_blocksize = SEMIBSIZE; inst->alg.base.cra_alignmask = 0; inst->alg.ivsize = SEMIBSIZE; |
e28facde3
|
271 |
|
6b611d98c
|
272 273 |
inst->alg.encrypt = crypto_kw_encrypt; inst->alg.decrypt = crypto_kw_decrypt; |
e28facde3
|
274 |
|
6b611d98c
|
275 276 277 278 |
err = skcipher_register_instance(tmpl, inst); if (err) goto out_free_inst; goto out_put_alg; |
e28facde3
|
279 |
|
6b611d98c
|
280 281 282 |
out_free_inst: inst->free(inst); out_put_alg: |
e28facde3
|
283 |
crypto_mod_put(alg); |
6b611d98c
|
284 |
return err; |
e28facde3
|
285 286 287 288 |
} static struct crypto_template crypto_kw_tmpl = { .name = "kw", |
6b611d98c
|
289 |
.create = crypto_kw_create, |
e28facde3
|
290 291 292 293 294 295 296 297 298 299 300 301 |
.module = THIS_MODULE, }; static int __init crypto_kw_init(void) { return crypto_register_template(&crypto_kw_tmpl); } static void __exit crypto_kw_exit(void) { crypto_unregister_template(&crypto_kw_tmpl); } |
c4741b230
|
302 |
subsys_initcall(crypto_kw_init); |
e28facde3
|
303 304 305 306 307 308 |
module_exit(crypto_kw_exit); MODULE_LICENSE("Dual BSD/GPL"); MODULE_AUTHOR("Stephan Mueller <smueller@chronox.de>"); MODULE_DESCRIPTION("Key Wrapping (RFC3394 / NIST SP800-38F)"); MODULE_ALIAS_CRYPTO("kw"); |