Blame view
include/crypto/ctr.h
1.42 KB
2874c5fd2 treewide: Replace... |
1 |
/* SPDX-License-Identifier: GPL-2.0-or-later */ |
5311f248b [CRYPTO] ctr: Ref... |
2 3 4 5 |
/* * CTR: Counter mode * * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> |
5311f248b [CRYPTO] ctr: Ref... |
6 7 8 9 |
*/ #ifndef _CRYPTO_CTR_H #define _CRYPTO_CTR_H |
d9ec772d9 crypto: ctr - add... |
10 11 12 13 |
#include <crypto/algapi.h> #include <crypto/internal/skcipher.h> #include <linux/string.h> #include <linux/types.h> |
5311f248b [CRYPTO] ctr: Ref... |
14 15 16 |
#define CTR_RFC3686_NONCE_SIZE 4 #define CTR_RFC3686_IV_SIZE 8 #define CTR_RFC3686_BLOCK_SIZE 16 |
d9ec772d9 crypto: ctr - add... |
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
static inline int crypto_ctr_encrypt_walk(struct skcipher_request *req, void (*fn)(struct crypto_skcipher *, const u8 *, u8 *)) { struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); int blocksize = crypto_skcipher_chunksize(tfm); u8 buf[MAX_CIPHER_BLOCKSIZE]; struct skcipher_walk walk; int err; /* avoid integer division due to variable blocksize parameter */ if (WARN_ON_ONCE(!is_power_of_2(blocksize))) return -EINVAL; err = skcipher_walk_virt(&walk, req, false); while (walk.nbytes > 0) { u8 *dst = walk.dst.virt.addr; u8 *src = walk.src.virt.addr; int nbytes = walk.nbytes; int tail = 0; if (nbytes < walk.total) { tail = walk.nbytes & (blocksize - 1); nbytes -= tail; } do { int bsize = min(nbytes, blocksize); fn(tfm, walk.iv, buf); crypto_xor_cpy(dst, src, buf, bsize); crypto_inc(walk.iv, blocksize); dst += bsize; src += bsize; nbytes -= bsize; } while (nbytes > 0); err = skcipher_walk_done(&walk, tail); } return err; } |
5311f248b [CRYPTO] ctr: Ref... |
61 |
#endif /* _CRYPTO_CTR_H */ |