Commit 95477377995aefa2ec1654a9a3777bd57ea99146

Authored by Herbert Xu
Committed by David S. Miller
1 parent 40725181b7

[CRYPTO] Add alignmask for low-level cipher implementations

The VIA Padlock device requires the input and output buffers to
be aligned on 16-byte boundaries.  This patch adds the alignmask
attribute for low-level cipher implementations to indicate their
alignment requirements.

The mid-level crypt() function will copy the input/output buffers
if they are not aligned correctly before they are passed to the
low-level implementation.

Strictly speaking, some of the software implementations require
the buffers to be aligned on 4-byte boundaries as they do 32-bit
loads.  However, it is not clear whether it is better to copy
the buffers or pay the penalty for unaligned loads/stores.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Signed-off-by: David S. Miller <davem@davemloft.net>

Showing 4 changed files with 49 additions and 7 deletions Side-by-side Diff

... ... @@ -168,6 +168,12 @@
168 168 {
169 169 int ret = 0;
170 170 struct crypto_alg *q;
  171 +
  172 + if (alg->cra_alignmask & (alg->cra_alignmask + 1))
  173 + return -EINVAL;
  174 +
  175 + if (alg->cra_alignmask > PAGE_SIZE)
  176 + return -EINVAL;
171 177  
172 178 down_write(&crypto_alg_sem);
173 179  
... ... @@ -41,8 +41,10 @@
41 41 struct scatter_walk *in,
42 42 struct scatter_walk *out, unsigned int bsize)
43 43 {
44   - u8 src[bsize];
45   - u8 dst[bsize];
  44 + unsigned int alignmask = desc->tfm->__crt_alg->cra_alignmask;
  45 + u8 buffer[bsize * 2 + alignmask];
  46 + u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  47 + u8 *dst = src + bsize;
46 48 unsigned int n;
47 49  
48 50 n = scatterwalk_copychunks(src, in, bsize, 0);
49 51  
50 52  
... ... @@ -59,15 +61,24 @@
59 61 static inline unsigned int crypt_fast(const struct cipher_desc *desc,
60 62 struct scatter_walk *in,
61 63 struct scatter_walk *out,
62   - unsigned int nbytes)
  64 + unsigned int nbytes, u8 *tmp)
63 65 {
64 66 u8 *src, *dst;
65 67  
66 68 src = in->data;
67 69 dst = scatterwalk_samebuf(in, out) ? src : out->data;
68 70  
  71 + if (tmp) {
  72 + memcpy(tmp, in->data, nbytes);
  73 + src = tmp;
  74 + dst = tmp;
  75 + }
  76 +
69 77 nbytes = desc->prfn(desc, dst, src, nbytes);
70 78  
  79 + if (tmp)
  80 + memcpy(out->data, tmp, nbytes);
  81 +
71 82 scatterwalk_advance(in, nbytes);
72 83 scatterwalk_advance(out, nbytes);
73 84  
... ... @@ -87,6 +98,8 @@
87 98 struct scatter_walk walk_in, walk_out;
88 99 struct crypto_tfm *tfm = desc->tfm;
89 100 const unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
  101 + unsigned int alignmask = tfm->__crt_alg->cra_alignmask;
  102 + unsigned long buffer = 0;
90 103  
91 104 if (!nbytes)
92 105 return 0;
93 106  
94 107  
95 108  
... ... @@ -100,16 +113,27 @@
100 113 scatterwalk_start(&walk_out, dst);
101 114  
102 115 for(;;) {
103   - unsigned int n;
  116 + unsigned int n = nbytes;
  117 + u8 *tmp = NULL;
104 118  
  119 + if (!scatterwalk_aligned(&walk_in, alignmask) ||
  120 + !scatterwalk_aligned(&walk_out, alignmask)) {
  121 + if (!buffer) {
  122 + buffer = __get_free_page(GFP_ATOMIC);
  123 + if (!buffer)
  124 + n = 0;
  125 + }
  126 + tmp = (u8 *)buffer;
  127 + }
  128 +
105 129 scatterwalk_map(&walk_in, 0);
106 130 scatterwalk_map(&walk_out, 1);
107 131  
108   - n = scatterwalk_clamp(&walk_in, nbytes);
  132 + n = scatterwalk_clamp(&walk_in, n);
109 133 n = scatterwalk_clamp(&walk_out, n);
110 134  
111 135 if (likely(n >= bsize))
112   - n = crypt_fast(desc, &walk_in, &walk_out, n);
  136 + n = crypt_fast(desc, &walk_in, &walk_out, n, tmp);
113 137 else
114 138 n = crypt_slow(desc, &walk_in, &walk_out, bsize);
115 139  
116 140  
... ... @@ -119,10 +143,15 @@
119 143 scatterwalk_done(&walk_out, 1, nbytes);
120 144  
121 145 if (!nbytes)
122   - return 0;
  146 + break;
123 147  
124 148 crypto_yield(tfm);
125 149 }
  150 +
  151 + if (buffer)
  152 + free_page(buffer);
  153 +
  154 + return 0;
126 155 }
127 156  
128 157 static unsigned int cbc_process_encrypt(const struct cipher_desc *desc,
crypto/scatterwalk.h
... ... @@ -55,6 +55,12 @@
55 55 walk->len_this_segment -= nbytes;
56 56 }
57 57  
  58 +static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk,
  59 + unsigned int alignmask)
  60 +{
  61 + return !(walk->offset & alignmask);
  62 +}
  63 +
58 64 void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg);
59 65 int scatterwalk_copychunks(void *buf, struct scatter_walk *walk, size_t nbytes, int out);
60 66 void scatterwalk_map(struct scatter_walk *walk, int out);
include/linux/crypto.h
... ... @@ -124,6 +124,7 @@
124 124 u32 cra_flags;
125 125 unsigned int cra_blocksize;
126 126 unsigned int cra_ctxsize;
  127 + unsigned int cra_alignmask;
127 128 const char cra_name[CRYPTO_MAX_ALG_NAME];
128 129  
129 130 union {