Blame view
drivers/crypto/padlock-sha.c
14.7 KB
2874c5fd2 treewide: Replace... |
1 |
// SPDX-License-Identifier: GPL-2.0-or-later |
6c8332751 [CRYPTO] padlock:... |
2 3 4 5 6 7 |
/* * Cryptographic API. * * Support for VIA PadLock hardware crypto engine. * * Copyright (c) 2006 Michal Ludvig <michal@logix.cz> |
6c8332751 [CRYPTO] padlock:... |
8 |
*/ |
7d0246082 crypto: padlock -... |
9 |
#include <crypto/internal/hash.h> |
214930887 crypto: padlock -... |
10 |
#include <crypto/padlock.h> |
5265eeb2b [CRYPTO] sha: Add... |
11 |
#include <crypto/sha.h> |
6010439f4 [CRYPTO] padlock:... |
12 |
#include <linux/err.h> |
6c8332751 [CRYPTO] padlock:... |
13 14 15 |
#include <linux/module.h> #include <linux/init.h> #include <linux/errno.h> |
6c8332751 [CRYPTO] padlock:... |
16 17 18 |
#include <linux/interrupt.h> #include <linux/kernel.h> #include <linux/scatterlist.h> |
3bd391f05 crypto: Add suppo... |
19 |
#include <asm/cpu_device_id.h> |
df6b35f40 x86/fpu: Rename i... |
20 |
#include <asm/fpu/api.h> |
4c6ab3ee4 crypto: padlock-s... |
21 |
|
bbbee4679 crypto: padlock -... |
22 23 |
struct padlock_sha_desc { struct shash_desc fallback; |
6c8332751 [CRYPTO] padlock:... |
24 |
}; |
bbbee4679 crypto: padlock -... |
25 26 27 |
struct padlock_sha_ctx { struct crypto_shash *fallback; }; |
6c8332751 [CRYPTO] padlock:... |
28 |
|
bbbee4679 crypto: padlock -... |
29 |
static int padlock_sha_init(struct shash_desc *desc) |
6c8332751 [CRYPTO] padlock:... |
30 |
{ |
bbbee4679 crypto: padlock -... |
31 32 |
struct padlock_sha_desc *dctx = shash_desc_ctx(desc); struct padlock_sha_ctx *ctx = crypto_shash_ctx(desc->tfm); |
6c8332751 [CRYPTO] padlock:... |
33 |
|
bbbee4679 crypto: padlock -... |
34 |
dctx->fallback.tfm = ctx->fallback; |
bbbee4679 crypto: padlock -... |
35 |
return crypto_shash_init(&dctx->fallback); |
6c8332751 [CRYPTO] padlock:... |
36 |
} |
bbbee4679 crypto: padlock -... |
37 38 |
static int padlock_sha_update(struct shash_desc *desc, const u8 *data, unsigned int length) |
6c8332751 [CRYPTO] padlock:... |
39 |
{ |
bbbee4679 crypto: padlock -... |
40 |
struct padlock_sha_desc *dctx = shash_desc_ctx(desc); |
6c8332751 [CRYPTO] padlock:... |
41 |
|
bbbee4679 crypto: padlock -... |
42 |
return crypto_shash_update(&dctx->fallback, data, length); |
6c8332751 [CRYPTO] padlock:... |
43 |
} |
a8d7ac279 crypto: padlock-s... |
44 45 46 47 48 49 50 51 52 53 54 55 56 |
static int padlock_sha_export(struct shash_desc *desc, void *out) { struct padlock_sha_desc *dctx = shash_desc_ctx(desc); return crypto_shash_export(&dctx->fallback, out); } static int padlock_sha_import(struct shash_desc *desc, const void *in) { struct padlock_sha_desc *dctx = shash_desc_ctx(desc); struct padlock_sha_ctx *ctx = crypto_shash_ctx(desc->tfm); dctx->fallback.tfm = ctx->fallback; |
a8d7ac279 crypto: padlock-s... |
57 58 |
return crypto_shash_import(&dctx->fallback, in); } |
6c8332751 [CRYPTO] padlock:... |
59 60 61 62 63 64 |
static inline void padlock_output_block(uint32_t *src, uint32_t *dst, size_t count) { while (count--) *dst++ = swab32(*src++); } |
bbbee4679 crypto: padlock -... |
65 66 |
static int padlock_sha1_finup(struct shash_desc *desc, const u8 *in, unsigned int count, u8 *out) |
6c8332751 [CRYPTO] padlock:... |
67 68 69 70 |
{ /* We can't store directly to *out as it may be unaligned. */ /* BTW Don't reduce the buffer size below 128 Bytes! * PadLock microcode needs it that big. */ |
4c6ab3ee4 crypto: padlock-s... |
71 72 73 |
char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ ((aligned(STACK_ALIGN))); char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); |
bbbee4679 crypto: padlock -... |
74 75 76 77 |
struct padlock_sha_desc *dctx = shash_desc_ctx(desc); struct sha1_state state; unsigned int space; unsigned int leftover; |
bbbee4679 crypto: padlock -... |
78 |
int err; |
bbbee4679 crypto: padlock -... |
79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
err = crypto_shash_export(&dctx->fallback, &state); if (err) goto out; if (state.count + count > ULONG_MAX) return crypto_shash_finup(&dctx->fallback, in, count, out); leftover = ((state.count - 1) & (SHA1_BLOCK_SIZE - 1)) + 1; space = SHA1_BLOCK_SIZE - leftover; if (space) { if (count > space) { err = crypto_shash_update(&dctx->fallback, in, space) ?: crypto_shash_export(&dctx->fallback, &state); if (err) goto out; count -= space; in += space; } else { memcpy(state.buffer + leftover, in, count); in = state.buffer; count += leftover; |
e9b25f16c crypto: padlock -... |
100 |
state.count &= ~(SHA1_BLOCK_SIZE - 1); |
bbbee4679 crypto: padlock -... |
101 102 103 104 |
} } memcpy(result, &state.state, SHA1_DIGEST_SIZE); |
6c8332751 [CRYPTO] padlock:... |
105 |
|
6c8332751 [CRYPTO] padlock:... |
106 |
asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */ |
bbbee4679 crypto: padlock -... |
107 |
: \ |
faae89088 crypto: padlock -... |
108 109 |
: "c"((unsigned long)state.count + count), \ "a"((unsigned long)state.count), \ |
bbbee4679 crypto: padlock -... |
110 |
"S"(in), "D"(result)); |
6c8332751 [CRYPTO] padlock:... |
111 112 |
padlock_output_block((uint32_t *)result, (uint32_t *)out, 5); |
bbbee4679 crypto: padlock -... |
113 114 115 |
out: return err; |
6c8332751 [CRYPTO] padlock:... |
116 |
} |
bbbee4679 crypto: padlock -... |
117 118 119 120 121 122 123 124 125 |
static int padlock_sha1_final(struct shash_desc *desc, u8 *out) { u8 buf[4]; return padlock_sha1_finup(desc, buf, 0, out); } static int padlock_sha256_finup(struct shash_desc *desc, const u8 *in, unsigned int count, u8 *out) |
6c8332751 [CRYPTO] padlock:... |
126 127 128 129 |
{ /* We can't store directly to *out as it may be unaligned. */ /* BTW Don't reduce the buffer size below 128 Bytes! * PadLock microcode needs it that big. */ |
4c6ab3ee4 crypto: padlock-s... |
130 131 132 |
char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ ((aligned(STACK_ALIGN))); char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); |
bbbee4679 crypto: padlock -... |
133 134 135 136 |
struct padlock_sha_desc *dctx = shash_desc_ctx(desc); struct sha256_state state; unsigned int space; unsigned int leftover; |
bbbee4679 crypto: padlock -... |
137 |
int err; |
6c8332751 [CRYPTO] padlock:... |
138 |
|
bbbee4679 crypto: padlock -... |
139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 |
err = crypto_shash_export(&dctx->fallback, &state); if (err) goto out; if (state.count + count > ULONG_MAX) return crypto_shash_finup(&dctx->fallback, in, count, out); leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1; space = SHA256_BLOCK_SIZE - leftover; if (space) { if (count > space) { err = crypto_shash_update(&dctx->fallback, in, space) ?: crypto_shash_export(&dctx->fallback, &state); if (err) goto out; count -= space; in += space; } else { memcpy(state.buf + leftover, in, count); in = state.buf; count += leftover; |
e9b25f16c crypto: padlock -... |
160 |
state.count &= ~(SHA1_BLOCK_SIZE - 1); |
bbbee4679 crypto: padlock -... |
161 162 163 164 |
} } memcpy(result, &state.state, SHA256_DIGEST_SIZE); |
6c8332751 [CRYPTO] padlock:... |
165 166 |
asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */ |
bbbee4679 crypto: padlock -... |
167 |
: \ |
faae89088 crypto: padlock -... |
168 169 |
: "c"((unsigned long)state.count + count), \ "a"((unsigned long)state.count), \ |
bbbee4679 crypto: padlock -... |
170 |
"S"(in), "D"(result)); |
6c8332751 [CRYPTO] padlock:... |
171 172 |
padlock_output_block((uint32_t *)result, (uint32_t *)out, 8); |
bbbee4679 crypto: padlock -... |
173 174 175 |
out: return err; |
6c8332751 [CRYPTO] padlock:... |
176 |
} |
bbbee4679 crypto: padlock -... |
177 |
static int padlock_sha256_final(struct shash_desc *desc, u8 *out) |
6c8332751 [CRYPTO] padlock:... |
178 |
{ |
bbbee4679 crypto: padlock -... |
179 |
u8 buf[4]; |
6c8332751 [CRYPTO] padlock:... |
180 |
|
bbbee4679 crypto: padlock -... |
181 |
return padlock_sha256_finup(desc, buf, 0, out); |
6c8332751 [CRYPTO] padlock:... |
182 |
} |
968956939 crypto: padlock-s... |
183 |
static int padlock_init_tfm(struct crypto_shash *hash) |
6c8332751 [CRYPTO] padlock:... |
184 |
{ |
968956939 crypto: padlock-s... |
185 186 |
const char *fallback_driver_name = crypto_shash_alg_name(hash); struct padlock_sha_ctx *ctx = crypto_shash_ctx(hash); |
7d0246082 crypto: padlock -... |
187 |
struct crypto_shash *fallback_tfm; |
6010439f4 [CRYPTO] padlock:... |
188 |
|
6c8332751 [CRYPTO] padlock:... |
189 |
/* Allocate a fallback and abort if it failed. */ |
7d0246082 crypto: padlock -... |
190 191 |
fallback_tfm = crypto_alloc_shash(fallback_driver_name, 0, CRYPTO_ALG_NEED_FALLBACK); |
6010439f4 [CRYPTO] padlock:... |
192 |
if (IS_ERR(fallback_tfm)) { |
6c8332751 [CRYPTO] padlock:... |
193 194 195 |
printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded! ", fallback_driver_name); |
968956939 crypto: padlock-s... |
196 |
return PTR_ERR(fallback_tfm); |
6c8332751 [CRYPTO] padlock:... |
197 |
} |
bbbee4679 crypto: padlock -... |
198 199 |
ctx->fallback = fallback_tfm; hash->descsize += crypto_shash_descsize(fallback_tfm); |
6c8332751 [CRYPTO] padlock:... |
200 201 |
return 0; } |
968956939 crypto: padlock-s... |
202 |
static void padlock_exit_tfm(struct crypto_shash *hash) |
6c8332751 [CRYPTO] padlock:... |
203 |
{ |
968956939 crypto: padlock-s... |
204 |
struct padlock_sha_ctx *ctx = crypto_shash_ctx(hash); |
7d0246082 crypto: padlock -... |
205 |
|
bbbee4679 crypto: padlock -... |
206 |
crypto_free_shash(ctx->fallback); |
6c8332751 [CRYPTO] padlock:... |
207 |
} |
bbbee4679 crypto: padlock -... |
208 209 210 211 212 213 |
static struct shash_alg sha1_alg = { .digestsize = SHA1_DIGEST_SIZE, .init = padlock_sha_init, .update = padlock_sha_update, .finup = padlock_sha1_finup, .final = padlock_sha1_final, |
a8d7ac279 crypto: padlock-s... |
214 215 |
.export = padlock_sha_export, .import = padlock_sha_import, |
968956939 crypto: padlock-s... |
216 217 |
.init_tfm = padlock_init_tfm, .exit_tfm = padlock_exit_tfm, |
bbbee4679 crypto: padlock -... |
218 |
.descsize = sizeof(struct padlock_sha_desc), |
a8d7ac279 crypto: padlock-s... |
219 |
.statesize = sizeof(struct sha1_state), |
bbbee4679 crypto: padlock -... |
220 221 222 223 |
.base = { .cra_name = "sha1", .cra_driver_name = "sha1-padlock", .cra_priority = PADLOCK_CRA_PRIORITY, |
e50944e21 crypto: shash - r... |
224 |
.cra_flags = CRYPTO_ALG_NEED_FALLBACK, |
bbbee4679 crypto: padlock -... |
225 226 227 |
.cra_blocksize = SHA1_BLOCK_SIZE, .cra_ctxsize = sizeof(struct padlock_sha_ctx), .cra_module = THIS_MODULE, |
6c8332751 [CRYPTO] padlock:... |
228 229 |
} }; |
bbbee4679 crypto: padlock -... |
230 231 232 233 234 235 |
static struct shash_alg sha256_alg = { .digestsize = SHA256_DIGEST_SIZE, .init = padlock_sha_init, .update = padlock_sha_update, .finup = padlock_sha256_finup, .final = padlock_sha256_final, |
a8d7ac279 crypto: padlock-s... |
236 237 |
.export = padlock_sha_export, .import = padlock_sha_import, |
968956939 crypto: padlock-s... |
238 239 |
.init_tfm = padlock_init_tfm, .exit_tfm = padlock_exit_tfm, |
bbbee4679 crypto: padlock -... |
240 |
.descsize = sizeof(struct padlock_sha_desc), |
a8d7ac279 crypto: padlock-s... |
241 |
.statesize = sizeof(struct sha256_state), |
bbbee4679 crypto: padlock -... |
242 243 244 245 |
.base = { .cra_name = "sha256", .cra_driver_name = "sha256-padlock", .cra_priority = PADLOCK_CRA_PRIORITY, |
e50944e21 crypto: shash - r... |
246 |
.cra_flags = CRYPTO_ALG_NEED_FALLBACK, |
bbbee4679 crypto: padlock -... |
247 248 249 |
.cra_blocksize = SHA256_BLOCK_SIZE, .cra_ctxsize = sizeof(struct padlock_sha_ctx), .cra_module = THIS_MODULE, |
6c8332751 [CRYPTO] padlock:... |
250 251 |
} }; |
0475add3c crypto: padlock -... |
252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 |
/* Add two shash_alg instance for hardware-implemented * * multiple-parts hash supported by VIA Nano Processor.*/ static int padlock_sha1_init_nano(struct shash_desc *desc) { struct sha1_state *sctx = shash_desc_ctx(desc); *sctx = (struct sha1_state){ .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 }, }; return 0; } static int padlock_sha1_update_nano(struct shash_desc *desc, const u8 *data, unsigned int len) { struct sha1_state *sctx = shash_desc_ctx(desc); unsigned int partial, done; const u8 *src; /*The PHE require the out buffer must 128 bytes and 16-bytes aligned*/ u8 buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ ((aligned(STACK_ALIGN))); u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); |
0475add3c crypto: padlock -... |
275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 |
partial = sctx->count & 0x3f; sctx->count += len; done = 0; src = data; memcpy(dst, (u8 *)(sctx->state), SHA1_DIGEST_SIZE); if ((partial + len) >= SHA1_BLOCK_SIZE) { /* Append the bytes in state's buffer to a block to handle */ if (partial) { done = -partial; memcpy(sctx->buffer + partial, data, done + SHA1_BLOCK_SIZE); src = sctx->buffer; |
0475add3c crypto: padlock -... |
290 291 292 |
asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" : "+S"(src), "+D"(dst) \ : "a"((long)-1), "c"((unsigned long)1)); |
0475add3c crypto: padlock -... |
293 294 295 296 297 298 |
done += SHA1_BLOCK_SIZE; src = data + done; } /* Process the left bytes from the input data */ if (len - done >= SHA1_BLOCK_SIZE) { |
0475add3c crypto: padlock -... |
299 300 301 302 |
asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" : "+S"(src), "+D"(dst) : "a"((long)-1), "c"((unsigned long)((len - done) / SHA1_BLOCK_SIZE))); |
0475add3c crypto: padlock -... |
303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 |
done += ((len - done) - (len - done) % SHA1_BLOCK_SIZE); src = data + done; } partial = 0; } memcpy((u8 *)(sctx->state), dst, SHA1_DIGEST_SIZE); memcpy(sctx->buffer + partial, src, len - done); return 0; } static int padlock_sha1_final_nano(struct shash_desc *desc, u8 *out) { struct sha1_state *state = (struct sha1_state *)shash_desc_ctx(desc); unsigned int partial, padlen; __be64 bits; static const u8 padding[64] = { 0x80, }; bits = cpu_to_be64(state->count << 3); /* Pad out to 56 mod 64 */ partial = state->count & 0x3f; padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial); padlock_sha1_update_nano(desc, padding, padlen); /* Append length field bytes */ padlock_sha1_update_nano(desc, (const u8 *)&bits, sizeof(bits)); /* Swap to output */ padlock_output_block((uint32_t *)(state->state), (uint32_t *)out, 5); return 0; } static int padlock_sha256_init_nano(struct shash_desc *desc) { struct sha256_state *sctx = shash_desc_ctx(desc); *sctx = (struct sha256_state){ .state = { SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3, \ SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7}, }; return 0; } static int padlock_sha256_update_nano(struct shash_desc *desc, const u8 *data, unsigned int len) { struct sha256_state *sctx = shash_desc_ctx(desc); unsigned int partial, done; const u8 *src; /*The PHE require the out buffer must 128 bytes and 16-bytes aligned*/ u8 buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ ((aligned(STACK_ALIGN))); u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); |
0475add3c crypto: padlock -... |
359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 |
partial = sctx->count & 0x3f; sctx->count += len; done = 0; src = data; memcpy(dst, (u8 *)(sctx->state), SHA256_DIGEST_SIZE); if ((partial + len) >= SHA256_BLOCK_SIZE) { /* Append the bytes in state's buffer to a block to handle */ if (partial) { done = -partial; memcpy(sctx->buf + partial, data, done + SHA256_BLOCK_SIZE); src = sctx->buf; |
0475add3c crypto: padlock -... |
374 375 376 |
asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" : "+S"(src), "+D"(dst) : "a"((long)-1), "c"((unsigned long)1)); |
0475add3c crypto: padlock -... |
377 378 379 380 381 382 |
done += SHA256_BLOCK_SIZE; src = data + done; } /* Process the left bytes from input data*/ if (len - done >= SHA256_BLOCK_SIZE) { |
0475add3c crypto: padlock -... |
383 384 385 386 |
asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" : "+S"(src), "+D"(dst) : "a"((long)-1), "c"((unsigned long)((len - done) / 64))); |
0475add3c crypto: padlock -... |
387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 |
done += ((len - done) - (len - done) % 64); src = data + done; } partial = 0; } memcpy((u8 *)(sctx->state), dst, SHA256_DIGEST_SIZE); memcpy(sctx->buf + partial, src, len - done); return 0; } static int padlock_sha256_final_nano(struct shash_desc *desc, u8 *out) { struct sha256_state *state = (struct sha256_state *)shash_desc_ctx(desc); unsigned int partial, padlen; __be64 bits; static const u8 padding[64] = { 0x80, }; bits = cpu_to_be64(state->count << 3); /* Pad out to 56 mod 64 */ partial = state->count & 0x3f; padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial); padlock_sha256_update_nano(desc, padding, padlen); /* Append length field bytes */ padlock_sha256_update_nano(desc, (const u8 *)&bits, sizeof(bits)); /* Swap to output */ padlock_output_block((uint32_t *)(state->state), (uint32_t *)out, 8); return 0; } static int padlock_sha_export_nano(struct shash_desc *desc, void *out) { int statesize = crypto_shash_statesize(desc->tfm); void *sctx = shash_desc_ctx(desc); memcpy(out, sctx, statesize); return 0; } static int padlock_sha_import_nano(struct shash_desc *desc, const void *in) { int statesize = crypto_shash_statesize(desc->tfm); void *sctx = shash_desc_ctx(desc); memcpy(sctx, in, statesize); return 0; } static struct shash_alg sha1_alg_nano = { .digestsize = SHA1_DIGEST_SIZE, .init = padlock_sha1_init_nano, .update = padlock_sha1_update_nano, .final = padlock_sha1_final_nano, .export = padlock_sha_export_nano, .import = padlock_sha_import_nano, .descsize = sizeof(struct sha1_state), .statesize = sizeof(struct sha1_state), .base = { .cra_name = "sha1", .cra_driver_name = "sha1-padlock-nano", .cra_priority = PADLOCK_CRA_PRIORITY, |
0475add3c crypto: padlock -... |
455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 |
.cra_blocksize = SHA1_BLOCK_SIZE, .cra_module = THIS_MODULE, } }; static struct shash_alg sha256_alg_nano = { .digestsize = SHA256_DIGEST_SIZE, .init = padlock_sha256_init_nano, .update = padlock_sha256_update_nano, .final = padlock_sha256_final_nano, .export = padlock_sha_export_nano, .import = padlock_sha_import_nano, .descsize = sizeof(struct sha256_state), .statesize = sizeof(struct sha256_state), .base = { .cra_name = "sha256", .cra_driver_name = "sha256-padlock-nano", .cra_priority = PADLOCK_CRA_PRIORITY, |
0475add3c crypto: padlock -... |
473 474 475 476 |
.cra_blocksize = SHA256_BLOCK_SIZE, .cra_module = THIS_MODULE, } }; |
16d5cee5c crypto: padlock-s... |
477 |
static const struct x86_cpu_id padlock_sha_ids[] = { |
f30cfacad crypto: Convert t... |
478 |
X86_MATCH_FEATURE(X86_FEATURE_PHE, NULL), |
3bd391f05 crypto: Add suppo... |
479 480 481 |
{} }; MODULE_DEVICE_TABLE(x86cpu, padlock_sha_ids); |
6c8332751 [CRYPTO] padlock:... |
482 483 484 |
static int __init padlock_init(void) { int rc = -ENODEV; |
0475add3c crypto: padlock -... |
485 486 487 |
struct cpuinfo_x86 *c = &cpu_data(0); struct shash_alg *sha1; struct shash_alg *sha256; |
6c8332751 [CRYPTO] padlock:... |
488 |
|
362f924b6 x86/cpufeature: R... |
489 |
if (!x86_match_cpu(padlock_sha_ids) || !boot_cpu_has(X86_FEATURE_PHE_EN)) |
6c8332751 [CRYPTO] padlock:... |
490 |
return -ENODEV; |
6c8332751 [CRYPTO] padlock:... |
491 |
|
0475add3c crypto: padlock -... |
492 493 494 495 496 497 498 499 500 501 502 |
/* Register the newly added algorithm module if on * * VIA Nano processor, or else just do as before */ if (c->x86_model < 0x0f) { sha1 = &sha1_alg; sha256 = &sha256_alg; } else { sha1 = &sha1_alg_nano; sha256 = &sha256_alg_nano; } rc = crypto_register_shash(sha1); |
6c8332751 [CRYPTO] padlock:... |
503 504 |
if (rc) goto out; |
0475add3c crypto: padlock -... |
505 |
rc = crypto_register_shash(sha256); |
6c8332751 [CRYPTO] padlock:... |
506 507 508 509 510 511 512 513 514 |
if (rc) goto out_unreg1; printk(KERN_NOTICE PFX "Using VIA PadLock ACE for SHA1/SHA256 algorithms. "); return 0; out_unreg1: |
0475add3c crypto: padlock -... |
515 |
crypto_unregister_shash(sha1); |
6c8332751 [CRYPTO] padlock:... |
516 517 518 519 520 521 522 523 |
out: printk(KERN_ERR PFX "VIA PadLock SHA1/SHA256 initialization failed. "); return rc; } static void __exit padlock_fini(void) { |
0475add3c crypto: padlock -... |
524 525 526 527 528 529 530 531 532 |
struct cpuinfo_x86 *c = &cpu_data(0); if (c->x86_model >= 0x0f) { crypto_unregister_shash(&sha1_alg_nano); crypto_unregister_shash(&sha256_alg_nano); } else { crypto_unregister_shash(&sha1_alg); crypto_unregister_shash(&sha256_alg); } |
6c8332751 [CRYPTO] padlock:... |
533 534 535 536 537 538 539 540 |
} module_init(padlock_init); module_exit(padlock_fini); MODULE_DESCRIPTION("VIA PadLock SHA1/SHA256 algorithms support."); MODULE_LICENSE("GPL"); MODULE_AUTHOR("Michal Ludvig"); |
5d26a105b crypto: prefix mo... |
541 542 543 544 |
MODULE_ALIAS_CRYPTO("sha1-all"); MODULE_ALIAS_CRYPTO("sha256-all"); MODULE_ALIAS_CRYPTO("sha1-padlock"); MODULE_ALIAS_CRYPTO("sha256-padlock"); |