Loading arch/x86/crypto/aesni-intel_glue.c +5 −5 Original line number Diff line number Diff line Loading @@ -82,7 +82,7 @@ static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx, return -EINVAL; } if (irq_fpu_usable()) if (!irq_fpu_usable()) err = crypto_aes_expand_key(ctx, in_key, key_len); else { kernel_fpu_begin(); Loading @@ -103,7 +103,7 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); if (irq_fpu_usable()) if (!irq_fpu_usable()) crypto_aes_encrypt_x86(ctx, dst, src); else { kernel_fpu_begin(); Loading @@ -116,7 +116,7 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); if (irq_fpu_usable()) if (!irq_fpu_usable()) crypto_aes_decrypt_x86(ctx, dst, src); else { kernel_fpu_begin(); Loading Loading @@ -342,7 +342,7 @@ static int ablk_encrypt(struct ablkcipher_request *req) struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm); if (irq_fpu_usable()) { if (!irq_fpu_usable()) { struct ablkcipher_request *cryptd_req = ablkcipher_request_ctx(req); memcpy(cryptd_req, req, sizeof(*req)); Loading @@ -363,7 +363,7 @@ static int ablk_decrypt(struct ablkcipher_request *req) struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm); if (irq_fpu_usable()) { if (!irq_fpu_usable()) { struct ablkcipher_request *cryptd_req = ablkcipher_request_ctx(req); memcpy(cryptd_req, req, sizeof(*req)); Loading drivers/crypto/padlock-sha.c +12 −2 Original line number Diff line number Diff line Loading @@ -24,6 +24,12 @@ #include <asm/i387.h> #include "padlock.h" #ifdef CONFIG_64BIT #define STACK_ALIGN 16 #else #define STACK_ALIGN 4 #endif struct padlock_sha_desc { struct shash_desc fallback; }; Loading Loading @@ -64,7 +70,9 @@ static int padlock_sha1_finup(struct shash_desc *desc, const u8 *in, /* We can't store directly to *out as it may be unaligned. */ /* BTW Don't reduce the buffer size below 128 Bytes! * PadLock microcode needs it that big. */ char result[128] __attribute__ ((aligned(PADLOCK_ALIGNMENT))); char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ ((aligned(STACK_ALIGN))); char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); struct padlock_sha_desc *dctx = shash_desc_ctx(desc); struct sha1_state state; unsigned int space; Loading Loading @@ -128,7 +136,9 @@ static int padlock_sha256_finup(struct shash_desc *desc, const u8 *in, /* We can't store directly to *out as it may be unaligned. */ /* BTW Don't reduce the buffer size below 128 Bytes! * PadLock microcode needs it that big. */ char result[128] __attribute__ ((aligned(PADLOCK_ALIGNMENT))); char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ ((aligned(STACK_ALIGN))); char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); struct padlock_sha_desc *dctx = shash_desc_ctx(desc); struct sha256_state state; unsigned int space; Loading Loading
arch/x86/crypto/aesni-intel_glue.c +5 −5 Original line number Diff line number Diff line Loading @@ -82,7 +82,7 @@ static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx, return -EINVAL; } if (irq_fpu_usable()) if (!irq_fpu_usable()) err = crypto_aes_expand_key(ctx, in_key, key_len); else { kernel_fpu_begin(); Loading @@ -103,7 +103,7 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); if (irq_fpu_usable()) if (!irq_fpu_usable()) crypto_aes_encrypt_x86(ctx, dst, src); else { kernel_fpu_begin(); Loading @@ -116,7 +116,7 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); if (irq_fpu_usable()) if (!irq_fpu_usable()) crypto_aes_decrypt_x86(ctx, dst, src); else { kernel_fpu_begin(); Loading Loading @@ -342,7 +342,7 @@ static int ablk_encrypt(struct ablkcipher_request *req) struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm); if (irq_fpu_usable()) { if (!irq_fpu_usable()) { struct ablkcipher_request *cryptd_req = ablkcipher_request_ctx(req); memcpy(cryptd_req, req, sizeof(*req)); Loading @@ -363,7 +363,7 @@ static int ablk_decrypt(struct ablkcipher_request *req) struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm); if (irq_fpu_usable()) { if (!irq_fpu_usable()) { struct ablkcipher_request *cryptd_req = ablkcipher_request_ctx(req); memcpy(cryptd_req, req, sizeof(*req)); Loading
drivers/crypto/padlock-sha.c +12 −2 Original line number Diff line number Diff line Loading @@ -24,6 +24,12 @@ #include <asm/i387.h> #include "padlock.h" #ifdef CONFIG_64BIT #define STACK_ALIGN 16 #else #define STACK_ALIGN 4 #endif struct padlock_sha_desc { struct shash_desc fallback; }; Loading Loading @@ -64,7 +70,9 @@ static int padlock_sha1_finup(struct shash_desc *desc, const u8 *in, /* We can't store directly to *out as it may be unaligned. */ /* BTW Don't reduce the buffer size below 128 Bytes! * PadLock microcode needs it that big. */ char result[128] __attribute__ ((aligned(PADLOCK_ALIGNMENT))); char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ ((aligned(STACK_ALIGN))); char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); struct padlock_sha_desc *dctx = shash_desc_ctx(desc); struct sha1_state state; unsigned int space; Loading Loading @@ -128,7 +136,9 @@ static int padlock_sha256_finup(struct shash_desc *desc, const u8 *in, /* We can't store directly to *out as it may be unaligned. */ /* BTW Don't reduce the buffer size below 128 Bytes! * PadLock microcode needs it that big. */ char result[128] __attribute__ ((aligned(PADLOCK_ALIGNMENT))); char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ ((aligned(STACK_ALIGN))); char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); struct padlock_sha_desc *dctx = shash_desc_ctx(desc); struct sha256_state state; unsigned int space; Loading