crypto: vmx - Adding enable_kernel_vsx() to access VSX instructions
authorLeonidas Da Silva Barbosa <leosilva@linux.vnet.ibm.com>
Mon, 13 Jul 2015 16:51:39 +0000 (13:51 -0300)
committerHerbert Xu <herbert@gondor.apana.org.au>
Tue, 14 Jul 2015 06:56:48 +0000 (14:56 +0800)
vmx-crypto driver make use of some VSX instructions which are
only available if VSX is enabled. Running in cases where VSX
are not enabled vmx-crypto fails in a VSX exception.

In order to fix this enable_kernel_vsx() was added to turn on
VSX instructions for vmx-crypto.

Signed-off-by: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
drivers/crypto/vmx/aes.c
drivers/crypto/vmx/aes_cbc.c
drivers/crypto/vmx/aes_ctr.c
drivers/crypto/vmx/ghash.c

index e79e567e43aacae4584b32c2d7fc9ae1e6c1e300..263af709e53604ee5a049f707d2e9f5795031d1d 100644 (file)
@@ -84,6 +84,7 @@ static int p8_aes_setkey(struct crypto_tfm *tfm, const u8 *key,
        preempt_disable();
        pagefault_disable();
        enable_kernel_altivec();
+       enable_kernel_vsx();
        ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
        ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
        pagefault_enable();
@@ -103,6 +104,7 @@ static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
                preempt_disable();
                pagefault_disable();
                enable_kernel_altivec();
+               enable_kernel_vsx();
                aes_p8_encrypt(src, dst, &ctx->enc_key);
                pagefault_enable();
                preempt_enable();
@@ -119,6 +121,7 @@ static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
                preempt_disable();
                pagefault_disable();
                enable_kernel_altivec();
+               enable_kernel_vsx();
                aes_p8_decrypt(src, dst, &ctx->dec_key);
                pagefault_enable();
                preempt_enable();
index 7299995c78ec3b34ea76e289cf84dc877f1175ef..0b8fe2ec5315fc8253431ca533d953b9c72d7243 100644 (file)
@@ -85,6 +85,7 @@ static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key,
        preempt_disable();
        pagefault_disable();
        enable_kernel_altivec();
+       enable_kernel_vsx();
        ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
        ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
        pagefault_enable();
@@ -115,6 +116,7 @@ static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
                preempt_disable();
                pagefault_disable();
                enable_kernel_altivec();
+               enable_kernel_vsx();
 
                blkcipher_walk_init(&walk, dst, src, nbytes);
                ret = blkcipher_walk_virt(desc, &walk);
@@ -155,6 +157,7 @@ static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
                preempt_disable();
                pagefault_disable();
                enable_kernel_altivec();
+               enable_kernel_vsx();
 
                blkcipher_walk_init(&walk, dst, src, nbytes);
                ret = blkcipher_walk_virt(desc, &walk);
index 7adae42a7b79ea81a5bc35ae2db9db9b6a2437e2..1e754ae4e8509ee490af0d392dafb9f70a97e313 100644 (file)
@@ -82,6 +82,7 @@ static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
 
        pagefault_disable();
        enable_kernel_altivec();
+       enable_kernel_vsx();
        ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
        pagefault_enable();
 
@@ -100,6 +101,7 @@ static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
 
        pagefault_disable();
        enable_kernel_altivec();
+       enable_kernel_vsx();
        aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
        pagefault_enable();
 
@@ -131,6 +133,7 @@ static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
                while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
                        pagefault_disable();
                        enable_kernel_altivec();
+                       enable_kernel_vsx();
                        aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
                                                    walk.dst.virt.addr,
                                                    (nbytes &
index b5e29002b66678337c54ec7858634d43285c4213..2183a2e77641e0682ca113951430765bcbcca4fc 100644 (file)
@@ -119,6 +119,7 @@ static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key,
        preempt_disable();
        pagefault_disable();
        enable_kernel_altivec();
+       enable_kernel_vsx();
        enable_kernel_fp();
        gcm_init_p8(ctx->htable, (const u64 *) key);
        pagefault_enable();
@@ -149,6 +150,7 @@ static int p8_ghash_update(struct shash_desc *desc,
                        preempt_disable();
                        pagefault_disable();
                        enable_kernel_altivec();
+                       enable_kernel_vsx();
                        enable_kernel_fp();
                        gcm_ghash_p8(dctx->shash, ctx->htable,
                                     dctx->buffer, GHASH_DIGEST_SIZE);
@@ -163,6 +165,7 @@ static int p8_ghash_update(struct shash_desc *desc,
                        preempt_disable();
                        pagefault_disable();
                        enable_kernel_altivec();
+                       enable_kernel_vsx();
                        enable_kernel_fp();
                        gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
                        pagefault_enable();
@@ -193,6 +196,7 @@ static int p8_ghash_final(struct shash_desc *desc, u8 *out)
                        preempt_disable();
                        pagefault_disable();
                        enable_kernel_altivec();
+                       enable_kernel_vsx();
                        enable_kernel_fp();
                        gcm_ghash_p8(dctx->shash, ctx->htable,
                                     dctx->buffer, GHASH_DIGEST_SIZE);