Merge tag 'v3.10.55' into update
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / arch / arm / crypto / aes-armv8-bcm-glue.c
1 /*
2 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
3 *
4 * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 */
10
11 #include <crypto/aes.h>
12 #include <crypto/ablk_helper.h>
13 #include <crypto/algapi.h>
14 #include <linux/module.h>
15 #include <linux/cpufeature.h>
16
17 asmlinkage void aes_v8_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
18 int rounds, int blocks, u8 iv[], int first);
19 asmlinkage void aes_v8_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
20 int rounds, int blocks, u8 iv[], int first);
21
22 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
23 struct scatterlist *src, unsigned int nbytes)
24 {
25 struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
26 int err, first, rounds = 6 + ctx->key_length / 4;
27 struct blkcipher_walk walk;
28 unsigned int blocks;
29
30 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
31 blkcipher_walk_init(&walk, dst, src, nbytes);
32 err = blkcipher_walk_virt(desc, &walk);
33
34 //kernel_neon_begin();
35 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
36 aes_v8_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
37 (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
38 first);
39 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
40 }
41 //kernel_neon_end();
42 return err;
43 }
44
45 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
46 struct scatterlist *src, unsigned int nbytes)
47 {
48 struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
49 int err, first, rounds = 6 + ctx->key_length / 4;
50 struct blkcipher_walk walk;
51 unsigned int blocks;
52
53 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
54 blkcipher_walk_init(&walk, dst, src, nbytes);
55 err = blkcipher_walk_virt(desc, &walk);
56
57 //kernel_neon_begin();
58 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
59 aes_v8_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
60 (u8 *)ctx->key_dec, rounds, blocks, walk.iv,
61 first);
62 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
63 }
64 //kernel_neon_end();
65 return err;
66 }
67
68 static struct crypto_alg aes_algs[] = { {
69 .cra_name = "__cbc-aes-armv8" ,
70 .cra_driver_name = "__driver-cbc-aes-armv8" ,
71 .cra_priority = 0,
72 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
73 .cra_blocksize = AES_BLOCK_SIZE,
74 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
75 .cra_alignmask = 7,
76 .cra_type = &crypto_blkcipher_type,
77 .cra_module = THIS_MODULE,
78 .cra_blkcipher = {
79 .min_keysize = AES_MIN_KEY_SIZE,
80 .max_keysize = AES_MAX_KEY_SIZE,
81 .ivsize = AES_BLOCK_SIZE,
82 .setkey = crypto_aes_set_key,
83 .encrypt = cbc_encrypt,
84 .decrypt = cbc_decrypt,
85 },
86 }, {
87 .cra_name = "cbc(aes)",
88 .cra_driver_name = "cbc-aes-armv8",
89 .cra_priority = 300,
90 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
91 .cra_blocksize = AES_BLOCK_SIZE,
92 .cra_ctxsize = sizeof(struct async_helper_ctx),
93 .cra_alignmask = 7,
94 .cra_type = &crypto_ablkcipher_type,
95 .cra_module = THIS_MODULE,
96 .cra_init = ablk_init,
97 .cra_exit = ablk_exit,
98 .cra_ablkcipher = {
99 .min_keysize = AES_MIN_KEY_SIZE,
100 .max_keysize = AES_MAX_KEY_SIZE,
101 .ivsize = AES_BLOCK_SIZE,
102 .setkey = ablk_set_key,
103 .encrypt = ablk_encrypt,
104 .decrypt = ablk_decrypt,
105 }
106 } };
107
108 static int __init aes_init(void)
109 {
110 return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs));
111 }
112
113 static void __exit aes_exit(void)
114 {
115 crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs));
116 }
117
118 module_init(aes_init);
119 module_exit(aes_exit);
120