[CRYPTO] aes-asm: Merge common glue code
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / crypto / ctr.c
CommitLineData
23e353c8
JL
1/*
2 * CTR: Counter mode
3 *
4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <crypto/algapi.h>
14#include <linux/err.h>
15#include <linux/init.h>
16#include <linux/kernel.h>
17#include <linux/module.h>
18#include <linux/random.h>
19#include <linux/scatterlist.h>
20#include <linux/slab.h>
21
22struct ctr_instance_ctx {
23 struct crypto_spawn alg;
24 unsigned int noncesize;
25 unsigned int ivsize;
41fdab3d 26 unsigned int countersize;
23e353c8
JL
27};
28
29struct crypto_ctr_ctx {
30 struct crypto_cipher *child;
31 u8 *nonce;
32};
33
23e353c8
JL
34static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
35 unsigned int keylen)
36{
37 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
38 struct crypto_cipher *child = ctx->child;
39 struct ctr_instance_ctx *ictx =
40 crypto_instance_ctx(crypto_tfm_alg_instance(parent));
41 unsigned int noncelen = ictx->noncesize;
42 int err = 0;
43
44 /* the nonce is stored in bytes at end of key */
45 if (keylen < noncelen)
46 return -EINVAL;
47
48 memcpy(ctx->nonce, key + (keylen - noncelen), noncelen);
49
50 keylen -= noncelen;
51
52 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
53 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
54 CRYPTO_TFM_REQ_MASK);
55 err = crypto_cipher_setkey(child, key, keylen);
56 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
57 CRYPTO_TFM_RES_MASK);
58
59 return err;
60}
61
62static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
63 struct crypto_cipher *tfm, u8 *ctrblk,
64 unsigned int countersize)
65{
66 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
67 crypto_cipher_alg(tfm)->cia_encrypt;
68 unsigned int bsize = crypto_cipher_blocksize(tfm);
3f8214ea
HX
69 unsigned long alignmask = crypto_cipher_alignmask(tfm) |
70 (__alignof__(u32) - 1);
23e353c8
JL
71 u8 ks[bsize + alignmask];
72 u8 *keystream = (u8 *)ALIGN((unsigned long)ks, alignmask + 1);
73 u8 *src = walk->src.virt.addr;
74 u8 *dst = walk->dst.virt.addr;
75 unsigned int nbytes = walk->nbytes;
76
77 do {
78 /* create keystream */
79 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
3f8214ea 80 crypto_xor(keystream, src, min(nbytes, bsize));
23e353c8
JL
81
82 /* copy result into dst */
83 memcpy(dst, keystream, min(nbytes, bsize));
84
85 /* increment counter in counterblock */
3f8214ea 86 crypto_inc(ctrblk + bsize - countersize, countersize);
23e353c8
JL
87
88 if (nbytes < bsize)
89 break;
90
91 src += bsize;
92 dst += bsize;
93 nbytes -= bsize;
94
95 } while (nbytes);
96
97 return 0;
98}
99
100static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
101 struct crypto_cipher *tfm, u8 *ctrblk,
102 unsigned int countersize)
103{
104 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
105 crypto_cipher_alg(tfm)->cia_encrypt;
106 unsigned int bsize = crypto_cipher_blocksize(tfm);
3f8214ea
HX
107 unsigned long alignmask = crypto_cipher_alignmask(tfm) |
108 (__alignof__(u32) - 1);
23e353c8
JL
109 unsigned int nbytes = walk->nbytes;
110 u8 *src = walk->src.virt.addr;
111 u8 ks[bsize + alignmask];
112 u8 *keystream = (u8 *)ALIGN((unsigned long)ks, alignmask + 1);
113
114 do {
115 /* create keystream */
116 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
3f8214ea 117 crypto_xor(src, keystream, min(nbytes, bsize));
23e353c8
JL
118
119 /* increment counter in counterblock */
3f8214ea 120 crypto_inc(ctrblk + bsize - countersize, countersize);
23e353c8
JL
121
122 if (nbytes < bsize)
123 break;
124
125 src += bsize;
126 nbytes -= bsize;
127
128 } while (nbytes);
129
130 return 0;
131}
132
133static int crypto_ctr_crypt(struct blkcipher_desc *desc,
134 struct scatterlist *dst, struct scatterlist *src,
135 unsigned int nbytes)
136{
137 struct blkcipher_walk walk;
138 struct crypto_blkcipher *tfm = desc->tfm;
139 struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
140 struct crypto_cipher *child = ctx->child;
141 unsigned int bsize = crypto_cipher_blocksize(child);
142 struct ctr_instance_ctx *ictx =
143 crypto_instance_ctx(crypto_tfm_alg_instance(&tfm->base));
3f8214ea
HX
144 unsigned long alignmask = crypto_cipher_alignmask(child) |
145 (__alignof__(u32) - 1);
23e353c8
JL
146 u8 cblk[bsize + alignmask];
147 u8 *counterblk = (u8 *)ALIGN((unsigned long)cblk, alignmask + 1);
23e353c8
JL
148 int err;
149
150 blkcipher_walk_init(&walk, dst, src, nbytes);
151 err = blkcipher_walk_virt_block(desc, &walk, bsize);
152
153 /* set up counter block */
154 memset(counterblk, 0 , bsize);
155 memcpy(counterblk, ctx->nonce, ictx->noncesize);
156 memcpy(counterblk + ictx->noncesize, walk.iv, ictx->ivsize);
157
158 /* initialize counter portion of counter block */
3f8214ea 159 crypto_inc(counterblk + bsize - ictx->countersize, ictx->countersize);
23e353c8
JL
160
161 while (walk.nbytes) {
162 if (walk.src.virt.addr == walk.dst.virt.addr)
163 nbytes = crypto_ctr_crypt_inplace(&walk, child,
164 counterblk,
41fdab3d 165 ictx->countersize);
23e353c8
JL
166 else
167 nbytes = crypto_ctr_crypt_segment(&walk, child,
168 counterblk,
41fdab3d 169 ictx->countersize);
23e353c8
JL
170
171 err = blkcipher_walk_done(desc, &walk, nbytes);
172 }
173 return err;
174}
175
176static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
177{
178 struct crypto_instance *inst = (void *)tfm->__crt_alg;
179 struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst);
180 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
181 struct crypto_cipher *cipher;
182
183 ctx->nonce = kzalloc(ictx->noncesize, GFP_KERNEL);
184 if (!ctx->nonce)
185 return -ENOMEM;
186
187 cipher = crypto_spawn_cipher(&ictx->alg);
188 if (IS_ERR(cipher))
189 return PTR_ERR(cipher);
190
191 ctx->child = cipher;
192
193 return 0;
194}
195
196static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
197{
198 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
199
200 kfree(ctx->nonce);
201 crypto_free_cipher(ctx->child);
202}
203
204static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
205{
206 struct crypto_instance *inst;
207 struct crypto_alg *alg;
208 struct ctr_instance_ctx *ictx;
209 unsigned int noncesize;
210 unsigned int ivsize;
41fdab3d 211 unsigned int countersize;
23e353c8
JL
212 int err;
213
214 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
215 if (err)
216 return ERR_PTR(err);
217
218 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
219 CRYPTO_ALG_TYPE_MASK);
220 if (IS_ERR(alg))
221 return ERR_PTR(PTR_ERR(alg));
222
223 err = crypto_attr_u32(tb[2], &noncesize);
224 if (err)
225 goto out_put_alg;
226
227 err = crypto_attr_u32(tb[3], &ivsize);
228 if (err)
229 goto out_put_alg;
230
41fdab3d
JL
231 err = crypto_attr_u32(tb[4], &countersize);
232 if (err)
233 goto out_put_alg;
234
235 /* verify size of nonce + iv + counter
236 * counter must be >= 4 bytes.
237 */
23e353c8 238 err = -EINVAL;
41fdab3d
JL
239 if (((noncesize + ivsize + countersize) < alg->cra_blocksize) ||
240 ((noncesize + ivsize) > alg->cra_blocksize) ||
241 (countersize > alg->cra_blocksize) || (countersize < 4))
23e353c8
JL
242 goto out_put_alg;
243
3f8214ea
HX
244 /* If this is false we'd fail the alignment of crypto_inc. */
245 if ((alg->cra_blocksize - countersize) % 4)
246 goto out_put_alg;
247
23e353c8
JL
248 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
249 err = -ENOMEM;
250 if (!inst)
251 goto out_put_alg;
252
253 err = -ENAMETOOLONG;
254 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
41fdab3d
JL
255 "ctr(%s,%u,%u,%u)", alg->cra_name, noncesize,
256 ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) {
23e353c8
JL
257 goto err_free_inst;
258 }
259
260 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
41fdab3d
JL
261 "ctr(%s,%u,%u,%u)", alg->cra_driver_name, noncesize,
262 ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) {
23e353c8
JL
263 goto err_free_inst;
264 }
265
266 ictx = crypto_instance_ctx(inst);
267 ictx->noncesize = noncesize;
268 ictx->ivsize = ivsize;
41fdab3d 269 ictx->countersize = countersize;
23e353c8
JL
270
271 err = crypto_init_spawn(&ictx->alg, alg, inst,
272 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
273 if (err)
274 goto err_free_inst;
275
276 err = 0;
277 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
278 inst->alg.cra_priority = alg->cra_priority;
279 inst->alg.cra_blocksize = 1;
3f8214ea 280 inst->alg.cra_alignmask = __alignof__(u32) - 1;
23e353c8
JL
281 inst->alg.cra_type = &crypto_blkcipher_type;
282
283 inst->alg.cra_blkcipher.ivsize = ivsize;
284 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize
285 + noncesize;
286 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize
287 + noncesize;
288
289 inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
290
291 inst->alg.cra_init = crypto_ctr_init_tfm;
292 inst->alg.cra_exit = crypto_ctr_exit_tfm;
293
294 inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey;
295 inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
296 inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
297
298err_free_inst:
299 if (err)
300 kfree(inst);
301
302out_put_alg:
303 crypto_mod_put(alg);
304
305 if (err)
306 inst = ERR_PTR(err);
307
308 return inst;
309}
310
311static void crypto_ctr_free(struct crypto_instance *inst)
312{
313 struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst);
314
315 crypto_drop_spawn(&ictx->alg);
316 kfree(inst);
317}
318
319static struct crypto_template crypto_ctr_tmpl = {
320 .name = "ctr",
321 .alloc = crypto_ctr_alloc,
322 .free = crypto_ctr_free,
323 .module = THIS_MODULE,
324};
325
326static int __init crypto_ctr_module_init(void)
327{
328 return crypto_register_template(&crypto_ctr_tmpl);
329}
330
331static void __exit crypto_ctr_module_exit(void)
332{
333 crypto_unregister_template(&crypto_ctr_tmpl);
334}
335
336module_init(crypto_ctr_module_init);
337module_exit(crypto_ctr_module_exit);
338
339MODULE_LICENSE("GPL");
340MODULE_DESCRIPTION("CTR Counter block mode");