Merge git://git.infradead.org/~dwmw2/cafe-2.6
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / crypto / tea.c
1 /*
2 * Cryptographic API.
3 *
4 * TEA, XTEA, and XETA crypto alogrithms
5 *
6 * The TEA and Xtended TEA algorithms were developed by David Wheeler
7 * and Roger Needham at the Computer Laboratory of Cambridge University.
8 *
9 * Due to the order of evaluation in XTEA many people have incorrectly
10 * implemented it. XETA (XTEA in the wrong order), exists for
11 * compatibility with these implementations.
12 *
13 * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com
14 *
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
19 *
20 */
21
22 #include <linux/init.h>
23 #include <linux/module.h>
24 #include <linux/mm.h>
25 #include <asm/byteorder.h>
26 #include <asm/scatterlist.h>
27 #include <linux/crypto.h>
28 #include <linux/types.h>
29
30 #define TEA_KEY_SIZE 16
31 #define TEA_BLOCK_SIZE 8
32 #define TEA_ROUNDS 32
33 #define TEA_DELTA 0x9e3779b9
34
35 #define XTEA_KEY_SIZE 16
36 #define XTEA_BLOCK_SIZE 8
37 #define XTEA_ROUNDS 32
38 #define XTEA_DELTA 0x9e3779b9
39
40 struct tea_ctx {
41 u32 KEY[4];
42 };
43
44 struct xtea_ctx {
45 u32 KEY[4];
46 };
47
48 static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
49 unsigned int key_len)
50 {
51 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
52 const __le32 *key = (const __le32 *)in_key;
53
54 ctx->KEY[0] = le32_to_cpu(key[0]);
55 ctx->KEY[1] = le32_to_cpu(key[1]);
56 ctx->KEY[2] = le32_to_cpu(key[2]);
57 ctx->KEY[3] = le32_to_cpu(key[3]);
58
59 return 0;
60
61 }
62
63 static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
64 {
65 u32 y, z, n, sum = 0;
66 u32 k0, k1, k2, k3;
67 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
68 const __le32 *in = (const __le32 *)src;
69 __le32 *out = (__le32 *)dst;
70
71 y = le32_to_cpu(in[0]);
72 z = le32_to_cpu(in[1]);
73
74 k0 = ctx->KEY[0];
75 k1 = ctx->KEY[1];
76 k2 = ctx->KEY[2];
77 k3 = ctx->KEY[3];
78
79 n = TEA_ROUNDS;
80
81 while (n-- > 0) {
82 sum += TEA_DELTA;
83 y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
84 z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
85 }
86
87 out[0] = cpu_to_le32(y);
88 out[1] = cpu_to_le32(z);
89 }
90
91 static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
92 {
93 u32 y, z, n, sum;
94 u32 k0, k1, k2, k3;
95 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
96 const __le32 *in = (const __le32 *)src;
97 __le32 *out = (__le32 *)dst;
98
99 y = le32_to_cpu(in[0]);
100 z = le32_to_cpu(in[1]);
101
102 k0 = ctx->KEY[0];
103 k1 = ctx->KEY[1];
104 k2 = ctx->KEY[2];
105 k3 = ctx->KEY[3];
106
107 sum = TEA_DELTA << 5;
108
109 n = TEA_ROUNDS;
110
111 while (n-- > 0) {
112 z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
113 y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
114 sum -= TEA_DELTA;
115 }
116
117 out[0] = cpu_to_le32(y);
118 out[1] = cpu_to_le32(z);
119 }
120
121 static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
122 unsigned int key_len)
123 {
124 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
125 const __le32 *key = (const __le32 *)in_key;
126
127 ctx->KEY[0] = le32_to_cpu(key[0]);
128 ctx->KEY[1] = le32_to_cpu(key[1]);
129 ctx->KEY[2] = le32_to_cpu(key[2]);
130 ctx->KEY[3] = le32_to_cpu(key[3]);
131
132 return 0;
133
134 }
135
136 static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
137 {
138 u32 y, z, sum = 0;
139 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
140 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
141 const __le32 *in = (const __le32 *)src;
142 __le32 *out = (__le32 *)dst;
143
144 y = le32_to_cpu(in[0]);
145 z = le32_to_cpu(in[1]);
146
147 while (sum != limit) {
148 y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]);
149 sum += XTEA_DELTA;
150 z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]);
151 }
152
153 out[0] = cpu_to_le32(y);
154 out[1] = cpu_to_le32(z);
155 }
156
157 static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
158 {
159 u32 y, z, sum;
160 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
161 const __le32 *in = (const __le32 *)src;
162 __le32 *out = (__le32 *)dst;
163
164 y = le32_to_cpu(in[0]);
165 z = le32_to_cpu(in[1]);
166
167 sum = XTEA_DELTA * XTEA_ROUNDS;
168
169 while (sum) {
170 z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
171 sum -= XTEA_DELTA;
172 y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
173 }
174
175 out[0] = cpu_to_le32(y);
176 out[1] = cpu_to_le32(z);
177 }
178
179
180 static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
181 {
182 u32 y, z, sum = 0;
183 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
184 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
185 const __le32 *in = (const __le32 *)src;
186 __le32 *out = (__le32 *)dst;
187
188 y = le32_to_cpu(in[0]);
189 z = le32_to_cpu(in[1]);
190
191 while (sum != limit) {
192 y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
193 sum += XTEA_DELTA;
194 z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
195 }
196
197 out[0] = cpu_to_le32(y);
198 out[1] = cpu_to_le32(z);
199 }
200
201 static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
202 {
203 u32 y, z, sum;
204 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
205 const __le32 *in = (const __le32 *)src;
206 __le32 *out = (__le32 *)dst;
207
208 y = le32_to_cpu(in[0]);
209 z = le32_to_cpu(in[1]);
210
211 sum = XTEA_DELTA * XTEA_ROUNDS;
212
213 while (sum) {
214 z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
215 sum -= XTEA_DELTA;
216 y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
217 }
218
219 out[0] = cpu_to_le32(y);
220 out[1] = cpu_to_le32(z);
221 }
222
223 static struct crypto_alg tea_alg = {
224 .cra_name = "tea",
225 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
226 .cra_blocksize = TEA_BLOCK_SIZE,
227 .cra_ctxsize = sizeof (struct tea_ctx),
228 .cra_alignmask = 3,
229 .cra_module = THIS_MODULE,
230 .cra_list = LIST_HEAD_INIT(tea_alg.cra_list),
231 .cra_u = { .cipher = {
232 .cia_min_keysize = TEA_KEY_SIZE,
233 .cia_max_keysize = TEA_KEY_SIZE,
234 .cia_setkey = tea_setkey,
235 .cia_encrypt = tea_encrypt,
236 .cia_decrypt = tea_decrypt } }
237 };
238
239 static struct crypto_alg xtea_alg = {
240 .cra_name = "xtea",
241 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
242 .cra_blocksize = XTEA_BLOCK_SIZE,
243 .cra_ctxsize = sizeof (struct xtea_ctx),
244 .cra_alignmask = 3,
245 .cra_module = THIS_MODULE,
246 .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list),
247 .cra_u = { .cipher = {
248 .cia_min_keysize = XTEA_KEY_SIZE,
249 .cia_max_keysize = XTEA_KEY_SIZE,
250 .cia_setkey = xtea_setkey,
251 .cia_encrypt = xtea_encrypt,
252 .cia_decrypt = xtea_decrypt } }
253 };
254
255 static struct crypto_alg xeta_alg = {
256 .cra_name = "xeta",
257 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
258 .cra_blocksize = XTEA_BLOCK_SIZE,
259 .cra_ctxsize = sizeof (struct xtea_ctx),
260 .cra_alignmask = 3,
261 .cra_module = THIS_MODULE,
262 .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list),
263 .cra_u = { .cipher = {
264 .cia_min_keysize = XTEA_KEY_SIZE,
265 .cia_max_keysize = XTEA_KEY_SIZE,
266 .cia_setkey = xtea_setkey,
267 .cia_encrypt = xeta_encrypt,
268 .cia_decrypt = xeta_decrypt } }
269 };
270
271 static int __init init(void)
272 {
273 int ret = 0;
274
275 ret = crypto_register_alg(&tea_alg);
276 if (ret < 0)
277 goto out;
278
279 ret = crypto_register_alg(&xtea_alg);
280 if (ret < 0) {
281 crypto_unregister_alg(&tea_alg);
282 goto out;
283 }
284
285 ret = crypto_register_alg(&xeta_alg);
286 if (ret < 0) {
287 crypto_unregister_alg(&tea_alg);
288 crypto_unregister_alg(&xtea_alg);
289 goto out;
290 }
291
292 out:
293 return ret;
294 }
295
296 static void __exit fini(void)
297 {
298 crypto_unregister_alg(&tea_alg);
299 crypto_unregister_alg(&xtea_alg);
300 crypto_unregister_alg(&xeta_alg);
301 }
302
303 MODULE_ALIAS("xtea");
304 MODULE_ALIAS("xeta");
305
306 module_init(init);
307 module_exit(fini);
308
309 MODULE_LICENSE("GPL");
310 MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");