Merge branch 'android-4.14-p' into android-exynos-4.14-ww-9610-minor_up-dev
[GitHub/LineageOS/android_kernel_motorola_exynos9610.git] / crypto / poly1305_generic.c
1 /*
2 * Poly1305 authenticator algorithm, RFC7539
3 *
4 * Copyright (C) 2015 Martin Willi
5 *
6 * Based on public domain code by Andrew Moon and Daniel J. Bernstein.
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
12 */
13
14 #include <crypto/algapi.h>
15 #include <crypto/internal/hash.h>
16 #include <crypto/poly1305.h>
17 #include <linux/crypto.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <asm/unaligned.h>
21
22 static inline u64 mlt(u64 a, u64 b)
23 {
24 return a * b;
25 }
26
27 static inline u32 sr(u64 v, u_char n)
28 {
29 return v >> n;
30 }
31
32 static inline u32 and(u32 v, u32 mask)
33 {
34 return v & mask;
35 }
36
37 int crypto_poly1305_init(struct shash_desc *desc)
38 {
39 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
40
41 memset(dctx->h, 0, sizeof(dctx->h));
42 dctx->buflen = 0;
43 dctx->rset = false;
44 dctx->sset = false;
45
46 return 0;
47 }
48 EXPORT_SYMBOL_GPL(crypto_poly1305_init);
49
50 static void poly1305_setrkey(struct poly1305_desc_ctx *dctx, const u8 *key)
51 {
52 /* r &= 0xffffffc0ffffffc0ffffffc0fffffff */
53 dctx->r[0] = (get_unaligned_le32(key + 0) >> 0) & 0x3ffffff;
54 dctx->r[1] = (get_unaligned_le32(key + 3) >> 2) & 0x3ffff03;
55 dctx->r[2] = (get_unaligned_le32(key + 6) >> 4) & 0x3ffc0ff;
56 dctx->r[3] = (get_unaligned_le32(key + 9) >> 6) & 0x3f03fff;
57 dctx->r[4] = (get_unaligned_le32(key + 12) >> 8) & 0x00fffff;
58 }
59
60 static void poly1305_setskey(struct poly1305_desc_ctx *dctx, const u8 *key)
61 {
62 dctx->s[0] = get_unaligned_le32(key + 0);
63 dctx->s[1] = get_unaligned_le32(key + 4);
64 dctx->s[2] = get_unaligned_le32(key + 8);
65 dctx->s[3] = get_unaligned_le32(key + 12);
66 }
67
68 /*
69 * Poly1305 requires a unique key for each tag, which implies that we can't set
70 * it on the tfm that gets accessed by multiple users simultaneously. Instead we
71 * expect the key as the first 32 bytes in the update() call.
72 */
73 unsigned int crypto_poly1305_setdesckey(struct poly1305_desc_ctx *dctx,
74 const u8 *src, unsigned int srclen)
75 {
76 if (!dctx->sset) {
77 if (!dctx->rset && srclen >= POLY1305_BLOCK_SIZE) {
78 poly1305_setrkey(dctx, src);
79 src += POLY1305_BLOCK_SIZE;
80 srclen -= POLY1305_BLOCK_SIZE;
81 dctx->rset = true;
82 }
83 if (srclen >= POLY1305_BLOCK_SIZE) {
84 poly1305_setskey(dctx, src);
85 src += POLY1305_BLOCK_SIZE;
86 srclen -= POLY1305_BLOCK_SIZE;
87 dctx->sset = true;
88 }
89 }
90 return srclen;
91 }
92 EXPORT_SYMBOL_GPL(crypto_poly1305_setdesckey);
93
94 static unsigned int poly1305_blocks(struct poly1305_desc_ctx *dctx,
95 const u8 *src, unsigned int srclen,
96 u32 hibit)
97 {
98 u32 r0, r1, r2, r3, r4;
99 u32 s1, s2, s3, s4;
100 u32 h0, h1, h2, h3, h4;
101 u64 d0, d1, d2, d3, d4;
102 unsigned int datalen;
103
104 if (unlikely(!dctx->sset)) {
105 datalen = crypto_poly1305_setdesckey(dctx, src, srclen);
106 src += srclen - datalen;
107 srclen = datalen;
108 }
109
110 r0 = dctx->r[0];
111 r1 = dctx->r[1];
112 r2 = dctx->r[2];
113 r3 = dctx->r[3];
114 r4 = dctx->r[4];
115
116 s1 = r1 * 5;
117 s2 = r2 * 5;
118 s3 = r3 * 5;
119 s4 = r4 * 5;
120
121 h0 = dctx->h[0];
122 h1 = dctx->h[1];
123 h2 = dctx->h[2];
124 h3 = dctx->h[3];
125 h4 = dctx->h[4];
126
127 while (likely(srclen >= POLY1305_BLOCK_SIZE)) {
128
129 /* h += m[i] */
130 h0 += (get_unaligned_le32(src + 0) >> 0) & 0x3ffffff;
131 h1 += (get_unaligned_le32(src + 3) >> 2) & 0x3ffffff;
132 h2 += (get_unaligned_le32(src + 6) >> 4) & 0x3ffffff;
133 h3 += (get_unaligned_le32(src + 9) >> 6) & 0x3ffffff;
134 h4 += (get_unaligned_le32(src + 12) >> 8) | hibit;
135
136 /* h *= r */
137 d0 = mlt(h0, r0) + mlt(h1, s4) + mlt(h2, s3) +
138 mlt(h3, s2) + mlt(h4, s1);
139 d1 = mlt(h0, r1) + mlt(h1, r0) + mlt(h2, s4) +
140 mlt(h3, s3) + mlt(h4, s2);
141 d2 = mlt(h0, r2) + mlt(h1, r1) + mlt(h2, r0) +
142 mlt(h3, s4) + mlt(h4, s3);
143 d3 = mlt(h0, r3) + mlt(h1, r2) + mlt(h2, r1) +
144 mlt(h3, r0) + mlt(h4, s4);
145 d4 = mlt(h0, r4) + mlt(h1, r3) + mlt(h2, r2) +
146 mlt(h3, r1) + mlt(h4, r0);
147
148 /* (partial) h %= p */
149 d1 += sr(d0, 26); h0 = and(d0, 0x3ffffff);
150 d2 += sr(d1, 26); h1 = and(d1, 0x3ffffff);
151 d3 += sr(d2, 26); h2 = and(d2, 0x3ffffff);
152 d4 += sr(d3, 26); h3 = and(d3, 0x3ffffff);
153 h0 += sr(d4, 26) * 5; h4 = and(d4, 0x3ffffff);
154 h1 += h0 >> 26; h0 = h0 & 0x3ffffff;
155
156 src += POLY1305_BLOCK_SIZE;
157 srclen -= POLY1305_BLOCK_SIZE;
158 }
159
160 dctx->h[0] = h0;
161 dctx->h[1] = h1;
162 dctx->h[2] = h2;
163 dctx->h[3] = h3;
164 dctx->h[4] = h4;
165
166 return srclen;
167 }
168
169 int crypto_poly1305_update(struct shash_desc *desc,
170 const u8 *src, unsigned int srclen)
171 {
172 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
173 unsigned int bytes;
174
175 if (unlikely(dctx->buflen)) {
176 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen);
177 memcpy(dctx->buf + dctx->buflen, src, bytes);
178 src += bytes;
179 srclen -= bytes;
180 dctx->buflen += bytes;
181
182 if (dctx->buflen == POLY1305_BLOCK_SIZE) {
183 poly1305_blocks(dctx, dctx->buf,
184 POLY1305_BLOCK_SIZE, 1 << 24);
185 dctx->buflen = 0;
186 }
187 }
188
189 if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
190 bytes = poly1305_blocks(dctx, src, srclen, 1 << 24);
191 src += srclen - bytes;
192 srclen = bytes;
193 }
194
195 if (unlikely(srclen)) {
196 dctx->buflen = srclen;
197 memcpy(dctx->buf, src, srclen);
198 }
199
200 return 0;
201 }
202 EXPORT_SYMBOL_GPL(crypto_poly1305_update);
203
204 int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
205 {
206 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
207 __le32 *mac = (__le32 *)dst;
208 u32 h0, h1, h2, h3, h4;
209 u32 g0, g1, g2, g3, g4;
210 u32 mask;
211 u64 f = 0;
212
213 if (unlikely(!dctx->sset))
214 return -ENOKEY;
215
216 if (unlikely(dctx->buflen)) {
217 dctx->buf[dctx->buflen++] = 1;
218 memset(dctx->buf + dctx->buflen, 0,
219 POLY1305_BLOCK_SIZE - dctx->buflen);
220 poly1305_blocks(dctx, dctx->buf, POLY1305_BLOCK_SIZE, 0);
221 }
222
223 /* fully carry h */
224 h0 = dctx->h[0];
225 h1 = dctx->h[1];
226 h2 = dctx->h[2];
227 h3 = dctx->h[3];
228 h4 = dctx->h[4];
229
230 h2 += (h1 >> 26); h1 = h1 & 0x3ffffff;
231 h3 += (h2 >> 26); h2 = h2 & 0x3ffffff;
232 h4 += (h3 >> 26); h3 = h3 & 0x3ffffff;
233 h0 += (h4 >> 26) * 5; h4 = h4 & 0x3ffffff;
234 h1 += (h0 >> 26); h0 = h0 & 0x3ffffff;
235
236 /* compute h + -p */
237 g0 = h0 + 5;
238 g1 = h1 + (g0 >> 26); g0 &= 0x3ffffff;
239 g2 = h2 + (g1 >> 26); g1 &= 0x3ffffff;
240 g3 = h3 + (g2 >> 26); g2 &= 0x3ffffff;
241 g4 = h4 + (g3 >> 26) - (1 << 26); g3 &= 0x3ffffff;
242
243 /* select h if h < p, or h + -p if h >= p */
244 mask = (g4 >> ((sizeof(u32) * 8) - 1)) - 1;
245 g0 &= mask;
246 g1 &= mask;
247 g2 &= mask;
248 g3 &= mask;
249 g4 &= mask;
250 mask = ~mask;
251 h0 = (h0 & mask) | g0;
252 h1 = (h1 & mask) | g1;
253 h2 = (h2 & mask) | g2;
254 h3 = (h3 & mask) | g3;
255 h4 = (h4 & mask) | g4;
256
257 /* h = h % (2^128) */
258 h0 = (h0 >> 0) | (h1 << 26);
259 h1 = (h1 >> 6) | (h2 << 20);
260 h2 = (h2 >> 12) | (h3 << 14);
261 h3 = (h3 >> 18) | (h4 << 8);
262
263 /* mac = (h + s) % (2^128) */
264 f = (f >> 32) + h0 + dctx->s[0]; mac[0] = cpu_to_le32(f);
265 f = (f >> 32) + h1 + dctx->s[1]; mac[1] = cpu_to_le32(f);
266 f = (f >> 32) + h2 + dctx->s[2]; mac[2] = cpu_to_le32(f);
267 f = (f >> 32) + h3 + dctx->s[3]; mac[3] = cpu_to_le32(f);
268
269 return 0;
270 }
271 EXPORT_SYMBOL_GPL(crypto_poly1305_final);
272
273 static struct shash_alg poly1305_alg = {
274 .digestsize = POLY1305_DIGEST_SIZE,
275 .init = crypto_poly1305_init,
276 .update = crypto_poly1305_update,
277 .final = crypto_poly1305_final,
278 .descsize = sizeof(struct poly1305_desc_ctx),
279 .base = {
280 .cra_name = "poly1305",
281 .cra_driver_name = "poly1305-generic",
282 .cra_priority = 100,
283 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
284 .cra_alignmask = sizeof(u32) - 1,
285 .cra_blocksize = POLY1305_BLOCK_SIZE,
286 .cra_module = THIS_MODULE,
287 },
288 };
289
290 static int __init poly1305_mod_init(void)
291 {
292 return crypto_register_shash(&poly1305_alg);
293 }
294
295 static void __exit poly1305_mod_exit(void)
296 {
297 crypto_unregister_shash(&poly1305_alg);
298 }
299
300 module_init(poly1305_mod_init);
301 module_exit(poly1305_mod_exit);
302
303 MODULE_LICENSE("GPL");
304 MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
305 MODULE_DESCRIPTION("Poly1305 authenticator");
306 MODULE_ALIAS_CRYPTO("poly1305");
307 MODULE_ALIAS_CRYPTO("poly1305-generic");