nlm: Ensure callback code also checks that the files match
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / include / linux / crypto.h
1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
19
20 #include <linux/atomic.h>
21 #include <linux/kernel.h>
22 #include <linux/list.h>
23 #include <linux/bug.h>
24 #include <linux/slab.h>
25 #include <linux/string.h>
26 #include <linux/uaccess.h>
27
28 /*
29 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
30 * arbitrary modules to be loaded. Loading from userspace may still need the
31 * unprefixed names, so retains those aliases as well.
32 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
33 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
34 * expands twice on the same line. Instead, use a separate base name for the
35 * alias.
36 */
37 #define MODULE_ALIAS_CRYPTO(name) \
38 __MODULE_INFO(alias, alias_userspace, name); \
39 __MODULE_INFO(alias, alias_crypto, "crypto-" name)
40
41 /*
42 * Algorithm masks and types.
43 */
44 #define CRYPTO_ALG_TYPE_MASK 0x0000000f
45 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
46 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000002
47 #define CRYPTO_ALG_TYPE_AEAD 0x00000003
48 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
49 #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005
50 #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006
51 #define CRYPTO_ALG_TYPE_DIGEST 0x00000008
52 #define CRYPTO_ALG_TYPE_HASH 0x00000008
53 #define CRYPTO_ALG_TYPE_SHASH 0x00000009
54 #define CRYPTO_ALG_TYPE_AHASH 0x0000000a
55 #define CRYPTO_ALG_TYPE_RNG 0x0000000c
56 #define CRYPTO_ALG_TYPE_PCOMPRESS 0x0000000f
57
58 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
59 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000c
60 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c
61
62 #define CRYPTO_ALG_LARVAL 0x00000010
63 #define CRYPTO_ALG_DEAD 0x00000020
64 #define CRYPTO_ALG_DYING 0x00000040
65 #define CRYPTO_ALG_ASYNC 0x00000080
66
67 /*
68 * Set this bit if and only if the algorithm requires another algorithm of
69 * the same type to handle corner cases.
70 */
71 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100
72
73 /*
74 * This bit is set for symmetric key ciphers that have already been wrapped
75 * with a generic IV generator to prevent them from being wrapped again.
76 */
77 #define CRYPTO_ALG_GENIV 0x00000200
78
79 /*
80 * Set if the algorithm has passed automated run-time testing. Note that
81 * if there is no run-time testing for a given algorithm it is considered
82 * to have passed.
83 */
84
85 #define CRYPTO_ALG_TESTED 0x00000400
86
87 /*
88 * Set if the algorithm is an instance that is build from templates.
89 */
90 #define CRYPTO_ALG_INSTANCE 0x00000800
91
92 /* Set this bit if the algorithm provided is hardware accelerated but
93 * not available to userspace via instruction set or so.
94 */
95 #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x00001000
96
97 /*
98 * Transform masks and values (for crt_flags).
99 */
100 #define CRYPTO_TFM_REQ_MASK 0x000fff00
101 #define CRYPTO_TFM_RES_MASK 0xfff00000
102
103 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
104 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
105 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400
106 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
107 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
108 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
109 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
110 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
111
112 /*
113 * Miscellaneous stuff.
114 */
115 #define CRYPTO_MAX_ALG_NAME 64
116
117 /*
118 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
119 * declaration) is used to ensure that the crypto_tfm context structure is
120 * aligned correctly for the given architecture so that there are no alignment
121 * faults for C data types. In particular, this is required on platforms such
122 * as arm where pointers are 32-bit aligned but there are data types such as
123 * u64 which require 64-bit alignment.
124 */
125 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
126
127 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
128
129 struct scatterlist;
130 struct crypto_ablkcipher;
131 struct crypto_async_request;
132 struct crypto_aead;
133 struct crypto_blkcipher;
134 struct crypto_hash;
135 struct crypto_rng;
136 struct crypto_tfm;
137 struct crypto_type;
138 struct aead_givcrypt_request;
139 struct skcipher_givcrypt_request;
140
141 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
142
143 struct crypto_async_request {
144 struct list_head list;
145 crypto_completion_t complete;
146 void *data;
147 struct crypto_tfm *tfm;
148
149 u32 flags;
150 };
151
152 struct ablkcipher_request {
153 struct crypto_async_request base;
154
155 unsigned int nbytes;
156
157 void *info;
158
159 struct scatterlist *src;
160 struct scatterlist *dst;
161
162 void *__ctx[] CRYPTO_MINALIGN_ATTR;
163 };
164
165 /**
166 * struct aead_request - AEAD request
167 * @base: Common attributes for async crypto requests
168 * @assoclen: Length in bytes of associated data for authentication
169 * @cryptlen: Length of data to be encrypted or decrypted
170 * @iv: Initialisation vector
171 * @assoc: Associated data
172 * @src: Source data
173 * @dst: Destination data
174 * @__ctx: Start of private context data
175 */
176 struct aead_request {
177 struct crypto_async_request base;
178
179 unsigned int assoclen;
180 unsigned int cryptlen;
181
182 u8 *iv;
183
184 struct scatterlist *assoc;
185 struct scatterlist *src;
186 struct scatterlist *dst;
187
188 void *__ctx[] CRYPTO_MINALIGN_ATTR;
189 };
190
191 struct blkcipher_desc {
192 struct crypto_blkcipher *tfm;
193 void *info;
194 u32 flags;
195 };
196
197 struct cipher_desc {
198 struct crypto_tfm *tfm;
199 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
200 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
201 const u8 *src, unsigned int nbytes);
202 void *info;
203 };
204
205 struct hash_desc {
206 struct crypto_hash *tfm;
207 u32 flags;
208 };
209
210 /*
211 * Algorithms: modular crypto algorithm implementations, managed
212 * via crypto_register_alg() and crypto_unregister_alg().
213 */
214 struct ablkcipher_alg {
215 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
216 unsigned int keylen);
217 int (*encrypt)(struct ablkcipher_request *req);
218 int (*decrypt)(struct ablkcipher_request *req);
219 int (*givencrypt)(struct skcipher_givcrypt_request *req);
220 int (*givdecrypt)(struct skcipher_givcrypt_request *req);
221
222 const char *geniv;
223
224 unsigned int min_keysize;
225 unsigned int max_keysize;
226 unsigned int ivsize;
227 };
228
229 struct aead_alg {
230 int (*setkey)(struct crypto_aead *tfm, const u8 *key,
231 unsigned int keylen);
232 int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize);
233 int (*encrypt)(struct aead_request *req);
234 int (*decrypt)(struct aead_request *req);
235 int (*givencrypt)(struct aead_givcrypt_request *req);
236 int (*givdecrypt)(struct aead_givcrypt_request *req);
237
238 const char *geniv;
239
240 unsigned int ivsize;
241 unsigned int maxauthsize;
242 };
243
244 struct blkcipher_alg {
245 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
246 unsigned int keylen);
247 int (*encrypt)(struct blkcipher_desc *desc,
248 struct scatterlist *dst, struct scatterlist *src,
249 unsigned int nbytes);
250 int (*decrypt)(struct blkcipher_desc *desc,
251 struct scatterlist *dst, struct scatterlist *src,
252 unsigned int nbytes);
253
254 const char *geniv;
255
256 unsigned int min_keysize;
257 unsigned int max_keysize;
258 unsigned int ivsize;
259 };
260
261 struct cipher_alg {
262 unsigned int cia_min_keysize;
263 unsigned int cia_max_keysize;
264 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
265 unsigned int keylen);
266 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
267 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
268 };
269
270 struct compress_alg {
271 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
272 unsigned int slen, u8 *dst, unsigned int *dlen);
273 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
274 unsigned int slen, u8 *dst, unsigned int *dlen);
275 };
276
277 struct rng_alg {
278 int (*rng_make_random)(struct crypto_rng *tfm, u8 *rdata,
279 unsigned int dlen);
280 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen);
281
282 unsigned int seedsize;
283 };
284
285
286 #define cra_ablkcipher cra_u.ablkcipher
287 #define cra_aead cra_u.aead
288 #define cra_blkcipher cra_u.blkcipher
289 #define cra_cipher cra_u.cipher
290 #define cra_compress cra_u.compress
291 #define cra_rng cra_u.rng
292
293 struct crypto_alg {
294 struct list_head cra_list;
295 struct list_head cra_users;
296
297 u32 cra_flags;
298 unsigned int cra_blocksize;
299 unsigned int cra_ctxsize;
300 unsigned int cra_alignmask;
301
302 int cra_priority;
303 atomic_t cra_refcnt;
304
305 char cra_name[CRYPTO_MAX_ALG_NAME];
306 char cra_driver_name[CRYPTO_MAX_ALG_NAME];
307
308 const struct crypto_type *cra_type;
309
310 union {
311 struct ablkcipher_alg ablkcipher;
312 struct aead_alg aead;
313 struct blkcipher_alg blkcipher;
314 struct cipher_alg cipher;
315 struct compress_alg compress;
316 struct rng_alg rng;
317 } cra_u;
318
319 int (*cra_init)(struct crypto_tfm *tfm);
320 void (*cra_exit)(struct crypto_tfm *tfm);
321 void (*cra_destroy)(struct crypto_alg *alg);
322
323 struct module *cra_module;
324 };
325
326 /*
327 * Algorithm registration interface.
328 */
329 int crypto_register_alg(struct crypto_alg *alg);
330 int crypto_unregister_alg(struct crypto_alg *alg);
331 int crypto_register_algs(struct crypto_alg *algs, int count);
332 int crypto_unregister_algs(struct crypto_alg *algs, int count);
333
334 /*
335 * Algorithm query interface.
336 */
337 int crypto_has_alg(const char *name, u32 type, u32 mask);
338
339 /*
340 * Transforms: user-instantiated objects which encapsulate algorithms
341 * and core processing logic. Managed via crypto_alloc_*() and
342 * crypto_free_*(), as well as the various helpers below.
343 */
344
345 struct ablkcipher_tfm {
346 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
347 unsigned int keylen);
348 int (*encrypt)(struct ablkcipher_request *req);
349 int (*decrypt)(struct ablkcipher_request *req);
350 int (*givencrypt)(struct skcipher_givcrypt_request *req);
351 int (*givdecrypt)(struct skcipher_givcrypt_request *req);
352
353 struct crypto_ablkcipher *base;
354
355 unsigned int ivsize;
356 unsigned int reqsize;
357 bool has_setkey;
358 };
359
360 struct aead_tfm {
361 int (*setkey)(struct crypto_aead *tfm, const u8 *key,
362 unsigned int keylen);
363 int (*encrypt)(struct aead_request *req);
364 int (*decrypt)(struct aead_request *req);
365 int (*givencrypt)(struct aead_givcrypt_request *req);
366 int (*givdecrypt)(struct aead_givcrypt_request *req);
367
368 struct crypto_aead *base;
369
370 unsigned int ivsize;
371 unsigned int authsize;
372 unsigned int reqsize;
373 };
374
375 struct blkcipher_tfm {
376 void *iv;
377 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
378 unsigned int keylen);
379 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
380 struct scatterlist *src, unsigned int nbytes);
381 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
382 struct scatterlist *src, unsigned int nbytes);
383 };
384
385 struct cipher_tfm {
386 int (*cit_setkey)(struct crypto_tfm *tfm,
387 const u8 *key, unsigned int keylen);
388 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
389 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
390 };
391
392 struct hash_tfm {
393 int (*init)(struct hash_desc *desc);
394 int (*update)(struct hash_desc *desc,
395 struct scatterlist *sg, unsigned int nsg);
396 int (*final)(struct hash_desc *desc, u8 *out);
397 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
398 unsigned int nsg, u8 *out);
399 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
400 unsigned int keylen);
401 unsigned int digestsize;
402 };
403
404 struct compress_tfm {
405 int (*cot_compress)(struct crypto_tfm *tfm,
406 const u8 *src, unsigned int slen,
407 u8 *dst, unsigned int *dlen);
408 int (*cot_decompress)(struct crypto_tfm *tfm,
409 const u8 *src, unsigned int slen,
410 u8 *dst, unsigned int *dlen);
411 };
412
413 struct rng_tfm {
414 int (*rng_gen_random)(struct crypto_rng *tfm, u8 *rdata,
415 unsigned int dlen);
416 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen);
417 };
418
419 #define crt_ablkcipher crt_u.ablkcipher
420 #define crt_aead crt_u.aead
421 #define crt_blkcipher crt_u.blkcipher
422 #define crt_cipher crt_u.cipher
423 #define crt_hash crt_u.hash
424 #define crt_compress crt_u.compress
425 #define crt_rng crt_u.rng
426
427 struct crypto_tfm {
428
429 u32 crt_flags;
430
431 union {
432 struct ablkcipher_tfm ablkcipher;
433 struct aead_tfm aead;
434 struct blkcipher_tfm blkcipher;
435 struct cipher_tfm cipher;
436 struct hash_tfm hash;
437 struct compress_tfm compress;
438 struct rng_tfm rng;
439 } crt_u;
440
441 void (*exit)(struct crypto_tfm *tfm);
442
443 struct crypto_alg *__crt_alg;
444
445 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
446 };
447
448 struct crypto_ablkcipher {
449 struct crypto_tfm base;
450 };
451
452 struct crypto_aead {
453 struct crypto_tfm base;
454 };
455
456 struct crypto_blkcipher {
457 struct crypto_tfm base;
458 };
459
460 struct crypto_cipher {
461 struct crypto_tfm base;
462 };
463
464 struct crypto_comp {
465 struct crypto_tfm base;
466 };
467
468 struct crypto_hash {
469 struct crypto_tfm base;
470 };
471
472 struct crypto_rng {
473 struct crypto_tfm base;
474 };
475
476 enum {
477 CRYPTOA_UNSPEC,
478 CRYPTOA_ALG,
479 CRYPTOA_TYPE,
480 CRYPTOA_U32,
481 __CRYPTOA_MAX,
482 };
483
484 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
485
486 /* Maximum number of (rtattr) parameters for each template. */
487 #define CRYPTO_MAX_ATTRS 32
488
489 struct crypto_attr_alg {
490 char name[CRYPTO_MAX_ALG_NAME];
491 };
492
493 struct crypto_attr_type {
494 u32 type;
495 u32 mask;
496 };
497
498 struct crypto_attr_u32 {
499 u32 num;
500 };
501
502 /*
503 * Transform user interface.
504 */
505
506 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
507 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
508
509 static inline void crypto_free_tfm(struct crypto_tfm *tfm)
510 {
511 return crypto_destroy_tfm(tfm, tfm);
512 }
513
514 int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
515
516 /*
517 * Transform helpers which query the underlying algorithm.
518 */
519 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
520 {
521 return tfm->__crt_alg->cra_name;
522 }
523
524 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
525 {
526 return tfm->__crt_alg->cra_driver_name;
527 }
528
529 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
530 {
531 return tfm->__crt_alg->cra_priority;
532 }
533
534 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
535 {
536 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
537 }
538
539 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
540 {
541 return tfm->__crt_alg->cra_blocksize;
542 }
543
544 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
545 {
546 return tfm->__crt_alg->cra_alignmask;
547 }
548
549 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
550 {
551 return tfm->crt_flags;
552 }
553
554 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
555 {
556 tfm->crt_flags |= flags;
557 }
558
559 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
560 {
561 tfm->crt_flags &= ~flags;
562 }
563
564 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
565 {
566 return tfm->__crt_ctx;
567 }
568
569 static inline unsigned int crypto_tfm_ctx_alignment(void)
570 {
571 struct crypto_tfm *tfm;
572 return __alignof__(tfm->__crt_ctx);
573 }
574
575 /*
576 * API wrappers.
577 */
578 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
579 struct crypto_tfm *tfm)
580 {
581 return (struct crypto_ablkcipher *)tfm;
582 }
583
584 static inline u32 crypto_skcipher_type(u32 type)
585 {
586 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
587 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
588 return type;
589 }
590
591 static inline u32 crypto_skcipher_mask(u32 mask)
592 {
593 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
594 mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
595 return mask;
596 }
597
598 struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
599 u32 type, u32 mask);
600
601 static inline struct crypto_tfm *crypto_ablkcipher_tfm(
602 struct crypto_ablkcipher *tfm)
603 {
604 return &tfm->base;
605 }
606
607 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
608 {
609 crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
610 }
611
612 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
613 u32 mask)
614 {
615 return crypto_has_alg(alg_name, crypto_skcipher_type(type),
616 crypto_skcipher_mask(mask));
617 }
618
619 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
620 struct crypto_ablkcipher *tfm)
621 {
622 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
623 }
624
625 static inline unsigned int crypto_ablkcipher_ivsize(
626 struct crypto_ablkcipher *tfm)
627 {
628 return crypto_ablkcipher_crt(tfm)->ivsize;
629 }
630
631 static inline unsigned int crypto_ablkcipher_blocksize(
632 struct crypto_ablkcipher *tfm)
633 {
634 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
635 }
636
637 static inline unsigned int crypto_ablkcipher_alignmask(
638 struct crypto_ablkcipher *tfm)
639 {
640 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
641 }
642
643 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
644 {
645 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
646 }
647
648 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
649 u32 flags)
650 {
651 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
652 }
653
654 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
655 u32 flags)
656 {
657 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
658 }
659
660 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
661 const u8 *key, unsigned int keylen)
662 {
663 struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm);
664
665 return crt->setkey(crt->base, key, keylen);
666 }
667
668 static inline bool crypto_ablkcipher_has_setkey(struct crypto_ablkcipher *tfm)
669 {
670 struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm);
671
672 return crt->has_setkey;
673 }
674
675 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
676 struct ablkcipher_request *req)
677 {
678 return __crypto_ablkcipher_cast(req->base.tfm);
679 }
680
681 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
682 {
683 struct ablkcipher_tfm *crt =
684 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
685 return crt->encrypt(req);
686 }
687
688 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
689 {
690 struct ablkcipher_tfm *crt =
691 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
692 return crt->decrypt(req);
693 }
694
695 static inline unsigned int crypto_ablkcipher_reqsize(
696 struct crypto_ablkcipher *tfm)
697 {
698 return crypto_ablkcipher_crt(tfm)->reqsize;
699 }
700
701 static inline void ablkcipher_request_set_tfm(
702 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
703 {
704 req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base);
705 }
706
707 static inline struct ablkcipher_request *ablkcipher_request_cast(
708 struct crypto_async_request *req)
709 {
710 return container_of(req, struct ablkcipher_request, base);
711 }
712
713 static inline struct ablkcipher_request *ablkcipher_request_alloc(
714 struct crypto_ablkcipher *tfm, gfp_t gfp)
715 {
716 struct ablkcipher_request *req;
717
718 req = kmalloc(sizeof(struct ablkcipher_request) +
719 crypto_ablkcipher_reqsize(tfm), gfp);
720
721 if (likely(req))
722 ablkcipher_request_set_tfm(req, tfm);
723
724 return req;
725 }
726
727 static inline void ablkcipher_request_free(struct ablkcipher_request *req)
728 {
729 kzfree(req);
730 }
731
732 static inline void ablkcipher_request_set_callback(
733 struct ablkcipher_request *req,
734 u32 flags, crypto_completion_t complete, void *data)
735 {
736 req->base.complete = complete;
737 req->base.data = data;
738 req->base.flags = flags;
739 }
740
741 static inline void ablkcipher_request_set_crypt(
742 struct ablkcipher_request *req,
743 struct scatterlist *src, struct scatterlist *dst,
744 unsigned int nbytes, void *iv)
745 {
746 req->src = src;
747 req->dst = dst;
748 req->nbytes = nbytes;
749 req->info = iv;
750 }
751
752 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm)
753 {
754 return (struct crypto_aead *)tfm;
755 }
756
757 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask);
758
759 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm)
760 {
761 return &tfm->base;
762 }
763
764 static inline void crypto_free_aead(struct crypto_aead *tfm)
765 {
766 crypto_free_tfm(crypto_aead_tfm(tfm));
767 }
768
769 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm)
770 {
771 return &crypto_aead_tfm(tfm)->crt_aead;
772 }
773
774 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm)
775 {
776 return crypto_aead_crt(tfm)->ivsize;
777 }
778
779 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm)
780 {
781 return crypto_aead_crt(tfm)->authsize;
782 }
783
784 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm)
785 {
786 return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm));
787 }
788
789 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm)
790 {
791 return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm));
792 }
793
794 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm)
795 {
796 return crypto_tfm_get_flags(crypto_aead_tfm(tfm));
797 }
798
799 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags)
800 {
801 crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags);
802 }
803
804 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags)
805 {
806 crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags);
807 }
808
809 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key,
810 unsigned int keylen)
811 {
812 struct aead_tfm *crt = crypto_aead_crt(tfm);
813
814 return crt->setkey(crt->base, key, keylen);
815 }
816
817 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize);
818
819 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req)
820 {
821 return __crypto_aead_cast(req->base.tfm);
822 }
823
824 static inline int crypto_aead_encrypt(struct aead_request *req)
825 {
826 return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req);
827 }
828
829 static inline int crypto_aead_decrypt(struct aead_request *req)
830 {
831 return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req);
832 }
833
834 static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
835 {
836 return crypto_aead_crt(tfm)->reqsize;
837 }
838
839 static inline void aead_request_set_tfm(struct aead_request *req,
840 struct crypto_aead *tfm)
841 {
842 req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base);
843 }
844
845 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm,
846 gfp_t gfp)
847 {
848 struct aead_request *req;
849
850 req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp);
851
852 if (likely(req))
853 aead_request_set_tfm(req, tfm);
854
855 return req;
856 }
857
858 static inline void aead_request_free(struct aead_request *req)
859 {
860 kzfree(req);
861 }
862
863 static inline void aead_request_set_callback(struct aead_request *req,
864 u32 flags,
865 crypto_completion_t complete,
866 void *data)
867 {
868 req->base.complete = complete;
869 req->base.data = data;
870 req->base.flags = flags;
871 }
872
873 static inline void aead_request_set_crypt(struct aead_request *req,
874 struct scatterlist *src,
875 struct scatterlist *dst,
876 unsigned int cryptlen, u8 *iv)
877 {
878 req->src = src;
879 req->dst = dst;
880 req->cryptlen = cryptlen;
881 req->iv = iv;
882 }
883
884 static inline void aead_request_set_assoc(struct aead_request *req,
885 struct scatterlist *assoc,
886 unsigned int assoclen)
887 {
888 req->assoc = assoc;
889 req->assoclen = assoclen;
890 }
891
892 static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
893 struct crypto_tfm *tfm)
894 {
895 return (struct crypto_blkcipher *)tfm;
896 }
897
898 static inline struct crypto_blkcipher *crypto_blkcipher_cast(
899 struct crypto_tfm *tfm)
900 {
901 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
902 return __crypto_blkcipher_cast(tfm);
903 }
904
905 static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
906 const char *alg_name, u32 type, u32 mask)
907 {
908 type &= ~CRYPTO_ALG_TYPE_MASK;
909 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
910 mask |= CRYPTO_ALG_TYPE_MASK;
911
912 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
913 }
914
915 static inline struct crypto_tfm *crypto_blkcipher_tfm(
916 struct crypto_blkcipher *tfm)
917 {
918 return &tfm->base;
919 }
920
921 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
922 {
923 crypto_free_tfm(crypto_blkcipher_tfm(tfm));
924 }
925
926 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
927 {
928 type &= ~CRYPTO_ALG_TYPE_MASK;
929 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
930 mask |= CRYPTO_ALG_TYPE_MASK;
931
932 return crypto_has_alg(alg_name, type, mask);
933 }
934
935 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
936 {
937 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
938 }
939
940 static inline struct blkcipher_tfm *crypto_blkcipher_crt(
941 struct crypto_blkcipher *tfm)
942 {
943 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
944 }
945
946 static inline struct blkcipher_alg *crypto_blkcipher_alg(
947 struct crypto_blkcipher *tfm)
948 {
949 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
950 }
951
952 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
953 {
954 return crypto_blkcipher_alg(tfm)->ivsize;
955 }
956
957 static inline unsigned int crypto_blkcipher_blocksize(
958 struct crypto_blkcipher *tfm)
959 {
960 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
961 }
962
963 static inline unsigned int crypto_blkcipher_alignmask(
964 struct crypto_blkcipher *tfm)
965 {
966 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
967 }
968
969 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
970 {
971 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
972 }
973
974 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
975 u32 flags)
976 {
977 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
978 }
979
980 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
981 u32 flags)
982 {
983 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
984 }
985
986 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
987 const u8 *key, unsigned int keylen)
988 {
989 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
990 key, keylen);
991 }
992
993 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
994 struct scatterlist *dst,
995 struct scatterlist *src,
996 unsigned int nbytes)
997 {
998 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
999 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
1000 }
1001
1002 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
1003 struct scatterlist *dst,
1004 struct scatterlist *src,
1005 unsigned int nbytes)
1006 {
1007 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
1008 }
1009
1010 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
1011 struct scatterlist *dst,
1012 struct scatterlist *src,
1013 unsigned int nbytes)
1014 {
1015 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
1016 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
1017 }
1018
1019 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
1020 struct scatterlist *dst,
1021 struct scatterlist *src,
1022 unsigned int nbytes)
1023 {
1024 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
1025 }
1026
1027 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
1028 const u8 *src, unsigned int len)
1029 {
1030 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
1031 }
1032
1033 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
1034 u8 *dst, unsigned int len)
1035 {
1036 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
1037 }
1038
1039 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
1040 {
1041 return (struct crypto_cipher *)tfm;
1042 }
1043
1044 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
1045 {
1046 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
1047 return __crypto_cipher_cast(tfm);
1048 }
1049
1050 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
1051 u32 type, u32 mask)
1052 {
1053 type &= ~CRYPTO_ALG_TYPE_MASK;
1054 type |= CRYPTO_ALG_TYPE_CIPHER;
1055 mask |= CRYPTO_ALG_TYPE_MASK;
1056
1057 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
1058 }
1059
1060 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
1061 {
1062 return &tfm->base;
1063 }
1064
1065 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
1066 {
1067 crypto_free_tfm(crypto_cipher_tfm(tfm));
1068 }
1069
1070 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
1071 {
1072 type &= ~CRYPTO_ALG_TYPE_MASK;
1073 type |= CRYPTO_ALG_TYPE_CIPHER;
1074 mask |= CRYPTO_ALG_TYPE_MASK;
1075
1076 return crypto_has_alg(alg_name, type, mask);
1077 }
1078
1079 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
1080 {
1081 return &crypto_cipher_tfm(tfm)->crt_cipher;
1082 }
1083
1084 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
1085 {
1086 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
1087 }
1088
1089 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
1090 {
1091 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
1092 }
1093
1094 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
1095 {
1096 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
1097 }
1098
1099 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
1100 u32 flags)
1101 {
1102 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
1103 }
1104
1105 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
1106 u32 flags)
1107 {
1108 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
1109 }
1110
1111 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
1112 const u8 *key, unsigned int keylen)
1113 {
1114 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
1115 key, keylen);
1116 }
1117
1118 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
1119 u8 *dst, const u8 *src)
1120 {
1121 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
1122 dst, src);
1123 }
1124
1125 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
1126 u8 *dst, const u8 *src)
1127 {
1128 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
1129 dst, src);
1130 }
1131
1132 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm)
1133 {
1134 return (struct crypto_hash *)tfm;
1135 }
1136
1137 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm)
1138 {
1139 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) &
1140 CRYPTO_ALG_TYPE_HASH_MASK);
1141 return __crypto_hash_cast(tfm);
1142 }
1143
1144 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
1145 u32 type, u32 mask)
1146 {
1147 type &= ~CRYPTO_ALG_TYPE_MASK;
1148 mask &= ~CRYPTO_ALG_TYPE_MASK;
1149 type |= CRYPTO_ALG_TYPE_HASH;
1150 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1151
1152 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask));
1153 }
1154
1155 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm)
1156 {
1157 return &tfm->base;
1158 }
1159
1160 static inline void crypto_free_hash(struct crypto_hash *tfm)
1161 {
1162 crypto_free_tfm(crypto_hash_tfm(tfm));
1163 }
1164
1165 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
1166 {
1167 type &= ~CRYPTO_ALG_TYPE_MASK;
1168 mask &= ~CRYPTO_ALG_TYPE_MASK;
1169 type |= CRYPTO_ALG_TYPE_HASH;
1170 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1171
1172 return crypto_has_alg(alg_name, type, mask);
1173 }
1174
1175 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm)
1176 {
1177 return &crypto_hash_tfm(tfm)->crt_hash;
1178 }
1179
1180 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm)
1181 {
1182 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm));
1183 }
1184
1185 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm)
1186 {
1187 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm));
1188 }
1189
1190 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm)
1191 {
1192 return crypto_hash_crt(tfm)->digestsize;
1193 }
1194
1195 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm)
1196 {
1197 return crypto_tfm_get_flags(crypto_hash_tfm(tfm));
1198 }
1199
1200 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags)
1201 {
1202 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags);
1203 }
1204
1205 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags)
1206 {
1207 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags);
1208 }
1209
1210 static inline int crypto_hash_init(struct hash_desc *desc)
1211 {
1212 return crypto_hash_crt(desc->tfm)->init(desc);
1213 }
1214
1215 static inline int crypto_hash_update(struct hash_desc *desc,
1216 struct scatterlist *sg,
1217 unsigned int nbytes)
1218 {
1219 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes);
1220 }
1221
1222 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out)
1223 {
1224 return crypto_hash_crt(desc->tfm)->final(desc, out);
1225 }
1226
1227 static inline int crypto_hash_digest(struct hash_desc *desc,
1228 struct scatterlist *sg,
1229 unsigned int nbytes, u8 *out)
1230 {
1231 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out);
1232 }
1233
1234 static inline int crypto_hash_setkey(struct crypto_hash *hash,
1235 const u8 *key, unsigned int keylen)
1236 {
1237 return crypto_hash_crt(hash)->setkey(hash, key, keylen);
1238 }
1239
1240 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
1241 {
1242 return (struct crypto_comp *)tfm;
1243 }
1244
1245 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
1246 {
1247 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
1248 CRYPTO_ALG_TYPE_MASK);
1249 return __crypto_comp_cast(tfm);
1250 }
1251
1252 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
1253 u32 type, u32 mask)
1254 {
1255 type &= ~CRYPTO_ALG_TYPE_MASK;
1256 type |= CRYPTO_ALG_TYPE_COMPRESS;
1257 mask |= CRYPTO_ALG_TYPE_MASK;
1258
1259 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
1260 }
1261
1262 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
1263 {
1264 return &tfm->base;
1265 }
1266
1267 static inline void crypto_free_comp(struct crypto_comp *tfm)
1268 {
1269 crypto_free_tfm(crypto_comp_tfm(tfm));
1270 }
1271
1272 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
1273 {
1274 type &= ~CRYPTO_ALG_TYPE_MASK;
1275 type |= CRYPTO_ALG_TYPE_COMPRESS;
1276 mask |= CRYPTO_ALG_TYPE_MASK;
1277
1278 return crypto_has_alg(alg_name, type, mask);
1279 }
1280
1281 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
1282 {
1283 return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
1284 }
1285
1286 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
1287 {
1288 return &crypto_comp_tfm(tfm)->crt_compress;
1289 }
1290
1291 static inline int crypto_comp_compress(struct crypto_comp *tfm,
1292 const u8 *src, unsigned int slen,
1293 u8 *dst, unsigned int *dlen)
1294 {
1295 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
1296 src, slen, dst, dlen);
1297 }
1298
1299 static inline int crypto_comp_decompress(struct crypto_comp *tfm,
1300 const u8 *src, unsigned int slen,
1301 u8 *dst, unsigned int *dlen)
1302 {
1303 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
1304 src, slen, dst, dlen);
1305 }
1306
1307 #endif /* _LINUX_CRYPTO_H */
1308