From 7613636def82092a5c7b6322078a2af832410417 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 20 Nov 2007 17:26:06 +0800 Subject: [PATCH] [CRYPTO] api: Add crypto_inc and crypto_xor With the addition of more stream ciphers we need to curb the proliferation of ad-hoc xor functions. This patch creates a generic pair of functions, crypto_inc and crypto_xor which does big-endian increment and exclusive or, respectively. For optimum performance, they both use u32 operations so alignment must be as that of u32 even though the arguments are of type u8 *. Signed-off-by: Herbert Xu --- crypto/algapi.c | 47 +++++++++++++++++++++++++++++++++++++++++ include/crypto/algapi.h | 4 ++++ 2 files changed, 51 insertions(+) diff --git a/crypto/algapi.c b/crypto/algapi.c index 8383282de1dd..08eca6da1d53 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -605,6 +605,53 @@ int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm) } EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); +static inline void crypto_inc_byte(u8 *a, unsigned int size) +{ + u8 *b = (a + size); + u8 c; + + for (; size; size--) { + c = *--b + 1; + *b = c; + if (c) + break; + } +} + +void crypto_inc(u8 *a, unsigned int size) +{ + __be32 *b = (__be32 *)(a + size); + u32 c; + + for (; size >= 4; size -= 4) { + c = be32_to_cpu(*--b) + 1; + *b = cpu_to_be32(c); + if (c) + return; + } + + crypto_inc_byte(a, size); +} +EXPORT_SYMBOL_GPL(crypto_inc); + +static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size) +{ + for (; size; size--) + *a++ ^= *b++; +} + +void crypto_xor(u8 *dst, const u8 *src, unsigned int size) +{ + u32 *a = (u32 *)dst; + u32 *b = (u32 *)src; + + for (; size >= 4; size -= 4) + *a++ ^= *b++; + + crypto_xor_byte((u8 *)a, (u8 *)b, size); +} +EXPORT_SYMBOL_GPL(crypto_xor); + static int __init crypto_algapi_init(void) { crypto_init_proc(); diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index 88619f902c10..2cdb227fc39d 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -124,6 +124,10 @@ int crypto_enqueue_request(struct crypto_queue *queue, struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); +/* These functions require the input/output to be aligned as u32. */ +void crypto_inc(u8 *a, unsigned int size); +void crypto_xor(u8 *dst, const u8 *src, unsigned int size); + int blkcipher_walk_done(struct blkcipher_desc *desc, struct blkcipher_walk *walk, int err); int blkcipher_walk_virt(struct blkcipher_desc *desc, -- 2.20.1