diff options
41 files changed, 648 insertions, 394 deletions
diff --git a/arch/i386/crypto/aes-i586-asm.S b/arch/i386/crypto/aes-i586-asm.S index 911b15377f2..f942f0c8f63 100644 --- a/arch/i386/crypto/aes-i586-asm.S +++ b/arch/i386/crypto/aes-i586-asm.S @@ -36,22 +36,19 @@ .file "aes-i586-asm.S" .text -// aes_rval aes_enc_blk(const unsigned char in_blk[], unsigned char out_blk[], const aes_ctx cx[1])// -// aes_rval aes_dec_blk(const unsigned char in_blk[], unsigned char out_blk[], const aes_ctx cx[1])// - -#define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words) +#include <asm/asm-offsets.h> -// offsets to parameters with one register pushed onto stack - -#define in_blk 8 // input byte array address parameter -#define out_blk 12 // output byte array address parameter -#define ctx 16 // AES context structure +#define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words) -// offsets in context structure +/* offsets to parameters with one register pushed onto stack */ +#define tfm 8 +#define out_blk 12 +#define in_blk 16 -#define ekey 0 // encryption key schedule base address -#define nrnd 256 // number of rounds -#define dkey 260 // decryption key schedule base address +/* offsets in crypto_tfm structure */ +#define ekey (crypto_tfm_ctx_offset + 0) +#define nrnd (crypto_tfm_ctx_offset + 256) +#define dkey (crypto_tfm_ctx_offset + 260) // register mapping for encrypt and decrypt subroutines @@ -220,6 +217,7 @@ do_col (table, r5,r0,r1,r4, r2,r3); /* idx=r5 */ // AES (Rijndael) Encryption Subroutine +/* void aes_enc_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */ .global aes_enc_blk @@ -230,7 +228,7 @@ aes_enc_blk: push %ebp - mov ctx(%esp),%ebp // pointer to context + mov tfm(%esp),%ebp // CAUTION: the order and the values used in these assigns // rely on the register mappings @@ -295,6 +293,7 @@ aes_enc_blk: ret // AES (Rijndael) Decryption Subroutine +/* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */ .global aes_dec_blk @@ -305,7 +304,7 @@ aes_enc_blk: aes_dec_blk: push %ebp - mov ctx(%esp),%ebp // pointer to context + mov tfm(%esp),%ebp // CAUTION: the order and the values used in these assigns // rely on the register mappings diff --git a/arch/i386/crypto/aes.c b/arch/i386/crypto/aes.c index a50397b1d5c..d3806daa3de 100644 --- a/arch/i386/crypto/aes.c +++ b/arch/i386/crypto/aes.c @@ -45,8 +45,8 @@ #include <linux/crypto.h> #include <linux/linkage.h> -asmlinkage void aes_enc_blk(const u8 *src, u8 *dst, void *ctx); -asmlinkage void aes_dec_blk(const u8 *src, u8 *dst, void *ctx); +asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); +asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); #define AES_MIN_KEY_SIZE 16 #define AES_MAX_KEY_SIZE 32 @@ -378,12 +378,12 @@ static void gen_tabs(void) k[8*(i)+11] = ss[3]; \ } -static int -aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) +static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { int i; u32 ss[8]; - struct aes_ctx *ctx = ctx_arg; + struct aes_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *key = (const __le32 *)in_key; /* encryption schedule */ @@ -464,16 +464,16 @@ aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) return 0; } -static inline void aes_encrypt(void *ctx, u8 *dst, const u8 *src) +static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - aes_enc_blk(src, dst, ctx); + aes_enc_blk(tfm, dst, src); } -static inline void aes_decrypt(void *ctx, u8 *dst, const u8 *src) + +static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - aes_dec_blk(src, dst, ctx); + aes_dec_blk(tfm, dst, src); } - static struct crypto_alg aes_alg = { .cra_name = "aes", .cra_driver_name = "aes-i586", diff --git a/arch/i386/kernel/asm-offsets.c b/arch/i386/kernel/asm-offsets.c index 36d66e2077d..1c3a809e642 100644 --- a/arch/i386/kernel/asm-offsets.c +++ b/arch/i386/kernel/asm-offsets.c @@ -4,6 +4,7 @@ * to extract and format the required data. */ +#include <linux/crypto.h> #include <linux/sched.h> #include <linux/signal.h> #include <linux/personality.h> @@ -69,4 +70,6 @@ void foo(void) DEFINE(PAGE_SIZE_asm, PAGE_SIZE); DEFINE(VSYSCALL_BASE, __fix_to_virt(FIX_VSYSCALL)); + + OFFSET(crypto_tfm_ctx_offset, crypto_tfm, __crt_ctx); } diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index c5ca2dc5d42..5713c7e5bd1 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c @@ -37,10 +37,10 @@ struct s390_aes_ctx { int key_len; }; -static int aes_set_key(void *ctx, const u8 *in_key, unsigned int key_len, - u32 *flags) +static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { - struct s390_aes_ctx *sctx = ctx; + struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); switch (key_len) { case 16: @@ -70,9 +70,9 @@ fail: return -EINVAL; } -static void aes_encrypt(void *ctx, u8 *out, const u8 *in) +static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - const struct s390_aes_ctx *sctx = ctx; + const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); switch (sctx->key_len) { case 16: @@ -90,9 +90,9 @@ static void aes_encrypt(void *ctx, u8 *out, const u8 *in) } } -static void aes_decrypt(void *ctx, u8 *out, const u8 *in) +static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - const struct s390_aes_ctx *sctx = ctx; + const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); switch (sctx->key_len) { case 16: diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c index e3c37aa0a19..b3f7496a79b 100644 --- a/arch/s390/crypto/des_s390.c +++ b/arch/s390/crypto/des_s390.c @@ -44,10 +44,10 @@ struct crypt_s390_des3_192_ctx { u8 key[DES3_192_KEY_SIZE]; }; -static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, - u32 *flags) +static int des_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { - struct crypt_s390_des_ctx *dctx = ctx; + struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); int ret; /* test if key is valid (not a weak key) */ @@ -57,16 +57,16 @@ static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, return ret; } -static void des_encrypt(void *ctx, u8 *out, const u8 *in) +static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - struct crypt_s390_des_ctx *dctx = ctx; + struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, out, in, DES_BLOCK_SIZE); } -static void des_decrypt(void *ctx, u8 *out, const u8 *in) +static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - struct crypt_s390_des_ctx *dctx = ctx; + struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE); } @@ -166,11 +166,11 @@ static struct crypto_alg des_alg = { * Implementers MUST reject keys that exhibit this property. * */ -static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, - u32 *flags) +static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { int i, ret; - struct crypt_s390_des3_128_ctx *dctx = ctx; + struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); const u8* temp_key = key; if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { @@ -186,17 +186,17 @@ static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, return 0; } -static void des3_128_encrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_128_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct crypt_s390_des3_128_ctx *dctx = ctx; + struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src, DES3_128_BLOCK_SIZE); } -static void des3_128_decrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_128_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct crypt_s390_des3_128_ctx *dctx = ctx; + struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src, DES3_128_BLOCK_SIZE); @@ -302,11 +302,11 @@ static struct crypto_alg des3_128_alg = { * property. * */ -static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, - u32 *flags) +static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { int i, ret; - struct crypt_s390_des3_192_ctx *dctx = ctx; + struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); const u8* temp_key = key; if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && @@ -325,17 +325,17 @@ static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, return 0; } -static void des3_192_encrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_192_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct crypt_s390_des3_192_ctx *dctx = ctx; + struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src, DES3_192_BLOCK_SIZE); } -static void des3_192_decrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_192_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct crypt_s390_des3_192_ctx *dctx = ctx; + struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src, DES3_192_BLOCK_SIZE); diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c index 98c896b86dc..9d34a35b1aa 100644 --- a/arch/s390/crypto/sha1_s390.c +++ b/arch/s390/crypto/sha1_s390.c @@ -40,28 +40,29 @@ struct crypt_s390_sha1_ctx { u8 buffer[2 * SHA1_BLOCK_SIZE]; }; -static void -sha1_init(void *ctx) +static void sha1_init(struct crypto_tfm *tfm) { - static const struct crypt_s390_sha1_ctx initstate = { - .state = { - 0x67452301, - 0xEFCDAB89, - 0x98BADCFE, - 0x10325476, - 0xC3D2E1F0 - }, + struct crypt_s390_sha1_ctx *ctx = crypto_tfm_ctx(tfm); + static const u32 initstate[5] = { + 0x67452301, + 0xEFCDAB89, + 0x98BADCFE, + 0x10325476, + 0xC3D2E1F0 }; - memcpy(ctx, &initstate, sizeof(initstate)); + + ctx->count = 0; + memcpy(ctx->state, &initstate, sizeof(initstate)); + ctx->buf_len = 0; } -static void -sha1_update(void *ctx, const u8 *data, unsigned int len) +static void sha1_update(struct crypto_tfm *tfm, const u8 *data, + unsigned int len) { struct crypt_s390_sha1_ctx *sctx; long imd_len; - sctx = ctx; + sctx = crypto_tfm_ctx(tfm); sctx->count += len * 8; //message bit length //anything in buffer yet? -> must be completed @@ -110,10 +111,9 @@ pad_message(struct crypt_s390_sha1_ctx* sctx) } /* Add padding and return the message digest. */ -static void -sha1_final(void* ctx, u8 *out) +static void sha1_final(struct crypto_tfm *tfm, u8 *out) { - struct crypt_s390_sha1_ctx *sctx = ctx; + struct crypt_s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm); //must perform manual padding pad_message(sctx); diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c index 1ec5e92b345..f573df30f31 100644 --- a/arch/s390/crypto/sha256_s390.c +++ b/arch/s390/crypto/sha256_s390.c @@ -31,9 +31,9 @@ struct s390_sha256_ctx { u8 buf[2 * SHA256_BLOCK_SIZE]; }; -static void sha256_init(void *ctx) +static void sha256_init(struct crypto_tfm *tfm) { - struct s390_sha256_ctx *sctx = ctx; + struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); sctx->state[0] = 0x6a09e667; sctx->state[1] = 0xbb67ae85; @@ -44,12 +44,12 @@ static void sha256_init(void *ctx) sctx->state[6] = 0x1f83d9ab; sctx->state[7] = 0x5be0cd19; sctx->count = 0; - memset(sctx->buf, 0, sizeof(sctx->buf)); } -static void sha256_update(void *ctx, const u8 *data, unsigned int len) +static void sha256_update(struct crypto_tfm *tfm, const u8 *data, + unsigned int len) { - struct s390_sha256_ctx *sctx = ctx; + struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); unsigned int index; int ret; @@ -108,9 +108,9 @@ static void pad_message(struct s390_sha256_ctx* sctx) } /* Add padding and return the message digest */ -static void sha256_final(void* ctx, u8 *out) +static void sha256_final(struct crypto_tfm *tfm, u8 *out) { - struct s390_sha256_ctx *sctx = ctx; + struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); /* must perform manual padding */ pad_message(sctx); diff --git a/arch/x86_64/crypto/aes-x86_64-asm.S b/arch/x86_64/crypto/aes-x86_64-asm.S index 483cbb23ab8..26b40de4d0b 100644 --- a/arch/x86_64/crypto/aes-x86_64-asm.S +++ b/arch/x86_64/crypto/aes-x86_64-asm.S @@ -15,6 +15,10 @@ .text +#include <asm/asm-offsets.h> + +#define BASE crypto_tfm_ctx_offset + #define R1 %rax #define R1E %eax #define R1X %ax @@ -46,19 +50,19 @@ #define R10 %r10 #define R11 %r11 -#define prologue(FUNC,BASE,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ +#define prologue(FUNC,KEY,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ .global FUNC; \ .type FUNC,@function; \ .align 8; \ FUNC: movq r1,r2; \ movq r3,r4; \ - leaq BASE+52(r8),r9; \ + leaq BASE+KEY+52(r8),r9; \ movq r10,r11; \ movl (r7),r5 ## E; \ movl 4(r7),r1 ## E; \ movl 8(r7),r6 ## E; \ movl 12(r7),r7 ## E; \ - movl (r8),r10 ## E; \ + movl BASE(r8),r10 ## E; \ xorl -48(r9),r5 ## E; \ xorl -44(r9),r1 ## E; \ xorl -40(r9),r6 ## E; \ @@ -128,8 +132,8 @@ FUNC: movq r1,r2; \ movl r3 ## E,r1 ## E; \ movl r4 ## E,r2 ## E; -#define entry(FUNC,BASE,B128,B192) \ - prologue(FUNC,BASE,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11) +#define entry(FUNC,KEY,B128,B192) \ + prologue(FUNC,KEY,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11) #define return epilogue(R8,R2,R9,R7,R5,R6,R3,R4,R11) @@ -147,9 +151,9 @@ FUNC: movq r1,r2; \ #define decrypt_final(TAB,OFFSET) \ round(TAB,OFFSET,R2,R1,R4,R3,R6,R5,R7,R10,R5,R6,R3,R4) -/* void aes_encrypt(void *ctx, u8 *out, const u8 *in) */ +/* void aes_enc_blk(stuct crypto_tfm *tfm, u8 *out, const u8 *in) */ - entry(aes_encrypt,0,enc128,enc192) + entry(aes_enc_blk,0,enc128,enc192) encrypt_round(aes_ft_tab,-96) encrypt_round(aes_ft_tab,-80) enc192: encrypt_round(aes_ft_tab,-64) @@ -166,9 +170,9 @@ enc128: encrypt_round(aes_ft_tab,-32) encrypt_final(aes_fl_tab,112) return -/* void aes_decrypt(void *ctx, u8 *out, const u8 *in) */ +/* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in) */ - entry(aes_decrypt,240,dec128,dec192) + entry(aes_dec_blk,240,dec128,dec192) decrypt_round(aes_it_tab,-96) decrypt_round(aes_it_tab,-80) dec192: decrypt_round(aes_it_tab,-64) diff --git a/arch/x86_64/crypto/aes.c b/arch/x86_64/crypto/aes.c index 6f77e7700d3..68866fab37a 100644 --- a/arch/x86_64/crypto/aes.c +++ b/arch/x86_64/crypto/aes.c @@ -227,10 +227,10 @@ static void __init gen_tabs(void) t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ } -static int aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, - u32 *flags) +static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { - struct aes_ctx *ctx = ctx_arg; + struct aes_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *key = (const __le32 *)in_key; u32 i, j, t, u, v, w; @@ -283,8 +283,18 @@ static int aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, return 0; } -extern void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in); -extern void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in); +asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in); +asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in); + +static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + aes_enc_blk(tfm, dst, src); +} + +static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + aes_dec_blk(tfm, dst, src); +} static struct crypto_alg aes_alg = { .cra_name = "aes", diff --git a/arch/x86_64/kernel/asm-offsets.c b/arch/x86_64/kernel/asm-offsets.c index 38834bbbae1..96687e2beb2 100644 --- a/arch/x86_64/kernel/asm-offsets.c +++ b/arch/x86_64/kernel/asm-offsets.c @@ -4,6 +4,7 @@ * and format the required data. */ +#include <linux/crypto.h> #include <linux/sched.h> #include <linux/stddef.h> #include <linux/errno.h> @@ -68,5 +69,7 @@ int main(void) DEFINE(pbe_next, offsetof(struct pbe, next)); BLANK(); DEFINE(TSS_ist, offsetof(struct tss_struct, ist)); + BLANK(); + DEFINE(crypto_tfm_ctx_offset, offsetof(struct crypto_tfm, __crt_ctx)); return 0; } diff --git a/crypto/Kconfig b/crypto/Kconfig index c442f2e7ce4..ba133d55704 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -337,7 +337,7 @@ config CRYPTO_CRC32C config CRYPTO_TEST tristate "Testing module" - depends on CRYPTO + depends on CRYPTO && m help Quick & dirty crypto test module. diff --git a/crypto/aes.c b/crypto/aes.c index a5017292e06..a038711831e 100644 --- a/crypto/aes.c +++ b/crypto/aes.c @@ -248,10 +248,10 @@ gen_tabs (void) t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ } -static int -aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) +static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { - struct aes_ctx *ctx = ctx_arg; + struct aes_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *key = (const __le32 *)in_key; u32 i, t, u, v, w; @@ -318,9 +318,9 @@ aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) f_rl(bo, bi, 2, k); \ f_rl(bo, bi, 3, k) -static void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in) +static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - const struct aes_ctx *ctx = ctx_arg; + const struct aes_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *src = (const __le32 *)in; __le32 *dst = (__le32 *)out; u32 b0[4], b1[4]; @@ -373,9 +373,9 @@ static void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in) i_rl(bo, bi, 2, k); \ i_rl(bo, bi, 3, k) -static void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in) +static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - const struct aes_ctx *ctx = ctx_arg; + const struct aes_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *src = (const __le32 *)in; __le32 *dst = (__le32 *)out; u32 b0[4], b1[4]; diff --git a/crypto/anubis.c b/crypto/anubis.c index 2c796bdb91a..7e2e1a29800 100644 --- a/crypto/anubis.c +++ b/crypto/anubis.c @@ -460,16 +460,15 @@ static const u32 rc[] = { 0xf726ffedU, 0xe89d6f8eU, 0x19a0f089U, }; -static int anubis_setkey(void *ctx_arg, const u8 *in_key, +static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len, u32 *flags) { + struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); const __be32 *key = (const __be32 *)in_key; int N, R, i, r; u32 kappa[ANUBIS_MAX_N]; u32 inter[ANUBIS_MAX_N]; - struct anubis_ctx *ctx = ctx_arg; - switch (key_len) { case 16: case 20: case 24: case 28: @@ -660,15 +659,15 @@ static void anubis_crypt(u32 roundKey[ANUBIS_MAX_ROUNDS + 1][4], dst[i] = cpu_to_be32(inter[i]); } -static void anubis_encrypt(void *ctx_arg, u8 *dst, const u8 *src) +static void anubis_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct anubis_ctx *ctx = ctx_arg; + struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); anubis_crypt(ctx->E, dst, src, ctx->R); } -static void anubis_decrypt(void *ctx_arg, u8 *dst, const u8 *src) +static void anubis_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct anubis_ctx *ctx = ctx_arg; + struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); anubis_crypt(ctx->D, dst, src, ctx->R); } diff --git a/crypto/api.c b/crypto/api.c index 80bba637fba..c11ec1fd4f1 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -188,13 +188,16 @@ struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) if (crypto_init_flags(tfm, flags)) goto out_free_tfm; - if (crypto_init_ops(tfm)) { - crypto_exit_ops(tfm); + if (crypto_init_ops(tfm)) goto out_free_tfm; - } + + if (alg->cra_init && alg->cra_init(tfm)) + goto cra_init_failed; goto out; +cra_init_failed: + crypto_exit_ops(tfm); out_free_tfm: kfree(tfm); tfm = NULL; @@ -215,6 +218,8 @@ void crypto_free_tfm(struct crypto_tfm *tfm) alg = tfm->__crt_alg; size = sizeof(*tfm) + alg->cra_ctxsize; + if (alg->cra_exit) + alg->cra_exit(tfm); crypto_exit_ops(tfm); crypto_alg_put(alg); memset(tfm, 0, size); @@ -224,7 +229,7 @@ void crypto_free_tfm(struct crypto_tfm *tfm) static inline int crypto_set_driver_name(struct crypto_alg *alg) { static const char suffix[] = "-generic"; - char *driver_name = (char *)alg->cra_driver_name; + char *driver_name = alg->cra_driver_name; int len; if (*driver_name) @@ -262,13 +267,13 @@ int crypto_register_alg(struct crypto_alg *alg) down_write(&crypto_alg_sem); list_for_each_entry(q, &crypto_alg_list, cra_list) { - if (!strcmp(q->cra_driver_name, alg->cra_driver_name)) { + if (q == alg) { ret = -EEXIST; goto out; } } - list_add_tail(&alg->cra_list, &crypto_alg_list); + list_add(&alg->cra_list, &crypto_alg_list); out: up_write(&crypto_alg_sem); return ret; diff --git a/crypto/arc4.c b/crypto/arc4.c index 9efbcaae88a..5edc6a65b98 100644 --- a/crypto/arc4.c +++ b/crypto/arc4.c @@ -24,9 +24,10 @@ struct arc4_ctx { u8 x, y; }; -static int arc4_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) +static int arc4_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { - struct arc4_ctx *ctx = ctx_arg; + struct arc4_ctx *ctx = crypto_tfm_ctx(tfm); int i, j = 0, k = 0; ctx->x = 1; @@ -48,9 +49,9 @@ static int arc4_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u return 0; } -static void arc4_crypt(void *ctx_arg, u8 *out, const u8 *in) +static void arc4_crypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - struct arc4_ctx *ctx = ctx_arg; + struct arc4_ctx *ctx = crypto_tfm_ctx(tfm); u8 *const S = ctx->S; u8 x = ctx->x; diff --git a/crypto/blowfish.c b/crypto/blowfish.c index 7f710b201f2..490265f42b3 100644 --- a/crypto/blowfish.c +++ b/crypto/blowfish.c @@ -349,7 +349,7 @@ static void encrypt_block(struct bf_ctx *bctx, u32 *dst, u32 *src) dst[1] = yl; } -static void bf_encrypt(void *ctx, u8 *dst, const u8 *src) +static void bf_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { const __be32 *in_blk = (const __be32 *)src; __be32 *const out_blk = (__be32 *)dst; @@ -357,17 +357,18 @@ static void bf_encrypt(void *ctx, u8 *dst, const u8 *src) in32[0] = be32_to_cpu(in_blk[0]); in32[1] = be32_to_cpu(in_blk[1]); - encrypt_block(ctx, out32, in32); + encrypt_block(crypto_tfm_ctx(tfm), out32, in32); out_blk[0] = cpu_to_be32(out32[0]); out_blk[1] = cpu_to_be32(out32[1]); } -static void bf_decrypt(void *ctx, u8 *dst, const u8 *src) +static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { + struct bf_ctx *ctx = crypto_tfm_ctx(tfm); const __be32 *in_blk = (const __be32 *)src; __be32 *const out_blk = (__be32 *)dst; - const u32 *P = ((struct bf_ctx *)ctx)->p; - const u32 *S = ((struct bf_ctx *)ctx)->s; + const u32 *P = ctx->p; + const u32 *S = ctx->s; u32 yl = be32_to_cpu(in_blk[0]); u32 yr = be32_to_cpu(in_blk[1]); @@ -398,12 +399,14 @@ static void bf_decrypt(void *ctx, u8 *dst, const u8 *src) /* * Calculates the blowfish S and P boxes for encryption and decryption. */ -static int bf_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) +static int bf_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { + struct bf_ctx *ctx = crypto_tfm_ctx(tfm); + u32 *P = ctx->p; + u32 *S = ctx->s; short i, j, count; u32 data[2], temp; - u32 *P = ((struct bf_ctx *)ctx)->p; - u32 *S = ((struct bf_ctx *)ctx)->s; /* Copy the initialization s-boxes */ for (i = 0, count = 0; i < 256; i++) diff --git a/crypto/cast5.c b/crypto/cast5.c index 8834c8580c0..08eef58c1d3 100644 --- a/crypto/cast5.c +++ b/crypto/cast5.c @@ -577,9 +577,9 @@ static const u32 sb8[256] = { (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]) ) -static void cast5_encrypt(void *ctx, u8 * outbuf, const u8 * inbuf) +static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) { - struct cast5_ctx *c = (struct cast5_ctx *) ctx; + struct cast5_ctx *c = crypto_tfm_ctx(tfm); const __be32 *src = (const __be32 *)inbuf; __be32 *dst = (__be32 *)outbuf; u32 l, r, t; @@ -642,9 +642,9 @@ static void cast5_encrypt(void *ctx, u8 * outbuf, const u8 * inbuf) dst[1] = cpu_to_be32(l); } -static void cast5_decrypt(void *ctx, u8 * outbuf, const u8 * inbuf) +static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) { - struct cast5_ctx *c = (struct cast5_ctx *) ctx; + struct cast5_ctx *c = crypto_tfm_ctx(tfm); const __be32 *src = (const __be32 *)inbuf; __be32 *dst = (__be32 *)outbuf; u32 l, r, t; @@ -769,15 +769,15 @@ static void key_schedule(u32 * x, u32 * z, u32 * k) } -static int -cast5_setkey(void *ctx, const u8 * key, unsigned key_len, u32 * flags) +static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned key_len, u32 *flags) { + struct cast5_ctx *c = crypto_tfm_ctx(tfm); int i; u32 x[4]; u32 z[4]; u32 k[16]; __be32 p_key[4]; - struct cast5_ctx *c = (struct cast5_ctx *) ctx; if (key_len < 5 || key_len > 16) { *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; diff --git a/crypto/cast6.c b/crypto/cast6.c index 9e28740ba77..08e33bfc3ad 100644 --- a/crypto/cast6.c +++ b/crypto/cast6.c @@ -381,13 +381,13 @@ static inline void W(u32 *key, unsigned int i) { key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]); } -static int -cast6_setkey(void *ctx, const u8 * in_key, unsigned key_len, u32 * flags) +static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key, + unsigned key_len, u32 *flags) { int i; u32 key[8]; __be32 p_key[8]; /* padded key */ - struct cast6_ctx *c = (struct cast6_ctx *) ctx; + struct cast6_ctx *c = crypto_tfm_ctx(tfm); if (key_len < 16 || key_len > 32 || key_len % 4 != 0) { *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; @@ -444,8 +444,9 @@ static inline void QBAR (u32 * block, u8 * Kr, u32 * Km) { block[2] ^= F1(block[3], Kr[0], Km[0]); } -static void cast6_encrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { - struct cast6_ctx * c = (struct cast6_ctx *)ctx; +static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) +{ + struct cast6_ctx *c = crypto_tfm_ctx(tfm); const __be32 *src = (const __be32 *)inbuf; __be32 *dst = (__be32 *)outbuf; u32 block[4]; @@ -476,8 +477,8 @@ static void cast6_encrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { dst[3] = cpu_to_be32(block[3]); } -static void cast6_decrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { - struct cast6_ctx * c = (struct cast6_ctx *)ctx; +static void cast6_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) { + struct cast6_ctx * c = crypto_tfm_ctx(tfm); const __be32 *src = (const __be32 *)inbuf; __be32 *dst = (__be32 *)outbuf; u32 block[4]; diff --git a/crypto/cipher.c b/crypto/cipher.c index 65bcea0cd17..b899eb97abd 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c @@ -187,7 +187,7 @@ static unsigned int cbc_process_encrypt(const struct cipher_desc *desc, void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; int bsize = crypto_tfm_alg_blocksize(tfm); - void (*fn)(void *, u8 *, const u8 *) = desc->crfn; + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; u8 *iv = desc->info; unsigned int done = 0; @@ -195,7 +195,7 @@ static unsigned int cbc_process_encrypt(const struct cipher_desc *desc, do { xor(iv, src); - fn(crypto_tfm_ctx(tfm), dst, iv); + fn(tfm, dst, iv); memcpy(iv, dst, bsize); src += bsize; @@ -218,7 +218,7 @@ static unsigned int cbc_process_decrypt(const struct cipher_desc *desc, u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1); u8 **dst_p = src == dst ? &buf : &dst; - void (*fn)(void *, u8 *, const u8 *) = desc->crfn; + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; u8 *iv = desc->info; unsigned int done = 0; @@ -227,7 +227,7 @@ static unsigned int cbc_process_decrypt(const struct cipher_desc *desc, do { u8 *tmp_dst = *dst_p; - fn(crypto_tfm_ctx(tfm), tmp_dst, src); + fn(tfm, tmp_dst, src); xor(tmp_dst, iv); memcpy(iv, src, bsize); if (tmp_dst != dst) @@ -245,13 +245,13 @@ static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst, { struct crypto_tfm *tfm = desc->tfm; int bsize = crypto_tfm_alg_blocksize(tfm); - void (*fn)(void *, u8 *, const u8 *) = desc->crfn; + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; unsigned int done = 0; nbytes -= bsize; do { - fn(crypto_tfm_ctx(tfm), dst, src); + fn(tfm, dst, src); src += bsize; dst += bsize; @@ -268,7 +268,7 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; return -EINVAL; } else - return cia->cia_setkey(crypto_tfm_ctx(tfm), key, keylen, + return cia->cia_setkey(tfm, key, keylen, &tfm->crt_flags); } diff --git a/crypto/compress.c b/crypto/compress.c index eb36d9364da..eca182aa338 100644 --- a/crypto/compress.c +++ b/crypto/compress.c @@ -22,8 +22,7 @@ static int crypto_compress(struct crypto_tfm *tfm, const u8 *src, unsigned int slen, u8 *dst, unsigned int *dlen) { - return tfm->__crt_alg->cra_compress.coa_compress(crypto_tfm_ctx(tfm), - src, slen, dst, + return tfm->__crt_alg->cra_compress.coa_compress(tfm, src, slen, dst, dlen); } @@ -31,8 +30,7 @@ static int crypto_decompress(struct crypto_tfm *tfm, const u8 *src, unsigned int slen, u8 *dst, unsigned int *dlen) { - return tfm->__crt_alg->cra_compress.coa_decompress(crypto_tfm_ctx(tfm), - src, slen, dst, + return tfm->__crt_alg->cra_compress.coa_decompress(tfm, src, slen, dst, dlen); } @@ -43,21 +41,14 @@ int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags) int crypto_init_compress_ops(struct crypto_tfm *tfm) { - int ret = 0; struct compress_tfm *ops = &tfm->crt_compress; - - ret = tfm->__crt_alg->cra_compress.coa_init(crypto_tfm_ctx(tfm)); - if (ret) - goto out; ops->cot_compress = crypto_compress; ops->cot_decompress = crypto_decompress; -out: - return ret; + return 0; } void crypto_exit_compress_ops(struct crypto_tfm *tfm) { - tfm->__crt_alg->cra_compress.coa_exit(crypto_tfm_ctx(tfm)); } diff --git a/crypto/crc32c.c b/crypto/crc32c.c index 953362423a5..f2660123aeb 100644 --- a/crypto/crc32c.c +++ b/crypto/crc32c.c @@ -31,9 +31,9 @@ struct chksum_ctx { * crc using table. */ -static void chksum_init(void *ctx) +static void chksum_init(struct crypto_tfm *tfm) { - struct chksum_ctx *mctx = ctx; + struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); mctx->crc = ~(u32)0; /* common usage */ } @@ -43,10 +43,10 @@ static void chksum_init(void *ctx) * If your algorithm starts with ~0, then XOR with ~0 before you set * the seed. */ -static int chksum_setkey(void *ctx, const u8 *key, unsigned int keylen, - u32 *flags) +static int chksum_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { - struct chksum_ctx *mctx = ctx; + struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); if (keylen != sizeof(mctx->crc)) { if (flags) @@ -57,9 +57,10 @@ static int chksum_setkey(void *ctx, const u8 *key, unsigned int keylen, return 0; } -static void chksum_update(void *ctx, const u8 *data, unsigned int length) +static void chksum_update(struct crypto_tfm *tfm, const u8 *data, + unsigned int length) { - struct chksum_ctx *mctx = ctx; + struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); u32 mcrc; mcrc = crc32c(mctx->crc, data, (size_t)length); @@ -67,9 +68,9 @@ static void chksum_update(void *ctx, const u8 *data, unsigned int length) mctx->crc = mcrc; } -static void chksum_final(void *ctx, u8 *out) +static void chksum_final(struct crypto_tfm *tfm, u8 *out) { - struct chksum_ctx *mctx = ctx; + struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); u32 mcrc = (mctx->crc ^ ~(u32)0); *(u32 *)out = __le32_to_cpu(mcrc); diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c index 3fcf6e887e8..a0d956b5294 100644 --- a/crypto/crypto_null.c +++ b/crypto/crypto_null.c @@ -27,8 +27,8 @@ #define NULL_BLOCK_SIZE 1 #define NULL_DIGEST_SIZE 0 -static int null_compress(void *ctx, const u8 *src, unsigned int slen, - u8 *dst, unsigned int *dlen) +static int null_compress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) { if (slen > *dlen) return -EINVAL; @@ -37,20 +37,21 @@ static int null_compress(void *ctx, const u8 *src, unsigned int slen, return 0; } -static void null_init(void *ctx) +static void null_init(struct crypto_tfm *tfm) { } -static void null_update(void *ctx, const u8 *data, unsigned int len) +static void null_update(struct crypto_tfm *tfm, const u8 *data, + unsigned int len) { } -static void null_final(void *ctx, u8 *out) +static void null_final(struct crypto_tfm *tfm, u8 *out) { } -static int null_setkey(void *ctx, const u8 *key, - unsigned int keylen, u32 *flags) +static int null_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { return 0; } -static void null_crypt(void *ctx, u8 *dst, const u8 *src) +static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { memcpy(dst, src, NULL_BLOCK_SIZE); } diff --git a/crypto/deflate.c b/crypto/deflate.c index f209368d62a..6588bbf82e9 100644 --- a/crypto/deflate.c +++ b/crypto/deflate.c @@ -102,8 +102,9 @@ static void deflate_decomp_exit(struct deflate_ctx *ctx) kfree(ctx->decomp_stream.workspace); } -static int deflate_init(void *ctx) +static int deflate_init(struct crypto_tfm *tfm) { + struct deflate_ctx *ctx = crypto_tfm_ctx(tfm); int ret; ret = deflate_comp_init(ctx); @@ -116,17 +117,19 @@ out: return ret; } -static void deflate_exit(void *ctx) +static void deflate_exit(struct crypto_tfm *tfm) { + struct deflate_ctx *ctx = crypto_tfm_ctx(tfm); + deflate_comp_exit(ctx); deflate_decomp_exit(ctx); } -static int deflate_compress(void *ctx, const u8 *src, unsigned int slen, - u8 *dst, unsigned int *dlen) +static int deflate_compress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) { int ret = 0; - struct deflate_ctx *dctx = ctx; + struct deflate_ctx *dctx = crypto_tfm_ctx(tfm); struct z_stream_s *stream = &dctx->comp_stream; ret = zlib_deflateReset(stream); @@ -151,12 +154,12 @@ out: return ret; } -static int deflate_decompress(void *ctx, const u8 *src, unsigned int slen, - u8 *dst, unsigned int *dlen) +static int deflate_decompress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) { int ret = 0; - struct deflate_ctx *dctx = ctx; + struct deflate_ctx *dctx = crypto_tfm_ctx(tfm); struct z_stream_s *stream = &dctx->decomp_stream; ret = zlib_inflateReset(stream); @@ -198,9 +201,9 @@ static struct crypto_alg alg = { .cra_ctxsize = sizeof(struct deflate_ctx), .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_init = deflate_init, + .cra_exit = deflate_exit, .cra_u = { .compress = { - .coa_init = deflate_init, - .coa_exit = deflate_exit, .coa_compress = deflate_compress, .coa_decompress = deflate_decompress } } }; diff --git a/crypto/des.c b/crypto/des.c index 2d74cab40c3..a9d3c235a6a 100644 --- a/crypto/des.c +++ b/crypto/des.c @@ -783,9 +783,10 @@ static void dkey(u32 *pe, const u8 *k) } } -static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) +static int des_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { - struct des_ctx *dctx = ctx; + struct des_ctx *dctx = crypto_tfm_ctx(tfm); u32 tmp[DES_EXPKEY_WORDS]; int ret; @@ -803,9 +804,10 @@ static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) return 0; } -static void des_encrypt(void *ctx, u8 *dst, const u8 *src) +static void des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - const u32 *K = ((struct des_ctx *)ctx)->expkey; + struct des_ctx *ctx = crypto_tfm_ctx(tfm); + const u32 *K = ctx->expkey; const __le32 *s = (const __le32 *)src; __le32 *d = (__le32 *)dst; u32 L, R, A, B; @@ -825,9 +827,10 @@ static void des_encrypt(void *ctx, u8 *dst, const u8 *src) d[1] = cpu_to_le32(L); } -static void des_decrypt(void *ctx, u8 *dst, const u8 *src) +static void des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - const u32 *K = ((struct des_ctx *)ctx)->expkey + DES_EXPKEY_WORDS - 2; + struct des_ctx *ctx = crypto_tfm_ctx(tfm); + const u32 *K = ctx->expkey + DES_EXPKEY_WORDS - 2; const __le32 *s = (const __le32 *)src; __le32 *d = (__le32 *)dst; u32 L, R, A, B; @@ -860,11 +863,11 @@ static void des_decrypt(void *ctx, u8 *dst, const u8 *src) * property. * */ -static int des3_ede_setkey(void *ctx, const u8 *key, +static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen, u32 *flags) { const u32 *K = (const u32 *)key; - struct des3_ede_ctx *dctx = ctx; + struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); u32 *expkey = dctx->expkey; if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) || @@ -881,9 +884,9 @@ static int des3_ede_setkey(void *ctx, const u8 *key, return 0; } -static void des3_ede_encrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct des3_ede_ctx *dctx = ctx; + struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); const u32 *K = dctx->expkey; const __le32 *s = (const __le32 *)src; __le32 *d = (__le32 *)dst; @@ -912,9 +915,9 @@ static void des3_ede_encrypt(void *ctx, u8 *dst, const u8 *src) d[1] = cpu_to_le32(L); } -static void des3_ede_decrypt(void *ctx, u8 *dst, const u8 *src) +static void des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct des3_ede_ctx *dctx = ctx; + struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); const u32 *K = dctx->expkey + DES3_EDE_EXPKEY_WORDS - 2; const __le32 *s = (const __le32 *)src; __le32 *d = (__le32 *)dst; diff --git a/crypto/digest.c b/crypto/digest.c index d9b6ac9dbf8..603006a7bef 100644 --- a/crypto/digest.c +++ b/crypto/digest.c @@ -20,13 +20,14 @@ static void init(struct crypto_tfm *tfm) { - tfm->__crt_alg->cra_digest.dia_init(crypto_tfm_ctx(tfm)); + tfm->__crt_alg->cra_digest.dia_init(tfm); } static void update(struct crypto_tfm *tfm, struct scatterlist *sg, unsigned int nsg) { unsigned int i; + unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); for (i = 0; i < nsg; i++) { @@ -38,12 +39,22 @@ static void update(struct crypto_tfm *tfm, unsigned int bytes_from_page = min(l, ((unsigned int) (PAGE_SIZE)) - offset); - char *p = crypto_kmap(pg, 0) + offset; + char *src = crypto_kmap(pg, 0); + char *p = src + offset; - tfm->__crt_alg->cra_digest.dia_update - (crypto_tfm_ctx(tfm), p, - bytes_from_page); - crypto_kunmap(p, 0); + if (unlikely(offset & alignmask)) { + unsigned int bytes = + alignmask + 1 - (offset & alignmask); + bytes = min(bytes, bytes_from_page); + tfm->__crt_alg->cra_digest.dia_update(tfm, p, + bytes); + p += bytes; + bytes_from_page -= bytes; + l -= bytes; + } + tfm->__crt_alg->cra_digest.dia_update(tfm, p, + bytes_from_page); + crypto_kunmap(src, 0); crypto_yield(tfm); offset = 0; pg++; @@ -54,7 +65,15 @@ static void update(struct crypto_tfm *tfm, static void final(struct crypto_tfm *tfm, u8 *out) { - tfm->__crt_alg->cra_digest.dia_final(crypto_tfm_ctx(tfm), out); + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); + if (unlikely((unsigned long)out & alignmask)) { + unsigned int size = crypto_tfm_alg_digestsize(tfm); + u8 buffer[size + alignmask]; + u8 *dst = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); + tfm->__crt_alg->cra_digest.dia_final(tfm, dst); + memcpy(out, dst, size); + } else + tfm->__crt_alg->cra_digest.dia_final(tfm, out); } static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) @@ -62,25 +81,15 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) u32 flags; if (tfm->__crt_alg->cra_digest.dia_setkey == NULL) return -ENOSYS; - return tfm->__crt_alg->cra_digest.dia_setkey(crypto_tfm_ctx(tfm), - key, keylen, &flags); + return tfm->__crt_alg->cra_digest.dia_setkey(tfm, key, keylen, &flags); } static void digest(struct crypto_tfm *tfm, struct scatterlist *sg, unsigned int nsg, u8 *out) { - unsigned int i; - - tfm->crt_digest.dit_init(tfm); - - for (i = 0; i < nsg; i++) { - char *p = crypto_kmap(sg[i].page, 0) + sg[i].offset; - tfm->__crt_alg->cra_digest.dia_update(crypto_tfm_ctx(tfm), - p, sg[i].length); - crypto_kunmap(p, 0); - crypto_yield(tfm); - } - crypto_digest_final(tfm, out); + init(tfm); + update(tfm, sg, nsg); + final(tfm, out); } int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags) diff --git a/crypto/khazad.c b/crypto/khazad.c index 807f2bf4ea2..d4c9d3657b3 100644 --- a/crypto/khazad.c +++ b/crypto/khazad.c @@ -754,11 +754,11 @@ static const u64 c[KHAZAD_ROUNDS + 1] = { 0xccc41d14c363da5dULL, 0x5fdc7dcd7f5a6c5cULL, 0xf726ffede89d6f8eULL }; -static int khazad_setkey(void *ctx_arg, const u8 *in_key, - unsigned int key_len, u32 *flags) +static int khazad_setkey(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { - struct khazad_ctx *ctx = ctx_arg; - const __be64 *key = (const __be64 *)in_key; + struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); + const __be32 *key = (const __be32 *)in_key; int r; const u64 *S = T7; u64 K2, K1; @@ -769,8 +769,9 @@ static int khazad_setkey(void *ctx_arg, const u8 *in_key, return -EINVAL; } - K2 = be64_to_cpu(key[0]); - K1 = be64_to_cpu(key[1]); + /* key is supposed to be 32-bit aligned */ + K2 = ((u64)be32_to_cpu(key[0]) << 32) | be32_to_cpu(key[1]); + K1 = ((u64)be32_to_cpu(key[2]) << 32) | be32_to_cpu(key[3]); /* setup the encrypt key */ for (r = 0; r <= KHAZAD_ROUNDS; r++) { @@ -840,15 +841,15 @@ static void khazad_crypt(const u64 roundKey[KHAZAD_ROUNDS + 1], *dst = cpu_to_be64(state); } -static void khazad_encrypt(void *ctx_arg, u8 *dst, const u8 *src) +static void khazad_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct khazad_ctx *ctx = ctx_arg; + struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); khazad_crypt(ctx->E, dst, src); } -static void khazad_decrypt(void *ctx_arg, u8 *dst, const u8 *src) +static void khazad_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { - struct khazad_ctx *ctx = ctx_arg; + struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); khazad_crypt(ctx->D, dst, src); } diff --git a/crypto/md4.c b/crypto/md4.c index a2d6df5c0f8..c1bc71bdc16 100644 --- a/crypto/md4.c +++ b/crypto/md4.c @@ -152,9 +152,9 @@ static inline void md4_transform_helper(struct md4_ctx *ctx) md4_transform(ctx->hash, ctx->block); } -static void md4_init(void *ctx) +static void md4_init(struct crypto_tfm *tfm) { - struct md4_ctx *mctx = ctx; + struct md4_ctx *mctx = crypto_tfm_ctx(tfm); mctx->hash[0] = 0x67452301; mctx->hash[1] = 0xefcdab89; @@ -163,9 +163,9 @@ static void md4_init(void *ctx) mctx->byte_count = 0; } -static void md4_update(void *ctx, const u8 *data, unsigned int len) +static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) { - struct md4_ctx *mctx = ctx; + struct md4_ctx *mctx = crypto_tfm_ctx(tfm); const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); mctx->byte_count += len; @@ -193,9 +193,9 @@ static void md4_update(void *ctx, const u8 *data, unsigned int len) memcpy(mctx->block, data, len); } -static void md4_final(void *ctx, u8 *out) +static void md4_final(struct crypto_tfm *tfm, u8 *out) { - struct md4_ctx *mctx = ctx; + struct md4_ctx *mctx = crypto_tfm_ctx(tfm); const unsigned int offset = mctx->byte_count & 0x3f; char *p = (char *)mctx->block + offset; int padding = 56 - (offset + 1); diff --git a/crypto/md5.c b/crypto/md5.c index 7f041aef5da..93d18e8b3d5 100644 --- a/crypto/md5.c +++ b/crypto/md5.c @@ -147,9 +147,9 @@ static inline void md5_transform_helper(struct md5_ctx *ctx) md5_transform(ctx->hash, ctx->block); } -static void md5_init(void *ctx) +static void md5_init(struct crypto_tfm *tfm) { - struct md5_ctx *mctx = ctx; + struct md5_ctx *mctx = crypto_tfm_ctx(tfm); mctx->hash[0] = 0x67452301; mctx->hash[1] = 0xefcdab89; @@ -158,9 +158,9 @@ static void md5_init(void *ctx) mctx->byte_count = 0; } -static void md5_update(void *ctx, const u8 *data, unsigned int len) +static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) { - struct md5_ctx *mctx = ctx; + struct md5_ctx *mctx = crypto_tfm_ctx(tfm); const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); mctx->byte_count += len; @@ -188,9 +188,9 @@ static void md5_update(void *ctx, const u8 *data, unsigned int len) memcpy(mctx->block, data, len); } -static void md5_final(void *ctx, u8 *out) +static void md5_final(struct crypto_tfm *tfm, u8 *out) { - struct md5_ctx *mctx = ctx; + struct md5_ctx *mctx = crypto_tfm_ctx(tfm); const unsigned int offset = mctx->byte_count & 0x3f; char *p = (char *)mctx->block + offset; int padding = 56 - (offset + 1); diff --git a/crypto/michael_mic.c b/crypto/michael_mic.c index 4f6ab23e14a..d061da21cfd 100644 --- a/crypto/michael_mic.c +++ b/crypto/michael_mic.c @@ -45,16 +45,17 @@ do { \ } while (0) -static void michael_init(void *ctx) +static void michael_init(struct crypto_tfm *tfm) { - struct michael_mic_ctx *mctx = ctx; + struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); mctx->pending_len = 0; } -static void michael_update(void *ctx, const u8 *data, unsigned int len) +static void michael_update(struct crypto_tfm *tfm, const u8 *data, + unsigned int len) { - struct michael_mic_ctx *mctx = ctx; + struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); const __le32 *src; if (mctx->pending_len) { @@ -90,9 +91,9 @@ static void michael_update(void *ctx, const u8 *data, unsigned int len) } -static void michael_final(void *ctx, u8 *out) +static void michael_final(struct crypto_tfm *tfm, u8 *out) { - struct michael_mic_ctx *mctx = ctx; + struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); u8 *data = mctx->pending; __le32 *dst = (__le32 *)out; @@ -121,10 +122,10 @@ static void michael_final(void *ctx, u8 *out) } -static int michael_setkey(void *ctx, const u8 *key, unsigned int keylen, - u32 *flags) +static int michael_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { - struct michael_mic_ctx *mctx = ctx; + struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); const __le32 *data = (const __le32 *)key; if (keylen != 8) { @@ -145,6 +146,7 @@ static struct crypto_alg michael_mic_alg = { .cra_blocksize = 8, .cra_ctxsize = sizeof(struct michael_mic_ctx), .cra_module = THIS_MODULE, + .cra_alignmask = 3, .cra_list = LIST_HEAD_INIT(michael_mic_alg.cra_list), .cra_u = { .digest = { .dia_digestsize = 8, diff --git a/crypto/serpent.c b/crypto/serpent.c index e366406ab49..de60cdddbf4 100644 --- a/crypto/serpent.c +++ b/crypto/serpent.c @@ -215,9 +215,11 @@ struct serpent_ctx { }; -static int serpent_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) +static int serpent_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { - u32 *k = ((struct serpent_ctx *)ctx)->expkey; + struct serpent_ctx *ctx = crypto_tfm_ctx(tfm); + u32 *k = ctx->expkey; u8 *k8 = (u8 *)k; u32 r0,r1,r2,r3,r4; int i; @@ -365,10 +367,11 @@ static int serpent_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *fl return 0; } -static void serpent_encrypt(void *ctx, u8 *dst, const u8 *src) +static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { + struct serpent_ctx *ctx = crypto_tfm_ctx(tfm); const u32 - *k = ((struct serpent_ctx *)ctx)->expkey, + *k = ctx->expkey, *s = (const u32 *)src; u32 *d = (u32 *)dst, r0, r1, r2, r3, r4; @@ -423,8 +426,9 @@ static void serpent_encrypt(void *ctx, u8 *dst, const u8 *src) d[3] = cpu_to_le32(r3); } -static void serpent_decrypt(void *ctx, u8 *dst, const u8 *src) +static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { + struct serpent_ctx *ctx = crypto_tfm_ctx(tfm); const u32 *k = ((struct serpent_ctx *)ctx)->expkey, *s = (const u32 *)src; @@ -492,7 +496,8 @@ static struct crypto_alg serpent_alg = { .cia_decrypt = serpent_decrypt } } }; -static int tnepres_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) +static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen, u32 *flags) { u8 rev_key[SERPENT_MAX_KEY_SIZE]; int i; @@ -506,10 +511,10 @@ static int tnepres_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *fl for (i = 0; i < keylen; ++i) rev_key[keylen - i - 1] = key[i]; - return serpent_setkey(ctx, rev_key, keylen, flags); + return serpent_setkey(tfm, rev_key, keylen, flags); } -static void tnepres_encrypt(void *ctx, u8 *dst, const u8 *src) +static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { const u32 * const s = (const u32 * const)src; u32 * const d = (u32 * const)dst; @@ -521,7 +526,7 @@ static void tnepres_encrypt(void *ctx, u8 *dst, const u8 *src) rs[2] = swab32(s[1]); rs[3] = swab32(s[0]); - serpent_encrypt(ctx, (u8 *)rd, (u8 *)rs); + serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs); d[0] = swab32(rd[3]); d[1] = swab32(rd[2]); @@ -529,7 +534,7 @@ static void tnepres_encrypt(void *ctx, u8 *dst, const u8 *src) d[3] = swab32(rd[0]); } -static void tnepres_decrypt(void *ctx, u8 *dst, const u8 *src) +static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) { const u32 * const s = (const u32 * const)src; u32 * const d = (u32 * const)dst; @@ -541,7 +546,7 @@ static void tnepres_decrypt(void *ctx, u8 *dst, const u8 *src) rs[2] = swab32(s[1]); rs[3] = swab32(s[0]); - serpent_decrypt(ctx, (u8 *)rd, (u8 *)rs); + serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs); d[0] = swab32(rd[3]); d[1] = swab32(rd[2]); diff --git a/crypto/sha1.c b/crypto/sha1.c index 21571ed35b7..6c77b689f87 100644 --- a/crypto/sha1.c +++ b/crypto/sha1.c @@ -34,9 +34,9 @@ struct sha1_ctx { u8 buffer[64]; }; -static void sha1_init(void *ctx) +static void sha1_init(struct crypto_tfm *tfm) { - struct sha1_ctx *sctx = ctx; + struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); static const struct sha1_ctx initstate = { 0, { 0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0 }, @@ -46,9 +46,10 @@ static void sha1_init(void *ctx) *sctx = initstate; } -static void sha1_update(void *ctx, const u8 *data, unsigned int len) +static void sha1_update(struct crypto_tfm *tfm, const u8 *data, + unsigned int len) { - struct sha1_ctx *sctx = ctx; + struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); unsigned int partial, done; const u8 *src; @@ -80,9 +81,9 @@ static void sha1_update(void *ctx, const u8 *data, unsigned int len) /* Add padding and return the message digest. */ -static void sha1_final(void* ctx, u8 *out) +static void sha1_final(struct crypto_tfm *tfm, u8 *out) { - struct sha1_ctx *sctx = ctx; + struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); __be32 *dst = (__be32 *)out; u32 i, index, padlen; __be64 bits; @@ -93,10 +94,10 @@ static void sha1_final(void* ctx, u8 *out) /* Pad out to 56 mod 64 */ index = sctx->count & 0x3f; padlen = (index < 56) ? (56 - index) : ((64+56) - index); - sha1_update(sctx, padding, padlen); + sha1_update(tfm, padding, padlen); /* Append length */ - sha1_update(sctx, (const u8 *)&bits, sizeof(bits)); + sha1_update(tfm, (const u8 *)&bits, sizeof(bits)); /* Store state in digest */ for (i = 0; i < 5; i++) @@ -112,6 +113,7 @@ static struct crypto_alg alg = { .cra_blocksize = SHA1_HMAC_BLOCK_SIZE, .cra_ctxsize = sizeof(struct sha1_ctx), .cra_module = THIS_MODULE, + .cra_alignmask = 3, .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .digest = { .dia_digestsize = SHA1_DIGEST_SIZE, diff --git a/crypto/sha256.c b/crypto/sha256.c index 9d5ef674d6a..bc71d85a7d0 100644 --- a/crypto/sha256.c +++ b/crypto/sha256.c @@ -230,9 +230,9 @@ static void sha256_transform(u32 *state, const u8 *input) memset(W, 0, 64 * sizeof(u32)); } -static void sha256_init(void *ctx) +static void sha256_init(struct crypto_tfm *tfm) { - struct sha256_ctx *sctx = ctx; + struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); sctx->state[0] = H0; sctx->state[1] = H1; sctx->state[2] = H2; @@ -242,12 +242,12 @@ static void sha256_init(void *ctx) sctx->state[6] = H6; sctx->state[7] = H7; sctx->count[0] = sctx->count[1] = 0; - memset(sctx->buf, 0, sizeof(sctx->buf)); } -static void sha256_update(void *ctx, const u8 *data, unsigned int len) +static void sha256_update(struct crypto_tfm *tfm, const u8 *data, + unsigned int len) { - struct sha256_ctx *sctx = ctx; + struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); unsigned int i, index, part_len; /* Compute number of bytes mod 128 */ @@ -277,9 +277,9 @@ static void sha256_update(void *ctx, const u8 *data, unsigned int len) memcpy(&sctx->buf[index], &data[i], len-i); } -static void sha256_final(void* ctx, u8 *out) +static void sha256_final(struct crypto_tfm *tfm, u8 *out) { - struct sha256_ctx *sctx = ctx; + struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); __be32 *dst = (__be32 *)out; __be32 bits[2]; unsigned int index, pad_len; @@ -293,10 +293,10 @@ static void sha256_final(void* ctx, u8 *out) /* Pad out to 56 mod 64. */ index = (sctx->count[0] >> 3) & 0x3f; pad_len = (index < 56) ? (56 - index) : ((64+56) - index); - sha256_update(sctx, padding, pad_len); + sha256_update(tfm, padding, pad_len); /* Append length (before padding) */ - sha256_update(sctx, (const u8 *)bits, sizeof(bits)); + sha256_update(tfm, (const u8 *)bits, sizeof(bits)); /* Store state in digest */ for (i = 0; i < 8; i++) @@ -313,6 +313,7 @@ static struct crypto_alg alg = { .cra_blocksize = SHA256_HMAC_BLOCK_SIZE, .cra_ctxsize = sizeof(struct sha256_ctx), .cra_module = THIS_MODULE, + .cra_alignmask = 3, .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .digest = { .dia_digestsize = SHA256_DIGEST_SIZE, diff --git a/crypto/sha512.c b/crypto/sha512.c index 3e6e9392310..2dfe7f170b4 100644 --- a/crypto/sha512.c +++ b/crypto/sha512.c @@ -161,9 +161,9 @@ sha512_transform(u64 *state, u64 *W, const u8 *input) } static void -sha512_init(void *ctx) +sha512_init(struct crypto_tfm *tfm) { - struct sha512_ctx *sctx = ctx; + struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); sctx->state[0] = H0; sctx->state[1] = H1; sctx->state[2] = H2; @@ -173,13 +173,12 @@ sha512_init(void *ctx) sctx->state[6] = H6; sctx->state[7] = H7; sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; - memset(sctx->buf, 0, sizeof(sctx->buf)); } static void -sha384_init(void *ctx) +sha384_init(struct crypto_tfm *tfm) { - struct sha512_ctx *sctx = ctx; + struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); sctx->state[0] = HP0; sctx->state[1] = HP1; sctx->state[2] = HP2; @@ -189,13 +188,12 @@ sha384_init(void *ctx) sctx->state[6] = HP6; sctx->state[7] = HP7; sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; - memset(sctx->buf, 0, sizeof(sctx->buf)); } static void -sha512_update(void *ctx, const u8 *data, unsigned int len) +sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) { - struct sha512_ctx *sctx = ctx; + struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); unsigned int i, index, part_len; @@ -233,9 +231,9 @@ sha512_update(void *ctx, const u8 *data, unsigned int len) } static void -sha512_final(void *ctx, u8 *hash) +sha512_final(struct crypto_tfm *tfm, u8 *hash) { - struct sha512_ctx *sctx = ctx; + struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); static u8 padding[128] = { 0x80, }; __be64 *dst = (__be64 *)hash; __be32 bits[4]; @@ -251,10 +249,10 @@ sha512_final(void *ctx, u8 *hash) /* Pad out to 112 mod 128. */ index = (sctx->count[0] >> 3) & 0x7f; pad_len = (index < 112) ? (112 - index) : ((128+112) - index); - sha512_update(sctx, padding, pad_len); + sha512_update(tfm, padding, pad_len); /* Append length (before padding) */ - sha512_update(sctx, (const u8 *)bits, sizeof(bits)); + sha512_update(tfm, (const u8 *)bits, sizeof(bits)); /* Store state in digest */ for (i = 0; i < 8; i++) @@ -264,12 +262,11 @@ sha512_final(void *ctx, u8 *hash) memset(sctx, 0, sizeof(struct sha512_ctx)); } -static void sha384_final(void *ctx, u8 *hash) +static void sha384_final(struct crypto_tfm *tfm, u8 *hash) { - struct sha512_ctx *sctx = ctx; u8 D[64]; - sha512_final(sctx, D); + sha512_final(tfm, D); memcpy(hash, D, 48); memset(D, 0, 64); @@ -281,6 +278,7 @@ static struct crypto_alg sha512 = { .cra_blocksize = SHA512_HMAC_BLOCK_SIZE, .cra_ctxsize = sizeof(struct sha512_ctx), .cra_module = THIS_MODULE, + .cra_alignmask = 3, .cra_list = LIST_HEAD_INIT(sha512.cra_list), .cra_u = { .digest = { .dia_digestsize = SHA512_DIGEST_SIZE, @@ -295,6 +293,7 @@ static struct crypto_alg sha384 = { .cra_flags = CRYPTO_ALG_TYPE_DIGEST, .cra_blocksize = SHA384_HMAC_BLOCK_SIZE, .cra_ctxsize = sizeof(struct sha512_ctx), + .cra_alignmask = 3, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(sha384.cra_list), .cra_u = { .digest = { diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 49e344f0080..e52f56c5bd5 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -570,6 +570,122 @@ out: crypto_free_tfm(tfm); } +static void test_digest_jiffies(struct crypto_tfm *tfm, char *p, int blen, + int plen, char *out, int sec) +{ + struct scatterlist sg[1]; + unsigned long start, end; + int bcount, pcount; + + for (start = jiffies, end = start + sec * HZ, bcount = 0; + time_before(jiffies, end); bcount++) { + crypto_digest_init(tfm); + for (pcount = 0; pcount < blen; pcount += plen) { + sg_set_buf(sg, p + pcount, plen); + crypto_digest_update(tfm, sg, 1); + } + /* we assume there is enough space in 'out' for the result */ + crypto_digest_final(tfm, out); + } + + printk("%6u opers/sec, %9lu bytes/sec\n", + bcount / sec, ((long)bcount * blen) / sec); + + return; +} + +static void test_digest_cycles(struct crypto_tfm *tfm, char *p, int blen, + int plen, char *out) +{ + struct scatterlist sg[1]; + unsigned long cycles = 0; + int i, pcount; + + local_bh_disable(); + local_irq_disable(); + + /* Warm-up run. */ + for (i = 0; i < 4; i++) { + crypto_digest_init(tfm); + for (pcount = 0; pcount < blen; pcount += plen) { + sg_set_buf(sg, p + pcount, plen); + crypto_digest_update(tfm, sg, 1); + } + crypto_digest_final(tfm, out); + } + + /* The real thing. */ + for (i = 0; i < 8; i++) { + cycles_t start, end; + + crypto_digest_init(tfm); + + start = get_cycles(); + + for (pcount = 0; pcount < blen; pcount += plen) { + sg_set_buf(sg, p + pcount, plen); + crypto_digest_update(tfm, sg, 1); + } + crypto_digest_final(tfm, out); + + end = get_cycles(); + + cycles += end - start; + } + + local_irq_enable(); + local_bh_enable(); + + printk("%6lu cycles/operation, %4lu cycles/byte\n", + cycles / 8, cycles / (8 * blen)); + + return; +} + +static void test_digest_speed(char *algo, unsigned int sec, + struct digest_speed *speed) +{ + struct crypto_tfm *tfm; + char output[1024]; + int i; + + printk("\ntesting speed of %s\n", algo); + + tfm = crypto_alloc_tfm(algo, 0); + + if (tfm == NULL) { + printk("failed to load transform for %s\n", algo); + return; + } + + if (crypto_tfm_alg_digestsize(tfm) > sizeof(output)) { + printk("digestsize(%u) > outputbuffer(%zu)\n", + crypto_tfm_alg_digestsize(tfm), sizeof(output)); + goto out; + } + + for (i = 0; speed[i].blen != 0; i++) { + if (speed[i].blen > TVMEMSIZE) { + printk("template (%u) too big for tvmem (%u)\n", + speed[i].blen, TVMEMSIZE); + goto out; + } + + printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ", + i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); + + memset(tvmem, 0xff, speed[i].blen); + + if (sec) + test_digest_jiffies(tfm, tvmem, speed[i].blen, speed[i].plen, output, sec); + else + test_digest_cycles(tfm, tvmem, speed[i].blen, speed[i].plen, output); + } + +out: + crypto_free_tfm(tfm); +} + static void test_deflate(void) { unsigned int i; @@ -1086,6 +1202,60 @@ static void do_test(void) des_speed_template); break; + case 300: + /* fall through */ + + case 301: + test_digest_speed("md4", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 302: + test_digest_speed("md5", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 303: + test_digest_speed("sha1", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 304: + test_digest_speed("sha256", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 305: + test_digest_speed("sha384", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 306: + test_digest_speed("sha512", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 307: + test_digest_speed("wp256", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 308: + test_digest_speed("wp384", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 309: + test_digest_speed("wp512", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 310: + test_digest_speed("tgr128", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 311: + test_digest_speed("tgr160", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 312: + test_digest_speed("tgr192", sec, generic_digest_speed_template); + if (mode > 300 && mode < 400) break; + + case 399: + break; + case 1000: test_available(); break; @@ -1113,7 +1283,14 @@ static int __init init(void) kfree(xbuf); kfree(tvmem); - return 0; + + /* We intentionaly return -EAGAIN to prevent keeping + * the module. It does all its work from init() + * and doesn't offer any runtime functionality + * => we don't need it in the memory, do we? + * -- mludvig + */ + return -EAGAIN; } /* diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index 1f683ba794e..1fac5602f63 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -65,6 +65,11 @@ struct cipher_speed { unsigned int blen; }; +struct digest_speed { + unsigned int blen; /* buffer length */ + unsigned int plen; /* per-update length */ +}; + /* * MD4 test vectors from RFC1320 */ @@ -2975,4 +2980,35 @@ static struct cipher_speed des_speed_template[] = { { .klen = 0, .blen = 0, } }; +/* + * Digest speed tests + */ +static struct digest_speed generic_digest_speed_template[] = { + { .blen = 16, .plen = 16, }, + { .blen = 64, .plen = 16, }, + { .blen = 64, .plen = 64, }, + { .blen = 256, .plen = 16, }, + { .blen = 256, .plen = 64, }, + { .blen = 256, .plen = 256, }, + { .blen = 1024, .plen = 16, }, + { .blen = 1024, .plen = 256, }, + { .blen = 1024, .plen = 1024, }, + { .blen = 2048, .plen = 16, }, + { .blen = 2048, .plen = 256, }, + { .blen = 2048, .plen = 1024, }, + { .blen = 2048, .plen = 2048, }, + { .blen = 4096, .plen = 16, }, + { .blen = 4096, .plen = 256, }, + { .blen = 4096, .plen = 1024, }, + { .blen = 4096, .plen = 4096, }, + { .blen = 8192, .plen = 16, }, + { .blen = 8192, .plen = 256, }, + { .blen = 8192, .plen = 1024, }, + { .blen = 8192, .plen = 4096, }, + { .blen = 8192, .plen = 8192, }, + + /* End marker */ + { .blen = 0, .plen = 0, } +}; + #endif /* _CRYPTO_TCRYPT_H */ diff --git a/crypto/tea.c b/crypto/tea.c index a6a02b30e47..5367adc82fc 100644 --- a/crypto/tea.c +++ b/crypto/tea.c @@ -45,10 +45,10 @@ struct xtea_ctx { u32 KEY[4]; }; -static int tea_setkey(void *ctx_arg, const u8 *in_key, - unsigned int key_len, u32 *flags) -{ - struct tea_ctx *ctx = ctx_arg; +static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) +{ + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *key = (const __le32 *)in_key; if (key_len != 16) @@ -66,12 +66,11 @@ static int tea_setkey(void *ctx_arg, const u8 *in_key, } -static void tea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) -{ +static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ u32 y, z, n, sum = 0; u32 k0, k1, k2, k3; - - struct tea_ctx *ctx = ctx_arg; + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *in = (const __le32 *)src; __le32 *out = (__le32 *)dst; @@ -95,11 +94,11 @@ static void tea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) out[1] = cpu_to_le32(z); } -static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) -{ +static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ u32 y, z, n, sum; u32 k0, k1, k2, k3; - struct tea_ctx *ctx = ctx_arg; + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *in = (const __le32 *)src; __le32 *out = (__le32 *)dst; @@ -125,10 +124,10 @@ static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) out[1] = cpu_to_le32(z); } -static int xtea_setkey(void *ctx_arg, const u8 *in_key, - unsigned int key_len, u32 *flags) -{ - struct xtea_ctx *ctx = ctx_arg; +static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) +{ + struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *key = (const __le32 *)in_key; if (key_len != 16) @@ -146,12 +145,11 @@ static int xtea_setkey(void *ctx_arg, const u8 *in_key, } -static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) -{ +static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ u32 y, z, sum = 0; u32 limit = XTEA_DELTA * XTEA_ROUNDS; - - struct xtea_ctx *ctx = ctx_arg; + struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *in = (const __le32 *)src; __le32 *out = (__le32 *)dst; @@ -168,10 +166,10 @@ static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) out[1] = cpu_to_le32(z); } -static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) -{ +static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ u32 y, z, sum; - struct tea_ctx *ctx = ctx_arg; + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *in = (const __le32 *)src; __le32 *out = (__le32 *)dst; @@ -191,12 +189,11 @@ static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) } -static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src) -{ +static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ u32 y, z, sum = 0; u32 limit = XTEA_DELTA * XTEA_ROUNDS; - - struct xtea_ctx *ctx = ctx_arg; + struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *in = (const __le32 *)src; __le32 *out = (__le32 *)dst; @@ -213,10 +210,10 @@ static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src) out[1] = cpu_to_le32(z); } -static void xeta_decrypt(void *ctx_arg, u8 *dst, const u8 *src) -{ +static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ u32 y, z, sum; - struct tea_ctx *ctx = ctx_arg; + struct tea_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *in = (const __le32 *)src; __le32 *out = (__le32 *)dst; diff --git a/crypto/tgr192.c b/crypto/tgr192.c index 2d8e44f6fbe..a0fadf3dd3e 100644 --- a/crypto/tgr192.c +++ b/crypto/tgr192.c @@ -496,11 +496,10 @@ static void tgr192_transform(struct tgr192_ctx *tctx, const u8 * data) tctx->c = c; } -static void tgr192_init(void *ctx) +static void tgr192_init(struct crypto_tfm *tfm) { - struct tgr192_ctx *tctx = ctx; + struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm); - memset (tctx->hash, 0, 64); tctx->a = 0x0123456789abcdefULL; tctx->b = 0xfedcba9876543210ULL; tctx->c = 0xf096a5b4c3b2e187ULL; @@ -511,9 +510,10 @@ static void tgr192_init(void *ctx) /* Update the message digest with the contents * of INBUF with length INLEN. */ -static void tgr192_update(void *ctx, const u8 * inbuf, unsigned int len) +static void tgr192_update(struct crypto_tfm *tfm, const u8 *inbuf, + unsigned int len) { - struct tgr192_ctx *tctx = ctx; + struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm); if (tctx->count == 64) { /* flush the buffer */ tgr192_transform(tctx, tctx->hash); @@ -527,7 +527,7 @@ static void tgr192_update(void *ctx, const u8 * inbuf, unsigned int len) for (; len && tctx->count < 64; len--) { tctx->hash[tctx->count++] = *inbuf++; } - tgr192_update(tctx, NULL, 0); + tgr192_update(tfm, NULL, 0); if (!len) { return; } @@ -549,15 +549,15 @@ static void tgr192_update(void *ctx, const u8 * inbuf, unsigned int len) /* The routine terminates the computation */ -static void tgr192_final(void *ctx, u8 * out) +static void tgr192_final(struct crypto_tfm *tfm, u8 * out) { - struct tgr192_ctx *tctx = ctx; + struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm); __be64 *dst = (__be64 *)out; __be64 *be64p; __le32 *le32p; u32 t, msb, lsb; - tgr192_update(tctx, NULL, 0); /* flush */ ; + tgr192_update(tfm, NULL, 0); /* flush */ ; msb = 0; t = tctx->nblocks; @@ -585,7 +585,7 @@ static void tgr192_final(void *ctx, u8 * out) while (tctx->count < 64) { tctx->hash[tctx->count++] = 0; } - tgr192_update(tctx, NULL, 0); /* flush */ ; + tgr192_update(tfm, NULL, 0); /* flush */ ; memset(tctx->hash, 0, 56); /* fill next block with zeroes */ } /* append the 64 bit count */ @@ -601,22 +601,20 @@ static void tgr192_final(void *ctx, u8 * out) dst[2] = be64p[2] = cpu_to_be64(tctx->c); } -static void tgr160_final(void *ctx, u8 * out) +static void tgr160_final(struct crypto_tfm *tfm, u8 * out) { - struct tgr192_ctx *wctx = ctx; u8 D[64]; - tgr192_final(wctx, D); + tgr192_final(tfm, D); memcpy(out, D, TGR160_DIGEST_SIZE); memset(D, 0, TGR192_DIGEST_SIZE); } -static void tgr128_final(void *ctx, u8 * out) +static void tgr128_final(struct crypto_tfm *tfm, u8 * out) { - struct tgr192_ctx *wctx = ctx; u8 D[64]; - tgr192_final(wctx, D); + tgr192_final(tfm, D); memcpy(out, D, TGR128_DIGEST_SIZE); memset(D, 0, TGR192_DIGEST_SIZE); } @@ -627,6 +625,7 @@ static struct crypto_alg tgr192 = { .cra_blocksize = TGR192_BLOCK_SIZE, .cra_ctxsize = sizeof(struct tgr192_ctx), .cra_module = THIS_MODULE, + .cra_alignmask = 7, .cra_list = LIST_HEAD_INIT(tgr192.cra_list), .cra_u = {.digest = { .dia_digestsize = TGR192_DIGEST_SIZE, @@ -641,6 +640,7 @@ static struct crypto_alg tgr160 = { .cra_blocksize = TGR192_BLOCK_SIZE, .cra_ctxsize = sizeof(struct tgr192_ctx), .cra_module = THIS_MODULE, + .cra_alignmask = 7, .cra_list = LIST_HEAD_INIT(tgr160.cra_list), .cra_u = {.digest = { .dia_digestsize = TGR160_DIGEST_SIZE, @@ -655,6 +655,7 @@ static struct crypto_alg tgr128 = { .cra_blocksize = TGR192_BLOCK_SIZE, .cra_ctxsize = sizeof(struct tgr192_ctx), .cra_module = THIS_MODULE, + .cra_alignmask = 7, .cra_list = LIST_HEAD_INIT(tgr128.cra_list), .cra_u = {.digest = { .dia_digestsize = TGR128_DIGEST_SIZE, diff --git a/crypto/twofish.c b/crypto/twofish.c index ddfd5a3fcc5..ec2488242e2 100644 --- a/crypto/twofish.c +++ b/crypto/twofish.c @@ -643,11 +643,11 @@ struct twofish_ctx { }; /* Perform the key setup. */ -static int twofish_setkey(void *cx, const u8 *key, - unsigned int key_len, u32 *flags) +static int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int key_len, u32 *flags) { - struct twofish_ctx *ctx = cx; + struct twofish_ctx *ctx = crypto_tfm_ctx(tfm); int i, j, k; @@ -802,9 +802,9 @@ static int twofish_setkey(void *cx, const u8 *key, } /* Encrypt one block. in and out may be the same. */ -static void twofish_encrypt(void *cx, u8 *out, const u8 *in) +static void twofish_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - struct twofish_ctx *ctx = cx; + struct twofish_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *src = (const __le32 *)in; __le32 *dst = (__le32 *)out; @@ -839,9 +839,9 @@ static void twofish_encrypt(void *cx, u8 *out, const u8 *in) } /* Decrypt one block. in and out may be the same. */ -static void twofish_decrypt(void *cx, u8 *out, const u8 *in) +static void twofish_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - struct twofish_ctx *ctx = cx; + struct twofish_ctx *ctx = crypto_tfm_ctx(tfm); const __le32 *src = (const __le32 *)in; __le32 *dst = (__le32 *)out; diff --git a/crypto/wp512.c b/crypto/wp512.c index b226a126cfa..727d05a19ff 100644 --- a/crypto/wp512.c +++ b/crypto/wp512.c @@ -981,9 +981,9 @@ static void wp512_process_buffer(struct wp512_ctx *wctx) { } -static void wp512_init (void *ctx) { +static void wp512_init(struct crypto_tfm *tfm) { + struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); int i; - struct wp512_ctx *wctx = ctx; memset(wctx->bitLength, 0, 32); wctx->bufferBits = wctx->bufferPos = 0; @@ -993,10 +993,10 @@ static void wp512_init (void *ctx) { } } -static void wp512_update(void *ctx, const u8 *source, unsigned int len) +static void wp512_update(struct crypto_tfm *tfm, const u8 *source, + unsigned int len) { - - struct wp512_ctx *wctx = ctx; + struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); int sourcePos = 0; unsigned int bits_len = len * 8; // convert to number of bits int sourceGap = (8 - ((int)bits_len & 7)) & 7; @@ -1054,9 +1054,9 @@ static void wp512_update(void *ctx, const u8 *source, unsigned int len) } -static void wp512_final(void *ctx, u8 *out) +static void wp512_final(struct crypto_tfm *tfm, u8 *out) { - struct wp512_ctx *wctx = ctx; + struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); int i; u8 *buffer = wctx->buffer; u8 *bitLength = wctx->bitLength; @@ -1087,22 +1087,20 @@ static void wp512_final(void *ctx, u8 *out) wctx->bufferPos = bufferPos; } -static void wp384_final(void *ctx, u8 *out) +static void wp384_final(struct crypto_tfm *tfm, u8 *out) { - struct wp512_ctx *wctx = ctx; u8 D[64]; - wp512_final (wctx, D); + wp512_final(tfm, D); memcpy (out, D, WP384_DIGEST_SIZE); memset (D, 0, WP512_DIGEST_SIZE); } -static void wp256_final(void *ctx, u8 *out) +static void wp256_final(struct crypto_tfm *tfm, u8 *out) { - struct wp512_ctx *wctx = ctx; u8 D[64]; - wp512_final (wctx, D); + wp512_final(tfm, D); memcpy (out, D, WP256_DIGEST_SIZE); memset (D, 0, WP512_DIGEST_SIZE); } diff --git a/drivers/crypto/padlock-aes.c b/drivers/crypto/padlock-aes.c index 5158a9db4bc..17ee684144f 100644 --- a/drivers/crypto/padlock-aes.c +++ b/drivers/crypto/padlock-aes.c @@ -60,15 +60,14 @@ #define AES_EXTENDED_KEY_SIZE_B (AES_EXTENDED_KEY_SIZE * sizeof(uint32_t)) struct aes_ctx { - uint32_t e_data[AES_EXTENDED_KEY_SIZE]; - uint32_t d_data[AES_EXTENDED_KEY_SIZE]; struct { struct cword encrypt; struct cword decrypt; } cword; - uint32_t *E; - uint32_t *D; + u32 *D; int key_length; + u32 E[AES_EXTENDED_KEY_SIZE]; + u32 d_data[AES_EXTENDED_KEY_SIZE]; }; /* ====== Key management routines ====== */ @@ -282,19 +281,20 @@ aes_hw_extkey_available(uint8_t key_len) return 0; } -static inline struct aes_ctx *aes_ctx(void *ctx) +static inline struct aes_ctx *aes_ctx(struct crypto_tfm *tfm) { + unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm); unsigned long align = PADLOCK_ALIGNMENT; if (align <= crypto_tfm_ctx_alignment()) align = 1; - return (struct aes_ctx *)ALIGN((unsigned long)ctx, align); + return (struct aes_ctx *)ALIGN(addr, align); } -static int -aes_set_key(void *ctx_arg, const uint8_t *in_key, unsigned int key_len, uint32_t *flags) +static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len, u32 *flags) { - struct aes_ctx *ctx = aes_ctx(ctx_arg); + struct aes_ctx *ctx = aes_ctx(tfm); const __le32 *key = (const __le32 *)in_key; uint32_t i, t, u, v, w; uint32_t P[AES_EXTENDED_KEY_SIZE]; @@ -312,8 +312,7 @@ aes_set_key(void *ctx_arg, const uint8_t *in_key, unsigned int key_len, uint32_t * itself we must supply the plain key for both encryption * and decryption. */ - ctx->E = ctx->e_data; - ctx->D = ctx->e_data; + ctx->D = ctx->E; E_KEY[0] = le32_to_cpu(key[0]); E_KEY[1] = le32_to_cpu(key[1]); @@ -414,24 +413,22 @@ static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key, return iv; } -static void -aes_encrypt(void *ctx_arg, uint8_t *out, const uint8_t *in) +static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - struct aes_ctx *ctx = aes_ctx(ctx_arg); + struct aes_ctx *ctx = aes_ctx(tfm); padlock_xcrypt_ecb(in, out, ctx->E, &ctx->cword.encrypt, 1); } -static void -aes_decrypt(void *ctx_arg, uint8_t *out, const uint8_t *in) +static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { - struct aes_ctx *ctx = aes_ctx(ctx_arg); + struct aes_ctx *ctx = aes_ctx(tfm); padlock_xcrypt_ecb(in, out, ctx->D, &ctx->cword.decrypt, 1); } static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out, const u8 *in, unsigned int nbytes) { - struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm)); + struct aes_ctx *ctx = aes_ctx(desc->tfm); padlock_xcrypt_ecb(in, out, ctx->E, &ctx->cword.encrypt, nbytes / AES_BLOCK_SIZE); return nbytes & ~(AES_BLOCK_SIZE - 1); @@ -440,7 +437,7 @@ static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out, static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out, const u8 *in, unsigned int nbytes) { - struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm)); + struct aes_ctx *ctx = aes_ctx(desc->tfm); padlock_xcrypt_ecb(in, out, ctx->D, &ctx->cword.decrypt, nbytes / AES_BLOCK_SIZE); return nbytes & ~(AES_BLOCK_SIZE - 1); @@ -449,7 +446,7 @@ static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out, static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out, const u8 *in, unsigned int nbytes) { - struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm)); + struct aes_ctx *ctx = aes_ctx(desc->tfm); u8 *iv; iv = padlock_xcrypt_cbc(in, out, ctx->E, desc->info, @@ -462,7 +459,7 @@ static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out, static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out, const u8 *in, unsigned int nbytes) { - struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm)); + struct aes_ctx *ctx = aes_ctx(desc->tfm); padlock_xcrypt_cbc(in, out, ctx->D, desc->info, &ctx->cword.decrypt, nbytes / AES_BLOCK_SIZE); return nbytes & ~(AES_BLOCK_SIZE - 1); diff --git a/include/linux/crypto.h b/include/linux/crypto.h index 5a0470e3611..7f946241b87 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h @@ -66,7 +66,7 @@ struct crypto_tfm; struct cipher_desc { struct crypto_tfm *tfm; - void (*crfn)(void *ctx, u8 *dst, const u8 *src); + void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, const u8 *src, unsigned int nbytes); void *info; @@ -79,10 +79,10 @@ struct cipher_desc { struct cipher_alg { unsigned int cia_min_keysize; unsigned int cia_max_keysize; - int (*cia_setkey)(void *ctx, const u8 *key, + int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen, u32 *flags); - void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src); - void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src); + void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); + void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc, u8 *dst, const u8 *src, @@ -100,20 +100,19 @@ struct cipher_alg { struct digest_alg { unsigned int dia_digestsize; - void (*dia_init)(void *ctx); - void (*dia_update)(void *ctx, const u8 *data, unsigned int len); - void (*dia_final)(void *ctx, u8 *out); - int (*dia_setkey)(void *ctx, const u8 *key, + void (*dia_init)(struct crypto_tfm *tfm); + void (*dia_update)(struct crypto_tfm *tfm, const u8 *data, + unsigned int len); + void (*dia_final)(struct crypto_tfm *tfm, u8 *out); + int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen, u32 *flags); }; struct compress_alg { - int (*coa_init)(void *ctx); - void (*coa_exit)(void *ctx); - int (*coa_compress)(void *ctx, const u8 *src, unsigned int slen, - u8 *dst, unsigned int *dlen); - int (*coa_decompress)(void *ctx, const u8 *src, unsigned int slen, - u8 *dst, unsigned int *dlen); + int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen); + int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen); }; #define cra_cipher cra_u.cipher @@ -129,14 +128,17 @@ struct crypto_alg { int cra_priority; - const char cra_name[CRYPTO_MAX_ALG_NAME]; - const char cra_driver_name[CRYPTO_MAX_ALG_NAME]; + char cra_name[CRYPTO_MAX_ALG_NAME]; + char cra_driver_name[CRYPTO_MAX_ALG_NAME]; union { struct cipher_alg cipher; struct digest_alg digest; struct compress_alg compress; } cra_u; + + int (*cra_init)(struct crypto_tfm *tfm); + void (*cra_exit)(struct crypto_tfm *tfm); struct module *cra_module; }; |