diff options
Diffstat (limited to 'include/crypto')
-rw-r--r-- | include/crypto/aead.h | 105 | ||||
-rw-r--r-- | include/crypto/aes.h | 31 | ||||
-rw-r--r-- | include/crypto/algapi.h | 31 | ||||
-rw-r--r-- | include/crypto/authenc.h | 27 | ||||
-rw-r--r-- | include/crypto/ctr.h | 20 | ||||
-rw-r--r-- | include/crypto/des.h | 19 | ||||
-rw-r--r-- | include/crypto/internal/aead.h | 80 | ||||
-rw-r--r-- | include/crypto/internal/skcipher.h | 110 | ||||
-rw-r--r-- | include/crypto/scatterwalk.h | 119 | ||||
-rw-r--r-- | include/crypto/sha.h | 12 | ||||
-rw-r--r-- | include/crypto/skcipher.h | 110 |
11 files changed, 654 insertions, 10 deletions
diff --git a/include/crypto/aead.h b/include/crypto/aead.h new file mode 100644 index 00000000000..0edf949f636 --- /dev/null +++ b/include/crypto/aead.h @@ -0,0 +1,105 @@ +/* + * AEAD: Authenticated Encryption with Associated Data + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_AEAD_H +#define _CRYPTO_AEAD_H + +#include <linux/crypto.h> +#include <linux/kernel.h> +#include <linux/slab.h> + +/** + * struct aead_givcrypt_request - AEAD request with IV generation + * @seq: Sequence number for IV generation + * @giv: Space for generated IV + * @areq: The AEAD request itself + */ +struct aead_givcrypt_request { + u64 seq; + u8 *giv; + + struct aead_request areq; +}; + +static inline struct crypto_aead *aead_givcrypt_reqtfm( + struct aead_givcrypt_request *req) +{ + return crypto_aead_reqtfm(&req->areq); +} + +static inline int crypto_aead_givencrypt(struct aead_givcrypt_request *req) +{ + struct aead_tfm *crt = crypto_aead_crt(aead_givcrypt_reqtfm(req)); + return crt->givencrypt(req); +}; + +static inline int crypto_aead_givdecrypt(struct aead_givcrypt_request *req) +{ + struct aead_tfm *crt = crypto_aead_crt(aead_givcrypt_reqtfm(req)); + return crt->givdecrypt(req); +}; + +static inline void aead_givcrypt_set_tfm(struct aead_givcrypt_request *req, + struct crypto_aead *tfm) +{ + req->areq.base.tfm = crypto_aead_tfm(tfm); +} + +static inline struct aead_givcrypt_request *aead_givcrypt_alloc( + struct crypto_aead *tfm, gfp_t gfp) +{ + struct aead_givcrypt_request *req; + + req = kmalloc(sizeof(struct aead_givcrypt_request) + + crypto_aead_reqsize(tfm), gfp); + + if (likely(req)) + aead_givcrypt_set_tfm(req, tfm); + + return req; +} + +static inline void aead_givcrypt_free(struct aead_givcrypt_request *req) +{ + kfree(req); +} + +static inline void aead_givcrypt_set_callback( + struct aead_givcrypt_request *req, u32 flags, + crypto_completion_t complete, void *data) +{ + aead_request_set_callback(&req->areq, flags, complete, data); +} + +static inline void aead_givcrypt_set_crypt(struct aead_givcrypt_request *req, + struct scatterlist *src, + struct scatterlist *dst, + unsigned int nbytes, void *iv) +{ + aead_request_set_crypt(&req->areq, src, dst, nbytes, iv); +} + +static inline void aead_givcrypt_set_assoc(struct aead_givcrypt_request *req, + struct scatterlist *assoc, + unsigned int assoclen) +{ + aead_request_set_assoc(&req->areq, assoc, assoclen); +} + +static inline void aead_givcrypt_set_giv(struct aead_givcrypt_request *req, + u8 *giv, u64 seq) +{ + req->giv = giv; + req->seq = seq; +} + +#endif /* _CRYPTO_AEAD_H */ diff --git a/include/crypto/aes.h b/include/crypto/aes.h new file mode 100644 index 00000000000..d480b76715a --- /dev/null +++ b/include/crypto/aes.h @@ -0,0 +1,31 @@ +/* + * Common values for AES algorithms + */ + +#ifndef _CRYPTO_AES_H +#define _CRYPTO_AES_H + +#include <linux/types.h> +#include <linux/crypto.h> + +#define AES_MIN_KEY_SIZE 16 +#define AES_MAX_KEY_SIZE 32 +#define AES_KEYSIZE_128 16 +#define AES_KEYSIZE_192 24 +#define AES_KEYSIZE_256 32 +#define AES_BLOCK_SIZE 16 + +struct crypto_aes_ctx { + u32 key_length; + u32 key_enc[60]; + u32 key_dec[60]; +}; + +extern u32 crypto_ft_tab[4][256]; +extern u32 crypto_fl_tab[4][256]; +extern u32 crypto_it_tab[4][256]; +extern u32 crypto_il_tab[4][256]; + +int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len); +#endif diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index b9b05d399d2..60d06e784be 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -111,8 +111,15 @@ void crypto_drop_spawn(struct crypto_spawn *spawn); struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, u32 mask); +static inline void crypto_set_spawn(struct crypto_spawn *spawn, + struct crypto_instance *inst) +{ + spawn->inst = inst; +} + struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); int crypto_check_attr_type(struct rtattr **tb, u32 type); +const char *crypto_attr_alg_name(struct rtattr *rta); struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask); int crypto_attr_u32(struct rtattr *rta, u32 *num); struct crypto_instance *crypto_alloc_instance(const char *name, @@ -124,6 +131,10 @@ int crypto_enqueue_request(struct crypto_queue *queue, struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); +/* These functions require the input/output to be aligned as u32. */ +void crypto_inc(u8 *a, unsigned int size); +void crypto_xor(u8 *dst, const u8 *src, unsigned int size); + int blkcipher_walk_done(struct blkcipher_desc *desc, struct blkcipher_walk *walk, int err); int blkcipher_walk_virt(struct blkcipher_desc *desc, @@ -187,20 +198,11 @@ static inline struct crypto_instance *crypto_aead_alg_instance( return crypto_tfm_alg_instance(&aead->base); } -static inline struct crypto_ablkcipher *crypto_spawn_ablkcipher( - struct crypto_spawn *spawn) -{ - u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; - u32 mask = CRYPTO_ALG_TYPE_MASK; - - return __crypto_ablkcipher_cast(crypto_spawn_tfm(spawn, type, mask)); -} - static inline struct crypto_blkcipher *crypto_spawn_blkcipher( struct crypto_spawn *spawn) { u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; - u32 mask = CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; + u32 mask = CRYPTO_ALG_TYPE_MASK; return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask)); } @@ -303,5 +305,14 @@ static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb, return crypto_attr_alg(tb[1], type, mask); } +/* + * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms. + * Otherwise returns zero. + */ +static inline int crypto_requires_sync(u32 type, u32 mask) +{ + return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC; +} + #endif /* _CRYPTO_ALGAPI_H */ diff --git a/include/crypto/authenc.h b/include/crypto/authenc.h new file mode 100644 index 00000000000..e47b044929a --- /dev/null +++ b/include/crypto/authenc.h @@ -0,0 +1,27 @@ +/* + * Authenc: Simple AEAD wrapper for IPsec + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#ifndef _CRYPTO_AUTHENC_H +#define _CRYPTO_AUTHENC_H + +#include <linux/types.h> + +enum { + CRYPTO_AUTHENC_KEYA_UNSPEC, + CRYPTO_AUTHENC_KEYA_PARAM, +}; + +struct crypto_authenc_key_param { + __be32 enckeylen; +}; + +#endif /* _CRYPTO_AUTHENC_H */ + diff --git a/include/crypto/ctr.h b/include/crypto/ctr.h new file mode 100644 index 00000000000..4180fc080e3 --- /dev/null +++ b/include/crypto/ctr.h @@ -0,0 +1,20 @@ +/* + * CTR: Counter mode + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_CTR_H +#define _CRYPTO_CTR_H + +#define CTR_RFC3686_NONCE_SIZE 4 +#define CTR_RFC3686_IV_SIZE 8 +#define CTR_RFC3686_BLOCK_SIZE 16 + +#endif /* _CRYPTO_CTR_H */ diff --git a/include/crypto/des.h b/include/crypto/des.h new file mode 100644 index 00000000000..2971c6304ad --- /dev/null +++ b/include/crypto/des.h @@ -0,0 +1,19 @@ +/* + * DES & Triple DES EDE Cipher Algorithms. + */ + +#ifndef __CRYPTO_DES_H +#define __CRYPTO_DES_H + +#define DES_KEY_SIZE 8 +#define DES_EXPKEY_WORDS 32 +#define DES_BLOCK_SIZE 8 + +#define DES3_EDE_KEY_SIZE (3 * DES_KEY_SIZE) +#define DES3_EDE_EXPKEY_WORDS (3 * DES_EXPKEY_WORDS) +#define DES3_EDE_BLOCK_SIZE DES_BLOCK_SIZE + + +extern unsigned long des_ekey(u32 *pe, const u8 *k); + +#endif /* __CRYPTO_DES_H */ diff --git a/include/crypto/internal/aead.h b/include/crypto/internal/aead.h new file mode 100644 index 00000000000..d838c945575 --- /dev/null +++ b/include/crypto/internal/aead.h @@ -0,0 +1,80 @@ +/* + * AEAD: Authenticated Encryption with Associated Data + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_INTERNAL_AEAD_H +#define _CRYPTO_INTERNAL_AEAD_H + +#include <crypto/aead.h> +#include <crypto/algapi.h> +#include <linux/types.h> + +struct rtattr; + +struct crypto_aead_spawn { + struct crypto_spawn base; +}; + +extern const struct crypto_type crypto_nivaead_type; + +static inline void crypto_set_aead_spawn( + struct crypto_aead_spawn *spawn, struct crypto_instance *inst) +{ + crypto_set_spawn(&spawn->base, inst); +} + +int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name, + u32 type, u32 mask); + +static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn) +{ + crypto_drop_spawn(&spawn->base); +} + +static inline struct crypto_alg *crypto_aead_spawn_alg( + struct crypto_aead_spawn *spawn) +{ + return spawn->base.alg; +} + +static inline struct crypto_aead *crypto_spawn_aead( + struct crypto_aead_spawn *spawn) +{ + return __crypto_aead_cast( + crypto_spawn_tfm(&spawn->base, CRYPTO_ALG_TYPE_AEAD, + CRYPTO_ALG_TYPE_MASK)); +} + +struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl, + struct rtattr **tb, u32 type, + u32 mask); +void aead_geniv_free(struct crypto_instance *inst); +int aead_geniv_init(struct crypto_tfm *tfm); +void aead_geniv_exit(struct crypto_tfm *tfm); + +static inline struct crypto_aead *aead_geniv_base(struct crypto_aead *geniv) +{ + return crypto_aead_crt(geniv)->base; +} + +static inline void *aead_givcrypt_reqctx(struct aead_givcrypt_request *req) +{ + return aead_request_ctx(&req->areq); +} + +static inline void aead_givcrypt_complete(struct aead_givcrypt_request *req, + int err) +{ + aead_request_complete(&req->areq, err); +} + +#endif /* _CRYPTO_INTERNAL_AEAD_H */ + diff --git a/include/crypto/internal/skcipher.h b/include/crypto/internal/skcipher.h new file mode 100644 index 00000000000..2ba42cd7d6a --- /dev/null +++ b/include/crypto/internal/skcipher.h @@ -0,0 +1,110 @@ +/* + * Symmetric key ciphers. + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_INTERNAL_SKCIPHER_H +#define _CRYPTO_INTERNAL_SKCIPHER_H + +#include <crypto/algapi.h> +#include <crypto/skcipher.h> +#include <linux/types.h> + +struct rtattr; + +struct crypto_skcipher_spawn { + struct crypto_spawn base; +}; + +extern const struct crypto_type crypto_givcipher_type; + +static inline void crypto_set_skcipher_spawn( + struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst) +{ + crypto_set_spawn(&spawn->base, inst); +} + +int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, + u32 type, u32 mask); + +static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn) +{ + crypto_drop_spawn(&spawn->base); +} + +static inline struct crypto_alg *crypto_skcipher_spawn_alg( + struct crypto_skcipher_spawn *spawn) +{ + return spawn->base.alg; +} + +static inline struct crypto_ablkcipher *crypto_spawn_skcipher( + struct crypto_skcipher_spawn *spawn) +{ + return __crypto_ablkcipher_cast( + crypto_spawn_tfm(&spawn->base, crypto_skcipher_type(0), + crypto_skcipher_mask(0))); +} + +int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req); +int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req); +const char *crypto_default_geniv(const struct crypto_alg *alg); + +struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl, + struct rtattr **tb, u32 type, + u32 mask); +void skcipher_geniv_free(struct crypto_instance *inst); +int skcipher_geniv_init(struct crypto_tfm *tfm); +void skcipher_geniv_exit(struct crypto_tfm *tfm); + +static inline struct crypto_ablkcipher *skcipher_geniv_cipher( + struct crypto_ablkcipher *geniv) +{ + return crypto_ablkcipher_crt(geniv)->base; +} + +static inline int skcipher_enqueue_givcrypt( + struct crypto_queue *queue, struct skcipher_givcrypt_request *request) +{ + return ablkcipher_enqueue_request(queue, &request->creq); +} + +static inline struct skcipher_givcrypt_request *skcipher_dequeue_givcrypt( + struct crypto_queue *queue) +{ + return container_of(ablkcipher_dequeue_request(queue), + struct skcipher_givcrypt_request, creq); +} + +static inline void *skcipher_givcrypt_reqctx( + struct skcipher_givcrypt_request *req) +{ + return ablkcipher_request_ctx(&req->creq); +} + +static inline void ablkcipher_request_complete(struct ablkcipher_request *req, + int err) +{ + req->base.complete(&req->base, err); +} + +static inline void skcipher_givcrypt_complete( + struct skcipher_givcrypt_request *req, int err) +{ + ablkcipher_request_complete(&req->creq, err); +} + +static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req) +{ + return req->base.flags; +} + +#endif /* _CRYPTO_INTERNAL_SKCIPHER_H */ + diff --git a/include/crypto/scatterwalk.h b/include/crypto/scatterwalk.h new file mode 100644 index 00000000000..224658b8d80 --- /dev/null +++ b/include/crypto/scatterwalk.h @@ -0,0 +1,119 @@ +/* + * Cryptographic scatter and gather helpers. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2002 Adam J. Richter <adam@yggdrasil.com> + * Copyright (c) 2004 Jean-Luc Cooke <jlcooke@certainkey.com> + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_SCATTERWALK_H +#define _CRYPTO_SCATTERWALK_H + +#include <asm/kmap_types.h> +#include <crypto/algapi.h> +#include <linux/hardirq.h> +#include <linux/highmem.h> +#include <linux/kernel.h> +#include <linux/mm.h> +#include <linux/scatterlist.h> +#include <linux/sched.h> + +static inline enum km_type crypto_kmap_type(int out) +{ + enum km_type type; + + if (in_softirq()) + type = out * (KM_SOFTIRQ1 - KM_SOFTIRQ0) + KM_SOFTIRQ0; + else + type = out * (KM_USER1 - KM_USER0) + KM_USER0; + + return type; +} + +static inline void *crypto_kmap(struct page *page, int out) +{ + return kmap_atomic(page, crypto_kmap_type(out)); +} + +static inline void crypto_kunmap(void *vaddr, int out) +{ + kunmap_atomic(vaddr, crypto_kmap_type(out)); +} + +static inline void crypto_yield(u32 flags) +{ + if (flags & CRYPTO_TFM_REQ_MAY_SLEEP) + cond_resched(); +} + +static inline void scatterwalk_sg_chain(struct scatterlist *sg1, int num, + struct scatterlist *sg2) +{ + sg_set_page(&sg1[num - 1], (void *)sg2, 0, 0); +} + +static inline struct scatterlist *scatterwalk_sg_next(struct scatterlist *sg) +{ + return (++sg)->length ? sg : (void *)sg_page(sg); +} + +static inline unsigned long scatterwalk_samebuf(struct scatter_walk *walk_in, + struct scatter_walk *walk_out) +{ + return !(((sg_page(walk_in->sg) - sg_page(walk_out->sg)) << PAGE_SHIFT) + + (int)(walk_in->offset - walk_out->offset)); +} + +static inline unsigned int scatterwalk_pagelen(struct scatter_walk *walk) +{ + unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; + unsigned int len_this_page = offset_in_page(~walk->offset) + 1; + return len_this_page > len ? len : len_this_page; +} + +static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk, + unsigned int nbytes) +{ + unsigned int len_this_page = scatterwalk_pagelen(walk); + return nbytes > len_this_page ? len_this_page : nbytes; +} + +static inline void scatterwalk_advance(struct scatter_walk *walk, + unsigned int nbytes) +{ + walk->offset += nbytes; +} + +static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk, + unsigned int alignmask) +{ + return !(walk->offset & alignmask); +} + +static inline struct page *scatterwalk_page(struct scatter_walk *walk) +{ + return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); +} + +static inline void scatterwalk_unmap(void *vaddr, int out) +{ + crypto_kunmap(vaddr, out); +} + +void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg); +void scatterwalk_copychunks(void *buf, struct scatter_walk *walk, + size_t nbytes, int out); +void *scatterwalk_map(struct scatter_walk *walk, int out); +void scatterwalk_done(struct scatter_walk *walk, int out, int more); + +void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg, + unsigned int start, unsigned int nbytes, int out); + +#endif /* _CRYPTO_SCATTERWALK_H */ diff --git a/include/crypto/sha.h b/include/crypto/sha.h index 0686e1f7a24..c0ccc2b1a2d 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -8,6 +8,9 @@ #define SHA1_DIGEST_SIZE 20 #define SHA1_BLOCK_SIZE 64 +#define SHA224_DIGEST_SIZE 28 +#define SHA224_BLOCK_SIZE 64 + #define SHA256_DIGEST_SIZE 32 #define SHA256_BLOCK_SIZE 64 @@ -23,6 +26,15 @@ #define SHA1_H3 0x10325476UL #define SHA1_H4 0xc3d2e1f0UL +#define SHA224_H0 0xc1059ed8UL +#define SHA224_H1 0x367cd507UL +#define SHA224_H2 0x3070dd17UL +#define SHA224_H3 0xf70e5939UL +#define SHA224_H4 0xffc00b31UL +#define SHA224_H5 0x68581511UL +#define SHA224_H6 0x64f98fa7UL +#define SHA224_H7 0xbefa4fa4UL + #define SHA256_H0 0x6a09e667UL #define SHA256_H1 0xbb67ae85UL #define SHA256_H2 0x3c6ef372UL diff --git a/include/crypto/skcipher.h b/include/crypto/skcipher.h new file mode 100644 index 00000000000..25fd6126522 --- /dev/null +++ b/include/crypto/skcipher.h @@ -0,0 +1,110 @@ +/* + * Symmetric key ciphers. + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_SKCIPHER_H +#define _CRYPTO_SKCIPHER_H + +#include <linux/crypto.h> +#include <linux/kernel.h> +#include <linux/slab.h> + +/** + * struct skcipher_givcrypt_request - Crypto request with IV generation + * @seq: Sequence number for IV generation + * @giv: Space for generated IV + * @creq: The crypto request itself + */ +struct skcipher_givcrypt_request { + u64 seq; + u8 *giv; + + struct ablkcipher_request creq; +}; + +static inline struct crypto_ablkcipher *skcipher_givcrypt_reqtfm( + struct skcipher_givcrypt_request *req) +{ + return crypto_ablkcipher_reqtfm(&req->creq); +} + +static inline int crypto_skcipher_givencrypt( + struct skcipher_givcrypt_request *req) +{ + struct ablkcipher_tfm *crt = + crypto_ablkcipher_crt(skcipher_givcrypt_reqtfm(req)); + return crt->givencrypt(req); +}; + +static inline int crypto_skcipher_givdecrypt( + struct skcipher_givcrypt_request *req) +{ + struct ablkcipher_tfm *crt = + crypto_ablkcipher_crt(skcipher_givcrypt_reqtfm(req)); + return crt->givdecrypt(req); +}; + +static inline void skcipher_givcrypt_set_tfm( + struct skcipher_givcrypt_request *req, struct crypto_ablkcipher *tfm) +{ + req->creq.base.tfm = crypto_ablkcipher_tfm(tfm); +} + +static inline struct skcipher_givcrypt_request *skcipher_givcrypt_cast( + struct crypto_async_request *req) +{ + return container_of(ablkcipher_request_cast(req), + struct skcipher_givcrypt_request, creq); +} + +static inline struct skcipher_givcrypt_request *skcipher_givcrypt_alloc( + struct crypto_ablkcipher *tfm, gfp_t gfp) +{ + struct skcipher_givcrypt_request *req; + + req = kmalloc(sizeof(struct skcipher_givcrypt_request) + + crypto_ablkcipher_reqsize(tfm), gfp); + + if (likely(req)) + skcipher_givcrypt_set_tfm(req, tfm); + + return req; +} + +static inline void skcipher_givcrypt_free(struct skcipher_givcrypt_request *req) +{ + kfree(req); +} + +static inline void skcipher_givcrypt_set_callback( + struct skcipher_givcrypt_request *req, u32 flags, + crypto_completion_t complete, void *data) +{ + ablkcipher_request_set_callback(&req->creq, flags, complete, data); +} + +static inline void skcipher_givcrypt_set_crypt( + struct skcipher_givcrypt_request *req, + struct scatterlist *src, struct scatterlist *dst, + unsigned int nbytes, void *iv) +{ + ablkcipher_request_set_crypt(&req->creq, src, dst, nbytes, iv); +} + +static inline void skcipher_givcrypt_set_giv( + struct skcipher_givcrypt_request *req, u8 *giv, u64 seq) +{ + req->giv = giv; + req->seq = seq; +} + +#endif /* _CRYPTO_SKCIPHER_H */ + |