- Changed unsigned to unsigned int in algos.
- Consistent use of u32 for flags throughout api.
- Use of unsigned int rather than int for counting things which must
be positive, also replaced size_ts to keep code simpler and lessen
bloat on some archs.
- got rid of some unneeded returns.
- const correctness.
__MOD_DEC_USE_COUNT(alg->cra_module);
}
-struct crypto_alg *crypto_alg_lookup(char *name)
+struct crypto_alg *crypto_alg_lookup(const char *name)
{
struct crypto_alg *q, *alg = NULL;
default:
BUG();
-
}
return -EINVAL;
}
}
-struct crypto_tfm *crypto_alloc_tfm(char *name, u32 flags)
+struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags)
{
struct crypto_tfm *tfm = NULL;
struct crypto_alg *alg;
}
if (crypto_alg_blocksize_check(alg)) {
- printk(KERN_WARNING "%s: blocksize %Zd exceeds max. "
- "size %d\n", __FUNCTION__, alg->cra_blocksize,
+ printk(KERN_WARNING "%s: blocksize %u exceeds max. "
+ "size %u\n", __FUNCTION__, alg->cra_blocksize,
CRYPTO_MAX_CIPHER_BLOCK_SIZE);
ret = -EINVAL;
}
seq_printf(m, "name : %s\n", alg->cra_name);
seq_printf(m, "module : %s\n", alg->cra_module ?
alg->cra_module->name : "[static]");
- seq_printf(m, "blocksize : %Zd\n", alg->cra_blocksize);
+ seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
case CRYPTO_ALG_TYPE_CIPHER:
- seq_printf(m, "keysize : %Zd\n", alg->cra_cipher.cia_keysize);
- seq_printf(m, "ivsize : %Zd\n", alg->cra_cipher.cia_ivsize);
+ seq_printf(m, "keysize : %u\n", alg->cra_cipher.cia_keysize);
+ seq_printf(m, "ivsize : %u\n", alg->cra_cipher.cia_ivsize);
break;
case CRYPTO_ALG_TYPE_DIGEST:
- seq_printf(m, "digestsize : %Zd\n",
+ seq_printf(m, "digestsize : %u\n",
alg->cra_digest.dia_digestsize);
break;
}
* A far more intelligent version of this is planned. For now, just
* try an exact match on the name of the algorithm.
*/
-void crypto_alg_autoload(char *name)
+void crypto_alg_autoload(const char *name)
{
request_module(name);
- return;
}
#include <asm/scatterlist.h>
#include "internal.h"
-typedef void (cryptfn_t)(void *, u8 *, u8 *);
+typedef void (cryptfn_t)(void *, u8 *, const u8 *);
typedef void (procfn_t)(struct crypto_tfm *, u8 *, cryptfn_t, int enc);
static inline void xor_64(u8 *a, const u8 *b)
((u32 *)a)[1] ^= ((u32 *)b)[1];
}
-static inline size_t sglen(struct scatterlist *sg, size_t nsg)
+static inline unsigned int sglen(struct scatterlist *sg, unsigned int nsg)
{
- int i;
- size_t n;
+ unsigned int i, n;
for (i = 0, n = 0; i < nsg; i++)
n += sg[i].length;
* Do not call this unless the total length of all of the fragments
* has been verified as multiple of the block size.
*/
-static int copy_chunks(struct crypto_tfm *tfm, u8 *buf,
- struct scatterlist *sg, int sgidx,
- int rlen, int *last, int in)
+static unsigned int copy_chunks(struct crypto_tfm *tfm, u8 *buf,
+ struct scatterlist *sg, unsigned int sgidx,
+ unsigned int rlen, unsigned int *last, int in)
{
- int i, copied, coff, j, aligned;
- size_t bsize = crypto_tfm_alg_blocksize(tfm);
+ unsigned int i, copied, coff, j, aligned;
+ unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
for (i = sgidx, j = copied = 0, aligned = 0 ; copied < bsize; i++) {
- int len = sg[i].length;
- int clen;
+ unsigned int len = sg[i].length;
+ unsigned int clen;
char *p;
if (copied) {
coff = 0;
- clen = min_t(int, len, bsize - copied);
+ clen = min(len, bsize - copied);
if (len == bsize - copied)
aligned = 1; /* last + right aligned */
return i - sgidx - 2 + aligned;
}
-static inline int gather_chunks(struct crypto_tfm *tfm, u8 *buf,
- struct scatterlist *sg,
- int sgidx, int rlen, int *last)
+static inline unsigned int gather_chunks(struct crypto_tfm *tfm, u8 *buf,
+ struct scatterlist *sg,
+ unsigned int sgidx, unsigned int rlen,
+ unsigned int *last)
{
return copy_chunks(tfm, buf, sg, sgidx, rlen, last, 1);
}
-static inline int scatter_chunks(struct crypto_tfm *tfm, u8 *buf,
- struct scatterlist *sg,
- int sgidx, int rlen, int *last)
+static inline unsigned int scatter_chunks(struct crypto_tfm *tfm, u8 *buf,
+ struct scatterlist *sg,
+ unsigned int sgidx, unsigned int rlen,
+ unsigned int *last)
{
return copy_chunks(tfm, buf, sg, sgidx, rlen, last, 0);
}
* and the block offset (boff).
*/
static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg,
- size_t nsg, cryptfn_t crfn, procfn_t prfn, int enc)
+ unsigned int nsg, cryptfn_t crfn, procfn_t prfn, int enc)
{
- int i, coff;
- size_t bsize = crypto_tfm_alg_blocksize(tfm);
+ unsigned int i, coff;
+ unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
u8 tmp[CRYPTO_MAX_CIPHER_BLOCK_SIZE];
if (sglen(sg, nsg) % bsize) {
}
for (i = 0, coff = 0; i < nsg; i++) {
- int n = 0, boff = 0;
- int len = sg[i].length - coff;
+ unsigned int n = 0, boff = 0;
+ unsigned int len = sg[i].length - coff;
char *p = crypto_kmap(sg[i].page) + sg[i].offset + coff;
while (len) {
fn(tfm->crt_ctx, block, block);
}
-static int setkey(struct crypto_tfm *tfm, const u8 *key, size_t keylen)
+static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
{
return tfm->__crt_alg->cra_cipher.cia_setkey(tfm->crt_ctx, key,
keylen, &tfm->crt_flags);
}
static int ecb_encrypt(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg)
+ struct scatterlist *sg, unsigned int nsg)
{
return crypt(tfm, sg, nsg,
tfm->__crt_alg->cra_cipher.cia_encrypt, ecb_process, 1);
}
static int ecb_decrypt(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg)
+ struct scatterlist *sg, unsigned int nsg)
{
return crypt(tfm, sg, nsg,
tfm->__crt_alg->cra_cipher.cia_decrypt, ecb_process, 1);
}
static int cbc_encrypt(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg)
+ struct scatterlist *sg, unsigned int nsg)
{
return crypt(tfm, sg, nsg,
tfm->__crt_alg->cra_cipher.cia_encrypt, cbc_process, 1);
}
static int cbc_decrypt(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg)
+ struct scatterlist *sg, unsigned int nsg)
{
return crypt(tfm, sg, nsg,
tfm->__crt_alg->cra_cipher.cia_decrypt, cbc_process, 0);
}
-static int nocrypt(struct crypto_tfm *tfm, struct scatterlist *sg, size_t nsg)
+static int nocrypt(struct crypto_tfm *tfm,
+ struct scatterlist *sg, unsigned int nsg)
{
return -ENOSYS;
}
* lossless Quadruple ROT13 compression.
*/
static void crypto_compress(struct crypto_tfm *tfm)
-{
- return;
-}
+{ }
static void crypto_decompress(struct crypto_tfm *tfm)
-{
- return;
-}
+{ }
int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags)
{
};
-static void des_small_fips_encrypt(u32 *expkey, u8 *dst, u8 *src)
+static void des_small_fips_encrypt(u32 *expkey, u8 *dst, const u8 *src)
{
u32 x, y, z;
dst[6] = y;
y >>= 8;
dst[7] = y;
- return;
}
-static void des_small_fips_decrypt(u32 *expkey, u8 *dst, u8 *src)
+static void des_small_fips_decrypt(u32 *expkey, u8 *dst, const u8 *src)
{
u32 x, y, z;
dst[6] = y;
y >>= 8;
dst[7] = y;
- return;
}
/*
* RFC2451: Weak key checks SHOULD be performed.
*/
-static int setkey(u32 *expkey, const u8 *key, size_t keylen, int *flags)
+static int setkey(u32 *expkey, const u8 *key, unsigned int keylen, u32 *flags)
{
const u8 *k;
u8 *b0, *b1;
return 0;
}
-static int des_setkey(void *ctx, const u8 *key, size_t keylen, int *flags)
+static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
{
return setkey(((struct des_ctx *)ctx)->expkey, key, keylen, flags);
}
-static void des_encrypt(void *ctx, u8 *dst, u8 *src)
+static void des_encrypt(void *ctx, u8 *dst, const u8 *src)
{
des_small_fips_encrypt(((struct des_ctx *)ctx)->expkey, dst, src);
}
-static void des_decrypt(void *ctx, u8 *dst, u8 *src)
+static void des_decrypt(void *ctx, u8 *dst, const u8 *src)
{
des_small_fips_decrypt(((struct des_ctx *)ctx)->expkey, dst, src);
}
* property.
*
*/
-static int des3_ede_setkey(void *ctx, const u8 *key, size_t keylen, int *flags)
+static int des3_ede_setkey(void *ctx, const u8 *key,
+ unsigned int keylen, u32 *flags)
{
- int i, off;
+ unsigned int i, off;
struct des3_ede_ctx *dctx = ctx;
if (keylen != DES3_EDE_KEY_SIZE) {
return 0;
}
-static void des3_ede_encrypt(void *ctx, u8 *dst, u8 *src)
+static void des3_ede_encrypt(void *ctx, u8 *dst, const u8 *src)
{
struct des3_ede_ctx *dctx = ctx;
des_small_fips_encrypt(dctx->expkey, dst, src);
des_small_fips_decrypt(&dctx->expkey[DES_EXPKEY_WORDS], dst, dst);
des_small_fips_encrypt(&dctx->expkey[DES_EXPKEY_WORDS * 2], dst, dst);
-
- return;
}
-static void des3_ede_decrypt(void *ctx, u8 *dst, u8 *src)
+static void des3_ede_decrypt(void *ctx, u8 *dst, const u8 *src)
{
struct des3_ede_ctx *dctx = ctx;
des_small_fips_decrypt(&dctx->expkey[DES_EXPKEY_WORDS * 2], dst, src);
des_small_fips_encrypt(&dctx->expkey[DES_EXPKEY_WORDS], dst, dst);
des_small_fips_decrypt(dctx->expkey, dst, dst);
-
- return;
}
static struct crypto_alg des_alg = {
static void init(struct crypto_tfm *tfm)
{
tfm->__crt_alg->cra_digest.dia_init(tfm->crt_ctx);
- return;
}
-static void update(struct crypto_tfm *tfm, struct scatterlist *sg, size_t nsg)
+static void update(struct crypto_tfm *tfm,
+ struct scatterlist *sg, unsigned int nsg)
{
- int i;
+ unsigned int i;
for (i = 0; i < nsg; i++) {
char *p = crypto_kmap(sg[i].page) + sg[i].offset;
crypto_kunmap(p);
crypto_yield(tfm);
}
- return;
}
static void final(struct crypto_tfm *tfm, u8 *out)
{
tfm->__crt_alg->cra_digest.dia_final(tfm->crt_ctx, out);
- return;
}
static void digest(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg, u8 *out)
+ struct scatterlist *sg, unsigned int nsg, u8 *out)
{
- int i;
+ unsigned int i;
tfm->crt_digest.dit_init(tfm);
crypto_yield(tfm);
}
crypto_digest_final(tfm, out);
- return;
}
-static void hmac(struct crypto_tfm *tfm, u8 *key, size_t keylen,
- struct scatterlist *sg, size_t nsg, u8 *out)
+static void hmac(struct crypto_tfm *tfm, u8 *key, unsigned int keylen,
+ struct scatterlist *sg, unsigned int nsg, u8 *out)
{
- int i;
+ unsigned int i;
struct scatterlist tmp;
char ipad[crypto_tfm_alg_blocksize(tfm) + 1];
char opad[crypto_tfm_alg_blocksize(tfm) + 1];
crypto_digest_update(tfm, &tmp, 1);
crypto_digest_final(tfm, out);
- return;
}
int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags)
}
#ifdef CONFIG_KMOD
-void crypto_alg_autoload(char *name);
+void crypto_alg_autoload(const char *name);
#endif
int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags);
u64 byte_count;
};
-static u32 lshift(u32 x, int s)
+static u32 lshift(u32 x, unsigned int s)
{
x &= 0xFFFFFFFF;
return ((x << s) & 0xFFFFFFFF) | (x >> (32 - s));
#define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s))
/* XXX: this stuff can be optimized */
-static inline void le32_to_cpu_array(u32 *buf, unsigned words)
+static inline void le32_to_cpu_array(u32 *buf, unsigned int words)
{
while (words--) {
__le32_to_cpus(buf);
}
}
-static inline void cpu_to_le32_array(u32 *buf, unsigned words)
+static inline void cpu_to_le32_array(u32 *buf, unsigned int words)
{
while (words--) {
__cpu_to_le32s(buf);
mctx->byte_count = 0;
}
-static void md4_update(void *ctx, const u8 *data, size_t len)
+static void md4_update(void *ctx, const u8 *data, unsigned int len)
{
struct md4_ctx *mctx = ctx;
const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
static void md4_final(void *ctx, u8 *out)
{
struct md4_ctx *mctx = ctx;
- const int offset = mctx->byte_count & 0x3f;
+ const unsigned int offset = mctx->byte_count & 0x3f;
char *p = (char *)mctx->block + offset;
int padding = 56 - (offset + 1);
}
/* XXX: this stuff can be optimized */
-static inline void le32_to_cpu_array(u32 *buf, unsigned words)
+static inline void le32_to_cpu_array(u32 *buf, unsigned int words)
{
while (words--) {
__le32_to_cpus(buf);
}
}
-static inline void cpu_to_le32_array(u32 *buf, unsigned words)
+static inline void cpu_to_le32_array(u32 *buf, unsigned int words)
{
while (words--) {
__cpu_to_le32s(buf);
mctx->byte_count = 0;
}
-static void md5_update(void *ctx, const u8 *data, size_t len)
+static void md5_update(void *ctx, const u8 *data, unsigned int len)
{
struct md5_ctx *mctx = ctx;
const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
static void md5_final(void *ctx, u8 *out)
{
struct md5_ctx *mctx = ctx;
- const int offset = mctx->byte_count & 0x3f;
+ const unsigned int offset = mctx->byte_count & 0x3f;
char *p = (char *)mctx->block + offset;
int padding = 56 - (offset + 1);
*sctx = initstate;
}
-static void sha1_update(void *ctx, const u8 *data, size_t len)
+static void sha1_update(void *ctx, const u8 *data, unsigned int len)
{
struct sha1_ctx *sctx = ctx;
- unsigned i, j;
+ unsigned int i, j;
j = (sctx->count >> 3) & 0x3f;
sctx->count += len << 3;
static char *tvmem;
static void
-hexdump(unsigned char *buf, size_t len)
+hexdump(unsigned char *buf, unsigned int len)
{
while (len--)
printk("%02x", *buf++);
test_md5(void)
{
char *p;
- int i;
+ unsigned int i;
struct scatterlist sg[2];
char result[128];
struct crypto_tfm *tfm;
struct md5_testvec *md5_tv;
struct hmac_md5_testvec *hmac_md5_tv;
- size_t tsize;
+ unsigned int tsize;
printk("\ntesting md5\n");
tsize = sizeof (md5_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
}
for (i = 0; i < MD5_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
memset(result, 0, sizeof (result));
p = md5_tv[i].plaintext;
tsize = sizeof (hmac_md5_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
hmac_md5_tv = (void *) tvmem;
for (i = 0; i < HMAC_MD5_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
memset(result, 0, sizeof (result));
p = hmac_md5_tv[i].plaintext;
test_md4(void)
{
char *p;
- int i;
+ unsigned int i;
struct scatterlist sg[1];
char result[128];
struct crypto_tfm *tfm;
struct md4_testvec *md4_tv;
- size_t tsize;
+ unsigned int tsize;
printk("\ntesting md4\n");
tsize = sizeof (md4_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
}
for (i = 0; i < MD4_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
memset(result, 0, sizeof (result));
p = md4_tv[i].plaintext;
test_sha1(void)
{
char *p;
- int i;
+ unsigned int i;
struct crypto_tfm *tfm;
struct sha1_testvec *sha1_tv;
struct hmac_sha1_testvec *hmac_sha1_tv;
struct scatterlist sg[2];
- size_t tsize;
+ unsigned int tsize;
char result[SHA1_DIGEST_SIZE];
printk("\ntesting sha1\n");
tsize = sizeof (sha1_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
}
for (i = 0; i < SHA1_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
memset(result, 0, sizeof (result));
p = sha1_tv[i].plaintext;
tsize = sizeof (hmac_sha1_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
hmac_sha1_tv = (void *) tvmem;
for (i = 0; i < HMAC_SHA1_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
memset(result, 0, sizeof (result));
p = hmac_sha1_tv[i].plaintext;
void
test_des(void)
{
- int ret, i, len;
- size_t tsize;
+ unsigned int ret, i, len;
+ unsigned int tsize;
char *p, *q;
struct crypto_tfm *tfm;
char *key;
tsize = sizeof (des_enc_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
}
for (i = 0; i < DES_ENC_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
key = des_tv[i].key;
tfm->crt_flags |= CRYPTO_TFM_REQ_WEAK_KEY;
tsize = sizeof (des_dec_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
des_tv = (void *) tvmem;
for (i = 0; i < DES_DEC_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
key = des_tv[i].key;
tsize = sizeof (des_cbc_enc_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
}
for (i = 0; i < DES_CBC_ENC_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
key = des_tv[i].key;
tsize = sizeof (des_cbc_dec_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
printk("\ntesting des cbc decryption\n");
for (i = 0; i < DES_CBC_DEC_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
tfm->crt_flags = 0;
key = des_tv[i].key;
out:
crypto_free_tfm(tfm);
- return;
}
void
test_des3_ede(void)
{
- int ret, i, len;
- size_t tsize;
+ unsigned int ret, i, len;
+ unsigned int tsize;
char *p, *q;
struct crypto_tfm *tfm;
char *key;
tsize = sizeof (des3_ede_enc_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
}
for (i = 0; i < DES3_EDE_ENC_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
key = des_tv[i].key;
ret = crypto_cipher_setkey(tfm, key, 24);
tsize = sizeof (des3_ede_dec_tv_template);
if (tsize > TVMEMSIZE) {
- printk("template (%Zd) too big for tvmem (%d)\n", tsize,
+ printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
return;
}
des_tv = (void *) tvmem;
for (i = 0; i < DES3_EDE_DEC_TEST_VECTORS; i++) {
- printk("test %d:\n", i + 1);
+ printk("test %u:\n", i + 1);
key = des_tv[i].key;
ret = crypto_cipher_setkey(tfm, key, 24);
out:
crypto_free_tfm(tfm);
- return;
}
static void
#define DES3_EDE_DEC_TEST_VECTORS 3
struct des_tv {
- int len;
+ unsigned int len;
int fail;
char key[24];
char iv[8];
* via crypto_register_alg() and crypto_unregister_alg().
*/
struct cipher_alg {
- size_t cia_keysize;
- size_t cia_ivsize;
- int (*cia_setkey)(void *ctx, const u8 *key, size_t keylen, int *flags);
- void (*cia_encrypt)(void *ctx, u8 *dst, u8 *src);
- void (*cia_decrypt)(void *ctx, u8 *dst, u8 *src);
+ unsigned int cia_keysize;
+ unsigned int cia_ivsize;
+ int (*cia_setkey)(void *ctx, const u8 *key,
+ unsigned int keylen, u32 *flags);
+ void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src);
+ void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src);
};
struct digest_alg {
- size_t dia_digestsize;
+ unsigned int dia_digestsize;
void (*dia_init)(void *ctx);
- void (*dia_update)(void *ctx, const u8 *data, size_t len);
+ void (*dia_update)(void *ctx, const u8 *data, unsigned int len);
void (*dia_final)(void *ctx, u8 *out);
};
struct crypto_alg {
struct list_head cra_list;
- int cra_flags;
- size_t cra_blocksize;
- size_t cra_ctxsize;
- char cra_name[CRYPTO_MAX_ALG_NAME];
+ u32 cra_flags;
+ unsigned int cra_blocksize;
+ unsigned int cra_ctxsize;
+ const char cra_name[CRYPTO_MAX_ALG_NAME];
union {
struct cipher_alg cipher;
struct cipher_tfm {
void *cit_iv;
u32 cit_mode;
- int (*cit_setkey)(struct crypto_tfm *tfm, const u8 *key, size_t keylen);
+ int (*cit_setkey)(struct crypto_tfm *tfm,
+ const u8 *key, unsigned int keylen);
int (*cit_encrypt)(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg);
+ struct scatterlist *sg, unsigned int nsg);
int (*cit_decrypt)(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg);
+ struct scatterlist *sg, unsigned int nsg);
};
struct digest_tfm {
void (*dit_init)(struct crypto_tfm *tfm);
void (*dit_update)(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg);
+ struct scatterlist *sg, unsigned int nsg);
void (*dit_final)(struct crypto_tfm *tfm, u8 *out);
void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
- size_t nsg, u8 *out);
- void (*dit_hmac)(struct crypto_tfm *tfm, u8 *key, size_t keylen,
- struct scatterlist *sg, size_t nsg, u8 *out);
+ unsigned int nsg, u8 *out);
+ void (*dit_hmac)(struct crypto_tfm *tfm, u8 *key,
+ unsigned int keylen, struct scatterlist *sg,
+ unsigned int nsg, u8 *out);
};
struct compress_tfm {
struct crypto_tfm {
void *crt_ctx;
- int crt_flags;
+ u32 crt_flags;
union {
struct cipher_tfm cipher;
* crypto_free_tfm() frees up the transform and any associated resources,
* then drops the refcount on the associated algorithm.
*/
-struct crypto_tfm *crypto_alloc_tfm(char *alg_name, u32 tfm_flags);
+struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
void crypto_free_tfm(struct crypto_tfm *tfm);
/*
* Transform helpers which query the underlying algorithm.
*/
-static inline char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
+static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
{
return tfm->__crt_alg->cra_name;
}
return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
}
-static inline size_t crypto_tfm_alg_keysize(struct crypto_tfm *tfm)
+static inline unsigned int crypto_tfm_alg_keysize(struct crypto_tfm *tfm)
{
return tfm->__crt_alg->cra_cipher.cia_keysize;
}
-static inline size_t crypto_tfm_alg_ivsize(struct crypto_tfm *tfm)
+static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm)
{
return tfm->__crt_alg->cra_cipher.cia_ivsize;
}
-static inline size_t crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
+static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
{
return tfm->__crt_alg->cra_blocksize;
}
-static inline size_t crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
+static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
{
return tfm->__crt_alg->cra_digest.dia_digestsize;
}
}
static inline void crypto_digest_update(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg)
+ struct scatterlist *sg,
+ unsigned int nsg)
{
BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
tfm->crt_digest.dit_update(tfm, sg, nsg);
static inline void crypto_digest_digest(struct crypto_tfm *tfm,
struct scatterlist *sg,
- size_t nsg, u8 *out)
+ unsigned int nsg, u8 *out)
{
BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
tfm->crt_digest.dit_digest(tfm, sg, nsg, out);
}
-
-static inline void crypto_digest_hmac(struct crypto_tfm *tfm, u8 *key,
- size_t keylen, struct scatterlist *sg,
- size_t nsg, u8 *out)
+
+static inline void crypto_digest_hmac(struct crypto_tfm *tfm,
+ u8 *key, unsigned int keylen,
+ struct scatterlist *sg,
+ unsigned int nsg, u8 *out)
{
BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
}
static inline int crypto_cipher_setkey(struct crypto_tfm *tfm,
- const u8 *key, size_t keylen)
+ const u8 *key, unsigned int keylen)
{
BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
return tfm->crt_cipher.cit_setkey(tfm, key, keylen);
}
static inline int crypto_cipher_encrypt(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg)
+ struct scatterlist *sg,
+ unsigned int nsg)
{
BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
return tfm->crt_cipher.cit_encrypt(tfm, sg, nsg);
}
static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm,
- struct scatterlist *sg, size_t nsg)
+ struct scatterlist *sg,
+ unsigned int nsg)
{
BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
return tfm->crt_cipher.cit_decrypt(tfm, sg, nsg);
}
static inline void crypto_cipher_set_iv(struct crypto_tfm *tfm,
- u8 *src, size_t len)
+ const u8 *src, unsigned int len)
{
BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
memcpy(tfm->crt_cipher.cit_iv, src, len);
}
static inline void crypto_cipher_get_iv(struct crypto_tfm *tfm,
- u8 *dst, size_t len)
+ u8 *dst, unsigned int len)
{
BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
memcpy(dst, tfm->crt_cipher.cit_iv, len);