Commit 332a3392 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (102 commits)
  crypto: sha-s390 - Fix warnings in import function
  crypto: vmac - New hash algorithm for intel_txt support
  crypto: api - Do not displace newly registered algorithms
  crypto: ansi_cprng - Fix module initialization
  crypto: xcbc - Fix alignment calculation of xcbc_tfm_ctx
  crypto: fips - Depend on ansi_cprng
  crypto: blkcipher - Do not use eseqiv on stream ciphers
  crypto: ctr - Use chainiv on raw counter mode
  Revert crypto: fips - Select CPRNG
  crypto: rng - Fix typo
  crypto: talitos - add support for 36 bit addressing
  crypto: talitos - align locks on cache lines
  crypto: talitos - simplify hmac data size calculation
  crypto: mv_cesa - Add support for Orion5X crypto engine
  crypto: cryptd - Add support to access underlaying shash
  crypto: gcm - Use GHASH digest algorithm
  crypto: ghash - Add GHASH digest algorithm for GCM
  crypto: authenc - Convert to ahash
  crypto: api - Fix aligned ctx helper
  crypto: hmac - Prehash ipad/opad
  ...
parents a9c86d42 81bd5f6c
...@@ -250,8 +250,9 @@ static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key, ...@@ -250,8 +250,9 @@ static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key,
const u8 *temp_key = key; const u8 *temp_key = key;
u32 *flags = &tfm->crt_flags; u32 *flags = &tfm->crt_flags;
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE)) &&
*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
*flags |= CRYPTO_TFM_RES_WEAK_KEY;
return -EINVAL; return -EINVAL;
} }
for (i = 0; i < 2; i++, temp_key += DES_KEY_SIZE) { for (i = 0; i < 2; i++, temp_key += DES_KEY_SIZE) {
...@@ -411,9 +412,9 @@ static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key, ...@@ -411,9 +412,9 @@ static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key,
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2], memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2],
DES_KEY_SIZE))) { DES_KEY_SIZE)) &&
(*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; *flags |= CRYPTO_TFM_RES_WEAK_KEY;
return -EINVAL; return -EINVAL;
} }
for (i = 0; i < 3; i++, temp_key += DES_KEY_SIZE) { for (i = 0; i < 3; i++, temp_key += DES_KEY_SIZE) {
......
...@@ -46,12 +46,38 @@ static int sha1_init(struct shash_desc *desc) ...@@ -46,12 +46,38 @@ static int sha1_init(struct shash_desc *desc)
return 0; return 0;
} }
static int sha1_export(struct shash_desc *desc, void *out)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
struct sha1_state *octx = out;
octx->count = sctx->count;
memcpy(octx->state, sctx->state, sizeof(octx->state));
memcpy(octx->buffer, sctx->buf, sizeof(octx->buffer));
return 0;
}
static int sha1_import(struct shash_desc *desc, const void *in)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
const struct sha1_state *ictx = in;
sctx->count = ictx->count;
memcpy(sctx->state, ictx->state, sizeof(ictx->state));
memcpy(sctx->buf, ictx->buffer, sizeof(ictx->buffer));
sctx->func = KIMD_SHA_1;
return 0;
}
static struct shash_alg alg = { static struct shash_alg alg = {
.digestsize = SHA1_DIGEST_SIZE, .digestsize = SHA1_DIGEST_SIZE,
.init = sha1_init, .init = sha1_init,
.update = s390_sha_update, .update = s390_sha_update,
.final = s390_sha_final, .final = s390_sha_final,
.export = sha1_export,
.import = sha1_import,
.descsize = sizeof(struct s390_sha_ctx), .descsize = sizeof(struct s390_sha_ctx),
.statesize = sizeof(struct sha1_state),
.base = { .base = {
.cra_name = "sha1", .cra_name = "sha1",
.cra_driver_name= "sha1-s390", .cra_driver_name= "sha1-s390",
......
...@@ -42,12 +42,38 @@ static int sha256_init(struct shash_desc *desc) ...@@ -42,12 +42,38 @@ static int sha256_init(struct shash_desc *desc)
return 0; return 0;
} }
static int sha256_export(struct shash_desc *desc, void *out)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
struct sha256_state *octx = out;
octx->count = sctx->count;
memcpy(octx->state, sctx->state, sizeof(octx->state));
memcpy(octx->buf, sctx->buf, sizeof(octx->buf));
return 0;
}
static int sha256_import(struct shash_desc *desc, const void *in)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
const struct sha256_state *ictx = in;
sctx->count = ictx->count;
memcpy(sctx->state, ictx->state, sizeof(ictx->state));
memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
sctx->func = KIMD_SHA_256;
return 0;
}
static struct shash_alg alg = { static struct shash_alg alg = {
.digestsize = SHA256_DIGEST_SIZE, .digestsize = SHA256_DIGEST_SIZE,
.init = sha256_init, .init = sha256_init,
.update = s390_sha_update, .update = s390_sha_update,
.final = s390_sha_final, .final = s390_sha_final,
.export = sha256_export,
.import = sha256_import,
.descsize = sizeof(struct s390_sha_ctx), .descsize = sizeof(struct s390_sha_ctx),
.statesize = sizeof(struct sha256_state),
.base = { .base = {
.cra_name = "sha256", .cra_name = "sha256",
.cra_driver_name= "sha256-s390", .cra_driver_name= "sha256-s390",
......
...@@ -13,7 +13,10 @@ ...@@ -13,7 +13,10 @@
* *
*/ */
#include <crypto/internal/hash.h> #include <crypto/internal/hash.h>
#include <crypto/sha.h>
#include <linux/errno.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h> #include <linux/module.h>
#include "sha.h" #include "sha.h"
...@@ -37,12 +40,42 @@ static int sha512_init(struct shash_desc *desc) ...@@ -37,12 +40,42 @@ static int sha512_init(struct shash_desc *desc)
return 0; return 0;
} }
static int sha512_export(struct shash_desc *desc, void *out)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
struct sha512_state *octx = out;
octx->count[0] = sctx->count;
octx->count[1] = 0;
memcpy(octx->state, sctx->state, sizeof(octx->state));
memcpy(octx->buf, sctx->buf, sizeof(octx->buf));
return 0;
}
static int sha512_import(struct shash_desc *desc, const void *in)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
const struct sha512_state *ictx = in;
if (unlikely(ictx->count[1]))
return -ERANGE;
sctx->count = ictx->count[0];
memcpy(sctx->state, ictx->state, sizeof(ictx->state));
memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
sctx->func = KIMD_SHA_512;
return 0;
}
static struct shash_alg sha512_alg = { static struct shash_alg sha512_alg = {
.digestsize = SHA512_DIGEST_SIZE, .digestsize = SHA512_DIGEST_SIZE,
.init = sha512_init, .init = sha512_init,
.update = s390_sha_update, .update = s390_sha_update,
.final = s390_sha_final, .final = s390_sha_final,
.export = sha512_export,
.import = sha512_import,
.descsize = sizeof(struct s390_sha_ctx), .descsize = sizeof(struct s390_sha_ctx),
.statesize = sizeof(struct sha512_state),
.base = { .base = {
.cra_name = "sha512", .cra_name = "sha512",
.cra_driver_name= "sha512-s390", .cra_driver_name= "sha512-s390",
...@@ -78,7 +111,10 @@ static struct shash_alg sha384_alg = { ...@@ -78,7 +111,10 @@ static struct shash_alg sha384_alg = {
.init = sha384_init, .init = sha384_init,
.update = s390_sha_update, .update = s390_sha_update,
.final = s390_sha_final, .final = s390_sha_final,
.export = sha512_export,
.import = sha512_import,
.descsize = sizeof(struct s390_sha_ctx), .descsize = sizeof(struct s390_sha_ctx),
.statesize = sizeof(struct sha512_state),
.base = { .base = {
.cra_name = "sha384", .cra_name = "sha384",
.cra_driver_name= "sha384-s390", .cra_driver_name= "sha384-s390",
......
...@@ -636,7 +636,7 @@ static int __init aesni_init(void) ...@@ -636,7 +636,7 @@ static int __init aesni_init(void)
int err; int err;
if (!cpu_has_aes) { if (!cpu_has_aes) {
printk(KERN_ERR "Intel AES-NI instructions are not detected.\n"); printk(KERN_INFO "Intel AES-NI instructions are not detected.\n");
return -ENODEV; return -ENODEV;
} }
if ((err = crypto_register_alg(&aesni_alg))) if ((err = crypto_register_alg(&aesni_alg)))
......
...@@ -23,11 +23,13 @@ comment "Crypto core or helper" ...@@ -23,11 +23,13 @@ comment "Crypto core or helper"
config CRYPTO_FIPS config CRYPTO_FIPS
bool "FIPS 200 compliance" bool "FIPS 200 compliance"
depends on CRYPTO_ANSI_CPRNG
help help
This options enables the fips boot option which is This options enables the fips boot option which is
required if you want to system to operate in a FIPS 200 required if you want to system to operate in a FIPS 200
certification. You should say no unless you know what certification. You should say no unless you know what
this is. this is. Note that CRYPTO_ANSI_CPRNG is requred if this
option is selected
config CRYPTO_ALGAPI config CRYPTO_ALGAPI
tristate tristate
...@@ -156,7 +158,7 @@ config CRYPTO_GCM ...@@ -156,7 +158,7 @@ config CRYPTO_GCM
tristate "GCM/GMAC support" tristate "GCM/GMAC support"
select CRYPTO_CTR select CRYPTO_CTR
select CRYPTO_AEAD select CRYPTO_AEAD
select CRYPTO_GF128MUL select CRYPTO_GHASH
help help
Support for Galois/Counter Mode (GCM) and Galois Message Support for Galois/Counter Mode (GCM) and Galois Message
Authentication Code (GMAC). Required for IPSec. Authentication Code (GMAC). Required for IPSec.
...@@ -267,6 +269,18 @@ config CRYPTO_XCBC ...@@ -267,6 +269,18 @@ config CRYPTO_XCBC
http://csrc.nist.gov/encryption/modes/proposedmodes/ http://csrc.nist.gov/encryption/modes/proposedmodes/
xcbc-mac/xcbc-mac-spec.pdf xcbc-mac/xcbc-mac-spec.pdf
config CRYPTO_VMAC
tristate "VMAC support"
depends on EXPERIMENTAL
select CRYPTO_HASH
select CRYPTO_MANAGER
help
VMAC is a message authentication algorithm designed for
very high speed on 64-bit architectures.
See also:
<http://fastcrypto.org/vmac>
comment "Digest" comment "Digest"
config CRYPTO_CRC32C config CRYPTO_CRC32C
...@@ -289,6 +303,13 @@ config CRYPTO_CRC32C_INTEL ...@@ -289,6 +303,13 @@ config CRYPTO_CRC32C_INTEL
gain performance compared with software implementation. gain performance compared with software implementation.
Module will be crc32c-intel. Module will be crc32c-intel.
config CRYPTO_GHASH
tristate "GHASH digest algorithm"
select CRYPTO_SHASH
select CRYPTO_GF128MUL
help
GHASH is message digest algorithm for GCM (Galois/Counter Mode).
config CRYPTO_MD4 config CRYPTO_MD4
tristate "MD4 digest algorithm" tristate "MD4 digest algorithm"
select CRYPTO_HASH select CRYPTO_HASH
...@@ -780,13 +801,14 @@ comment "Random Number Generation" ...@@ -780,13 +801,14 @@ comment "Random Number Generation"
config CRYPTO_ANSI_CPRNG config CRYPTO_ANSI_CPRNG
tristate "Pseudo Random Number Generation for Cryptographic modules" tristate "Pseudo Random Number Generation for Cryptographic modules"
default m
select CRYPTO_AES select CRYPTO_AES
select CRYPTO_RNG select CRYPTO_RNG
select CRYPTO_FIPS
help help
This option enables the generic pseudo random number generator This option enables the generic pseudo random number generator
for cryptographic modules. Uses the Algorithm specified in for cryptographic modules. Uses the Algorithm specified in
ANSI X9.31 A.2.4 ANSI X9.31 A.2.4. Not this option must be enabled if CRYPTO_FIPS
is selected
source "drivers/crypto/Kconfig" source "drivers/crypto/Kconfig"
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# #
obj-$(CONFIG_CRYPTO) += crypto.o obj-$(CONFIG_CRYPTO) += crypto.o
crypto-objs := api.o cipher.o digest.o compress.o crypto-objs := api.o cipher.o compress.o
obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o
...@@ -22,7 +22,6 @@ obj-$(CONFIG_CRYPTO_BLKCIPHER2) += chainiv.o ...@@ -22,7 +22,6 @@ obj-$(CONFIG_CRYPTO_BLKCIPHER2) += chainiv.o
obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o
obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
crypto_hash-objs := hash.o
crypto_hash-objs += ahash.o crypto_hash-objs += ahash.o
crypto_hash-objs += shash.o crypto_hash-objs += shash.o
obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
...@@ -33,6 +32,7 @@ cryptomgr-objs := algboss.o testmgr.o ...@@ -33,6 +32,7 @@ cryptomgr-objs := algboss.o testmgr.o
obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o
obj-$(CONFIG_CRYPTO_HMAC) += hmac.o obj-$(CONFIG_CRYPTO_HMAC) += hmac.o
obj-$(CONFIG_CRYPTO_VMAC) += vmac.o
obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o
obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o
obj-$(CONFIG_CRYPTO_MD4) += md4.o obj-$(CONFIG_CRYPTO_MD4) += md4.o
...@@ -83,6 +83,7 @@ obj-$(CONFIG_CRYPTO_RNG2) += rng.o ...@@ -83,6 +83,7 @@ obj-$(CONFIG_CRYPTO_RNG2) += rng.o
obj-$(CONFIG_CRYPTO_RNG2) += krng.o obj-$(CONFIG_CRYPTO_RNG2) += krng.o
obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o
obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o
obj-$(CONFIG_CRYPTO_GHASH) += ghash-generic.o
# #
# generic algorithms and the async_tx api # generic algorithms and the async_tx api
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
*/ */
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include <linux/cpumask.h>
#include <linux/err.h> #include <linux/err.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/kernel.h> #include <linux/kernel.h>
...@@ -25,6 +26,8 @@ ...@@ -25,6 +26,8 @@
#include "internal.h" #include "internal.h"
static const char *skcipher_default_geniv __read_mostly;
static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key, static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
...@@ -180,7 +183,14 @@ EXPORT_SYMBOL_GPL(crypto_givcipher_type); ...@@ -180,7 +183,14 @@ EXPORT_SYMBOL_GPL(crypto_givcipher_type);
const char *crypto_default_geniv(const struct crypto_alg *alg) const char *crypto_default_geniv(const struct crypto_alg *alg)
{ {
return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv"; if (((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
alg->cra_ablkcipher.ivsize) !=
alg->cra_blocksize)
return "chainiv";
return alg->cra_flags & CRYPTO_ALG_ASYNC ?
"eseqiv" : skcipher_default_geniv;
} }
static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
...@@ -201,8 +211,9 @@ static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) ...@@ -201,8 +211,9 @@ static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
int err; int err;
larval = crypto_larval_lookup(alg->cra_driver_name, larval = crypto_larval_lookup(alg->cra_driver_name,
(type & ~CRYPTO_ALG_TYPE_MASK) |
CRYPTO_ALG_TYPE_GIVCIPHER, CRYPTO_ALG_TYPE_GIVCIPHER,
CRYPTO_ALG_TYPE_MASK); mask | CRYPTO_ALG_TYPE_MASK);
err = PTR_ERR(larval); err = PTR_ERR(larval);
if (IS_ERR(larval)) if (IS_ERR(larval))
goto out; goto out;
...@@ -360,3 +371,17 @@ struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, ...@@ -360,3 +371,17 @@ struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
return ERR_PTR(err); return ERR_PTR(err);
} }
EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher);
static int __init skcipher_module_init(void)
{
skcipher_default_geniv = num_possible_cpus() > 1 ?
"eseqiv" : "chainiv";
return 0;
}
static void skcipher_module_exit(void)
{
}
module_init(skcipher_module_init);
module_exit(skcipher_module_exit);
...@@ -1174,7 +1174,7 @@ EXPORT_SYMBOL_GPL(crypto_il_tab); ...@@ -1174,7 +1174,7 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
ctx->key_enc[6 * i + 11] = t; \ ctx->key_enc[6 * i + 11] = t; \
} while (0) } while (0)
#define loop8(i) do { \ #define loop8tophalf(i) do { \
t = ror32(t, 8); \ t = ror32(t, 8); \
t = ls_box(t) ^ rco_tab[i]; \ t = ls_box(t) ^ rco_tab[i]; \
t ^= ctx->key_enc[8 * i]; \ t ^= ctx->key_enc[8 * i]; \
...@@ -1185,6 +1185,10 @@ EXPORT_SYMBOL_GPL(crypto_il_tab); ...@@ -1185,6 +1185,10 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
ctx->key_enc[8 * i + 10] = t; \ ctx->key_enc[8 * i + 10] = t; \
t ^= ctx->key_enc[8 * i + 3]; \ t ^= ctx->key_enc[8 * i + 3]; \
ctx->key_enc[8 * i + 11] = t; \ ctx->key_enc[8 * i + 11] = t; \
} while (0)
#define loop8(i) do { \
loop8tophalf(i); \
t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \ t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \
ctx->key_enc[8 * i + 12] = t; \ ctx->key_enc[8 * i + 12] = t; \
t ^= ctx->key_enc[8 * i + 5]; \ t ^= ctx->key_enc[8 * i + 5]; \
...@@ -1245,8 +1249,9 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key, ...@@ -1245,8 +1249,9 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
ctx->key_enc[5] = le32_to_cpu(key[5]); ctx->key_enc[5] = le32_to_cpu(key[5]);
ctx->key_enc[6] = le32_to_cpu(key[6]); ctx->key_enc[6] = le32_to_cpu(key[6]);
t = ctx->key_enc[7] = le32_to_cpu(key[7]); t = ctx->key_enc[7] = le32_to_cpu(key[7]);
for (i = 0; i < 7; ++i) for (i = 0; i < 6; ++i)
loop8(i); loop8(i);
loop8tophalf(i);
break; break;
} }
......
...@@ -24,6 +24,19 @@ ...@@ -24,6 +24,19 @@
#include "internal.h" #include "internal.h"
struct ahash_request_priv {
crypto_completion_t complete;
void *data;
u8 *result;
void *ubuf[] CRYPTO_MINALIGN_ATTR;
};
static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
{
return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
halg);
}
static int hash_walk_next(struct crypto_hash_walk *walk) static int hash_walk_next(struct crypto_hash_walk *walk)
{ {
unsigned int alignmask = walk->alignmask; unsigned int alignmask = walk->alignmask;
...@@ -132,36 +145,34 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc, ...@@ -132,36 +145,34 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
struct ahash_alg *ahash = crypto_ahash_alg(tfm);
unsigned long alignmask = crypto_ahash_alignmask(tfm); unsigned long alignmask = crypto_ahash_alignmask(tfm);
int ret; int ret;
u8 *buffer, *alignbuffer; u8 *buffer, *alignbuffer;
unsigned long absize; unsigned long absize;
absize = keylen + alignmask; absize = keylen + alignmask;
buffer = kmalloc(absize, GFP_ATOMIC); buffer = kmalloc(absize, GFP_KERNEL);
if (!buffer) if (!buffer)
return -ENOMEM; return -ENOMEM;
alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
memcpy(alignbuffer, key, keylen); memcpy(alignbuffer, key, keylen);
ret = ahash->setkey(tfm, alignbuffer, keylen); ret = tfm->setkey(tfm, alignbuffer, keylen);
memset(alignbuffer, 0, keylen); kzfree(buffer);
kfree(buffer);
return ret; return ret;
} }
static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
struct ahash_alg *ahash = crypto_ahash_alg(tfm);
unsigned long alignmask = crypto_ahash_alignmask(tfm); unsigned long alignmask = crypto_ahash_alignmask(tfm);
if ((unsigned long)key & alignmask) if ((unsigned long)key & alignmask)
return ahash_setkey_unaligned(tfm, key, keylen); return ahash_setkey_unaligned(tfm, key, keylen);
return ahash->setkey(tfm, key, keylen); return tfm->setkey(tfm, key, keylen);
} }
EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
...@@ -169,44 +180,221 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -169,44 +180,221 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
return -ENOSYS; return -ENOSYS;
} }
int crypto_ahash_import(struct ahash_request *req, const u8 *in) static inline unsigned int ahash_align_buffer_size(unsigned len,
unsigned long mask)
{
return len + (mask & ~(crypto_tfm_ctx_alignment() - 1));
}
static void ahash_op_unaligned_finish(struct ahash_request *req, int err)
{
struct ahash_request_priv *priv = req->priv;
if (err == -EINPROGRESS)
return;
if (!err)
memcpy(priv->result, req->result,
crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
kzfree(priv);
}
static void ahash_op_unaligned_done(struct crypto_async_request *req, int err)
{
struct ahash_request *areq = req->data;
struct ahash_request_priv *priv = areq->priv;
crypto_completion_t complete = priv->complete;
void *data = priv->data;
ahash_op_unaligned_finish(areq, err);
complete(data, err);
}
static int ahash_op_unaligned(struct ahash_request *req,
int (*op)(struct ahash_request *))
{ {
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
struct ahash_alg *alg = crypto_ahash_alg(tfm); unsigned long alignmask = crypto_ahash_alignmask(tfm);
unsigned int ds = crypto_ahash_digestsize(tfm);
struct ahash_request_priv *priv;
int err;
priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
GFP_KERNEL : GFP_ATOMIC);
if (!priv)
return -ENOMEM;
memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm)); priv->result = req->result;
priv->complete = req->base.complete;
priv->data = req->base.data;
if (alg->reinit) req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
alg->reinit(req); req->base.complete = ahash_op_unaligned_done;
req->base.data = req;
req->priv = priv;
return 0; err = op(req);
ahash_op_unaligned_finish(req, err);
return err;
} }
EXPORT_SYMBOL_GPL(crypto_ahash_import);
static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type, static int crypto_ahash_op(struct ahash_request *req,
u32 mask) int (*op)(struct ahash_request *))
{ {
return alg->cra_ctxsize; struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
unsigned long alignmask = crypto_ahash_alignmask(tfm);
if ((unsigned long)req->result & alignmask)
return ahash_op_unaligned(req, op);
return op(req);
} }
static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) int crypto_ahash_final(struct ahash_request *req)
{ {
struct ahash_alg *alg = &tfm->__crt_alg->cra_ahash; return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final);
struct ahash_tfm *crt = &tfm->crt_ahash; }
EXPORT_SYMBOL_GPL(crypto_ahash_final);
if (alg->digestsize > PAGE_SIZE / 8) int crypto_ahash_finup(struct ahash_request *req)
return -EINVAL; {
return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup);
}
EXPORT_SYMBOL_GPL(crypto_ahash_finup);
int crypto_ahash_digest(struct ahash_request *req)
{
return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest);
}
EXPORT_SYMBOL_GPL(crypto_ahash_digest);
static void ahash_def_finup_finish2(struct ahash_request *req, int err)
{
struct ahash_request_priv *priv = req->priv;
if (err == -EINPROGRESS)
return;
if (!err)
memcpy(priv->result, req->result,
crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
crt->init = alg->init; kzfree(priv);
crt->update = alg->update; }
crt->final = alg->final;
crt->digest = alg->digest; static void ahash_def_finup_done2(struct crypto_async_request *req, int err)
crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; {
crt->digestsize = alg->digestsize; struct ahash_request *areq = req->data;
struct ahash_request_priv *priv = areq->priv;
crypto_completion_t complete = priv->complete;
void *data = priv->data;
ahash_def_finup_finish2(areq, err);
complete(data, err);
}
static int ahash_def_finup_finish1(struct ahash_request *req, int err)
{
if (err)
goto out;
req->base.complete = ahash_def_finup_done2;
req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
err = crypto_ahash_reqtfm(req)->final(req);
out:
ahash_def_finup_finish2(req, err);
return err;
}
static void ahash_def_finup_done1(struct crypto_async_request *req, int err)
{
struct ahash_request *areq = req->data;
struct ahash_request_priv *priv = areq->priv;
crypto_completion_t complete = priv->complete;
void *data = priv->data;
err = ahash_def_finup_finish1(areq, err);
complete(data, err);
}
static int ahash_def_finup(struct ahash_request *req)
{
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
unsigned long alignmask = crypto_ahash_alignmask(tfm);
unsigned int ds = crypto_ahash_digestsize(tfm);
struct ahash_request_priv *priv;
priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
GFP_KERNEL : GFP_ATOMIC);
if (!priv)
return -ENOMEM;
priv->result = req->result;
priv->complete = req->base.complete;
priv->data = req->base.data;
req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
req->base.complete = ahash_def_finup_done1;
req->base.data = req;
req->priv = priv;
return ahash_def_finup_finish1(req, tfm->update(req));
}
static int ahash_no_export(struct ahash_request *req, void *out)
{
return -ENOSYS;
}
static int ahash_no_import(struct ahash_request *req, const void *in)
{
return -ENOSYS;
}
static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
{
struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
struct ahash_alg *alg = crypto_ahash_alg(hash);
hash->setkey = ahash_nosetkey;
hash->export = ahash_no_export;
hash->import = ahash_no_import;
if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
return crypto_init_shash_ops_async(tfm);
hash->init = alg->init;
hash->update = alg->update;
hash->final = alg->final;
hash->finup = alg->finup ?: ahash_def_finup;
hash->digest = alg->digest;
if (alg->setkey)
hash->setkey = alg->setkey;
if (alg->export)
hash->export = alg->export;
if (alg->import)
hash->import = alg->import;
return 0; return 0;
} }
static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
{
if (alg->cra_type == &crypto_ahash_type)
return alg->cra_ctxsize;
return sizeof(struct crypto_shash *);
}
static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused)); __attribute__ ((unused));
static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
...@@ -215,17 +403,101 @@ static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) ...@@ -215,17 +403,101 @@ static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
"yes" : "no"); "yes" : "no");
seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
seq_printf(m, "digestsize : %u\n", alg->cra_ahash.digestsize); seq_printf(m, "digestsize : %u\n",
__crypto_hash_alg_common(alg)->digestsize);
} }
const struct crypto_type crypto_ahash_type = { const struct crypto_type crypto_ahash_type = {
.ctxsize = crypto_ahash_ctxsize, .extsize = crypto_ahash_extsize,
.init = crypto_init_ahash_ops, .init_tfm = crypto_ahash_init_tfm,
#ifdef CONFIG_PROC_FS #ifdef CONFIG_PROC_FS
.show = crypto_ahash_show, .show = crypto_ahash_show,
#endif #endif
.maskclear = ~CRYPTO_ALG_TYPE_MASK,
.maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
.type = CRYPTO_ALG_TYPE_AHASH,
.tfmsize = offsetof(struct crypto_ahash, base),
}; };
EXPORT_SYMBOL_GPL(crypto_ahash_type); EXPORT_SYMBOL_GPL(crypto_ahash_type);
struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
u32 mask)
{
return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
}
EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
static int ahash_prepare_alg(struct ahash_alg *alg)
{
struct crypto_alg *base = &alg->halg.base;
if (alg->halg.digestsize > PAGE_SIZE / 8 ||
alg->halg.statesize > PAGE_SIZE / 8)
return -EINVAL;
base->cra_type = &crypto_ahash_type;
base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
return 0;
}
int crypto_register_ahash(struct ahash_alg *alg)
{
struct crypto_alg *base = &alg->halg.base;
int err;
err = ahash_prepare_alg(alg);
if (err)
return err;
return crypto_register_alg(base);
}
EXPORT_SYMBOL_GPL(crypto_register_ahash);
int crypto_unregister_ahash(struct ahash_alg *alg)
{
return crypto_unregister_alg(&alg->halg.base);
}
EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
int ahash_register_instance(struct crypto_template *tmpl,
struct ahash_instance *inst)
{
int err;
err = ahash_prepare_alg(&inst->alg);
if (err)
return err;
return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
}
EXPORT_SYMBOL_GPL(ahash_register_instance);
void ahash_free_instance(struct crypto_instance *inst)
{
crypto_drop_spawn(crypto_instance_ctx(inst));
kfree(ahash_instance(inst));
}
EXPORT_SYMBOL_GPL(ahash_free_instance);
int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn,
struct hash_alg_common *alg,
struct crypto_instance *inst)
{
return crypto_init_spawn2(&spawn->base, &alg->base, inst,
&crypto_ahash_type);
}
EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn);
struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
{
struct crypto_alg *alg;
alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask);
return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg);
}
EXPORT_SYMBOL_GPL(ahash_attr_alg);
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
...@@ -81,16 +81,35 @@ static void crypto_destroy_instance(struct crypto_alg *alg) ...@@ -81,16 +81,35 @@ static void crypto_destroy_instance(struct crypto_alg *alg)
crypto_tmpl_put(tmpl); crypto_tmpl_put(tmpl);
} }
static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
struct list_head *stack,
struct list_head *top,
struct list_head *secondary_spawns)
{
struct crypto_spawn *spawn, *n;
if (list_empty(stack))
return NULL;
spawn = list_first_entry(stack, struct crypto_spawn, list);
n = list_entry(spawn->list.next, struct crypto_spawn, list);
if (spawn->alg && &n->list != stack && !n->alg)
n->alg = (n->list.next == stack) ? alg :
&list_entry(n->list.next, struct crypto_spawn,
list)->inst->alg;
list_move(&spawn->list, secondary_spawns);
return &n->list == stack ? top : &n->inst->alg.cra_users;
}
static void crypto_remove_spawn(struct crypto_spawn *spawn, static void crypto_remove_spawn(struct crypto_spawn *spawn,
struct list_head *list, struct list_head *list)
struct list_head *secondary_spawns)
{ {
struct crypto_instance *inst = spawn->inst; struct crypto_instance *inst = spawn->inst;
struct crypto_template *tmpl = inst->tmpl; struct crypto_template *tmpl = inst->tmpl;
list_del_init(&spawn->list);
spawn->alg = NULL;
if (crypto_is_dead(&inst->alg)) if (crypto_is_dead(&inst->alg))
return; return;
...@@ -106,25 +125,55 @@ static void crypto_remove_spawn(struct crypto_spawn *spawn, ...@@ -106,25 +125,55 @@ static void crypto_remove_spawn(struct crypto_spawn *spawn,
hlist_del(&inst->list); hlist_del(&inst->list);
inst->alg.cra_destroy = crypto_destroy_instance; inst->alg.cra_destroy = crypto_destroy_instance;
list_splice(&inst->alg.cra_users, secondary_spawns); BUG_ON(!list_empty(&inst->alg.cra_users));
} }
static void crypto_remove_spawns(struct list_head *spawns, static void crypto_remove_spawns(struct crypto_alg *alg,
struct list_head *list, u32 new_type) struct list_head *list,
struct crypto_alg *nalg)
{ {
u32 new_type = (nalg ?: alg)->cra_flags;
struct crypto_spawn *spawn, *n; struct crypto_spawn *spawn, *n;
LIST_HEAD(secondary_spawns); LIST_HEAD(secondary_spawns);
struct list_head *spawns;
LIST_HEAD(stack);
LIST_HEAD(top);
spawns = &alg->cra_users;
list_for_each_entry_safe(spawn, n, spawns, list) { list_for_each_entry_safe(spawn, n, spawns, list) {
if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
continue; continue;
crypto_remove_spawn(spawn, list, &secondary_spawns); list_move(&spawn->list, &top);
} }
while (!list_empty(&secondary_spawns)) { spawns = &top;
list_for_each_entry_safe(spawn, n, &secondary_spawns, list) do {
crypto_remove_spawn(spawn, list, &secondary_spawns); while (!list_empty(spawns)) {
struct crypto_instance *inst;
spawn = list_first_entry(spawns, struct crypto_spawn,
list);
inst = spawn->inst;
BUG_ON(&inst->alg == alg);
list_move(&spawn->list, &stack);
if (&inst->alg == nalg)
break;
spawn->alg = NULL;
spawns = &inst->alg.cra_users;
}
} while ((spawns = crypto_more_spawns(alg, &stack, &top,
&secondary_spawns)));
list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
if (spawn->alg)
list_move(&spawn->list, &spawn->alg->cra_users);
else
crypto_remove_spawn(spawn, list);
} }
} }
...@@ -258,7 +307,7 @@ void crypto_alg_tested(const char *name, int err) ...@@ -258,7 +307,7 @@ void crypto_alg_tested(const char *name, int err)
q->cra_priority > alg->cra_priority) q->cra_priority > alg->cra_priority)
continue; continue;
crypto_remove_spawns(&q->cra_users, &list, alg->cra_flags); crypto_remove_spawns(q, &list, alg);
} }
complete: complete:
...@@ -330,7 +379,7 @@ static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) ...@@ -330,7 +379,7 @@ static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg); crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg);
list_del_init(&alg->cra_list); list_del_init(&alg->cra_list);
crypto_remove_spawns(&alg->cra_users, list, alg->cra_flags); crypto_remove_spawns(alg, list, NULL);
return 0; return 0;
} }
...@@ -488,20 +537,38 @@ int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, ...@@ -488,20 +537,38 @@ int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
} }
EXPORT_SYMBOL_GPL(crypto_init_spawn); EXPORT_SYMBOL_GPL(crypto_init_spawn);
int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
struct crypto_instance *inst,
const struct crypto_type *frontend)
{
int err = -EINVAL;
if (frontend && (alg->cra_flags ^ frontend->type) & frontend->maskset)
goto out;
spawn->frontend = frontend;
err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
out:
return err;
}
EXPORT_SYMBOL_GPL(crypto_init_spawn2);
void crypto_drop_spawn(struct crypto_spawn *spawn) void crypto_drop_spawn(struct crypto_spawn *spawn)
{ {
if (!spawn->alg)
return;
down_write(&crypto_alg_sem); down_write(&crypto_alg_sem);
list_del(&spawn->list); list_del(&spawn->list);
up_write(&crypto_alg_sem); up_write(&crypto_alg_sem);
} }
EXPORT_SYMBOL_GPL(crypto_drop_spawn); EXPORT_SYMBOL_GPL(crypto_drop_spawn);
struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
u32 mask)
{ {
struct crypto_alg *alg; struct crypto_alg *alg;
struct crypto_alg *alg2; struct crypto_alg *alg2;
struct crypto_tfm *tfm;
down_read(&crypto_alg_sem); down_read(&crypto_alg_sem);
alg = spawn->alg; alg = spawn->alg;
...@@ -516,6 +583,19 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, ...@@ -516,6 +583,19 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
return ERR_PTR(-EAGAIN); return ERR_PTR(-EAGAIN);
} }
return alg;
}
struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
u32 mask)
{
struct crypto_alg *alg;
struct crypto_tfm *tfm;
alg = crypto_spawn_alg(spawn);
if (IS_ERR(alg))
return ERR_CAST(alg);
tfm = ERR_PTR(-EINVAL); tfm = ERR_PTR(-EINVAL);
if (unlikely((alg->cra_flags ^ type) & mask)) if (unlikely((alg->cra_flags ^ type) & mask))
goto out_put_alg; goto out_put_alg;
...@@ -532,6 +612,27 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, ...@@ -532,6 +612,27 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
} }
EXPORT_SYMBOL_GPL(crypto_spawn_tfm); EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
{
struct crypto_alg *alg;
struct crypto_tfm *tfm;
alg = crypto_spawn_alg(spawn);
if (IS_ERR(alg))
return ERR_CAST(alg);
tfm = crypto_create_tfm(alg, spawn->frontend);
if (IS_ERR(tfm))
goto out_put_alg;
return tfm;
out_put_alg:
crypto_mod_put(alg);
return tfm;
}
EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
int crypto_register_notifier(struct notifier_block *nb) int crypto_register_notifier(struct notifier_block *nb)
{ {
return blocking_notifier_chain_register(&crypto_chain, nb); return blocking_notifier_chain_register(&crypto_chain, nb);
...@@ -595,7 +696,9 @@ const char *crypto_attr_alg_name(struct rtattr *rta) ...@@ -595,7 +696,9 @@ const char *crypto_attr_alg_name(struct rtattr *rta)
} }
EXPORT_SYMBOL_GPL(crypto_attr_alg_name); EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
const struct crypto_type *frontend,
u32 type, u32 mask)
{ {
const char *name; const char *name;
int err; int err;
...@@ -605,9 +708,9 @@ struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) ...@@ -605,9 +708,9 @@ struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
if (IS_ERR(name)) if (IS_ERR(name))
return ERR_PTR(err); return ERR_PTR(err);
return crypto_alg_mod_lookup(name, type, mask); return crypto_find_alg(name, frontend, type, mask);
} }
EXPORT_SYMBOL_GPL(crypto_attr_alg); EXPORT_SYMBOL_GPL(crypto_attr_alg2);
int crypto_attr_u32(struct rtattr *rta, u32 *num) int crypto_attr_u32(struct rtattr *rta, u32 *num)
{ {
...@@ -627,17 +730,20 @@ int crypto_attr_u32(struct rtattr *rta, u32 *num) ...@@ -627,17 +730,20 @@ int crypto_attr_u32(struct rtattr *rta, u32 *num)
} }
EXPORT_SYMBOL_GPL(crypto_attr_u32); EXPORT_SYMBOL_GPL(crypto_attr_u32);
struct crypto_instance *crypto_alloc_instance(const char *name, void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
struct crypto_alg *alg) unsigned int head)
{ {
struct crypto_instance *inst; struct crypto_instance *inst;
struct crypto_spawn *spawn; char *p;
int err; int err;
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
if (!inst) GFP_KERNEL);
if (!p)
return ERR_PTR(-ENOMEM); return ERR_PTR(-ENOMEM);
inst = (void *)(p + head);
err = -ENAMETOOLONG; err = -ENAMETOOLONG;
if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
alg->cra_name) >= CRYPTO_MAX_ALG_NAME) alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
...@@ -647,6 +753,25 @@ struct crypto_instance *crypto_alloc_instance(const char *name, ...@@ -647,6 +753,25 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto err_free_inst; goto err_free_inst;
return p;
err_free_inst:
kfree(p);
return ERR_PTR(err);
}
EXPORT_SYMBOL_GPL(crypto_alloc_instance2);
struct crypto_instance *crypto_alloc_instance(const char *name,
struct crypto_alg *alg)
{
struct crypto_instance *inst;
struct crypto_spawn *spawn;
int err;
inst = crypto_alloc_instance2(name, alg, 0);
if (IS_ERR(inst))
goto out;
spawn = crypto_instance_ctx(inst); spawn = crypto_instance_ctx(inst);
err = crypto_init_spawn(spawn, alg, inst, err = crypto_init_spawn(spawn, alg, inst,
CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
...@@ -658,7 +783,10 @@ struct crypto_instance *crypto_alloc_instance(const char *name, ...@@ -658,7 +783,10 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
err_free_inst: err_free_inst:
kfree(inst); kfree(inst);
return ERR_PTR(err); inst = ERR_PTR(err);
out:
return inst;
} }
EXPORT_SYMBOL_GPL(crypto_alloc_instance); EXPORT_SYMBOL_GPL(crypto_alloc_instance);
......
...@@ -68,6 +68,11 @@ static int cryptomgr_probe(void *data) ...@@ -68,6 +68,11 @@ static int cryptomgr_probe(void *data)
goto err; goto err;
do { do {
if (tmpl->create) {
err = tmpl->create(tmpl, param->tb);
continue;
}
inst = tmpl->alloc(param->tb); inst = tmpl->alloc(param->tb);
if (IS_ERR(inst)) if (IS_ERR(inst))
err = PTR_ERR(inst); err = PTR_ERR(inst);
......
...@@ -187,7 +187,6 @@ static int _get_more_prng_bytes(struct prng_context *ctx) ...@@ -187,7 +187,6 @@ static int _get_more_prng_bytes(struct prng_context *ctx)
/* Our exported functions */ /* Our exported functions */
static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx) static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
{ {
unsigned long flags;
unsigned char *ptr = buf; unsigned char *ptr = buf;
unsigned int byte_count = (unsigned int)nbytes; unsigned int byte_count = (unsigned int)nbytes;
int err; int err;
...@@ -196,7 +195,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx) ...@@ -196,7 +195,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
if (nbytes < 0) if (nbytes < 0)
return -EINVAL; return -EINVAL;
spin_lock_irqsave(&ctx->prng_lock, flags); spin_lock_bh(&ctx->prng_lock);
err = -EINVAL; err = -EINVAL;
if (ctx->flags & PRNG_NEED_RESET) if (ctx->flags & PRNG_NEED_RESET)
...@@ -268,7 +267,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx) ...@@ -268,7 +267,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
goto remainder; goto remainder;
done: done:
spin_unlock_irqrestore(&ctx->prng_lock, flags); spin_unlock_bh(&ctx->prng_lock);
dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n", dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n",
err, ctx); err, ctx);
return err; return err;
...@@ -284,10 +283,9 @@ static int reset_prng_context(struct prng_context *ctx, ...@@ -284,10 +283,9 @@ static int reset_prng_context(struct prng_context *ctx,
unsigned char *V, unsigned char *DT) unsigned char *V, unsigned char *DT)
{ {
int ret; int ret;
int rc = -EINVAL;
unsigned char *prng_key; unsigned char *prng_key;
spin_lock(&ctx->prng_lock); spin_lock_bh(&ctx->prng_lock);
ctx->flags |= PRNG_NEED_RESET; ctx->flags |= PRNG_NEED_RESET;
prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY; prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY;
...@@ -308,34 +306,20 @@ static int reset_prng_context(struct prng_context *ctx, ...@@ -308,34 +306,20 @@ static int reset_prng_context(struct prng_context *ctx,
memset(ctx->rand_data, 0, DEFAULT_BLK_SZ); memset(ctx->rand_data, 0, DEFAULT_BLK_SZ);
memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ); memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ);
if (ctx->tfm)
crypto_free_cipher(ctx->tfm);
ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
if (IS_ERR(ctx->tfm)) {
dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
ctx);
ctx->tfm = NULL;
goto out;
}
ctx->rand_data_valid = DEFAULT_BLK_SZ; ctx->rand_data_valid = DEFAULT_BLK_SZ;
ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen); ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen);
if (ret) { if (ret) {
dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n", dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n",
crypto_cipher_get_flags(ctx->tfm)); crypto_cipher_get_flags(ctx->tfm));
crypto_free_cipher(ctx->tfm);
goto out; goto out;
} }
rc = 0; ret = 0;
ctx->flags &= ~PRNG_NEED_RESET; ctx->flags &= ~PRNG_NEED_RESET;
out: out:
spin_unlock(&ctx->prng_lock); spin_unlock_bh(&ctx->prng_lock);
return ret;
return rc;
} }
static int cprng_init(struct crypto_tfm *tfm) static int cprng_init(struct crypto_tfm *tfm)
...@@ -343,6 +327,12 @@ static int cprng_init(struct crypto_tfm *tfm) ...@@ -343,6 +327,12 @@ static int cprng_init(struct crypto_tfm *tfm)
struct prng_context *ctx = crypto_tfm_ctx(tfm); struct prng_context *ctx = crypto_tfm_ctx(tfm);
spin_lock_init(&ctx->prng_lock); spin_lock_init(&ctx->prng_lock);
ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
if (IS_ERR(ctx->tfm)) {
dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
ctx);
return PTR_ERR(ctx->tfm);
}
if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0) if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0)
return -EINVAL; return -EINVAL;
...@@ -418,17 +408,10 @@ static struct crypto_alg rng_alg = { ...@@ -418,17 +408,10 @@ static struct crypto_alg rng_alg = {
/* Module initalization */ /* Module initalization */
static int __init prng_mod_init(void) static int __init prng_mod_init(void)
{ {
int ret = 0;
if (fips_enabled) if (fips_enabled)
rng_alg.cra_priority += 200; rng_alg.cra_priority += 200;
ret = crypto_register_alg(&rng_alg); return crypto_register_alg(&rng_alg);
if (ret)
goto out;
out:
return 0;
} }
static void __exit prng_mod_fini(void) static void __exit prng_mod_fini(void)
......
...@@ -285,13 +285,6 @@ static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) ...@@ -285,13 +285,6 @@ static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
switch (crypto_tfm_alg_type(tfm)) { switch (crypto_tfm_alg_type(tfm)) {
case CRYPTO_ALG_TYPE_CIPHER: case CRYPTO_ALG_TYPE_CIPHER:
return crypto_init_cipher_ops(tfm); return crypto_init_cipher_ops(tfm);
case CRYPTO_ALG_TYPE_DIGEST:
if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
CRYPTO_ALG_TYPE_HASH_MASK)
return crypto_init_digest_ops_async(tfm);
else
return crypto_init_digest_ops(tfm);
case CRYPTO_ALG_TYPE_COMPRESS: case CRYPTO_ALG_TYPE_COMPRESS:
return crypto_init_compress_ops(tfm); return crypto_init_compress_ops(tfm);
...@@ -318,11 +311,7 @@ static void crypto_exit_ops(struct crypto_tfm *tfm) ...@@ -318,11 +311,7 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
case CRYPTO_ALG_TYPE_CIPHER: case CRYPTO_ALG_TYPE_CIPHER:
crypto_exit_cipher_ops(tfm); crypto_exit_cipher_ops(tfm);
break; break;
case CRYPTO_ALG_TYPE_DIGEST:
crypto_exit_digest_ops(tfm);
break;
case CRYPTO_ALG_TYPE_COMPRESS: case CRYPTO_ALG_TYPE_COMPRESS:
crypto_exit_compress_ops(tfm); crypto_exit_compress_ops(tfm);
break; break;
...@@ -349,11 +338,7 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) ...@@ -349,11 +338,7 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
case CRYPTO_ALG_TYPE_CIPHER: case CRYPTO_ALG_TYPE_CIPHER:
len += crypto_cipher_ctxsize(alg); len += crypto_cipher_ctxsize(alg);
break; break;
case CRYPTO_ALG_TYPE_DIGEST:
len += crypto_digest_ctxsize(alg);
break;
case CRYPTO_ALG_TYPE_COMPRESS: case CRYPTO_ALG_TYPE_COMPRESS:
len += crypto_compress_ctxsize(alg); len += crypto_compress_ctxsize(alg);
break; break;
...@@ -472,7 +457,7 @@ void *crypto_create_tfm(struct crypto_alg *alg, ...@@ -472,7 +457,7 @@ void *crypto_create_tfm(struct crypto_alg *alg,
int err = -ENOMEM; int err = -ENOMEM;
tfmsize = frontend->tfmsize; tfmsize = frontend->tfmsize;
total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend); total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
mem = kzalloc(total, GFP_KERNEL); mem = kzalloc(total, GFP_KERNEL);
if (mem == NULL) if (mem == NULL)
...@@ -481,7 +466,7 @@ void *crypto_create_tfm(struct crypto_alg *alg, ...@@ -481,7 +466,7 @@ void *crypto_create_tfm(struct crypto_alg *alg,
tfm = (struct crypto_tfm *)(mem + tfmsize); tfm = (struct crypto_tfm *)(mem + tfmsize);
tfm->__crt_alg = alg; tfm->__crt_alg = alg;
err = frontend->init_tfm(tfm, frontend); err = frontend->init_tfm(tfm);
if (err) if (err)
goto out_free_tfm; goto out_free_tfm;
...@@ -503,6 +488,27 @@ void *crypto_create_tfm(struct crypto_alg *alg, ...@@ -503,6 +488,27 @@ void *crypto_create_tfm(struct crypto_alg *alg,
} }
EXPORT_SYMBOL_GPL(crypto_create_tfm); EXPORT_SYMBOL_GPL(crypto_create_tfm);
struct crypto_alg *crypto_find_alg(const char *alg_name,
const struct crypto_type *frontend,
u32 type, u32 mask)
{
struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
crypto_alg_mod_lookup;
if (frontend) {
type &= frontend->maskclear;
mask &= frontend->maskclear;
type |= frontend->type;
mask |= frontend->maskset;
if (frontend->lookup)
lookup = frontend->lookup;
}
return lookup(alg_name, type, mask);
}
EXPORT_SYMBOL_GPL(crypto_find_alg);
/* /*
* crypto_alloc_tfm - Locate algorithm and allocate transform * crypto_alloc_tfm - Locate algorithm and allocate transform
* @alg_name: Name of algorithm * @alg_name: Name of algorithm
...@@ -526,21 +532,13 @@ EXPORT_SYMBOL_GPL(crypto_create_tfm); ...@@ -526,21 +532,13 @@ EXPORT_SYMBOL_GPL(crypto_create_tfm);
void *crypto_alloc_tfm(const char *alg_name, void *crypto_alloc_tfm(const char *alg_name,
const struct crypto_type *frontend, u32 type, u32 mask) const struct crypto_type *frontend, u32 type, u32 mask)
{ {
struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
void *tfm; void *tfm;
int err; int err;
type &= frontend->maskclear;
mask &= frontend->maskclear;
type |= frontend->type;
mask |= frontend->maskset;
lookup = frontend->lookup ?: crypto_alg_mod_lookup;
for (;;) { for (;;) {
struct crypto_alg *alg; struct crypto_alg *alg;
alg = lookup(alg_name, type, mask); alg = crypto_find_alg(alg_name, frontend, type, mask);
if (IS_ERR(alg)) { if (IS_ERR(alg)) {
err = PTR_ERR(alg); err = PTR_ERR(alg);
goto err; goto err;
......
...@@ -23,24 +23,36 @@ ...@@ -23,24 +23,36 @@
#include <linux/slab.h> #include <linux/slab.h>
#include <linux/spinlock.h> #include <linux/spinlock.h>
typedef u8 *(*authenc_ahash_t)(struct aead_request *req, unsigned int flags);
struct authenc_instance_ctx { struct authenc_instance_ctx {
struct crypto_spawn auth; struct crypto_ahash_spawn auth;
struct crypto_skcipher_spawn enc; struct crypto_skcipher_spawn enc;
}; };
struct crypto_authenc_ctx { struct crypto_authenc_ctx {
spinlock_t auth_lock; unsigned int reqoff;
struct crypto_hash *auth; struct crypto_ahash *auth;
struct crypto_ablkcipher *enc; struct crypto_ablkcipher *enc;
}; };
struct authenc_request_ctx {
unsigned int cryptlen;
struct scatterlist *sg;
struct scatterlist asg[2];
struct scatterlist cipher[2];
crypto_completion_t complete;
crypto_completion_t update_complete;
char tail[];
};
static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key, static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
unsigned int authkeylen; unsigned int authkeylen;
unsigned int enckeylen; unsigned int enckeylen;
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
struct crypto_hash *auth = ctx->auth; struct crypto_ahash *auth = ctx->auth;
struct crypto_ablkcipher *enc = ctx->enc; struct crypto_ablkcipher *enc = ctx->enc;
struct rtattr *rta = (void *)key; struct rtattr *rta = (void *)key;
struct crypto_authenc_key_param *param; struct crypto_authenc_key_param *param;
...@@ -64,11 +76,11 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key, ...@@ -64,11 +76,11 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
authkeylen = keylen - enckeylen; authkeylen = keylen - enckeylen;
crypto_hash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
crypto_hash_set_flags(auth, crypto_aead_get_flags(authenc) & crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc) &
CRYPTO_TFM_REQ_MASK); CRYPTO_TFM_REQ_MASK);
err = crypto_hash_setkey(auth, key, authkeylen); err = crypto_ahash_setkey(auth, key, authkeylen);
crypto_aead_set_flags(authenc, crypto_hash_get_flags(auth) & crypto_aead_set_flags(authenc, crypto_ahash_get_flags(auth) &
CRYPTO_TFM_RES_MASK); CRYPTO_TFM_RES_MASK);
if (err) if (err)
...@@ -103,40 +115,198 @@ static void authenc_chain(struct scatterlist *head, struct scatterlist *sg, ...@@ -103,40 +115,198 @@ static void authenc_chain(struct scatterlist *head, struct scatterlist *sg,
sg_mark_end(head); sg_mark_end(head);
} }
static u8 *crypto_authenc_hash(struct aead_request *req, unsigned int flags, static void authenc_geniv_ahash_update_done(struct crypto_async_request *areq,
struct scatterlist *cipher, int err)
unsigned int cryptlen) {
struct aead_request *req = areq->data;
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
if (err)
goto out;
ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result,
areq_ctx->cryptlen);
ahash_request_set_callback(ahreq, aead_request_flags(req) &
CRYPTO_TFM_REQ_MAY_SLEEP,
areq_ctx->complete, req);
err = crypto_ahash_finup(ahreq);
if (err)
goto out;
scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg,
areq_ctx->cryptlen,
crypto_aead_authsize(authenc), 1);
out:
aead_request_complete(req, err);
}
static void authenc_geniv_ahash_done(struct crypto_async_request *areq, int err)
{
struct aead_request *req = areq->data;
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
if (err)
goto out;
scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg,
areq_ctx->cryptlen,
crypto_aead_authsize(authenc), 1);
out:
aead_request_complete(req, err);
}
static void authenc_verify_ahash_update_done(struct crypto_async_request *areq,
int err)
{ {
u8 *ihash;
unsigned int authsize;
struct ablkcipher_request *abreq;
struct aead_request *req = areq->data;
struct crypto_aead *authenc = crypto_aead_reqtfm(req); struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
struct crypto_hash *auth = ctx->auth; struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct hash_desc desc = { struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
.tfm = auth,
.flags = aead_request_flags(req) & flags, if (err)
}; goto out;
u8 *hash = aead_request_ctx(req);
ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result,
areq_ctx->cryptlen);
ahash_request_set_callback(ahreq, aead_request_flags(req) &
CRYPTO_TFM_REQ_MAY_SLEEP,
areq_ctx->complete, req);
err = crypto_ahash_finup(ahreq);
if (err)
goto out;
authsize = crypto_aead_authsize(authenc);
ihash = ahreq->result + authsize;
scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
authsize, 0);
err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG: 0;
if (err)
goto out;
abreq = aead_request_ctx(req);
ablkcipher_request_set_tfm(abreq, ctx->enc);
ablkcipher_request_set_callback(abreq, aead_request_flags(req),
req->base.complete, req->base.data);
ablkcipher_request_set_crypt(abreq, req->src, req->dst,
req->cryptlen, req->iv);
err = crypto_ablkcipher_decrypt(abreq);
out:
aead_request_complete(req, err);
}
static void authenc_verify_ahash_done(struct crypto_async_request *areq,
int err)
{
u8 *ihash;
unsigned int authsize;
struct ablkcipher_request *abreq;
struct aead_request *req = areq->data;
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
if (err)
goto out;
authsize = crypto_aead_authsize(authenc);
ihash = ahreq->result + authsize;
scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
authsize, 0);
err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG: 0;
if (err)
goto out;
abreq = aead_request_ctx(req);
ablkcipher_request_set_tfm(abreq, ctx->enc);
ablkcipher_request_set_callback(abreq, aead_request_flags(req),
req->base.complete, req->base.data);
ablkcipher_request_set_crypt(abreq, req->src, req->dst,
req->cryptlen, req->iv);
err = crypto_ablkcipher_decrypt(abreq);
out:
aead_request_complete(req, err);
}
static u8 *crypto_authenc_ahash_fb(struct aead_request *req, unsigned int flags)
{
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
struct crypto_ahash *auth = ctx->auth;
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
u8 *hash = areq_ctx->tail;
int err; int err;
hash = (u8 *)ALIGN((unsigned long)hash + crypto_hash_alignmask(auth), hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth),
crypto_hash_alignmask(auth) + 1); crypto_ahash_alignmask(auth) + 1);
ahash_request_set_tfm(ahreq, auth);
spin_lock_bh(&ctx->auth_lock); err = crypto_ahash_init(ahreq);
err = crypto_hash_init(&desc);
if (err) if (err)
goto auth_unlock; return ERR_PTR(err);
ahash_request_set_crypt(ahreq, req->assoc, hash, req->assoclen);
ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
areq_ctx->update_complete, req);
err = crypto_hash_update(&desc, req->assoc, req->assoclen); err = crypto_ahash_update(ahreq);
if (err) if (err)
goto auth_unlock; return ERR_PTR(err);
ahash_request_set_crypt(ahreq, areq_ctx->sg, hash,
areq_ctx->cryptlen);
ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
areq_ctx->complete, req);
err = crypto_hash_update(&desc, cipher, cryptlen); err = crypto_ahash_finup(ahreq);
if (err) if (err)
goto auth_unlock; return ERR_PTR(err);
err = crypto_hash_final(&desc, hash); return hash;
auth_unlock: }
spin_unlock_bh(&ctx->auth_lock);
static u8 *crypto_authenc_ahash(struct aead_request *req, unsigned int flags)
{
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
struct crypto_ahash *auth = ctx->auth;
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
u8 *hash = areq_ctx->tail;
int err;
hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth),
crypto_ahash_alignmask(auth) + 1);
ahash_request_set_tfm(ahreq, auth);
ahash_request_set_crypt(ahreq, areq_ctx->sg, hash,
areq_ctx->cryptlen);
ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
areq_ctx->complete, req);
err = crypto_ahash_digest(ahreq);
if (err) if (err)
return ERR_PTR(err); return ERR_PTR(err);
...@@ -147,11 +317,15 @@ static int crypto_authenc_genicv(struct aead_request *req, u8 *iv, ...@@ -147,11 +317,15 @@ static int crypto_authenc_genicv(struct aead_request *req, u8 *iv,
unsigned int flags) unsigned int flags)
{ {
struct crypto_aead *authenc = crypto_aead_reqtfm(req); struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct scatterlist *dst = req->dst; struct scatterlist *dst = req->dst;
struct scatterlist cipher[2]; struct scatterlist *assoc = req->assoc;
struct page *dstp; struct scatterlist *cipher = areq_ctx->cipher;
struct scatterlist *asg = areq_ctx->asg;
unsigned int ivsize = crypto_aead_ivsize(authenc); unsigned int ivsize = crypto_aead_ivsize(authenc);
unsigned int cryptlen; unsigned int cryptlen = req->cryptlen;
authenc_ahash_t authenc_ahash_fn = crypto_authenc_ahash_fb;
struct page *dstp;
u8 *vdst; u8 *vdst;
u8 *hash; u8 *hash;
...@@ -163,10 +337,25 @@ static int crypto_authenc_genicv(struct aead_request *req, u8 *iv, ...@@ -163,10 +337,25 @@ static int crypto_authenc_genicv(struct aead_request *req, u8 *iv,
sg_set_buf(cipher, iv, ivsize); sg_set_buf(cipher, iv, ivsize);
authenc_chain(cipher, dst, vdst == iv + ivsize); authenc_chain(cipher, dst, vdst == iv + ivsize);
dst = cipher; dst = cipher;
cryptlen += ivsize;
} }
cryptlen = req->cryptlen + ivsize; if (sg_is_last(assoc)) {
hash = crypto_authenc_hash(req, flags, dst, cryptlen); authenc_ahash_fn = crypto_authenc_ahash;
sg_init_table(asg, 2);
sg_set_page(asg, sg_page(assoc), assoc->length, assoc->offset);
authenc_chain(asg, dst, 0);
dst = asg;
cryptlen += req->assoclen;
}
areq_ctx->cryptlen = cryptlen;
areq_ctx->sg = dst;
areq_ctx->complete = authenc_geniv_ahash_done;
areq_ctx->update_complete = authenc_geniv_ahash_update_done;
hash = authenc_ahash_fn(req, flags);
if (IS_ERR(hash)) if (IS_ERR(hash))
return PTR_ERR(hash); return PTR_ERR(hash);
...@@ -256,22 +445,25 @@ static int crypto_authenc_givencrypt(struct aead_givcrypt_request *req) ...@@ -256,22 +445,25 @@ static int crypto_authenc_givencrypt(struct aead_givcrypt_request *req)
} }
static int crypto_authenc_verify(struct aead_request *req, static int crypto_authenc_verify(struct aead_request *req,
struct scatterlist *cipher, authenc_ahash_t authenc_ahash_fn)
unsigned int cryptlen)
{ {
struct crypto_aead *authenc = crypto_aead_reqtfm(req); struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
u8 *ohash; u8 *ohash;
u8 *ihash; u8 *ihash;
unsigned int authsize; unsigned int authsize;
ohash = crypto_authenc_hash(req, CRYPTO_TFM_REQ_MAY_SLEEP, cipher, areq_ctx->complete = authenc_verify_ahash_done;
cryptlen); areq_ctx->complete = authenc_verify_ahash_update_done;
ohash = authenc_ahash_fn(req, CRYPTO_TFM_REQ_MAY_SLEEP);
if (IS_ERR(ohash)) if (IS_ERR(ohash))
return PTR_ERR(ohash); return PTR_ERR(ohash);
authsize = crypto_aead_authsize(authenc); authsize = crypto_aead_authsize(authenc);
ihash = ohash + authsize; ihash = ohash + authsize;
scatterwalk_map_and_copy(ihash, cipher, cryptlen, authsize, 0); scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
authsize, 0);
return memcmp(ihash, ohash, authsize) ? -EBADMSG: 0; return memcmp(ihash, ohash, authsize) ? -EBADMSG: 0;
} }
...@@ -279,10 +471,14 @@ static int crypto_authenc_iverify(struct aead_request *req, u8 *iv, ...@@ -279,10 +471,14 @@ static int crypto_authenc_iverify(struct aead_request *req, u8 *iv,
unsigned int cryptlen) unsigned int cryptlen)
{ {
struct crypto_aead *authenc = crypto_aead_reqtfm(req); struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct scatterlist *src = req->src; struct scatterlist *src = req->src;
struct scatterlist cipher[2]; struct scatterlist *assoc = req->assoc;
struct page *srcp; struct scatterlist *cipher = areq_ctx->cipher;
struct scatterlist *asg = areq_ctx->asg;
unsigned int ivsize = crypto_aead_ivsize(authenc); unsigned int ivsize = crypto_aead_ivsize(authenc);
authenc_ahash_t authenc_ahash_fn = crypto_authenc_ahash_fb;
struct page *srcp;
u8 *vsrc; u8 *vsrc;
srcp = sg_page(src); srcp = sg_page(src);
...@@ -293,9 +489,22 @@ static int crypto_authenc_iverify(struct aead_request *req, u8 *iv, ...@@ -293,9 +489,22 @@ static int crypto_authenc_iverify(struct aead_request *req, u8 *iv,
sg_set_buf(cipher, iv, ivsize); sg_set_buf(cipher, iv, ivsize);
authenc_chain(cipher, src, vsrc == iv + ivsize); authenc_chain(cipher, src, vsrc == iv + ivsize);
src = cipher; src = cipher;
cryptlen += ivsize;
}
if (sg_is_last(assoc)) {
authenc_ahash_fn = crypto_authenc_ahash;
sg_init_table(asg, 2);
sg_set_page(asg, sg_page(assoc), assoc->length, assoc->offset);
authenc_chain(asg, src, 0);
src = asg;
cryptlen += req->assoclen;
} }
return crypto_authenc_verify(req, src, cryptlen + ivsize); areq_ctx->cryptlen = cryptlen;
areq_ctx->sg = src;
return crypto_authenc_verify(req, authenc_ahash_fn);
} }
static int crypto_authenc_decrypt(struct aead_request *req) static int crypto_authenc_decrypt(struct aead_request *req)
...@@ -326,38 +535,41 @@ static int crypto_authenc_decrypt(struct aead_request *req) ...@@ -326,38 +535,41 @@ static int crypto_authenc_decrypt(struct aead_request *req)
static int crypto_authenc_init_tfm(struct crypto_tfm *tfm) static int crypto_authenc_init_tfm(struct crypto_tfm *tfm)
{ {
struct crypto_instance *inst = (void *)tfm->__crt_alg; struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
struct authenc_instance_ctx *ictx = crypto_instance_ctx(inst); struct authenc_instance_ctx *ictx = crypto_instance_ctx(inst);
struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm); struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
struct crypto_hash *auth; struct crypto_ahash *auth;
struct crypto_ablkcipher *enc; struct crypto_ablkcipher *enc;
int err; int err;
auth = crypto_spawn_hash(&ictx->auth); auth = crypto_spawn_ahash(&ictx->auth);
if (IS_ERR(auth)) if (IS_ERR(auth))
return PTR_ERR(auth); return PTR_ERR(auth);
ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth) +
crypto_ahash_alignmask(auth),
crypto_ahash_alignmask(auth) + 1);
enc = crypto_spawn_skcipher(&ictx->enc); enc = crypto_spawn_skcipher(&ictx->enc);
err = PTR_ERR(enc); err = PTR_ERR(enc);
if (IS_ERR(enc)) if (IS_ERR(enc))
goto err_free_hash; goto err_free_ahash;
ctx->auth = auth; ctx->auth = auth;
ctx->enc = enc; ctx->enc = enc;
tfm->crt_aead.reqsize = max_t(unsigned int, tfm->crt_aead.reqsize = max_t(unsigned int,
(crypto_hash_alignmask(auth) & crypto_ahash_reqsize(auth) + ctx->reqoff +
~(crypto_tfm_ctx_alignment() - 1)) + sizeof(struct authenc_request_ctx) +
crypto_hash_digestsize(auth) * 2, sizeof(struct ahash_request),
sizeof(struct skcipher_givcrypt_request) + sizeof(struct skcipher_givcrypt_request) +
crypto_ablkcipher_reqsize(enc) + crypto_ablkcipher_reqsize(enc) +
crypto_ablkcipher_ivsize(enc)); crypto_ablkcipher_ivsize(enc));
spin_lock_init(&ctx->auth_lock);
return 0; return 0;
err_free_hash: err_free_ahash:
crypto_free_hash(auth); crypto_free_ahash(auth);
return err; return err;
} }
...@@ -365,7 +577,7 @@ static void crypto_authenc_exit_tfm(struct crypto_tfm *tfm) ...@@ -365,7 +577,7 @@ static void crypto_authenc_exit_tfm(struct crypto_tfm *tfm)
{ {
struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm); struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
crypto_free_hash(ctx->auth); crypto_free_ahash(ctx->auth);
crypto_free_ablkcipher(ctx->enc); crypto_free_ablkcipher(ctx->enc);
} }
...@@ -373,7 +585,8 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb) ...@@ -373,7 +585,8 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
{ {
struct crypto_attr_type *algt; struct crypto_attr_type *algt;
struct crypto_instance *inst; struct crypto_instance *inst;
struct crypto_alg *auth; struct hash_alg_common *auth;
struct crypto_alg *auth_base;
struct crypto_alg *enc; struct crypto_alg *enc;
struct authenc_instance_ctx *ctx; struct authenc_instance_ctx *ctx;
const char *enc_name; const char *enc_name;
...@@ -387,11 +600,13 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb) ...@@ -387,11 +600,13 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return ERR_PTR(-EINVAL); return ERR_PTR(-EINVAL);
auth = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH, auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
CRYPTO_ALG_TYPE_HASH_MASK); CRYPTO_ALG_TYPE_AHASH_MASK);
if (IS_ERR(auth)) if (IS_ERR(auth))
return ERR_PTR(PTR_ERR(auth)); return ERR_PTR(PTR_ERR(auth));
auth_base = &auth->base;
enc_name = crypto_attr_alg_name(tb[2]); enc_name = crypto_attr_alg_name(tb[2]);
err = PTR_ERR(enc_name); err = PTR_ERR(enc_name);
if (IS_ERR(enc_name)) if (IS_ERR(enc_name))
...@@ -404,7 +619,7 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb) ...@@ -404,7 +619,7 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
ctx = crypto_instance_ctx(inst); ctx = crypto_instance_ctx(inst);
err = crypto_init_spawn(&ctx->auth, auth, inst, CRYPTO_ALG_TYPE_MASK); err = crypto_init_ahash_spawn(&ctx->auth, auth, inst);
if (err) if (err)
goto err_free_inst; goto err_free_inst;
...@@ -419,28 +634,25 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb) ...@@ -419,28 +634,25 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
err = -ENAMETOOLONG; err = -ENAMETOOLONG;
if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
"authenc(%s,%s)", auth->cra_name, enc->cra_name) >= "authenc(%s,%s)", auth_base->cra_name, enc->cra_name) >=
CRYPTO_MAX_ALG_NAME) CRYPTO_MAX_ALG_NAME)
goto err_drop_enc; goto err_drop_enc;
if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"authenc(%s,%s)", auth->cra_driver_name, "authenc(%s,%s)", auth_base->cra_driver_name,
enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto err_drop_enc; goto err_drop_enc;
inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC; inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC;
inst->alg.cra_priority = enc->cra_priority * 10 + auth->cra_priority; inst->alg.cra_priority = enc->cra_priority *
10 + auth_base->cra_priority;
inst->alg.cra_blocksize = enc->cra_blocksize; inst->alg.cra_blocksize = enc->cra_blocksize;
inst->alg.cra_alignmask = auth->cra_alignmask | enc->cra_alignmask; inst->alg.cra_alignmask = auth_base->cra_alignmask | enc->cra_alignmask;
inst->alg.cra_type = &crypto_aead_type; inst->alg.cra_type = &crypto_aead_type;
inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize; inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ? inst->alg.cra_aead.maxauthsize = auth->digestsize;
auth->cra_hash.digestsize :
auth->cra_type ?
__crypto_shash_alg(auth)->digestsize :
auth->cra_digest.dia_digestsize;
inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx); inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
...@@ -453,13 +665,13 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb) ...@@ -453,13 +665,13 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
inst->alg.cra_aead.givencrypt = crypto_authenc_givencrypt; inst->alg.cra_aead.givencrypt = crypto_authenc_givencrypt;
out: out:
crypto_mod_put(auth); crypto_mod_put(auth_base);
return inst; return inst;
err_drop_enc: err_drop_enc:
crypto_drop_skcipher(&ctx->enc); crypto_drop_skcipher(&ctx->enc);
err_drop_auth: err_drop_auth:
crypto_drop_spawn(&ctx->auth); crypto_drop_ahash(&ctx->auth);
err_free_inst: err_free_inst:
kfree(inst); kfree(inst);
out_put_auth: out_put_auth:
...@@ -472,7 +684,7 @@ static void crypto_authenc_free(struct crypto_instance *inst) ...@@ -472,7 +684,7 @@ static void crypto_authenc_free(struct crypto_instance *inst)
struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst); struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst);
crypto_drop_skcipher(&ctx->enc); crypto_drop_skcipher(&ctx->enc);
crypto_drop_spawn(&ctx->auth); crypto_drop_ahash(&ctx->auth);
kfree(inst); kfree(inst);
} }
......
...@@ -39,6 +39,11 @@ struct cryptd_instance_ctx { ...@@ -39,6 +39,11 @@ struct cryptd_instance_ctx {
struct cryptd_queue *queue; struct cryptd_queue *queue;
}; };
struct hashd_instance_ctx {
struct crypto_shash_spawn spawn;
struct cryptd_queue *queue;
};
struct cryptd_blkcipher_ctx { struct cryptd_blkcipher_ctx {
struct crypto_blkcipher *child; struct crypto_blkcipher *child;
}; };
...@@ -48,11 +53,12 @@ struct cryptd_blkcipher_request_ctx { ...@@ -48,11 +53,12 @@ struct cryptd_blkcipher_request_ctx {
}; };
struct cryptd_hash_ctx { struct cryptd_hash_ctx {
struct crypto_hash *child; struct crypto_shash *child;
}; };
struct cryptd_hash_request_ctx { struct cryptd_hash_request_ctx {
crypto_completion_t complete; crypto_completion_t complete;
struct shash_desc desc;
}; };
static void cryptd_queue_worker(struct work_struct *work); static void cryptd_queue_worker(struct work_struct *work);
...@@ -249,32 +255,24 @@ static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm) ...@@ -249,32 +255,24 @@ static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
crypto_free_blkcipher(ctx->child); crypto_free_blkcipher(ctx->child);
} }
static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg, static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
struct cryptd_queue *queue) unsigned int tail)
{ {
char *p;
struct crypto_instance *inst; struct crypto_instance *inst;
struct cryptd_instance_ctx *ctx;
int err; int err;
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
if (!inst) { if (!p)
inst = ERR_PTR(-ENOMEM); return ERR_PTR(-ENOMEM);
goto out;
} inst = (void *)(p + head);
err = -ENAMETOOLONG; err = -ENAMETOOLONG;
if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto out_free_inst; goto out_free_inst;
ctx = crypto_instance_ctx(inst);
err = crypto_init_spawn(&ctx->spawn, alg, inst,
CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
if (err)
goto out_free_inst;
ctx->queue = queue;
memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
inst->alg.cra_priority = alg->cra_priority + 50; inst->alg.cra_priority = alg->cra_priority + 50;
...@@ -282,29 +280,41 @@ static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg, ...@@ -282,29 +280,41 @@ static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg,
inst->alg.cra_alignmask = alg->cra_alignmask; inst->alg.cra_alignmask = alg->cra_alignmask;
out: out:
return inst; return p;
out_free_inst: out_free_inst:
kfree(inst); kfree(p);
inst = ERR_PTR(err); p = ERR_PTR(err);
goto out; goto out;
} }
static struct crypto_instance *cryptd_alloc_blkcipher( static int cryptd_create_blkcipher(struct crypto_template *tmpl,
struct rtattr **tb, struct cryptd_queue *queue) struct rtattr **tb,
struct cryptd_queue *queue)
{ {
struct cryptd_instance_ctx *ctx;
struct crypto_instance *inst; struct crypto_instance *inst;
struct crypto_alg *alg; struct crypto_alg *alg;
int err;
alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER, alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
CRYPTO_ALG_TYPE_MASK); CRYPTO_ALG_TYPE_MASK);
if (IS_ERR(alg)) if (IS_ERR(alg))
return ERR_CAST(alg); return PTR_ERR(alg);
inst = cryptd_alloc_instance(alg, queue); inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
err = PTR_ERR(inst);
if (IS_ERR(inst)) if (IS_ERR(inst))
goto out_put_alg; goto out_put_alg;
ctx = crypto_instance_ctx(inst);
ctx->queue = queue;
err = crypto_init_spawn(&ctx->spawn, alg, inst,
CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
if (err)
goto out_free_inst;
inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
inst->alg.cra_type = &crypto_ablkcipher_type; inst->alg.cra_type = &crypto_ablkcipher_type;
...@@ -323,26 +333,34 @@ static struct crypto_instance *cryptd_alloc_blkcipher( ...@@ -323,26 +333,34 @@ static struct crypto_instance *cryptd_alloc_blkcipher(
inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue; inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue; inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
err = crypto_register_instance(tmpl, inst);
if (err) {
crypto_drop_spawn(&ctx->spawn);
out_free_inst:
kfree(inst);
}
out_put_alg: out_put_alg:
crypto_mod_put(alg); crypto_mod_put(alg);
return inst; return err;
} }
static int cryptd_hash_init_tfm(struct crypto_tfm *tfm) static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
{ {
struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
struct crypto_spawn *spawn = &ictx->spawn; struct crypto_shash_spawn *spawn = &ictx->spawn;
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
struct crypto_hash *cipher; struct crypto_shash *hash;
cipher = crypto_spawn_hash(spawn); hash = crypto_spawn_shash(spawn);
if (IS_ERR(cipher)) if (IS_ERR(hash))
return PTR_ERR(cipher); return PTR_ERR(hash);
ctx->child = cipher; ctx->child = hash;
tfm->crt_ahash.reqsize = crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
sizeof(struct cryptd_hash_request_ctx); sizeof(struct cryptd_hash_request_ctx) +
crypto_shash_descsize(hash));
return 0; return 0;
} }
...@@ -350,22 +368,22 @@ static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm) ...@@ -350,22 +368,22 @@ static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
{ {
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
crypto_free_hash(ctx->child); crypto_free_shash(ctx->child);
} }
static int cryptd_hash_setkey(struct crypto_ahash *parent, static int cryptd_hash_setkey(struct crypto_ahash *parent,
const u8 *key, unsigned int keylen) const u8 *key, unsigned int keylen)
{ {
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
struct crypto_hash *child = ctx->child; struct crypto_shash *child = ctx->child;
int err; int err;
crypto_hash_clear_flags(child, CRYPTO_TFM_REQ_MASK); crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_hash_set_flags(child, crypto_ahash_get_flags(parent) & crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
CRYPTO_TFM_REQ_MASK); CRYPTO_TFM_REQ_MASK);
err = crypto_hash_setkey(child, key, keylen); err = crypto_shash_setkey(child, key, keylen);
crypto_ahash_set_flags(parent, crypto_hash_get_flags(child) & crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
CRYPTO_TFM_RES_MASK); CRYPTO_TFM_RES_MASK);
return err; return err;
} }
...@@ -385,21 +403,19 @@ static int cryptd_hash_enqueue(struct ahash_request *req, ...@@ -385,21 +403,19 @@ static int cryptd_hash_enqueue(struct ahash_request *req,
static void cryptd_hash_init(struct crypto_async_request *req_async, int err) static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
{ {
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
struct crypto_hash *child = ctx->child; struct crypto_shash *child = ctx->child;
struct ahash_request *req = ahash_request_cast(req_async); struct ahash_request *req = ahash_request_cast(req_async);
struct cryptd_hash_request_ctx *rctx; struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
struct hash_desc desc; struct shash_desc *desc = &rctx->desc;
rctx = ahash_request_ctx(req);
if (unlikely(err == -EINPROGRESS)) if (unlikely(err == -EINPROGRESS))
goto out; goto out;
desc.tfm = child; desc->tfm = child;
desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
err = crypto_hash_crt(child)->init(&desc); err = crypto_shash_init(desc);
req->base.complete = rctx->complete; req->base.complete = rctx->complete;
...@@ -416,23 +432,15 @@ static int cryptd_hash_init_enqueue(struct ahash_request *req) ...@@ -416,23 +432,15 @@ static int cryptd_hash_init_enqueue(struct ahash_request *req)
static void cryptd_hash_update(struct crypto_async_request *req_async, int err) static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
{ {
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); struct ahash_request *req = ahash_request_cast(req_async);
struct crypto_hash *child = ctx->child;
struct ahash_request *req = ahash_request_cast(req_async);
struct cryptd_hash_request_ctx *rctx; struct cryptd_hash_request_ctx *rctx;
struct hash_desc desc;
rctx = ahash_request_ctx(req); rctx = ahash_request_ctx(req);
if (unlikely(err == -EINPROGRESS)) if (unlikely(err == -EINPROGRESS))
goto out; goto out;
desc.tfm = child; err = shash_ahash_update(req, &rctx->desc);
desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
err = crypto_hash_crt(child)->update(&desc,
req->src,
req->nbytes);
req->base.complete = rctx->complete; req->base.complete = rctx->complete;
...@@ -449,21 +457,13 @@ static int cryptd_hash_update_enqueue(struct ahash_request *req) ...@@ -449,21 +457,13 @@ static int cryptd_hash_update_enqueue(struct ahash_request *req)
static void cryptd_hash_final(struct crypto_async_request *req_async, int err) static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
{ {
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); struct ahash_request *req = ahash_request_cast(req_async);
struct crypto_hash *child = ctx->child; struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
struct ahash_request *req = ahash_request_cast(req_async);
struct cryptd_hash_request_ctx *rctx;
struct hash_desc desc;
rctx = ahash_request_ctx(req);
if (unlikely(err == -EINPROGRESS)) if (unlikely(err == -EINPROGRESS))
goto out; goto out;
desc.tfm = child; err = crypto_shash_final(&rctx->desc, req->result);
desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
err = crypto_hash_crt(child)->final(&desc, req->result);
req->base.complete = rctx->complete; req->base.complete = rctx->complete;
...@@ -478,26 +478,44 @@ static int cryptd_hash_final_enqueue(struct ahash_request *req) ...@@ -478,26 +478,44 @@ static int cryptd_hash_final_enqueue(struct ahash_request *req)
return cryptd_hash_enqueue(req, cryptd_hash_final); return cryptd_hash_enqueue(req, cryptd_hash_final);
} }
static void cryptd_hash_digest(struct crypto_async_request *req_async, int err) static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
{ {
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); struct ahash_request *req = ahash_request_cast(req_async);
struct crypto_hash *child = ctx->child; struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
struct ahash_request *req = ahash_request_cast(req_async);
struct cryptd_hash_request_ctx *rctx;
struct hash_desc desc;
rctx = ahash_request_ctx(req); if (unlikely(err == -EINPROGRESS))
goto out;
err = shash_ahash_finup(req, &rctx->desc);
req->base.complete = rctx->complete;
out:
local_bh_disable();
rctx->complete(&req->base, err);
local_bh_enable();
}
static int cryptd_hash_finup_enqueue(struct ahash_request *req)
{
return cryptd_hash_enqueue(req, cryptd_hash_finup);
}
static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
{
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
struct crypto_shash *child = ctx->child;
struct ahash_request *req = ahash_request_cast(req_async);
struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
struct shash_desc *desc = &rctx->desc;
if (unlikely(err == -EINPROGRESS)) if (unlikely(err == -EINPROGRESS))
goto out; goto out;
desc.tfm = child; desc->tfm = child;
desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
err = crypto_hash_crt(child)->digest(&desc, err = shash_ahash_digest(req, desc);
req->src,
req->nbytes,
req->result);
req->base.complete = rctx->complete; req->base.complete = rctx->complete;
...@@ -512,64 +530,108 @@ static int cryptd_hash_digest_enqueue(struct ahash_request *req) ...@@ -512,64 +530,108 @@ static int cryptd_hash_digest_enqueue(struct ahash_request *req)
return cryptd_hash_enqueue(req, cryptd_hash_digest); return cryptd_hash_enqueue(req, cryptd_hash_digest);
} }
static struct crypto_instance *cryptd_alloc_hash( static int cryptd_hash_export(struct ahash_request *req, void *out)
struct rtattr **tb, struct cryptd_queue *queue)
{ {
struct crypto_instance *inst; struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
return crypto_shash_export(&rctx->desc, out);
}
static int cryptd_hash_import(struct ahash_request *req, const void *in)
{
struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
return crypto_shash_import(&rctx->desc, in);
}
static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
struct cryptd_queue *queue)
{
struct hashd_instance_ctx *ctx;
struct ahash_instance *inst;
struct shash_alg *salg;
struct crypto_alg *alg; struct crypto_alg *alg;
int err;
alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_HASH, salg = shash_attr_alg(tb[1], 0, 0);
CRYPTO_ALG_TYPE_HASH_MASK); if (IS_ERR(salg))
if (IS_ERR(alg)) return PTR_ERR(salg);
return ERR_PTR(PTR_ERR(alg));
inst = cryptd_alloc_instance(alg, queue); alg = &salg->base;
inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
sizeof(*ctx));
err = PTR_ERR(inst);
if (IS_ERR(inst)) if (IS_ERR(inst))
goto out_put_alg; goto out_put_alg;
inst->alg.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC; ctx = ahash_instance_ctx(inst);
inst->alg.cra_type = &crypto_ahash_type; ctx->queue = queue;
inst->alg.cra_ahash.digestsize = alg->cra_hash.digestsize; err = crypto_init_shash_spawn(&ctx->spawn, salg,
inst->alg.cra_ctxsize = sizeof(struct cryptd_hash_ctx); ahash_crypto_instance(inst));
if (err)
goto out_free_inst;
inst->alg.cra_init = cryptd_hash_init_tfm; inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC;
inst->alg.cra_exit = cryptd_hash_exit_tfm;
inst->alg.cra_ahash.init = cryptd_hash_init_enqueue; inst->alg.halg.digestsize = salg->digestsize;
inst->alg.cra_ahash.update = cryptd_hash_update_enqueue; inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
inst->alg.cra_ahash.final = cryptd_hash_final_enqueue;
inst->alg.cra_ahash.setkey = cryptd_hash_setkey; inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
inst->alg.cra_ahash.digest = cryptd_hash_digest_enqueue; inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
inst->alg.init = cryptd_hash_init_enqueue;
inst->alg.update = cryptd_hash_update_enqueue;
inst->alg.final = cryptd_hash_final_enqueue;
inst->alg.finup = cryptd_hash_finup_enqueue;
inst->alg.export = cryptd_hash_export;
inst->alg.import = cryptd_hash_import;
inst->alg.setkey = cryptd_hash_setkey;
inst->alg.digest = cryptd_hash_digest_enqueue;
err = ahash_register_instance(tmpl, inst);
if (err) {
crypto_drop_shash(&ctx->spawn);
out_free_inst:
kfree(inst);
}
out_put_alg: out_put_alg:
crypto_mod_put(alg); crypto_mod_put(alg);
return inst; return err;
} }
static struct cryptd_queue queue; static struct cryptd_queue queue;
static struct crypto_instance *cryptd_alloc(struct rtattr **tb) static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
{ {
struct crypto_attr_type *algt; struct crypto_attr_type *algt;
algt = crypto_get_attr_type(tb); algt = crypto_get_attr_type(tb);
if (IS_ERR(algt)) if (IS_ERR(algt))
return ERR_CAST(algt); return PTR_ERR(algt);
switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
case CRYPTO_ALG_TYPE_BLKCIPHER: case CRYPTO_ALG_TYPE_BLKCIPHER:
return cryptd_alloc_blkcipher(tb, &queue); return cryptd_create_blkcipher(tmpl, tb, &queue);
case CRYPTO_ALG_TYPE_DIGEST: case CRYPTO_ALG_TYPE_DIGEST:
return cryptd_alloc_hash(tb, &queue); return cryptd_create_hash(tmpl, tb, &queue);
} }
return ERR_PTR(-EINVAL); return -EINVAL;
} }
static void cryptd_free(struct crypto_instance *inst) static void cryptd_free(struct crypto_instance *inst)
{ {
struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst); struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
case CRYPTO_ALG_TYPE_AHASH:
crypto_drop_shash(&hctx->spawn);
kfree(ahash_instance(inst));
return;
}
crypto_drop_spawn(&ctx->spawn); crypto_drop_spawn(&ctx->spawn);
kfree(inst); kfree(inst);
...@@ -577,7 +639,7 @@ static void cryptd_free(struct crypto_instance *inst) ...@@ -577,7 +639,7 @@ static void cryptd_free(struct crypto_instance *inst)
static struct crypto_template cryptd_tmpl = { static struct crypto_template cryptd_tmpl = {
.name = "cryptd", .name = "cryptd",
.alloc = cryptd_alloc, .create = cryptd_create,
.free = cryptd_free, .free = cryptd_free,
.module = THIS_MODULE, .module = THIS_MODULE,
}; };
...@@ -620,6 +682,41 @@ void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm) ...@@ -620,6 +682,41 @@ void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
} }
EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher); EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
u32 type, u32 mask)
{
char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
struct crypto_ahash *tfm;
if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
"cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
return ERR_PTR(-EINVAL);
tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
if (IS_ERR(tfm))
return ERR_CAST(tfm);
if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
crypto_free_ahash(tfm);
return ERR_PTR(-EINVAL);
}
return __cryptd_ahash_cast(tfm);
}
EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
{
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
return ctx->child;
}
EXPORT_SYMBOL_GPL(cryptd_ahash_child);
void cryptd_free_ahash(struct cryptd_ahash *tfm)
{
crypto_free_ahash(&tfm->base);
}
EXPORT_SYMBOL_GPL(cryptd_free_ahash);
static int __init cryptd_init(void) static int __init cryptd_init(void)
{ {
int err; int err;
......
...@@ -219,6 +219,8 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) ...@@ -219,6 +219,8 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt; inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt; inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
inst->alg.cra_blkcipher.geniv = "chainiv";
out: out:
crypto_mod_put(alg); crypto_mod_put(alg);
return inst; return inst;
......
This diff is collapsed.
/*
* GHASH: digest algorithm for GCM (Galois/Counter Mode).
*
* Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
* Copyright (c) 2009 Intel Corp.
* Author: Huang Ying <ying.huang@intel.com>
*
* The algorithm implementation is copied from gcm.c.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*/
#include <crypto/algapi.h>
#include <crypto/gf128mul.h>
#include <crypto/internal/hash.h>
#include <linux/crypto.h>
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h>
#define GHASH_BLOCK_SIZE 16
#define GHASH_DIGEST_SIZE 16
struct ghash_ctx {
struct gf128mul_4k *gf128;
};
struct ghash_desc_ctx {
u8 buffer[GHASH_BLOCK_SIZE];
u32 bytes;
};
static int ghash_init(struct shash_desc *desc)
{
struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
memset(dctx, 0, sizeof(*dctx));
return 0;
}
static int ghash_setkey(struct crypto_shash *tfm,
const u8 *key, unsigned int keylen)
{
struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
if (keylen != GHASH_BLOCK_SIZE) {
crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
return -EINVAL;
}
if (ctx->gf128)
gf128mul_free_4k(ctx->gf128);
ctx->gf128 = gf128mul_init_4k_lle((be128 *)key);
if (!ctx->gf128)
return -ENOMEM;
return 0;
}
static int ghash_update(struct shash_desc *desc,
const u8 *src, unsigned int srclen)
{
struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
u8 *dst = dctx->buffer;
if (dctx->bytes) {
int n = min(srclen, dctx->bytes);
u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
dctx->bytes -= n;
srclen -= n;
while (n--)
*pos++ ^= *src++;
if (!dctx->bytes)
gf128mul_4k_lle((be128 *)dst, ctx->gf128);
}
while (srclen >= GHASH_BLOCK_SIZE) {
crypto_xor(dst, src, GHASH_BLOCK_SIZE);
gf128mul_4k_lle((be128 *)dst, ctx->gf128);
src += GHASH_BLOCK_SIZE;
srclen -= GHASH_BLOCK_SIZE;
}
if (srclen) {
dctx->bytes = GHASH_BLOCK_SIZE - srclen;
while (srclen--)
*dst++ ^= *src++;
}
return 0;
}
static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
{
u8 *dst = dctx->buffer;
if (dctx->bytes) {
u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
while (dctx->bytes--)
*tmp++ ^= 0;
gf128mul_4k_lle((be128 *)dst, ctx->gf128);
}
dctx->bytes = 0;
}
static int ghash_final(struct shash_desc *desc, u8 *dst)
{
struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
u8 *buf = dctx->buffer;
ghash_flush(ctx, dctx);
memcpy(dst, buf, GHASH_BLOCK_SIZE);
return 0;
}
static void ghash_exit_tfm(struct crypto_tfm *tfm)
{
struct ghash_ctx *ctx = crypto_tfm_ctx(tfm);
if (ctx->gf128)
gf128mul_free_4k(ctx->gf128);
}
static struct shash_alg ghash_alg = {
.digestsize = GHASH_DIGEST_SIZE,
.init = ghash_init,
.update = ghash_update,
.final = ghash_final,
.setkey = ghash_setkey,
.descsize = sizeof(struct ghash_desc_ctx),
.base = {
.cra_name = "ghash",
.cra_driver_name = "ghash-generic",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
.cra_blocksize = GHASH_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct ghash_ctx),
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list),
.cra_exit = ghash_exit_tfm,
},
};
static int __init ghash_mod_init(void)
{
return crypto_register_shash(&ghash_alg);
}
static void __exit ghash_mod_exit(void)
{
crypto_unregister_shash(&ghash_alg);
}
module_init(ghash_mod_init);
module_exit(ghash_mod_exit);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("GHASH Message Digest Algorithm");
MODULE_ALIAS("ghash");
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
#include <linux/string.h> #include <linux/string.h>
struct hmac_ctx { struct hmac_ctx {
struct crypto_hash *child; struct crypto_shash *hash;
}; };
static inline void *align_ptr(void *p, unsigned int align) static inline void *align_ptr(void *p, unsigned int align)
...@@ -35,65 +35,45 @@ static inline void *align_ptr(void *p, unsigned int align) ...@@ -35,65 +35,45 @@ static inline void *align_ptr(void *p, unsigned int align)
return (void *)ALIGN((unsigned long)p, align); return (void *)ALIGN((unsigned long)p, align);
} }
static inline struct hmac_ctx *hmac_ctx(struct crypto_hash *tfm) static inline struct hmac_ctx *hmac_ctx(struct crypto_shash *tfm)
{ {
return align_ptr(crypto_hash_ctx_aligned(tfm) + return align_ptr(crypto_shash_ctx_aligned(tfm) +
crypto_hash_blocksize(tfm) * 2 + crypto_shash_statesize(tfm) * 2,
crypto_hash_digestsize(tfm), sizeof(void *)); crypto_tfm_ctx_alignment());
} }
static int hmac_setkey(struct crypto_hash *parent, static int hmac_setkey(struct crypto_shash *parent,
const u8 *inkey, unsigned int keylen) const u8 *inkey, unsigned int keylen)
{ {
int bs = crypto_hash_blocksize(parent); int bs = crypto_shash_blocksize(parent);
int ds = crypto_hash_digestsize(parent); int ds = crypto_shash_digestsize(parent);
char *ipad = crypto_hash_ctx_aligned(parent); int ss = crypto_shash_statesize(parent);
char *opad = ipad + bs; char *ipad = crypto_shash_ctx_aligned(parent);
char *digest = opad + bs; char *opad = ipad + ss;
struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *)); struct hmac_ctx *ctx = align_ptr(opad + ss,
struct crypto_hash *tfm = ctx->child; crypto_tfm_ctx_alignment());
struct crypto_shash *hash = ctx->hash;
struct {
struct shash_desc shash;
char ctx[crypto_shash_descsize(hash)];
} desc;
unsigned int i; unsigned int i;
desc.shash.tfm = hash;
desc.shash.flags = crypto_shash_get_flags(parent) &
CRYPTO_TFM_REQ_MAY_SLEEP;
if (keylen > bs) { if (keylen > bs) {
struct hash_desc desc;
struct scatterlist tmp;
int tmplen;
int err; int err;
desc.tfm = tfm; err = crypto_shash_digest(&desc.shash, inkey, keylen, ipad);
desc.flags = crypto_hash_get_flags(parent);
desc.flags &= CRYPTO_TFM_REQ_MAY_SLEEP;
err = crypto_hash_init(&desc);
if (err) if (err)
return err; return err;
tmplen = bs * 2 + ds;
sg_init_one(&tmp, ipad, tmplen);
for (; keylen > tmplen; inkey += tmplen, keylen -= tmplen) {
memcpy(ipad, inkey, tmplen);
err = crypto_hash_update(&desc, &tmp, tmplen);
if (err)
return err;
}
if (keylen) {
memcpy(ipad, inkey, keylen);
err = crypto_hash_update(&desc, &tmp, keylen);
if (err)
return err;
}
err = crypto_hash_final(&desc, digest);
if (err)
return err;
inkey = digest;
keylen = ds; keylen = ds;
} } else
memcpy(ipad, inkey, keylen);
memcpy(ipad, inkey, keylen);
memset(ipad + keylen, 0, bs - keylen); memset(ipad + keylen, 0, bs - keylen);
memcpy(opad, ipad, bs); memcpy(opad, ipad, bs);
...@@ -102,184 +82,178 @@ static int hmac_setkey(struct crypto_hash *parent, ...@@ -102,184 +82,178 @@ static int hmac_setkey(struct crypto_hash *parent,
opad[i] ^= 0x5c; opad[i] ^= 0x5c;
} }
return 0; return crypto_shash_init(&desc.shash) ?:
crypto_shash_update(&desc.shash, ipad, bs) ?:
crypto_shash_export(&desc.shash, ipad) ?:
crypto_shash_init(&desc.shash) ?:
crypto_shash_update(&desc.shash, opad, bs) ?:
crypto_shash_export(&desc.shash, opad);
} }
static int hmac_init(struct hash_desc *pdesc) static int hmac_export(struct shash_desc *pdesc, void *out)
{ {
struct crypto_hash *parent = pdesc->tfm; struct shash_desc *desc = shash_desc_ctx(pdesc);
int bs = crypto_hash_blocksize(parent);
int ds = crypto_hash_digestsize(parent);
char *ipad = crypto_hash_ctx_aligned(parent);
struct hmac_ctx *ctx = align_ptr(ipad + bs * 2 + ds, sizeof(void *));
struct hash_desc desc;
struct scatterlist tmp;
int err;
desc.tfm = ctx->child; desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
sg_init_one(&tmp, ipad, bs);
err = crypto_hash_init(&desc); return crypto_shash_export(desc, out);
if (unlikely(err))
return err;
return crypto_hash_update(&desc, &tmp, bs);
} }
static int hmac_update(struct hash_desc *pdesc, static int hmac_import(struct shash_desc *pdesc, const void *in)
struct scatterlist *sg, unsigned int nbytes)
{ {
struct shash_desc *desc = shash_desc_ctx(pdesc);
struct hmac_ctx *ctx = hmac_ctx(pdesc->tfm); struct hmac_ctx *ctx = hmac_ctx(pdesc->tfm);
struct hash_desc desc;
desc.tfm = ctx->child; desc->tfm = ctx->hash;
desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP; desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
return crypto_hash_update(&desc, sg, nbytes); return crypto_shash_import(desc, in);
} }
static int hmac_final(struct hash_desc *pdesc, u8 *out) static int hmac_init(struct shash_desc *pdesc)
{ {
struct crypto_hash *parent = pdesc->tfm; return hmac_import(pdesc, crypto_shash_ctx_aligned(pdesc->tfm));
int bs = crypto_hash_blocksize(parent); }
int ds = crypto_hash_digestsize(parent);
char *opad = crypto_hash_ctx_aligned(parent) + bs;
char *digest = opad + bs;
struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
struct hash_desc desc;
struct scatterlist tmp;
int err;
desc.tfm = ctx->child; static int hmac_update(struct shash_desc *pdesc,
desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP; const u8 *data, unsigned int nbytes)
sg_init_one(&tmp, opad, bs + ds); {
struct shash_desc *desc = shash_desc_ctx(pdesc);
err = crypto_hash_final(&desc, digest); desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
if (unlikely(err))
return err;
return crypto_hash_digest(&desc, &tmp, bs + ds, out); return crypto_shash_update(desc, data, nbytes);
} }
static int hmac_digest(struct hash_desc *pdesc, struct scatterlist *sg, static int hmac_final(struct shash_desc *pdesc, u8 *out)
unsigned int nbytes, u8 *out)
{ {
struct crypto_hash *parent = pdesc->tfm; struct crypto_shash *parent = pdesc->tfm;
int bs = crypto_hash_blocksize(parent); int ds = crypto_shash_digestsize(parent);
int ds = crypto_hash_digestsize(parent); int ss = crypto_shash_statesize(parent);
char *ipad = crypto_hash_ctx_aligned(parent); char *opad = crypto_shash_ctx_aligned(parent) + ss;
char *opad = ipad + bs; struct shash_desc *desc = shash_desc_ctx(pdesc);
char *digest = opad + bs;
struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
struct hash_desc desc;
struct scatterlist sg1[2];
struct scatterlist sg2[1];
int err;
desc.tfm = ctx->child; desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
sg_init_table(sg1, 2); return crypto_shash_final(desc, out) ?:
sg_set_buf(sg1, ipad, bs); crypto_shash_import(desc, opad) ?:
scatterwalk_sg_chain(sg1, 2, sg); crypto_shash_finup(desc, out, ds, out);
}
sg_init_table(sg2, 1); static int hmac_finup(struct shash_desc *pdesc, const u8 *data,
sg_set_buf(sg2, opad, bs + ds); unsigned int nbytes, u8 *out)
{
err = crypto_hash_digest(&desc, sg1, nbytes + bs, digest); struct crypto_shash *parent = pdesc->tfm;
if (unlikely(err)) int ds = crypto_shash_digestsize(parent);
return err; int ss = crypto_shash_statesize(parent);
char *opad = crypto_shash_ctx_aligned(parent) + ss;
struct shash_desc *desc = shash_desc_ctx(pdesc);
return crypto_hash_digest(&desc, sg2, bs + ds, out); desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
return crypto_shash_finup(desc, data, nbytes, out) ?:
crypto_shash_import(desc, opad) ?:
crypto_shash_finup(desc, out, ds, out);
} }
static int hmac_init_tfm(struct crypto_tfm *tfm) static int hmac_init_tfm(struct crypto_tfm *tfm)
{ {
struct crypto_hash *hash; struct crypto_shash *parent = __crypto_shash_cast(tfm);
struct crypto_shash *hash;
struct crypto_instance *inst = (void *)tfm->__crt_alg; struct crypto_instance *inst = (void *)tfm->__crt_alg;
struct crypto_spawn *spawn = crypto_instance_ctx(inst); struct crypto_shash_spawn *spawn = crypto_instance_ctx(inst);
struct hmac_ctx *ctx = hmac_ctx(__crypto_hash_cast(tfm)); struct hmac_ctx *ctx = hmac_ctx(parent);
hash = crypto_spawn_hash(spawn); hash = crypto_spawn_shash(spawn);
if (IS_ERR(hash)) if (IS_ERR(hash))
return PTR_ERR(hash); return PTR_ERR(hash);
ctx->child = hash; parent->descsize = sizeof(struct shash_desc) +
crypto_shash_descsize(hash);
ctx->hash = hash;
return 0; return 0;
} }
static void hmac_exit_tfm(struct crypto_tfm *tfm) static void hmac_exit_tfm(struct crypto_tfm *tfm)
{ {
struct hmac_ctx *ctx = hmac_ctx(__crypto_hash_cast(tfm)); struct hmac_ctx *ctx = hmac_ctx(__crypto_shash_cast(tfm));
crypto_free_hash(ctx->child); crypto_free_shash(ctx->hash);
} }
static void hmac_free(struct crypto_instance *inst) static int hmac_create(struct crypto_template *tmpl, struct rtattr **tb)
{ {
crypto_drop_spawn(crypto_instance_ctx(inst)); struct shash_instance *inst;
kfree(inst);
}
static struct crypto_instance *hmac_alloc(struct rtattr **tb)
{
struct crypto_instance *inst;
struct crypto_alg *alg; struct crypto_alg *alg;
struct shash_alg *salg;
int err; int err;
int ds; int ds;
int ss;
err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_HASH); err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
if (err) if (err)
return ERR_PTR(err); return err;
alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_HASH, salg = shash_attr_alg(tb[1], 0, 0);
CRYPTO_ALG_TYPE_HASH_MASK); if (IS_ERR(salg))
if (IS_ERR(alg)) return PTR_ERR(salg);
return ERR_CAST(alg);
err = -EINVAL;
inst = ERR_PTR(-EINVAL); ds = salg->digestsize;
ds = alg->cra_type == &crypto_hash_type ? ss = salg->statesize;
alg->cra_hash.digestsize : alg = &salg->base;
alg->cra_type ? if (ds > alg->cra_blocksize ||
__crypto_shash_alg(alg)->digestsize : ss < alg->cra_blocksize)
alg->cra_digest.dia_digestsize;
if (ds > alg->cra_blocksize)
goto out_put_alg; goto out_put_alg;
inst = crypto_alloc_instance("hmac", alg); inst = shash_alloc_instance("hmac", alg);
err = PTR_ERR(inst);
if (IS_ERR(inst)) if (IS_ERR(inst))
goto out_put_alg; goto out_put_alg;
inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH; err = crypto_init_shash_spawn(shash_instance_ctx(inst), salg,
inst->alg.cra_priority = alg->cra_priority; shash_crypto_instance(inst));
inst->alg.cra_blocksize = alg->cra_blocksize; if (err)
inst->alg.cra_alignmask = alg->cra_alignmask; goto out_free_inst;
inst->alg.cra_type = &crypto_hash_type;
inst->alg.base.cra_priority = alg->cra_priority;
inst->alg.cra_hash.digestsize = ds; inst->alg.base.cra_blocksize = alg->cra_blocksize;
inst->alg.base.cra_alignmask = alg->cra_alignmask;
inst->alg.cra_ctxsize = sizeof(struct hmac_ctx) +
ALIGN(inst->alg.cra_blocksize * 2 + ds, ss = ALIGN(ss, alg->cra_alignmask + 1);
sizeof(void *)); inst->alg.digestsize = ds;
inst->alg.statesize = ss;
inst->alg.cra_init = hmac_init_tfm;
inst->alg.cra_exit = hmac_exit_tfm; inst->alg.base.cra_ctxsize = sizeof(struct hmac_ctx) +
ALIGN(ss * 2, crypto_tfm_ctx_alignment());
inst->alg.cra_hash.init = hmac_init;
inst->alg.cra_hash.update = hmac_update; inst->alg.base.cra_init = hmac_init_tfm;
inst->alg.cra_hash.final = hmac_final; inst->alg.base.cra_exit = hmac_exit_tfm;
inst->alg.cra_hash.digest = hmac_digest;
inst->alg.cra_hash.setkey = hmac_setkey; inst->alg.init = hmac_init;
inst->alg.update = hmac_update;
inst->alg.final = hmac_final;
inst->alg.finup = hmac_finup;
inst->alg.export = hmac_export;
inst->alg.import = hmac_import;
inst->alg.setkey = hmac_setkey;
err = shash_register_instance(tmpl, inst);
if (err) {
out_free_inst:
shash_free_instance(shash_crypto_instance(inst));
}
out_put_alg: out_put_alg:
crypto_mod_put(alg); crypto_mod_put(alg);
return inst; return err;
} }
static struct crypto_template hmac_tmpl = { static struct crypto_template hmac_tmpl = {
.name = "hmac", .name = "hmac",
.alloc = hmac_alloc, .create = hmac_create,
.free = hmac_free, .free = shash_free_instance,
.module = THIS_MODULE, .module = THIS_MODULE,
}; };
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment