crypto: sha256 - support arch-optimized lib and expose through shash

As has been done for various other algorithms, rework the design of the
SHA-256 library to support arch-optimized implementations, and make
crypto/sha256.c expose both generic and arch-optimized shash algorithms
that wrap the library functions.

This allows users of the SHA-256 library functions to take advantage of
the arch-optimized code, and this makes it much simpler to integrate
SHA-256 for each architecture.

Note that sha256_base.h is not used in the new design.  It will be
removed once all the architecture-specific code has been updated.

Move the generic block function into its own module to avoid a circular
dependency from libsha256.ko => sha256-$ARCH.ko => libsha256.ko.

Signed-off-by: Eric Biggers <ebiggers@google.com>

Add export and import functions to maintain existing export format.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Eric Biggers
2025-04-28 10:00:26 -07:00
committed by Herbert Xu
parent 10a6d72ea3
commit 950e5c8411
11 changed files with 529 additions and 227 deletions

View File

@@ -982,6 +982,7 @@ config CRYPTO_SHA256
tristate "SHA-224 and SHA-256"
select CRYPTO_HASH
select CRYPTO_LIB_SHA256
select CRYPTO_LIB_SHA256_GENERIC
help
SHA-224 and SHA-256 secure hash algorithms (FIPS 180, ISO/IEC 10118-3)

View File

@@ -76,7 +76,8 @@ obj-$(CONFIG_CRYPTO_MD4) += md4.o
obj-$(CONFIG_CRYPTO_MD5) += md5.o
obj-$(CONFIG_CRYPTO_RMD160) += rmd160.o
obj-$(CONFIG_CRYPTO_SHA1) += sha1_generic.o
obj-$(CONFIG_CRYPTO_SHA256) += sha256_generic.o
obj-$(CONFIG_CRYPTO_SHA256) += sha256.o
CFLAGS_sha256.o += -DARCH=$(ARCH)
obj-$(CONFIG_CRYPTO_SHA512) += sha512_generic.o
obj-$(CONFIG_CRYPTO_SHA3) += sha3_generic.o
obj-$(CONFIG_CRYPTO_SM3_GENERIC) += sm3_generic.o

243
crypto/sha256.c Normal file
View File

@@ -0,0 +1,243 @@
// SPDX-License-Identifier: GPL-2.0-or-later
/*
* Crypto API wrapper for the SHA-256 and SHA-224 library functions
*
* Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
* Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
* Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
* SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
*/
#include <crypto/internal/hash.h>
#include <crypto/internal/sha2.h>
#include <linux/kernel.h>
#include <linux/module.h>
const u8 sha224_zero_message_hash[SHA224_DIGEST_SIZE] = {
0xd1, 0x4a, 0x02, 0x8c, 0x2a, 0x3a, 0x2b, 0xc9, 0x47,
0x61, 0x02, 0xbb, 0x28, 0x82, 0x34, 0xc4, 0x15, 0xa2,
0xb0, 0x1f, 0x82, 0x8e, 0xa6, 0x2a, 0xc5, 0xb3, 0xe4,
0x2f
};
EXPORT_SYMBOL_GPL(sha224_zero_message_hash);
const u8 sha256_zero_message_hash[SHA256_DIGEST_SIZE] = {
0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14,
0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24,
0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c,
0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55
};
EXPORT_SYMBOL_GPL(sha256_zero_message_hash);
static int crypto_sha256_init(struct shash_desc *desc)
{
sha256_init(shash_desc_ctx(desc));
return 0;
}
static int crypto_sha256_update_generic(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
sha256_update_generic(shash_desc_ctx(desc), data, len);
return 0;
}
static int crypto_sha256_update_arch(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
sha256_update(shash_desc_ctx(desc), data, len);
return 0;
}
static int crypto_sha256_final_generic(struct shash_desc *desc, u8 *out)
{
sha256_final_generic(shash_desc_ctx(desc), out);
return 0;
}
static int crypto_sha256_final_arch(struct shash_desc *desc, u8 *out)
{
sha256_final(shash_desc_ctx(desc), out);
return 0;
}
static int crypto_sha256_finup_generic(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
sha256_update_generic(sctx, data, len);
sha256_final_generic(sctx, out);
return 0;
}
static int crypto_sha256_finup_arch(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
sha256_update(sctx, data, len);
sha256_final(sctx, out);
return 0;
}
static int crypto_sha256_digest_generic(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
sha256_init(sctx);
sha256_update_generic(sctx, data, len);
sha256_final_generic(sctx, out);
return 0;
}
static int crypto_sha256_digest_arch(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
sha256(data, len, out);
return 0;
}
static int crypto_sha224_init(struct shash_desc *desc)
{
sha224_init(shash_desc_ctx(desc));
return 0;
}
static int crypto_sha224_final_generic(struct shash_desc *desc, u8 *out)
{
sha224_final_generic(shash_desc_ctx(desc), out);
return 0;
}
static int crypto_sha224_final_arch(struct shash_desc *desc, u8 *out)
{
sha224_final(shash_desc_ctx(desc), out);
return 0;
}
static int crypto_sha256_import_lib(struct shash_desc *desc, const void *in)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
const u8 *p = in;
memcpy(sctx, p, sizeof(*sctx));
p += sizeof(*sctx);
sctx->count += *p;
return 0;
}
static int crypto_sha256_export_lib(struct shash_desc *desc, void *out)
{
struct sha256_state *sctx0 = shash_desc_ctx(desc);
struct sha256_state sctx = *sctx0;
unsigned int partial;
u8 *p = out;
partial = sctx.count % SHA256_BLOCK_SIZE;
sctx.count -= partial;
memcpy(p, &sctx, sizeof(sctx));
p += sizeof(sctx);
*p = partial;
return 0;
}
static struct shash_alg algs[] = {
{
.base.cra_name = "sha256",
.base.cra_driver_name = "sha256-generic",
.base.cra_priority = 100,
.base.cra_blocksize = SHA256_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
.digestsize = SHA256_DIGEST_SIZE,
.init = crypto_sha256_init,
.update = crypto_sha256_update_generic,
.final = crypto_sha256_final_generic,
.finup = crypto_sha256_finup_generic,
.digest = crypto_sha256_digest_generic,
.descsize = sizeof(struct sha256_state),
.statesize = sizeof(struct crypto_sha256_state) +
SHA256_BLOCK_SIZE + 1,
.import = crypto_sha256_import_lib,
.export = crypto_sha256_export_lib,
},
{
.base.cra_name = "sha224",
.base.cra_driver_name = "sha224-generic",
.base.cra_priority = 100,
.base.cra_blocksize = SHA224_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
.digestsize = SHA224_DIGEST_SIZE,
.init = crypto_sha224_init,
.update = crypto_sha256_update_generic,
.final = crypto_sha224_final_generic,
.descsize = sizeof(struct sha256_state),
.statesize = sizeof(struct crypto_sha256_state) +
SHA256_BLOCK_SIZE + 1,
.import = crypto_sha256_import_lib,
.export = crypto_sha256_export_lib,
},
{
.base.cra_name = "sha256",
.base.cra_driver_name = "sha256-" __stringify(ARCH),
.base.cra_priority = 300,
.base.cra_blocksize = SHA256_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
.digestsize = SHA256_DIGEST_SIZE,
.init = crypto_sha256_init,
.update = crypto_sha256_update_arch,
.final = crypto_sha256_final_arch,
.finup = crypto_sha256_finup_arch,
.digest = crypto_sha256_digest_arch,
.descsize = sizeof(struct sha256_state),
.statesize = sizeof(struct crypto_sha256_state) +
SHA256_BLOCK_SIZE + 1,
.import = crypto_sha256_import_lib,
.export = crypto_sha256_export_lib,
},
{
.base.cra_name = "sha224",
.base.cra_driver_name = "sha224-" __stringify(ARCH),
.base.cra_priority = 300,
.base.cra_blocksize = SHA224_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
.digestsize = SHA224_DIGEST_SIZE,
.init = crypto_sha224_init,
.update = crypto_sha256_update_arch,
.final = crypto_sha224_final_arch,
.descsize = sizeof(struct sha256_state),
.statesize = sizeof(struct crypto_sha256_state) +
SHA256_BLOCK_SIZE + 1,
.import = crypto_sha256_import_lib,
.export = crypto_sha256_export_lib,
},
};
static unsigned int num_algs;
static int __init crypto_sha256_mod_init(void)
{
/* register the arch flavours only if they differ from generic */
num_algs = ARRAY_SIZE(algs);
BUILD_BUG_ON(ARRAY_SIZE(algs) % 2 != 0);
if (!sha256_is_arch_optimized())
num_algs /= 2;
return crypto_register_shashes(algs, ARRAY_SIZE(algs));
}
subsys_initcall(crypto_sha256_mod_init);
static void __exit crypto_sha256_mod_exit(void)
{
crypto_unregister_shashes(algs, num_algs);
}
module_exit(crypto_sha256_mod_exit);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Crypto API wrapper for the SHA-256 and SHA-224 library functions");
MODULE_ALIAS_CRYPTO("sha256");
MODULE_ALIAS_CRYPTO("sha256-generic");
MODULE_ALIAS_CRYPTO("sha256-" __stringify(ARCH));
MODULE_ALIAS_CRYPTO("sha224");
MODULE_ALIAS_CRYPTO("sha224-generic");
MODULE_ALIAS_CRYPTO("sha224-" __stringify(ARCH));

View File

@@ -1,102 +0,0 @@
// SPDX-License-Identifier: GPL-2.0-or-later
/*
* Crypto API wrapper for the generic SHA256 code from lib/crypto/sha256.c
*
* Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
* Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
* Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
* SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
*/
#include <crypto/internal/hash.h>
#include <crypto/sha2.h>
#include <crypto/sha256_base.h>
#include <linux/kernel.h>
#include <linux/module.h>
const u8 sha224_zero_message_hash[SHA224_DIGEST_SIZE] = {
0xd1, 0x4a, 0x02, 0x8c, 0x2a, 0x3a, 0x2b, 0xc9, 0x47,
0x61, 0x02, 0xbb, 0x28, 0x82, 0x34, 0xc4, 0x15, 0xa2,
0xb0, 0x1f, 0x82, 0x8e, 0xa6, 0x2a, 0xc5, 0xb3, 0xe4,
0x2f
};
EXPORT_SYMBOL_GPL(sha224_zero_message_hash);
const u8 sha256_zero_message_hash[SHA256_DIGEST_SIZE] = {
0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14,
0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24,
0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c,
0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55
};
EXPORT_SYMBOL_GPL(sha256_zero_message_hash);
static void sha256_block(struct crypto_sha256_state *sctx, const u8 *input,
int blocks)
{
sha256_transform_blocks(sctx, input, blocks);
}
static int crypto_sha256_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
return sha256_base_do_update_blocks(desc, data, len, sha256_block);
}
static int crypto_sha256_finup(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *hash)
{
sha256_base_do_finup(desc, data, len, sha256_block);
return sha256_base_finish(desc, hash);
}
static struct shash_alg sha256_algs[2] = { {
.digestsize = SHA256_DIGEST_SIZE,
.init = sha256_base_init,
.update = crypto_sha256_update,
.finup = crypto_sha256_finup,
.descsize = sizeof(struct crypto_sha256_state),
.base = {
.cra_name = "sha256",
.cra_driver_name= "sha256-generic",
.cra_priority = 100,
.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
CRYPTO_AHASH_ALG_FINUP_MAX,
.cra_blocksize = SHA256_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
}, {
.digestsize = SHA224_DIGEST_SIZE,
.init = sha224_base_init,
.update = crypto_sha256_update,
.finup = crypto_sha256_finup,
.descsize = sizeof(struct crypto_sha256_state),
.base = {
.cra_name = "sha224",
.cra_driver_name= "sha224-generic",
.cra_priority = 100,
.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
CRYPTO_AHASH_ALG_FINUP_MAX,
.cra_blocksize = SHA224_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
} };
static int __init sha256_generic_mod_init(void)
{
return crypto_register_shashes(sha256_algs, ARRAY_SIZE(sha256_algs));
}
static void __exit sha256_generic_mod_fini(void)
{
crypto_unregister_shashes(sha256_algs, ARRAY_SIZE(sha256_algs));
}
subsys_initcall(sha256_generic_mod_init);
module_exit(sha256_generic_mod_fini);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm");
MODULE_ALIAS_CRYPTO("sha224");
MODULE_ALIAS_CRYPTO("sha224-generic");
MODULE_ALIAS_CRYPTO("sha256");
MODULE_ALIAS_CRYPTO("sha256-generic");