lib/crypto: s390/ghash: Migrate optimized code into library

Remove the "ghash-s390" crypto_shash algorithm, and replace it with an
implementation of ghash_blocks_arch() for the GHASH library.

This makes the GHASH library be optimized with CPACF.  It also greatly
reduces the amount of s390-specific glue code that is needed, and it
fixes the issue where this GHASH optimization was disabled by default.

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20260319061723.1140720-14-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
This commit is contained in:
Eric Biggers 2026-03-18 23:17:14 -07:00
parent af413d71f0
commit efd1d2c8f3
8 changed files with 57 additions and 158 deletions

View File

@ -809,7 +809,6 @@ CONFIG_CRYPTO_USER_API_HASH=m
CONFIG_CRYPTO_USER_API_SKCIPHER=m
CONFIG_CRYPTO_USER_API_RNG=m
CONFIG_CRYPTO_USER_API_AEAD=m
CONFIG_CRYPTO_GHASH_S390=m
CONFIG_CRYPTO_AES_S390=m
CONFIG_CRYPTO_DES_S390=m
CONFIG_CRYPTO_HMAC_S390=m

View File

@ -794,7 +794,6 @@ CONFIG_CRYPTO_USER_API_HASH=m
CONFIG_CRYPTO_USER_API_SKCIPHER=m
CONFIG_CRYPTO_USER_API_RNG=m
CONFIG_CRYPTO_USER_API_AEAD=m
CONFIG_CRYPTO_GHASH_S390=m
CONFIG_CRYPTO_AES_S390=m
CONFIG_CRYPTO_DES_S390=m
CONFIG_CRYPTO_HMAC_S390=m

View File

@ -2,16 +2,6 @@
menu "Accelerated Cryptographic Algorithms for CPU (s390)"
config CRYPTO_GHASH_S390
tristate "Hash functions: GHASH"
select CRYPTO_HASH
help
GCM GHASH hash function (NIST SP800-38D)
Architecture: s390
It is available as of z196.
config CRYPTO_AES_S390
tristate "Ciphers: AES, modes: ECB, CBC, CTR, XTS, GCM"
select CRYPTO_SKCIPHER

View File

@ -7,7 +7,6 @@ obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o
obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o
obj-$(CONFIG_CRYPTO_PAES_S390) += paes_s390.o
obj-$(CONFIG_S390_PRNG) += prng.o
obj-$(CONFIG_CRYPTO_GHASH_S390) += ghash_s390.o
obj-$(CONFIG_CRYPTO_HMAC_S390) += hmac_s390.o
obj-$(CONFIG_CRYPTO_PHMAC_S390) += phmac_s390.o
obj-y += arch_random.o

View File

@ -1,144 +0,0 @@
// SPDX-License-Identifier: GPL-2.0
/*
* Cryptographic API.
*
* s390 implementation of the GHASH algorithm for GCM (Galois/Counter Mode).
*
* Copyright IBM Corp. 2011
* Author(s): Gerald Schaefer <gerald.schaefer@de.ibm.com>
*/
#include <asm/cpacf.h>
#include <crypto/ghash.h>
#include <crypto/internal/hash.h>
#include <linux/cpufeature.h>
#include <linux/err.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/string.h>
struct s390_ghash_ctx {
u8 key[GHASH_BLOCK_SIZE];
};
struct s390_ghash_desc_ctx {
u8 icv[GHASH_BLOCK_SIZE];
u8 key[GHASH_BLOCK_SIZE];
};
static int ghash_init(struct shash_desc *desc)
{
struct s390_ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
struct s390_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
memset(dctx, 0, sizeof(*dctx));
memcpy(dctx->key, ctx->key, GHASH_BLOCK_SIZE);
return 0;
}
static int ghash_setkey(struct crypto_shash *tfm,
const u8 *key, unsigned int keylen)
{
struct s390_ghash_ctx *ctx = crypto_shash_ctx(tfm);
if (keylen != GHASH_BLOCK_SIZE)
return -EINVAL;
memcpy(ctx->key, key, GHASH_BLOCK_SIZE);
return 0;
}
static int ghash_update(struct shash_desc *desc,
const u8 *src, unsigned int srclen)
{
struct s390_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
unsigned int n;
n = srclen & ~(GHASH_BLOCK_SIZE - 1);
cpacf_kimd(CPACF_KIMD_GHASH, dctx, src, n);
return srclen - n;
}
static void ghash_flush(struct s390_ghash_desc_ctx *dctx, const u8 *src,
unsigned int len)
{
if (len) {
u8 buf[GHASH_BLOCK_SIZE] = {};
memcpy(buf, src, len);
cpacf_kimd(CPACF_KIMD_GHASH, dctx, buf, GHASH_BLOCK_SIZE);
memzero_explicit(buf, sizeof(buf));
}
}
static int ghash_finup(struct shash_desc *desc, const u8 *src,
unsigned int len, u8 *dst)
{
struct s390_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
ghash_flush(dctx, src, len);
memcpy(dst, dctx->icv, GHASH_BLOCK_SIZE);
return 0;
}
static int ghash_export(struct shash_desc *desc, void *out)
{
struct s390_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
memcpy(out, dctx->icv, GHASH_DIGEST_SIZE);
return 0;
}
static int ghash_import(struct shash_desc *desc, const void *in)
{
struct s390_ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
struct s390_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
memcpy(dctx->icv, in, GHASH_DIGEST_SIZE);
memcpy(dctx->key, ctx->key, GHASH_BLOCK_SIZE);
return 0;
}
static struct shash_alg ghash_alg = {
.digestsize = GHASH_DIGEST_SIZE,
.init = ghash_init,
.update = ghash_update,
.finup = ghash_finup,
.setkey = ghash_setkey,
.export = ghash_export,
.import = ghash_import,
.statesize = sizeof(struct ghash_desc_ctx),
.descsize = sizeof(struct s390_ghash_desc_ctx),
.base = {
.cra_name = "ghash",
.cra_driver_name = "ghash-s390",
.cra_priority = 300,
.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY,
.cra_blocksize = GHASH_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct s390_ghash_ctx),
.cra_module = THIS_MODULE,
},
};
static int __init ghash_mod_init(void)
{
if (!cpacf_query_func(CPACF_KIMD, CPACF_KIMD_GHASH))
return -ENODEV;
return crypto_register_shash(&ghash_alg);
}
static void __exit ghash_mod_exit(void)
{
crypto_unregister_shash(&ghash_alg);
}
module_cpu_feature_match(S390_CPU_FEATURE_MSA, ghash_mod_init);
module_exit(ghash_mod_exit);
MODULE_ALIAS_CRYPTO("ghash");
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("GHASH hash function, s390 implementation");

View File

@ -44,7 +44,8 @@ struct ghash_key {
#if defined(CONFIG_CRYPTO_LIB_GF128HASH_ARCH) && defined(CONFIG_PPC64)
/** @htable: GHASH key format used by the POWER8 assembly code */
u64 htable[4][2];
#elif defined(CONFIG_CRYPTO_LIB_GF128HASH_ARCH) && defined(CONFIG_RISCV)
#elif defined(CONFIG_CRYPTO_LIB_GF128HASH_ARCH) && \
(defined(CONFIG_RISCV) || defined(CONFIG_S390))
/** @h_raw: The hash key H, in GHASH format */
u8 h_raw[GHASH_BLOCK_SIZE];
#endif

View File

@ -124,6 +124,7 @@ config CRYPTO_LIB_GF128HASH_ARCH
default y if PPC64 && VSX
default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
default y if S390
default y if X86_64
config CRYPTO_LIB_MD5

View File

@ -0,0 +1,54 @@
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
* GHASH optimized using the CP Assist for Cryptographic Functions (CPACF)
*
* Copyright 2026 Google LLC
*/
#include <asm/cpacf.h>
#include <linux/cpufeature.h>
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_cpacf_ghash);
#define ghash_preparekey_arch ghash_preparekey_arch
static void ghash_preparekey_arch(struct ghash_key *key,
const u8 raw_key[GHASH_BLOCK_SIZE])
{
/* Save key in POLYVAL format for fallback */
ghash_key_to_polyval(raw_key, &key->h);
/* Save key in GHASH format for CPACF_KIMD_GHASH */
memcpy(key->h_raw, raw_key, GHASH_BLOCK_SIZE);
}
#define ghash_blocks_arch ghash_blocks_arch
static void ghash_blocks_arch(struct polyval_elem *acc,
const struct ghash_key *key,
const u8 *data, size_t nblocks)
{
if (static_branch_likely(&have_cpacf_ghash)) {
/*
* CPACF_KIMD_GHASH requires the accumulator and key in a single
* buffer, each using the GHASH convention.
*/
u8 ctx[2][GHASH_BLOCK_SIZE] __aligned(8);
polyval_acc_to_ghash(acc, ctx[0]);
memcpy(ctx[1], key->h_raw, GHASH_BLOCK_SIZE);
cpacf_kimd(CPACF_KIMD_GHASH, ctx, data,
nblocks * GHASH_BLOCK_SIZE);
ghash_acc_to_polyval(ctx[0], acc);
memzero_explicit(ctx, sizeof(ctx));
} else {
ghash_blocks_generic(acc, &key->h, data, nblocks);
}
}
#define gf128hash_mod_init_arch gf128hash_mod_init_arch
static void gf128hash_mod_init_arch(void)
{
if (cpu_have_feature(S390_CPU_FEATURE_MSA) &&
cpacf_query_func(CPACF_KIMD, CPACF_KIMD_GHASH))
static_branch_enable(&have_cpacf_ghash);
}