crypto: arm64/ghash - Use API partial block handling

Use the Crypto API partial block handling.

Also remove the unnecessary SIMD fallback path.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Herbert Xu
2025-04-18 10:58:54 +08:00
parent 08bcb69135
commit 9a7c987fb9

View File

@ -6,30 +6,27 @@
*/
#include <asm/neon.h>
#include <asm/simd.h>
#include <linux/unaligned.h>
#include <crypto/aes.h>
#include <crypto/gcm.h>
#include <crypto/algapi.h>
#include <crypto/b128ops.h>
#include <crypto/gcm.h>
#include <crypto/ghash.h>
#include <crypto/gf128mul.h>
#include <crypto/internal/aead.h>
#include <crypto/internal/hash.h>
#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <crypto/scatterwalk.h>
#include <linux/cpufeature.h>
#include <linux/crypto.h>
#include <linux/errno.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/string.h>
#include <linux/unaligned.h>
MODULE_DESCRIPTION("GHASH and AES-GCM using ARMv8 Crypto Extensions");
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
MODULE_LICENSE("GPL v2");
MODULE_ALIAS_CRYPTO("ghash");
#define GHASH_BLOCK_SIZE 16
#define GHASH_DIGEST_SIZE 16
#define RFC4106_NONCE_SIZE 4
struct ghash_key {
@ -37,10 +34,8 @@ struct ghash_key {
u64 h[][2];
};
struct ghash_desc_ctx {
struct arm_ghash_desc_ctx {
u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
u8 buf[GHASH_BLOCK_SIZE];
u32 count;
};
struct gcm_aes_ctx {
@ -65,36 +60,12 @@ asmlinkage int pmull_gcm_decrypt(int bytes, u8 dst[], const u8 src[],
static int ghash_init(struct shash_desc *desc)
{
struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
*ctx = (struct ghash_desc_ctx){};
*ctx = (struct arm_ghash_desc_ctx){};
return 0;
}
static void ghash_do_update(int blocks, u64 dg[], const char *src,
struct ghash_key *key, const char *head)
{
be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
do {
const u8 *in = src;
if (head) {
in = head;
blocks++;
head = NULL;
} else {
src += GHASH_BLOCK_SIZE;
}
crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
gf128mul_lle(&dst, &key->k);
} while (--blocks);
dg[0] = be64_to_cpu(dst.b);
dg[1] = be64_to_cpu(dst.a);
}
static __always_inline
void ghash_do_simd_update(int blocks, u64 dg[], const char *src,
struct ghash_key *key, const char *head,
@ -103,13 +74,9 @@ void ghash_do_simd_update(int blocks, u64 dg[], const char *src,
u64 const h[][2],
const char *head))
{
if (likely(crypto_simd_usable())) {
kernel_neon_begin();
simd_update(blocks, dg, src, key->h, head);
kernel_neon_end();
} else {
ghash_do_update(blocks, dg, src, key, head);
}
}
/* avoid hogging the CPU for too long */
@ -118,63 +85,61 @@ void ghash_do_simd_update(int blocks, u64 dg[], const char *src,
static int ghash_update(struct shash_desc *desc, const u8 *src,
unsigned int len)
{
struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
ctx->count += len;
if ((partial + len) >= GHASH_BLOCK_SIZE) {
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
struct ghash_key *key = crypto_shash_ctx(desc->tfm);
int blocks;
if (partial) {
int p = GHASH_BLOCK_SIZE - partial;
memcpy(ctx->buf + partial, src, p);
src += p;
len -= p;
}
blocks = len / GHASH_BLOCK_SIZE;
len %= GHASH_BLOCK_SIZE;
len -= blocks * GHASH_BLOCK_SIZE;
do {
int chunk = min(blocks, MAX_BLOCKS);
ghash_do_simd_update(chunk, ctx->digest, src, key,
partial ? ctx->buf : NULL,
ghash_do_simd_update(chunk, ctx->digest, src, key, NULL,
pmull_ghash_update_p8);
blocks -= chunk;
src += chunk * GHASH_BLOCK_SIZE;
partial = 0;
} while (unlikely(blocks > 0));
}
if (len)
memcpy(ctx->buf + partial, src, len);
return 0;
return len;
}
static int ghash_final(struct shash_desc *desc, u8 *dst)
static int ghash_export(struct shash_desc *desc, void *out)
{
struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
u8 *dst = out;
if (partial) {
struct ghash_key *key = crypto_shash_ctx(desc->tfm);
memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
ghash_do_simd_update(1, ctx->digest, ctx->buf, key, NULL,
pmull_ghash_update_p8);
}
put_unaligned_be64(ctx->digest[1], dst);
put_unaligned_be64(ctx->digest[0], dst + 8);
memzero_explicit(ctx, sizeof(*ctx));
return 0;
}
static int ghash_import(struct shash_desc *desc, const void *in)
{
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
const u8 *src = in;
ctx->digest[1] = get_unaligned_be64(src);
ctx->digest[0] = get_unaligned_be64(src + 8);
return 0;
}
static int ghash_finup(struct shash_desc *desc, const u8 *src,
unsigned int len, u8 *dst)
{
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
struct ghash_key *key = crypto_shash_ctx(desc->tfm);
if (len) {
u8 buf[GHASH_BLOCK_SIZE] = {};
memcpy(buf, src, len);
ghash_do_simd_update(1, ctx->digest, src, key, NULL,
pmull_ghash_update_p8);
memzero_explicit(buf, sizeof(buf));
}
return ghash_export(desc, dst);
}
static void ghash_reflect(u64 h[], const be128 *k)
{
u64 carry = be64_to_cpu(k->a) & BIT(63) ? 1 : 0;
@ -205,6 +170,7 @@ static struct shash_alg ghash_alg = {
.base.cra_name = "ghash",
.base.cra_driver_name = "ghash-neon",
.base.cra_priority = 150,
.base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY,
.base.cra_blocksize = GHASH_BLOCK_SIZE,
.base.cra_ctxsize = sizeof(struct ghash_key) + sizeof(u64[2]),
.base.cra_module = THIS_MODULE,
@ -212,9 +178,12 @@ static struct shash_alg ghash_alg = {
.digestsize = GHASH_DIGEST_SIZE,
.init = ghash_init,
.update = ghash_update,
.final = ghash_final,
.finup = ghash_finup,
.setkey = ghash_setkey,
.descsize = sizeof(struct ghash_desc_ctx),
.export = ghash_export,
.import = ghash_import,
.descsize = sizeof(struct arm_ghash_desc_ctx),
.statesize = sizeof(struct ghash_desc_ctx),
};
static int num_rounds(struct crypto_aes_ctx *ctx)