get:
Show a patch.

patch:
Update a patch.

put:
Update a patch.

GET /api/1.0/patches/2197960/?format=api
HTTP 200 OK
Allow: GET, PUT, PATCH, HEAD, OPTIONS
Content-Type: application/json
Vary: Accept

{
    "id": 2197960,
    "url": "http://patchwork.ozlabs.org/api/1.0/patches/2197960/?format=api",
    "project": {
        "id": 12,
        "url": "http://patchwork.ozlabs.org/api/1.0/projects/12/?format=api",
        "name": "Linux CIFS Client",
        "link_name": "linux-cifs-client",
        "list_id": "linux-cifs.vger.kernel.org",
        "list_email": "linux-cifs@vger.kernel.org",
        "web_url": "",
        "scm_url": "",
        "webscm_url": ""
    },
    "msgid": "<20260218213501.136844-6-ebiggers@kernel.org>",
    "date": "2026-02-18T21:34:51",
    "name": "[05/15] lib/crypto: arm64/aes: Migrate optimized CBC-based MACs into library",
    "commit_ref": null,
    "pull_url": null,
    "state": "new",
    "archived": false,
    "hash": "36967c4587732cdc58aaf3be5029ea813ec9a480",
    "submitter": {
        "id": 74690,
        "url": "http://patchwork.ozlabs.org/api/1.0/people/74690/?format=api",
        "name": "Eric Biggers",
        "email": "ebiggers@kernel.org"
    },
    "delegate": null,
    "mbox": "http://patchwork.ozlabs.org/project/linux-cifs-client/patch/20260218213501.136844-6-ebiggers@kernel.org/mbox/",
    "series": [
        {
            "id": 492621,
            "url": "http://patchwork.ozlabs.org/api/1.0/series/492621/?format=api",
            "date": "2026-02-18T21:34:46",
            "name": "AES-CMAC library",
            "version": 1,
            "mbox": "http://patchwork.ozlabs.org/series/492621/mbox/"
        }
    ],
    "check": "pending",
    "checks": "http://patchwork.ozlabs.org/api/patches/2197960/checks/",
    "tags": {},
    "headers": {
        "Return-Path": "\n <linux-cifs+bounces-9446-incoming=patchwork.ozlabs.org@vger.kernel.org>",
        "X-Original-To": [
            "incoming@patchwork.ozlabs.org",
            "linux-cifs@vger.kernel.org"
        ],
        "Delivered-To": "patchwork-incoming@legolas.ozlabs.org",
        "Authentication-Results": [
            "legolas.ozlabs.org;\n\tdkim=pass (2048-bit key;\n unprotected) header.d=kernel.org header.i=@kernel.org header.a=rsa-sha256\n header.s=k20201202 header.b=MbJq6E7d;\n\tdkim-atps=neutral",
            "legolas.ozlabs.org;\n spf=pass (sender SPF authorized) smtp.mailfrom=vger.kernel.org\n (client-ip=172.234.253.10; helo=sea.lore.kernel.org;\n envelope-from=linux-cifs+bounces-9446-incoming=patchwork.ozlabs.org@vger.kernel.org;\n receiver=patchwork.ozlabs.org)",
            "smtp.subspace.kernel.org;\n\tdkim=pass (2048-bit key) header.d=kernel.org header.i=@kernel.org\n header.b=\"MbJq6E7d\"",
            "smtp.subspace.kernel.org;\n arc=none smtp.client-ip=10.30.226.201"
        ],
        "Received": [
            "from sea.lore.kernel.org (sea.lore.kernel.org [172.234.253.10])\n\t(using TLSv1.3 with cipher TLS_AES_256_GCM_SHA384 (256/256 bits)\n\t key-exchange x25519)\n\t(No client certificate requested)\n\tby legolas.ozlabs.org (Postfix) with ESMTPS id 4fGVJl2hJ9z1xvS\n\tfor <incoming@patchwork.ozlabs.org>; Thu, 19 Feb 2026 08:39:19 +1100 (AEDT)",
            "from smtp.subspace.kernel.org (conduit.subspace.kernel.org\n [100.90.174.1])\n\tby sea.lore.kernel.org (Postfix) with ESMTP id 44060306A508\n\tfor <incoming@patchwork.ozlabs.org>; Wed, 18 Feb 2026 21:37:04 +0000 (UTC)",
            "from localhost.localdomain (localhost.localdomain [127.0.0.1])\n\tby smtp.subspace.kernel.org (Postfix) with ESMTP id 56629335562;\n\tWed, 18 Feb 2026 21:36:53 +0000 (UTC)",
            "from smtp.kernel.org (aws-us-west-2-korg-mail-1.web.codeaurora.org\n [10.30.226.201])\n\t(using TLSv1.2 with cipher ECDHE-RSA-AES256-GCM-SHA384 (256/256 bits))\n\t(No client certificate requested)\n\tby smtp.subspace.kernel.org (Postfix) with ESMTPS id 28B642ECEA3;\n\tWed, 18 Feb 2026 21:36:53 +0000 (UTC)",
            "by smtp.kernel.org (Postfix) with ESMTPSA id 6D0F2C19425;\n\tWed, 18 Feb 2026 21:36:52 +0000 (UTC)"
        ],
        "ARC-Seal": "i=1; a=rsa-sha256; d=subspace.kernel.org; s=arc-20240116;\n\tt=1771450613; cv=none;\n b=WgtBDN0aWJzAauBwygJTD2fDvjlXn7PCPpNOwyWsTArrYy3rHBOQNlNex3H3beyYUot8xz3bhzH0NrmhwOFD4Z1OW0vS8Kgicezo+nKtj72+xEMrt9TfAmCpQXkPq1QOwFa/Rvs2T19DUlzzhOQaxASmVSdFEP1Zeil+ptjCHRs=",
        "ARC-Message-Signature": "i=1; a=rsa-sha256; d=subspace.kernel.org;\n\ts=arc-20240116; t=1771450613; c=relaxed/simple;\n\tbh=0LeGm98KqVExci2vvu6SjzoBtiGH1qcdbzcO5wJ5lqM=;\n\th=From:To:Cc:Subject:Date:Message-ID:In-Reply-To:References:\n\t MIME-Version;\n b=fYQ074nZyze9fFZGhyosLz7IbSoyKRJ8NZfv3pX1y7n1joWjZRlXhNFcCcJXmqnixQyqXgSD1sPxlJ3iuRVzmh+TC5xyEasbFuzhwWR74a2wRwAvpRxZ8iaiN2175DhYFvGqgB4LKmq9z/2SNaW50o3b1YrgoapUwMY8lG9W6j8=",
        "ARC-Authentication-Results": "i=1; smtp.subspace.kernel.org;\n dkim=pass (2048-bit key) header.d=kernel.org header.i=@kernel.org\n header.b=MbJq6E7d; arc=none smtp.client-ip=10.30.226.201",
        "DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/simple; d=kernel.org;\n\ts=k20201202; t=1771450612;\n\tbh=0LeGm98KqVExci2vvu6SjzoBtiGH1qcdbzcO5wJ5lqM=;\n\th=From:To:Cc:Subject:Date:In-Reply-To:References:From;\n\tb=MbJq6E7dirP6hem2BPoJf7hEAikTjbr3hVlsE85ume5etX0r62FaVHsSVLkjOmejW\n\t ePbJxSQYQzLHqpMTY3wp2DNbv2cuwgGCFEbvTT+i5OAfd6zfTcnTRZIBthd++Hxg4f\n\t v118WKqVPhyvA3RuXsBfd70pUtebHZTiHBS+7n8yQzaViumKZ0AtyN5WYOLJreV+p8\n\t SRiVGLkS48KvrIiocik5O6ADvhtnqLVAvUqhc/gsSn7+5km9lnxDE9YBnnfV0fSW4J\n\t ktxlyRbeEbtF7EwfFFFspdwYGJLJgYeLXKIpQT/F2aMwZV3byLAKL1LChfMaHbuxd0\n\t ghbVX/hL6hFfQ==",
        "From": "Eric Biggers <ebiggers@kernel.org>",
        "To": "linux-crypto@vger.kernel.org",
        "Cc": "linux-kernel@vger.kernel.org,\n\tArd Biesheuvel <ardb@kernel.org>,\n\t\"Jason A . Donenfeld\" <Jason@zx2c4.com>,\n\tHerbert Xu <herbert@gondor.apana.org.au>,\n\tlinux-arm-kernel@lists.infradead.org,\n\tlinux-cifs@vger.kernel.org,\n\tlinux-wireless@vger.kernel.org,\n\tEric Biggers <ebiggers@kernel.org>",
        "Subject": "[PATCH 05/15] lib/crypto: arm64/aes: Migrate optimized CBC-based MACs\n into library",
        "Date": "Wed, 18 Feb 2026 13:34:51 -0800",
        "Message-ID": "<20260218213501.136844-6-ebiggers@kernel.org>",
        "X-Mailer": "git-send-email 2.53.0",
        "In-Reply-To": "<20260218213501.136844-1-ebiggers@kernel.org>",
        "References": "<20260218213501.136844-1-ebiggers@kernel.org>",
        "Precedence": "bulk",
        "X-Mailing-List": "linux-cifs@vger.kernel.org",
        "List-Id": "<linux-cifs.vger.kernel.org>",
        "List-Subscribe": "<mailto:linux-cifs+subscribe@vger.kernel.org>",
        "List-Unsubscribe": "<mailto:linux-cifs+unsubscribe@vger.kernel.org>",
        "MIME-Version": "1.0",
        "Content-Transfer-Encoding": "8bit"
    },
    "content": "Instead of exposing the arm64-optimized CMAC, XCBC-MAC, and CBC-MAC code\nvia arm64-specific crypto_shash algorithms, instead just implement the\naes_cbcmac_blocks_arch() library function.  This is much simpler, it\nmakes the corresponding library functions be arm64-optimized, and it\nfixes the longstanding issue where this optimized code was disabled by\ndefault.  The corresponding algorithms still remain available through\ncrypto_shash, but individual architectures no longer need to handle it.\n\nNote that to be compatible with the library using 'size_t' lengths, the\ntype of the return value and 'blocks' parameter to the assembly\nfunctions had to be changed to 'size_t', and the assembly code had to be\nupdated accordingly to use the corresponding 64-bit registers.\n\nSigned-off-by: Eric Biggers <ebiggers@kernel.org>\n---\n arch/arm64/crypto/Kconfig    |   2 +-\n arch/arm64/crypto/aes-glue.c | 213 +----------------------------------\n include/crypto/aes.h         |   9 +-\n lib/crypto/arm64/aes-modes.S |  19 ++--\n lib/crypto/arm64/aes.h       |  48 +++++++-\n 5 files changed, 61 insertions(+), 230 deletions(-)",
    "diff": "diff --git a/arch/arm64/crypto/Kconfig b/arch/arm64/crypto/Kconfig\nindex 81ed892b3b72..82794afaffc9 100644\n--- a/arch/arm64/crypto/Kconfig\n+++ b/arch/arm64/crypto/Kconfig\n@@ -142,11 +142,11 @@ config CRYPTO_AES_ARM64_CE_CCM\n \ttristate \"AEAD cipher: AES in CCM mode (ARMv8 Crypto Extensions)\"\n \tdepends on KERNEL_MODE_NEON\n \tselect CRYPTO_ALGAPI\n \tselect CRYPTO_AES_ARM64_CE_BLK\n \tselect CRYPTO_AEAD\n-\tselect CRYPTO_LIB_AES\n+\tselect CRYPTO_LIB_AES_CBC_MACS\n \thelp\n \t  AEAD cipher: AES cipher algorithms (FIPS-197) with\n \t  CCM (Counter with Cipher Block Chaining-Message Authentication Code)\n \t  authenticated encryption mode (NIST SP800-38C)\n \ndiff --git a/arch/arm64/crypto/aes-glue.c b/arch/arm64/crypto/aes-glue.c\nindex fd7c3a560a71..e1b88f7e2d39 100644\n--- a/arch/arm64/crypto/aes-glue.c\n+++ b/arch/arm64/crypto/aes-glue.c\n@@ -35,11 +35,10 @@\n #define aes_essiv_cbc_decrypt\tce_aes_essiv_cbc_decrypt\n #define aes_ctr_encrypt\t\tce_aes_ctr_encrypt\n #define aes_xctr_encrypt\tce_aes_xctr_encrypt\n #define aes_xts_encrypt\t\tce_aes_xts_encrypt\n #define aes_xts_decrypt\t\tce_aes_xts_decrypt\n-#define aes_mac_update\t\tce_aes_mac_update\n MODULE_DESCRIPTION(\"AES-ECB/CBC/CTR/XTS/XCTR using ARMv8 Crypto Extensions\");\n #else\n #define MODE\t\t\t\"neon\"\n #define PRIO\t\t\t200\n #define aes_ecb_encrypt\t\tneon_aes_ecb_encrypt\n@@ -52,11 +51,10 @@ MODULE_DESCRIPTION(\"AES-ECB/CBC/CTR/XTS/XCTR using ARMv8 Crypto Extensions\");\n #define aes_essiv_cbc_decrypt\tneon_aes_essiv_cbc_decrypt\n #define aes_ctr_encrypt\t\tneon_aes_ctr_encrypt\n #define aes_xctr_encrypt\tneon_aes_xctr_encrypt\n #define aes_xts_encrypt\t\tneon_aes_xts_encrypt\n #define aes_xts_decrypt\t\tneon_aes_xts_decrypt\n-#define aes_mac_update\t\tneon_aes_mac_update\n MODULE_DESCRIPTION(\"AES-ECB/CBC/CTR/XTS/XCTR using ARMv8 NEON\");\n #endif\n #if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)\n MODULE_ALIAS_CRYPTO(\"ecb(aes)\");\n MODULE_ALIAS_CRYPTO(\"cbc(aes)\");\n@@ -64,13 +62,10 @@ MODULE_ALIAS_CRYPTO(\"ctr(aes)\");\n MODULE_ALIAS_CRYPTO(\"xts(aes)\");\n MODULE_ALIAS_CRYPTO(\"xctr(aes)\");\n #endif\n MODULE_ALIAS_CRYPTO(\"cts(cbc(aes))\");\n MODULE_ALIAS_CRYPTO(\"essiv(cbc(aes),sha256)\");\n-MODULE_ALIAS_CRYPTO(\"cmac(aes)\");\n-MODULE_ALIAS_CRYPTO(\"xcbc(aes)\");\n-MODULE_ALIAS_CRYPTO(\"cbcmac(aes)\");\n \n MODULE_AUTHOR(\"Ard Biesheuvel <ard.biesheuvel@linaro.org>\");\n MODULE_IMPORT_NS(\"CRYPTO_INTERNAL\");\n MODULE_LICENSE(\"GPL v2\");\n \n@@ -82,19 +77,10 @@ struct crypto_aes_xts_ctx {\n struct crypto_aes_essiv_cbc_ctx {\n \tstruct crypto_aes_ctx key1;\n \tstruct crypto_aes_ctx __aligned(8) key2;\n };\n \n-struct mac_tfm_ctx {\n-\tstruct crypto_aes_ctx key;\n-\tu8 __aligned(8) consts[];\n-};\n-\n-struct mac_desc_ctx {\n-\tu8 dg[AES_BLOCK_SIZE];\n-};\n-\n static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,\n \t\t\t       unsigned int key_len)\n {\n \tstruct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);\n \n@@ -721,215 +707,18 @@ static struct skcipher_alg aes_algs[] = { {\n \t.setkey\t\t= essiv_cbc_set_key,\n \t.encrypt\t= essiv_cbc_encrypt,\n \t.decrypt\t= essiv_cbc_decrypt,\n } };\n \n-static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,\n-\t\t\t unsigned int key_len)\n-{\n-\tstruct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);\n-\n-\treturn aes_expandkey(&ctx->key, in_key, key_len);\n-}\n-\n-static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)\n-{\n-\tu64 a = be64_to_cpu(x->a);\n-\tu64 b = be64_to_cpu(x->b);\n-\n-\ty->a = cpu_to_be64((a << 1) | (b >> 63));\n-\ty->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));\n-}\n-\n-static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,\n-\t\t       unsigned int key_len)\n-{\n-\tstruct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);\n-\tbe128 *consts = (be128 *)ctx->consts;\n-\tint rounds = 6 + key_len / 4;\n-\tint err;\n-\n-\terr = cbcmac_setkey(tfm, in_key, key_len);\n-\tif (err)\n-\t\treturn err;\n-\n-\t/* encrypt the zero vector */\n-\tscoped_ksimd()\n-\t\taes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){},\n-\t\t\t\tctx->key.key_enc, rounds, 1);\n-\n-\tcmac_gf128_mul_by_x(consts, consts);\n-\tcmac_gf128_mul_by_x(consts + 1, consts);\n-\n-\treturn 0;\n-}\n-\n-static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,\n-\t\t       unsigned int key_len)\n-{\n-\tstatic u8 const ks[3][AES_BLOCK_SIZE] = {\n-\t\t{ [0 ... AES_BLOCK_SIZE - 1] = 0x1 },\n-\t\t{ [0 ... AES_BLOCK_SIZE - 1] = 0x2 },\n-\t\t{ [0 ... AES_BLOCK_SIZE - 1] = 0x3 },\n-\t};\n-\n-\tstruct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);\n-\tint rounds = 6 + key_len / 4;\n-\tu8 key[AES_BLOCK_SIZE];\n-\tint err;\n-\n-\terr = cbcmac_setkey(tfm, in_key, key_len);\n-\tif (err)\n-\t\treturn err;\n-\n-\tscoped_ksimd() {\n-\t\taes_ecb_encrypt(key, ks[0], ctx->key.key_enc, rounds, 1);\n-\t\taes_ecb_encrypt(ctx->consts, ks[1], ctx->key.key_enc, rounds, 2);\n-\t}\n-\n-\treturn cbcmac_setkey(tfm, key, sizeof(key));\n-}\n-\n-static int mac_init(struct shash_desc *desc)\n-{\n-\tstruct mac_desc_ctx *ctx = shash_desc_ctx(desc);\n-\n-\tmemset(ctx->dg, 0, AES_BLOCK_SIZE);\n-\treturn 0;\n-}\n-\n-static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks,\n-\t\t\t  u8 dg[], int enc_before)\n-{\n-\tint rounds = 6 + ctx->key_length / 4;\n-\tint rem;\n-\n-\tdo {\n-\t\tscoped_ksimd()\n-\t\t\trem = aes_mac_update(in, ctx->key_enc, rounds, blocks,\n-\t\t\t\t\t     dg, enc_before, !enc_before);\n-\t\tin += (blocks - rem) * AES_BLOCK_SIZE;\n-\t\tblocks = rem;\n-\t} while (blocks);\n-}\n-\n-static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)\n-{\n-\tstruct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);\n-\tstruct mac_desc_ctx *ctx = shash_desc_ctx(desc);\n-\tint blocks = len / AES_BLOCK_SIZE;\n-\n-\tlen %= AES_BLOCK_SIZE;\n-\tmac_do_update(&tctx->key, p, blocks, ctx->dg, 0);\n-\treturn len;\n-}\n-\n-static int cbcmac_finup(struct shash_desc *desc, const u8 *src,\n-\t\t\tunsigned int len, u8 *out)\n-{\n-\tstruct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);\n-\tstruct mac_desc_ctx *ctx = shash_desc_ctx(desc);\n-\n-\tif (len) {\n-\t\tcrypto_xor(ctx->dg, src, len);\n-\t\tmac_do_update(&tctx->key, NULL, 0, ctx->dg, 1);\n-\t}\n-\tmemcpy(out, ctx->dg, AES_BLOCK_SIZE);\n-\treturn 0;\n-}\n-\n-static int cmac_finup(struct shash_desc *desc, const u8 *src, unsigned int len,\n-\t\t      u8 *out)\n-{\n-\tstruct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);\n-\tstruct mac_desc_ctx *ctx = shash_desc_ctx(desc);\n-\tu8 *consts = tctx->consts;\n-\n-\tcrypto_xor(ctx->dg, src, len);\n-\tif (len != AES_BLOCK_SIZE) {\n-\t\tctx->dg[len] ^= 0x80;\n-\t\tconsts += AES_BLOCK_SIZE;\n-\t}\n-\tmac_do_update(&tctx->key, consts, 1, ctx->dg, 0);\n-\tmemcpy(out, ctx->dg, AES_BLOCK_SIZE);\n-\treturn 0;\n-}\n-\n-static struct shash_alg mac_algs[] = { {\n-\t.base.cra_name\t\t= \"cmac(aes)\",\n-\t.base.cra_driver_name\t= \"cmac-aes-\" MODE,\n-\t.base.cra_priority\t= PRIO,\n-\t.base.cra_flags\t\t= CRYPTO_AHASH_ALG_BLOCK_ONLY |\n-\t\t\t\t  CRYPTO_AHASH_ALG_FINAL_NONZERO,\n-\t.base.cra_blocksize\t= AES_BLOCK_SIZE,\n-\t.base.cra_ctxsize\t= sizeof(struct mac_tfm_ctx) +\n-\t\t\t\t  2 * AES_BLOCK_SIZE,\n-\t.base.cra_module\t= THIS_MODULE,\n-\n-\t.digestsize\t\t= AES_BLOCK_SIZE,\n-\t.init\t\t\t= mac_init,\n-\t.update\t\t\t= mac_update,\n-\t.finup\t\t\t= cmac_finup,\n-\t.setkey\t\t\t= cmac_setkey,\n-\t.descsize\t\t= sizeof(struct mac_desc_ctx),\n-}, {\n-\t.base.cra_name\t\t= \"xcbc(aes)\",\n-\t.base.cra_driver_name\t= \"xcbc-aes-\" MODE,\n-\t.base.cra_priority\t= PRIO,\n-\t.base.cra_flags\t\t= CRYPTO_AHASH_ALG_BLOCK_ONLY |\n-\t\t\t\t  CRYPTO_AHASH_ALG_FINAL_NONZERO,\n-\t.base.cra_blocksize\t= AES_BLOCK_SIZE,\n-\t.base.cra_ctxsize\t= sizeof(struct mac_tfm_ctx) +\n-\t\t\t\t  2 * AES_BLOCK_SIZE,\n-\t.base.cra_module\t= THIS_MODULE,\n-\n-\t.digestsize\t\t= AES_BLOCK_SIZE,\n-\t.init\t\t\t= mac_init,\n-\t.update\t\t\t= mac_update,\n-\t.finup\t\t\t= cmac_finup,\n-\t.setkey\t\t\t= xcbc_setkey,\n-\t.descsize\t\t= sizeof(struct mac_desc_ctx),\n-}, {\n-\t.base.cra_name\t\t= \"cbcmac(aes)\",\n-\t.base.cra_driver_name\t= \"cbcmac-aes-\" MODE,\n-\t.base.cra_priority\t= PRIO,\n-\t.base.cra_flags\t\t= CRYPTO_AHASH_ALG_BLOCK_ONLY,\n-\t.base.cra_blocksize\t= AES_BLOCK_SIZE,\n-\t.base.cra_ctxsize\t= sizeof(struct mac_tfm_ctx),\n-\t.base.cra_module\t= THIS_MODULE,\n-\n-\t.digestsize\t\t= AES_BLOCK_SIZE,\n-\t.init\t\t\t= mac_init,\n-\t.update\t\t\t= mac_update,\n-\t.finup\t\t\t= cbcmac_finup,\n-\t.setkey\t\t\t= cbcmac_setkey,\n-\t.descsize\t\t= sizeof(struct mac_desc_ctx),\n-} };\n-\n static void aes_exit(void)\n {\n-\tcrypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));\n \tcrypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));\n }\n \n static int __init aes_init(void)\n {\n-\tint err;\n-\n-\terr = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));\n-\tif (err)\n-\t\treturn err;\n-\n-\terr = crypto_register_shashes(mac_algs, ARRAY_SIZE(mac_algs));\n-\tif (err)\n-\t\tgoto unregister_ciphers;\n-\n-\treturn 0;\n-\n-unregister_ciphers:\n-\tcrypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));\n-\treturn err;\n+\treturn crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));\n }\n \n #ifdef USE_V8_CRYPTO_EXTENSIONS\n module_cpu_feature_match(AES, aes_init);\n #else\ndiff --git a/include/crypto/aes.h b/include/crypto/aes.h\nindex 91bf4667d3e9..3feb4105c2a2 100644\n--- a/include/crypto/aes.h\n+++ b/include/crypto/aes.h\n@@ -198,13 +198,10 @@ asmlinkage void neon_aes_essiv_cbc_encrypt(u8 out[], u8 const in[],\n \t\t\t\t\t   u32 const rk2[]);\n asmlinkage void neon_aes_essiv_cbc_decrypt(u8 out[], u8 const in[],\n \t\t\t\t\t   u32 const rk1[], int rounds,\n \t\t\t\t\t   int blocks, u8 iv[],\n \t\t\t\t\t   u32 const rk2[]);\n-asmlinkage int neon_aes_mac_update(u8 const in[], u32 const rk[], int rounds,\n-\t\t\t\t   int blocks, u8 dg[], int enc_before,\n-\t\t\t\t   int enc_after);\n \n asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],\n \t\t\t\t   int rounds, int blocks);\n asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],\n \t\t\t\t   int rounds, int blocks);\n@@ -231,13 +228,13 @@ asmlinkage void ce_aes_essiv_cbc_encrypt(u8 out[], u8 const in[],\n \t\t\t\t\t u32 const rk1[], int rounds,\n \t\t\t\t\t int blocks, u8 iv[], u32 const rk2[]);\n asmlinkage void ce_aes_essiv_cbc_decrypt(u8 out[], u8 const in[],\n \t\t\t\t\t u32 const rk1[], int rounds,\n \t\t\t\t\t int blocks, u8 iv[], u32 const rk2[]);\n-asmlinkage int ce_aes_mac_update(u8 const in[], u32 const rk[], int rounds,\n-\t\t\t\t int blocks, u8 dg[], int enc_before,\n-\t\t\t\t int enc_after);\n+asmlinkage size_t ce_aes_mac_update(u8 const in[], u32 const rk[], int rounds,\n+\t\t\t\t    size_t blocks, u8 dg[], int enc_before,\n+\t\t\t\t    int enc_after);\n #elif defined(CONFIG_PPC)\n void ppc_expand_key_128(u32 *key_enc, const u8 *key);\n void ppc_expand_key_192(u32 *key_enc, const u8 *key);\n void ppc_expand_key_256(u32 *key_enc, const u8 *key);\n void ppc_generate_decrypt_key(u32 *key_dec, u32 *key_enc, unsigned int key_len);\ndiff --git a/lib/crypto/arm64/aes-modes.S b/lib/crypto/arm64/aes-modes.S\nindex e793478f37c1..fb1332108986 100644\n--- a/lib/crypto/arm64/aes-modes.S\n+++ b/lib/crypto/arm64/aes-modes.S\n@@ -813,54 +813,57 @@ AES_FUNC_START(aes_xts_decrypt)\n \tst1\t\t{v2.16b}, [x4]\t\t\t/* overlapping stores */\n \tmov\t\tw4, wzr\n \tb\t\t.Lxtsdecctsout\n AES_FUNC_END(aes_xts_decrypt)\n \n+#if IS_ENABLED(CONFIG_CRYPTO_LIB_AES_CBC_MACS)\n \t/*\n-\t * aes_mac_update(u8 const in[], u32 const rk[], int rounds,\n-\t *\t\t  int blocks, u8 dg[], int enc_before, int enc_after)\n+\t * size_t aes_mac_update(u8 const in[], u32 const rk[], int rounds,\n+\t *\t\t\t size_t blocks, u8 dg[], int enc_before,\n+\t *\t\t\t int enc_after);\n \t */\n AES_FUNC_START(aes_mac_update)\n \tld1\t\t{v0.16b}, [x4]\t\t\t/* get dg */\n \tenc_prepare\tw2, x1, x7\n \tcbz\t\tw5, .Lmacloop4x\n \n \tencrypt_block\tv0, w2, x1, x7, w8\n \n .Lmacloop4x:\n-\tsubs\t\tw3, w3, #4\n+\tsubs\t\tx3, x3, #4\n \tbmi\t\t.Lmac1x\n \tld1\t\t{v1.16b-v4.16b}, [x0], #64\t/* get next pt block */\n \teor\t\tv0.16b, v0.16b, v1.16b\t\t/* ..and xor with dg */\n \tencrypt_block\tv0, w2, x1, x7, w8\n \teor\t\tv0.16b, v0.16b, v2.16b\n \tencrypt_block\tv0, w2, x1, x7, w8\n \teor\t\tv0.16b, v0.16b, v3.16b\n \tencrypt_block\tv0, w2, x1, x7, w8\n \teor\t\tv0.16b, v0.16b, v4.16b\n-\tcmp\t\tw3, wzr\n+\tcmp\t\tx3, xzr\n \tcsinv\t\tw5, w6, wzr, eq\n \tcbz\t\tw5, .Lmacout\n \tencrypt_block\tv0, w2, x1, x7, w8\n \tst1\t\t{v0.16b}, [x4]\t\t\t/* return dg */\n \tcond_yield\t.Lmacout, x7, x8\n \tb\t\t.Lmacloop4x\n .Lmac1x:\n-\tadd\t\tw3, w3, #4\n+\tadd\t\tx3, x3, #4\n .Lmacloop:\n-\tcbz\t\tw3, .Lmacout\n+\tcbz\t\tx3, .Lmacout\n \tld1\t\t{v1.16b}, [x0], #16\t\t/* get next pt block */\n \teor\t\tv0.16b, v0.16b, v1.16b\t\t/* ..and xor with dg */\n \n-\tsubs\t\tw3, w3, #1\n+\tsubs\t\tx3, x3, #1\n \tcsinv\t\tw5, w6, wzr, eq\n \tcbz\t\tw5, .Lmacout\n \n .Lmacenc:\n \tencrypt_block\tv0, w2, x1, x7, w8\n \tb\t\t.Lmacloop\n \n .Lmacout:\n \tst1\t\t{v0.16b}, [x4]\t\t\t/* return dg */\n-\tmov\t\tw0, w3\n+\tmov\t\tx0, x3\n \tret\n AES_FUNC_END(aes_mac_update)\n+#endif /* CONFIG_CRYPTO_LIB_AES_CBC_MACS */\ndiff --git a/lib/crypto/arm64/aes.h b/lib/crypto/arm64/aes.h\nindex 69f465c668f0..78e7b4e5f120 100644\n--- a/lib/crypto/arm64/aes.h\n+++ b/lib/crypto/arm64/aes.h\n@@ -9,10 +9,11 @@\n #include <asm/neon.h>\n #include <asm/simd.h>\n #include <linux/unaligned.h>\n #include <linux/cpufeature.h>\n \n+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);\n static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_aes);\n \n struct aes_block {\n \tu8 b[AES_BLOCK_SIZE];\n };\n@@ -26,10 +27,13 @@ asmlinkage void __aes_ce_encrypt(const u32 rk[], u8 out[AES_BLOCK_SIZE],\n asmlinkage void __aes_ce_decrypt(const u32 inv_rk[], u8 out[AES_BLOCK_SIZE],\n \t\t\t\t const u8 in[AES_BLOCK_SIZE], int rounds);\n asmlinkage u32 __aes_ce_sub(u32 l);\n asmlinkage void __aes_ce_invert(struct aes_block *out,\n \t\t\t\tconst struct aes_block *in);\n+asmlinkage size_t neon_aes_mac_update(u8 const in[], u32 const rk[], int rounds,\n+\t\t\t\t      size_t blocks, u8 dg[], int enc_before,\n+\t\t\t\t      int enc_after);\n \n /*\n  * Expand an AES key using the crypto extensions if supported and usable or\n  * generic code otherwise.  The expanded key format is compatible between the\n  * two cases.  The outputs are @rndkeys (required) and @inv_rndkeys (optional).\n@@ -137,11 +141,10 @@ EXPORT_SYMBOL_NS_GPL(neon_aes_ctr_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(neon_aes_xctr_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(neon_aes_xts_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(neon_aes_xts_decrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(neon_aes_essiv_cbc_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(neon_aes_essiv_cbc_decrypt, \"CRYPTO_INTERNAL\");\n-EXPORT_SYMBOL_NS_GPL(neon_aes_mac_update, \"CRYPTO_INTERNAL\");\n \n EXPORT_SYMBOL_NS_GPL(ce_aes_ecb_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(ce_aes_ecb_decrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(ce_aes_cbc_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(ce_aes_cbc_decrypt, \"CRYPTO_INTERNAL\");\n@@ -151,10 +154,12 @@ EXPORT_SYMBOL_NS_GPL(ce_aes_ctr_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(ce_aes_xctr_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(ce_aes_xts_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(ce_aes_xts_decrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(ce_aes_essiv_cbc_encrypt, \"CRYPTO_INTERNAL\");\n EXPORT_SYMBOL_NS_GPL(ce_aes_essiv_cbc_decrypt, \"CRYPTO_INTERNAL\");\n+#endif\n+#if IS_MODULE(CONFIG_CRYPTO_AES_ARM64_CE_CCM)\n EXPORT_SYMBOL_NS_GPL(ce_aes_mac_update, \"CRYPTO_INTERNAL\");\n #endif\n \n static void aes_encrypt_arch(const struct aes_enckey *key,\n \t\t\t     u8 out[AES_BLOCK_SIZE],\n@@ -182,13 +187,50 @@ static void aes_decrypt_arch(const struct aes_key *key,\n \t\t__aes_arm64_decrypt(key->inv_k.inv_rndkeys, out, in,\n \t\t\t\t    key->nrounds);\n \t}\n }\n \n+#if IS_ENABLED(CONFIG_CRYPTO_LIB_AES_CBC_MACS)\n+#define aes_cbcmac_blocks_arch aes_cbcmac_blocks_arch\n+static bool aes_cbcmac_blocks_arch(u8 h[AES_BLOCK_SIZE],\n+\t\t\t\t   const struct aes_enckey *key, const u8 *data,\n+\t\t\t\t   size_t nblocks, bool enc_before,\n+\t\t\t\t   bool enc_after)\n+{\n+\tif (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&\n+\t    static_branch_likely(&have_neon) && likely(may_use_simd())) {\n+\t\tdo {\n+\t\t\tsize_t rem;\n+\n+\t\t\tscoped_ksimd() {\n+\t\t\t\tif (static_branch_likely(&have_aes))\n+\t\t\t\t\trem = ce_aes_mac_update(\n+\t\t\t\t\t\tdata, key->k.rndkeys,\n+\t\t\t\t\t\tkey->nrounds, nblocks, h,\n+\t\t\t\t\t\tenc_before, enc_after);\n+\t\t\t\telse\n+\t\t\t\t\trem = neon_aes_mac_update(\n+\t\t\t\t\t\tdata, key->k.rndkeys,\n+\t\t\t\t\t\tkey->nrounds, nblocks, h,\n+\t\t\t\t\t\tenc_before, enc_after);\n+\t\t\t}\n+\t\t\tdata += (nblocks - rem) * AES_BLOCK_SIZE;\n+\t\t\tnblocks = rem;\n+\t\t\tenc_before = false;\n+\t\t} while (nblocks);\n+\t\treturn true;\n+\t}\n+\treturn false;\n+}\n+#endif /* CONFIG_CRYPTO_LIB_AES_CBC_MACS */\n+\n #ifdef CONFIG_KERNEL_MODE_NEON\n #define aes_mod_init_arch aes_mod_init_arch\n static void aes_mod_init_arch(void)\n {\n-\tif (cpu_have_named_feature(AES))\n-\t\tstatic_branch_enable(&have_aes);\n+\tif (cpu_have_named_feature(ASIMD)) {\n+\t\tstatic_branch_enable(&have_neon);\n+\t\tif (cpu_have_named_feature(AES))\n+\t\t\tstatic_branch_enable(&have_aes);\n+\t}\n }\n #endif /* CONFIG_KERNEL_MODE_NEON */\n",
    "prefixes": [
        "05/15"
    ]
}