Patch Detail
get:
Show a patch.
patch:
Update a patch.
put:
Update a patch.
GET /api/patches/2183222/?format=api
{ "id": 2183222, "url": "http://patchwork.ozlabs.org/api/patches/2183222/?format=api", "web_url": "http://patchwork.ozlabs.org/project/linuxppc-dev/patch/20260112192035.10427-12-ebiggers@kernel.org/", "project": { "id": 2, "url": "http://patchwork.ozlabs.org/api/projects/2/?format=api", "name": "Linux PPC development", "link_name": "linuxppc-dev", "list_id": "linuxppc-dev.lists.ozlabs.org", "list_email": "linuxppc-dev@lists.ozlabs.org", "web_url": "https://github.com/linuxppc/wiki/wiki", "scm_url": "https://git.kernel.org/pub/scm/linux/kernel/git/powerpc/linux.git", "webscm_url": "https://git.kernel.org/pub/scm/linux/kernel/git/powerpc/linux.git/", "list_archive_url": "https://lore.kernel.org/linuxppc-dev/", "list_archive_url_format": "https://lore.kernel.org/linuxppc-dev/{}/", "commit_url_format": "https://git.kernel.org/pub/scm/linux/kernel/git/powerpc/linux.git/commit/?id={}" }, "msgid": "<20260112192035.10427-12-ebiggers@kernel.org>", "list_archive_url": "https://lore.kernel.org/linuxppc-dev/20260112192035.10427-12-ebiggers@kernel.org/", "date": "2026-01-12T19:20:09", "name": "[v2,11/35] lib/crypto: arm64/aes: Migrate optimized code into library", "commit_ref": null, "pull_url": null, "state": "handled-elsewhere", "archived": false, "hash": "0aa882ec6cc59fc69ab675c52e581017ed2a4bac", "submitter": { "id": 74690, "url": "http://patchwork.ozlabs.org/api/people/74690/?format=api", "name": "Eric Biggers", "email": "ebiggers@kernel.org" }, "delegate": null, "mbox": "http://patchwork.ozlabs.org/project/linuxppc-dev/patch/20260112192035.10427-12-ebiggers@kernel.org/mbox/", "series": [ { "id": 488089, "url": "http://patchwork.ozlabs.org/api/series/488089/?format=api", "web_url": "http://patchwork.ozlabs.org/project/linuxppc-dev/list/?series=488089", "date": "2026-01-12T19:19:58", "name": "AES library improvements", "version": 2, "mbox": "http://patchwork.ozlabs.org/series/488089/mbox/" } ], "comments": "http://patchwork.ozlabs.org/api/patches/2183222/comments/", "check": "pending", "checks": "http://patchwork.ozlabs.org/api/patches/2183222/checks/", "tags": {}, "related": [], "headers": { "Return-Path": "\n <linuxppc-dev+bounces-15581-incoming=patchwork.ozlabs.org@lists.ozlabs.org>", "X-Original-To": [ "incoming@patchwork.ozlabs.org", "linuxppc-dev@lists.ozlabs.org" ], "Delivered-To": "patchwork-incoming@legolas.ozlabs.org", "Authentication-Results": [ "legolas.ozlabs.org;\n\tdkim=pass (2048-bit key;\n unprotected) header.d=kernel.org header.i=@kernel.org header.a=rsa-sha256\n header.s=k20201202 header.b=tBGhe1lj;\n\tdkim-atps=neutral", "legolas.ozlabs.org;\n spf=pass (sender SPF authorized) smtp.mailfrom=lists.ozlabs.org\n (client-ip=2404:9400:21b9:f100::1; helo=lists.ozlabs.org;\n envelope-from=linuxppc-dev+bounces-15581-incoming=patchwork.ozlabs.org@lists.ozlabs.org;\n receiver=patchwork.ozlabs.org)", "lists.ozlabs.org;\n arc=none smtp.remote-ip=172.234.252.31", "lists.ozlabs.org;\n dmarc=pass (p=quarantine dis=none) header.from=kernel.org", "lists.ozlabs.org;\n\tdkim=pass (2048-bit key;\n unprotected) header.d=kernel.org header.i=@kernel.org header.a=rsa-sha256\n header.s=k20201202 header.b=tBGhe1lj;\n\tdkim-atps=neutral", "lists.ozlabs.org;\n spf=pass (sender SPF authorized) smtp.mailfrom=kernel.org\n (client-ip=172.234.252.31; helo=sea.source.kernel.org;\n envelope-from=ebiggers@kernel.org; receiver=lists.ozlabs.org)" ], "Received": [ "from lists.ozlabs.org (lists.ozlabs.org\n [IPv6:2404:9400:21b9:f100::1])\n\t(using TLSv1.3 with cipher TLS_AES_256_GCM_SHA384 (256/256 bits)\n\t key-exchange x25519)\n\t(No client certificate requested)\n\tby legolas.ozlabs.org (Postfix) with ESMTPS id 4dqj7D3wHVz1xpY\n\tfor <incoming@patchwork.ozlabs.org>; Tue, 13 Jan 2026 06:27:04 +1100 (AEDT)", "from boromir.ozlabs.org (localhost [127.0.0.1])\n\tby lists.ozlabs.org (Postfix) with ESMTP id 4dqj3c5CXwz3c1g;\n\tTue, 13 Jan 2026 06:23:56 +1100 (AEDT)", "from sea.source.kernel.org (sea.source.kernel.org [172.234.252.31])\n\t(using TLSv1.3 with cipher TLS_AES_256_GCM_SHA384 (256/256 bits)\n\t key-exchange x25519)\n\t(No client certificate requested)\n\tby lists.ozlabs.org (Postfix) with ESMTPS id 4dqj3b49Rsz3bx1\n\tfor <linuxppc-dev@lists.ozlabs.org>; Tue, 13 Jan 2026 06:23:55 +1100 (AEDT)", "from smtp.kernel.org (transwarp.subspace.kernel.org [100.75.92.58])\n\tby sea.source.kernel.org (Postfix) with ESMTP id F26EE443FC;\n\tMon, 12 Jan 2026 19:23:23 +0000 (UTC)", "by smtp.kernel.org (Postfix) with ESMTPSA id 6CE6AC19425;\n\tMon, 12 Jan 2026 19:23:23 +0000 (UTC)" ], "ARC-Seal": "i=1; a=rsa-sha256; d=lists.ozlabs.org; s=201707; t=1768245836;\n\tcv=none;\n b=in6vm/uiBYezbvroRPp7jYbXFHwXV8P9MQAvKYpKxoSUJxzPx8K6UW+iBT+9Zl2G832jAh1DiYRwZlZa5w+RoY/hHE07erKILaMtAoY356GVGTygHlWQlikcGzZH7bNpoB6dq0vz5J8nZeNd63dID0KsORhmS5BT2Q/4kWQLrtWP1fw6NXj8edWUz8pu53ck8LmkVfZoYw1nQtDfsBNOEXAFUaTsBPhlJx5aUmsmZ1kTVtYyaryinHZYjS1Ar+g7mKFjQ4S9Ve8fIAd7mGcIEdR1xO3UJm5aoNC6vcItwboEmF+ikbbrYy2DrGpe8ENSLWgogjr65ls09RlibibGUg==", "ARC-Message-Signature": "i=1; a=rsa-sha256; d=lists.ozlabs.org; s=201707;\n\tt=1768245836; c=relaxed/relaxed;\n\tbh=712mb/9c+K9R8KQ0BWdB5TadCzlE7NUJr9Sg7OGVMi0=;\n\th=From:To:Cc:Subject:Date:Message-ID:In-Reply-To:References:\n\t MIME-Version;\n b=Himx3OLh3snXivHubEWyw4scOAm6Xokh51OcKrHW59qqWE9r8yFUKd6N8cKJa/RxMn/T2X+0WzjjP6wb3z/U9uJqtvgBQHixGvK+d+7sxCCjCwuGAR5/SmIvnG636yJGRI2SeyimzUmr3P5AEPj+X38Er1vSpsD6R0GsOya6oPAOPVEPxg8NqpgOcc7jAOWLYM+SgZa5O4frpaEjK9rePjmAX6VBadc/rqODkwHw7Dr7wCAr+JCIjrjIAXMFZfz/jAaE0w5w8G/ilMOWFo4zZUULhkUurvmUgtFa3qf14kIeUIMxjqZZGUshS1AD343lVZ4BCg+kPcsuGxqrJ2pkNQ==", "ARC-Authentication-Results": "i=1; lists.ozlabs.org;\n dmarc=pass (p=quarantine dis=none) header.from=kernel.org;\n dkim=pass (2048-bit key;\n unprotected) header.d=kernel.org header.i=@kernel.org header.a=rsa-sha256\n header.s=k20201202 header.b=tBGhe1lj; dkim-atps=neutral;\n spf=pass (client-ip=172.234.252.31; helo=sea.source.kernel.org;\n envelope-from=ebiggers@kernel.org;\n receiver=lists.ozlabs.org) smtp.mailfrom=kernel.org", "DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/simple; d=kernel.org;\n\ts=k20201202; t=1768245803;\n\tbh=9CJBW3Yw+l4NE2IObqGq5P1JjpXcxrdOguxNy6LSrR8=;\n\th=From:To:Cc:Subject:Date:In-Reply-To:References:From;\n\tb=tBGhe1ljAN+c+Er3sKN/zTwRVF7hb9QqDW8m8K7K3ARDGvH6dgN10gvZHj6WzRkOt\n\t LY0I+kt94+INQwIrUcS7Kvw3KlV0wqAHr3HxHGLBJFwxfNJQjNWCATNuXyIFhB4Bhq\n\t DzLMUXcxhkE3u4tlIRJ+JOwTa9ZKgBxkSzW9+dMe6FTMT95gPJfBcd+DotZolpCWOW\n\t UZq9XCjpGr+8PrV3aGXtXq+nFA3WqsHTp5qdfQirlxGeEFgz0jPt4wljfCmKUQ0+IL\n\t N3/RPB0j0120qz+KlkYKSfdOA623XyhsiFKMUZdsWPGa0o9G3pW868Z+jvRwLRF1rC\n\t TzQY9qvPmNkSg==", "From": "Eric Biggers <ebiggers@kernel.org>", "To": "linux-crypto@vger.kernel.org", "Cc": "linux-kernel@vger.kernel.org,\n\tArd Biesheuvel <ardb@kernel.org>,\n\t\"Jason A . Donenfeld\" <Jason@zx2c4.com>,\n\tHerbert Xu <herbert@gondor.apana.org.au>,\n\tlinux-arm-kernel@lists.infradead.org,\n\tlinuxppc-dev@lists.ozlabs.org,\n\tlinux-riscv@lists.infradead.org,\n\tlinux-s390@vger.kernel.org,\n\tsparclinux@vger.kernel.org,\n\tx86@kernel.org,\n\tHolger Dengler <dengler@linux.ibm.com>,\n\tHarald Freudenberger <freude@linux.ibm.com>,\n\tEric Biggers <ebiggers@kernel.org>", "Subject": "[PATCH v2 11/35] lib/crypto: arm64/aes: Migrate optimized code into\n library", "Date": "Mon, 12 Jan 2026 11:20:09 -0800", "Message-ID": "<20260112192035.10427-12-ebiggers@kernel.org>", "X-Mailer": "git-send-email 2.52.0", "In-Reply-To": "<20260112192035.10427-1-ebiggers@kernel.org>", "References": "<20260112192035.10427-1-ebiggers@kernel.org>", "X-Mailing-List": "linuxppc-dev@lists.ozlabs.org", "List-Id": "<linuxppc-dev.lists.ozlabs.org>", "List-Help": "<mailto:linuxppc-dev+help@lists.ozlabs.org>", "List-Owner": "<mailto:linuxppc-dev+owner@lists.ozlabs.org>", "List-Post": "<mailto:linuxppc-dev@lists.ozlabs.org>", "List-Archive": "<https://lore.kernel.org/linuxppc-dev/>,\n <https://lists.ozlabs.org/pipermail/linuxppc-dev/>", "List-Subscribe": "<mailto:linuxppc-dev+subscribe@lists.ozlabs.org>,\n <mailto:linuxppc-dev+subscribe-digest@lists.ozlabs.org>,\n <mailto:linuxppc-dev+subscribe-nomail@lists.ozlabs.org>", "List-Unsubscribe": "<mailto:linuxppc-dev+unsubscribe@lists.ozlabs.org>", "Precedence": "list", "MIME-Version": "1.0", "Content-Transfer-Encoding": "8bit", "X-Spam-Status": "No, score=-0.2 required=3.0 tests=DKIMWL_WL_HIGH,DKIM_SIGNED,\n\tDKIM_VALID,DKIM_VALID_AU,DKIM_VALID_EF,SPF_HELO_NONE,SPF_PASS\n\tautolearn=disabled version=4.0.1 OzLabs 8", "X-Spam-Checker-Version": "SpamAssassin 4.0.1 (2024-03-25) on lists.ozlabs.org" }, "content": "Move the ARM64 optimized AES key expansion and single-block AES\nen/decryption code into lib/crypto/, wire it up to the AES library API,\nand remove the superseded crypto_cipher algorithms.\n\nThe result is that both the AES library and crypto_cipher APIs are now\noptimized for ARM64, whereas previously only crypto_cipher was (and the\noptimizations weren't enabled by default, which this fixes as well).\n\nNote: to see the diff from arch/arm64/crypto/aes-ce-glue.c to\nlib/crypto/arm64/aes.h, view this commit with 'git show -M10'.\n\nAcked-by: Ard Biesheuvel <ardb@kernel.org>\nSigned-off-by: Eric Biggers <ebiggers@kernel.org>\n---\n arch/arm64/crypto/Kconfig | 26 +--\n arch/arm64/crypto/Makefile | 6 -\n arch/arm64/crypto/aes-ce-ccm-glue.c | 2 -\n arch/arm64/crypto/aes-ce-glue.c | 178 ------------------\n arch/arm64/crypto/aes-ce-setkey.h | 6 -\n arch/arm64/crypto/aes-cipher-glue.c | 71 -------\n arch/arm64/crypto/aes-glue.c | 2 -\n include/crypto/aes.h | 10 +\n lib/crypto/Kconfig | 1 +\n lib/crypto/Makefile | 5 +\n .../crypto => lib/crypto/arm64}/aes-ce-core.S | 0\n .../crypto/arm64}/aes-cipher-core.S | 0\n lib/crypto/arm64/aes.h | 164 ++++++++++++++++\n 13 files changed, 181 insertions(+), 290 deletions(-)\n delete mode 100644 arch/arm64/crypto/aes-ce-glue.c\n delete mode 100644 arch/arm64/crypto/aes-ce-setkey.h\n delete mode 100644 arch/arm64/crypto/aes-cipher-glue.c\n rename {arch/arm64/crypto => lib/crypto/arm64}/aes-ce-core.S (100%)\n rename {arch/arm64/crypto => lib/crypto/arm64}/aes-cipher-core.S (100%)\n create mode 100644 lib/crypto/arm64/aes.h", "diff": "diff --git a/arch/arm64/crypto/Kconfig b/arch/arm64/crypto/Kconfig\nindex 4453dff8f0c1..81ed892b3b72 100644\n--- a/arch/arm64/crypto/Kconfig\n+++ b/arch/arm64/crypto/Kconfig\n@@ -35,38 +35,15 @@ config CRYPTO_SM3_ARM64_CE\n \t SM3 (ShangMi 3) secure hash function (OSCCA GM/T 0004-2012)\n \n \t Architecture: arm64 using:\n \t - ARMv8.2 Crypto Extensions\n \n-config CRYPTO_AES_ARM64\n-\ttristate \"Ciphers: AES, modes: ECB, CBC, CTR, CTS, XCTR, XTS\"\n-\tselect CRYPTO_AES\n-\thelp\n-\t Block ciphers: AES cipher algorithms (FIPS-197)\n-\t Length-preserving ciphers: AES with ECB, CBC, CTR, CTS,\n-\t XCTR, and XTS modes\n-\t AEAD cipher: AES with CBC, ESSIV, and SHA-256\n-\t for fscrypt and dm-crypt\n-\n-\t Architecture: arm64\n-\n-config CRYPTO_AES_ARM64_CE\n-\ttristate \"Ciphers: AES (ARMv8 Crypto Extensions)\"\n-\tdepends on KERNEL_MODE_NEON\n-\tselect CRYPTO_ALGAPI\n-\tselect CRYPTO_LIB_AES\n-\thelp\n-\t Block ciphers: AES cipher algorithms (FIPS-197)\n-\n-\t Architecture: arm64 using:\n-\t - ARMv8 Crypto Extensions\n-\n config CRYPTO_AES_ARM64_CE_BLK\n \ttristate \"Ciphers: AES, modes: ECB/CBC/CTR/XTS (ARMv8 Crypto Extensions)\"\n \tdepends on KERNEL_MODE_NEON\n \tselect CRYPTO_SKCIPHER\n-\tselect CRYPTO_AES_ARM64_CE\n+\tselect CRYPTO_LIB_AES\n \tselect CRYPTO_LIB_SHA256\n \thelp\n \t Length-preserving ciphers: AES cipher algorithms (FIPS-197)\n \t with block cipher modes:\n \t - ECB (Electronic Codebook) mode (NIST SP800-38A)\n@@ -163,11 +140,10 @@ config CRYPTO_SM4_ARM64_NEON_BLK\n \n config CRYPTO_AES_ARM64_CE_CCM\n \ttristate \"AEAD cipher: AES in CCM mode (ARMv8 Crypto Extensions)\"\n \tdepends on KERNEL_MODE_NEON\n \tselect CRYPTO_ALGAPI\n-\tselect CRYPTO_AES_ARM64_CE\n \tselect CRYPTO_AES_ARM64_CE_BLK\n \tselect CRYPTO_AEAD\n \tselect CRYPTO_LIB_AES\n \thelp\n \t AEAD cipher: AES cipher algorithms (FIPS-197) with\ndiff --git a/arch/arm64/crypto/Makefile b/arch/arm64/crypto/Makefile\nindex 3ab4b58e5c4c..3574e917bc37 100644\n--- a/arch/arm64/crypto/Makefile\n+++ b/arch/arm64/crypto/Makefile\n@@ -27,22 +27,16 @@ obj-$(CONFIG_CRYPTO_SM4_ARM64_NEON_BLK) += sm4-neon.o\n sm4-neon-y := sm4-neon-glue.o sm4-neon-core.o\n \n obj-$(CONFIG_CRYPTO_GHASH_ARM64_CE) += ghash-ce.o\n ghash-ce-y := ghash-ce-glue.o ghash-ce-core.o\n \n-obj-$(CONFIG_CRYPTO_AES_ARM64_CE) += aes-ce-cipher.o\n-aes-ce-cipher-y := aes-ce-core.o aes-ce-glue.o\n-\n obj-$(CONFIG_CRYPTO_AES_ARM64_CE_CCM) += aes-ce-ccm.o\n aes-ce-ccm-y := aes-ce-ccm-glue.o aes-ce-ccm-core.o\n \n obj-$(CONFIG_CRYPTO_AES_ARM64_CE_BLK) += aes-ce-blk.o\n aes-ce-blk-y := aes-glue-ce.o aes-ce.o\n \n obj-$(CONFIG_CRYPTO_AES_ARM64_NEON_BLK) += aes-neon-blk.o\n aes-neon-blk-y := aes-glue-neon.o aes-neon.o\n \n-obj-$(CONFIG_CRYPTO_AES_ARM64) += aes-arm64.o\n-aes-arm64-y := aes-cipher-core.o aes-cipher-glue.o\n-\n obj-$(CONFIG_CRYPTO_AES_ARM64_BS) += aes-neon-bs.o\n aes-neon-bs-y := aes-neonbs-core.o aes-neonbs-glue.o\ndiff --git a/arch/arm64/crypto/aes-ce-ccm-glue.c b/arch/arm64/crypto/aes-ce-ccm-glue.c\nindex c4fd648471f1..db371ac051fc 100644\n--- a/arch/arm64/crypto/aes-ce-ccm-glue.c\n+++ b/arch/arm64/crypto/aes-ce-ccm-glue.c\n@@ -15,12 +15,10 @@\n #include <crypto/internal/skcipher.h>\n #include <linux/module.h>\n \n #include <asm/simd.h>\n \n-#include \"aes-ce-setkey.h\"\n-\n MODULE_IMPORT_NS(\"CRYPTO_INTERNAL\");\n \n static int num_rounds(struct crypto_aes_ctx *ctx)\n {\n \t/*\ndiff --git a/arch/arm64/crypto/aes-ce-glue.c b/arch/arm64/crypto/aes-ce-glue.c\ndeleted file mode 100644\nindex a4dad370991d..000000000000\n--- a/arch/arm64/crypto/aes-ce-glue.c\n+++ /dev/null\n@@ -1,178 +0,0 @@\n-// SPDX-License-Identifier: GPL-2.0-only\n-/*\n- * aes-ce-cipher.c - core AES cipher using ARMv8 Crypto Extensions\n- *\n- * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>\n- */\n-\n-#include <asm/neon.h>\n-#include <asm/simd.h>\n-#include <linux/unaligned.h>\n-#include <crypto/aes.h>\n-#include <crypto/algapi.h>\n-#include <crypto/internal/simd.h>\n-#include <linux/cpufeature.h>\n-#include <linux/module.h>\n-\n-#include \"aes-ce-setkey.h\"\n-\n-MODULE_DESCRIPTION(\"Synchronous AES cipher using ARMv8 Crypto Extensions\");\n-MODULE_AUTHOR(\"Ard Biesheuvel <ard.biesheuvel@linaro.org>\");\n-MODULE_LICENSE(\"GPL v2\");\n-\n-struct aes_block {\n-\tu8 b[AES_BLOCK_SIZE];\n-};\n-\n-asmlinkage void __aes_ce_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);\n-asmlinkage void __aes_ce_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds);\n-\n-asmlinkage u32 __aes_ce_sub(u32 l);\n-asmlinkage void __aes_ce_invert(struct aes_block *out,\n-\t\t\t\tconst struct aes_block *in);\n-\n-static int num_rounds(struct crypto_aes_ctx *ctx)\n-{\n-\t/*\n-\t * # of rounds specified by AES:\n-\t * 128 bit key\t\t10 rounds\n-\t * 192 bit key\t\t12 rounds\n-\t * 256 bit key\t\t14 rounds\n-\t * => n byte key\t=> 6 + (n/4) rounds\n-\t */\n-\treturn 6 + ctx->key_length / 4;\n-}\n-\n-static void aes_cipher_encrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])\n-{\n-\tstruct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);\n-\n-\tif (!crypto_simd_usable()) {\n-\t\taes_encrypt(ctx, dst, src);\n-\t\treturn;\n-\t}\n-\n-\tscoped_ksimd()\n-\t\t__aes_ce_encrypt(ctx->key_enc, dst, src, num_rounds(ctx));\n-}\n-\n-static void aes_cipher_decrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])\n-{\n-\tstruct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);\n-\n-\tif (!crypto_simd_usable()) {\n-\t\taes_decrypt(ctx, dst, src);\n-\t\treturn;\n-\t}\n-\n-\tscoped_ksimd()\n-\t\t__aes_ce_decrypt(ctx->key_dec, dst, src, num_rounds(ctx));\n-}\n-\n-int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,\n-\t\t unsigned int key_len)\n-{\n-\t/*\n-\t * The AES key schedule round constants\n-\t */\n-\tstatic u8 const rcon[] = {\n-\t\t0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,\n-\t};\n-\n-\tu32 kwords = key_len / sizeof(u32);\n-\tstruct aes_block *key_enc, *key_dec;\n-\tint i, j;\n-\n-\tif (key_len != AES_KEYSIZE_128 &&\n-\t key_len != AES_KEYSIZE_192 &&\n-\t key_len != AES_KEYSIZE_256)\n-\t\treturn -EINVAL;\n-\n-\tctx->key_length = key_len;\n-\tfor (i = 0; i < kwords; i++)\n-\t\tctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32));\n-\n-\tscoped_ksimd() {\n-\t\tfor (i = 0; i < sizeof(rcon); i++) {\n-\t\t\tu32 *rki = ctx->key_enc + (i * kwords);\n-\t\t\tu32 *rko = rki + kwords;\n-\n-\t\t\trko[0] = ror32(__aes_ce_sub(rki[kwords - 1]), 8) ^\n-\t\t\t\t rcon[i] ^ rki[0];\n-\t\t\trko[1] = rko[0] ^ rki[1];\n-\t\t\trko[2] = rko[1] ^ rki[2];\n-\t\t\trko[3] = rko[2] ^ rki[3];\n-\n-\t\t\tif (key_len == AES_KEYSIZE_192) {\n-\t\t\t\tif (i >= 7)\n-\t\t\t\t\tbreak;\n-\t\t\t\trko[4] = rko[3] ^ rki[4];\n-\t\t\t\trko[5] = rko[4] ^ rki[5];\n-\t\t\t} else if (key_len == AES_KEYSIZE_256) {\n-\t\t\t\tif (i >= 6)\n-\t\t\t\t\tbreak;\n-\t\t\t\trko[4] = __aes_ce_sub(rko[3]) ^ rki[4];\n-\t\t\t\trko[5] = rko[4] ^ rki[5];\n-\t\t\t\trko[6] = rko[5] ^ rki[6];\n-\t\t\t\trko[7] = rko[6] ^ rki[7];\n-\t\t\t}\n-\t\t}\n-\n-\t\t/*\n-\t\t * Generate the decryption keys for the Equivalent Inverse\n-\t\t * Cipher. This involves reversing the order of the round\n-\t\t * keys, and applying the Inverse Mix Columns transformation on\n-\t\t * all but the first and the last one.\n-\t\t */\n-\t\tkey_enc = (struct aes_block *)ctx->key_enc;\n-\t\tkey_dec = (struct aes_block *)ctx->key_dec;\n-\t\tj = num_rounds(ctx);\n-\n-\t\tkey_dec[0] = key_enc[j];\n-\t\tfor (i = 1, j--; j > 0; i++, j--)\n-\t\t\t__aes_ce_invert(key_dec + i, key_enc + j);\n-\t\tkey_dec[i] = key_enc[0];\n-\t}\n-\n-\treturn 0;\n-}\n-EXPORT_SYMBOL(ce_aes_expandkey);\n-\n-int ce_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key,\n-\t\t unsigned int key_len)\n-{\n-\tstruct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);\n-\n-\treturn ce_aes_expandkey(ctx, in_key, key_len);\n-}\n-EXPORT_SYMBOL(ce_aes_setkey);\n-\n-static struct crypto_alg aes_alg = {\n-\t.cra_name\t\t= \"aes\",\n-\t.cra_driver_name\t= \"aes-ce\",\n-\t.cra_priority\t\t= 250,\n-\t.cra_flags\t\t= CRYPTO_ALG_TYPE_CIPHER,\n-\t.cra_blocksize\t\t= AES_BLOCK_SIZE,\n-\t.cra_ctxsize\t\t= sizeof(struct crypto_aes_ctx),\n-\t.cra_module\t\t= THIS_MODULE,\n-\t.cra_cipher = {\n-\t\t.cia_min_keysize\t= AES_MIN_KEY_SIZE,\n-\t\t.cia_max_keysize\t= AES_MAX_KEY_SIZE,\n-\t\t.cia_setkey\t\t= ce_aes_setkey,\n-\t\t.cia_encrypt\t\t= aes_cipher_encrypt,\n-\t\t.cia_decrypt\t\t= aes_cipher_decrypt\n-\t}\n-};\n-\n-static int __init aes_mod_init(void)\n-{\n-\treturn crypto_register_alg(&aes_alg);\n-}\n-\n-static void __exit aes_mod_exit(void)\n-{\n-\tcrypto_unregister_alg(&aes_alg);\n-}\n-\n-module_cpu_feature_match(AES, aes_mod_init);\n-module_exit(aes_mod_exit);\ndiff --git a/arch/arm64/crypto/aes-ce-setkey.h b/arch/arm64/crypto/aes-ce-setkey.h\ndeleted file mode 100644\nindex fd9ecf07d88c..000000000000\n--- a/arch/arm64/crypto/aes-ce-setkey.h\n+++ /dev/null\n@@ -1,6 +0,0 @@\n-/* SPDX-License-Identifier: GPL-2.0 */\n-\n-int ce_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key,\n-\t\t unsigned int key_len);\n-int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,\n-\t\t unsigned int key_len);\ndiff --git a/arch/arm64/crypto/aes-cipher-glue.c b/arch/arm64/crypto/aes-cipher-glue.c\ndeleted file mode 100644\nindex 9b27cbac278b..000000000000\n--- a/arch/arm64/crypto/aes-cipher-glue.c\n+++ /dev/null\n@@ -1,71 +0,0 @@\n-// SPDX-License-Identifier: GPL-2.0-only\n-/*\n- * Scalar AES core transform\n- *\n- * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>\n- */\n-\n-#include <crypto/aes.h>\n-#include <crypto/algapi.h>\n-#include <linux/module.h>\n-\n-asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);\n-asmlinkage void __aes_arm64_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds);\n-\n-static int aes_arm64_setkey(struct crypto_tfm *tfm, const u8 *in_key,\n-\t\t\t unsigned int key_len)\n-{\n-\tstruct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);\n-\n-\treturn aes_expandkey(ctx, in_key, key_len);\n-}\n-\n-static void aes_arm64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)\n-{\n-\tstruct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);\n-\tint rounds = 6 + ctx->key_length / 4;\n-\n-\t__aes_arm64_encrypt(ctx->key_enc, out, in, rounds);\n-}\n-\n-static void aes_arm64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)\n-{\n-\tstruct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);\n-\tint rounds = 6 + ctx->key_length / 4;\n-\n-\t__aes_arm64_decrypt(ctx->key_dec, out, in, rounds);\n-}\n-\n-static struct crypto_alg aes_alg = {\n-\t.cra_name\t\t\t= \"aes\",\n-\t.cra_driver_name\t\t= \"aes-arm64\",\n-\t.cra_priority\t\t\t= 200,\n-\t.cra_flags\t\t\t= CRYPTO_ALG_TYPE_CIPHER,\n-\t.cra_blocksize\t\t\t= AES_BLOCK_SIZE,\n-\t.cra_ctxsize\t\t\t= sizeof(struct crypto_aes_ctx),\n-\t.cra_module\t\t\t= THIS_MODULE,\n-\n-\t.cra_cipher.cia_min_keysize\t= AES_MIN_KEY_SIZE,\n-\t.cra_cipher.cia_max_keysize\t= AES_MAX_KEY_SIZE,\n-\t.cra_cipher.cia_setkey\t\t= aes_arm64_setkey,\n-\t.cra_cipher.cia_encrypt\t\t= aes_arm64_encrypt,\n-\t.cra_cipher.cia_decrypt\t\t= aes_arm64_decrypt\n-};\n-\n-static int __init aes_init(void)\n-{\n-\treturn crypto_register_alg(&aes_alg);\n-}\n-\n-static void __exit aes_fini(void)\n-{\n-\tcrypto_unregister_alg(&aes_alg);\n-}\n-\n-module_init(aes_init);\n-module_exit(aes_fini);\n-\n-MODULE_DESCRIPTION(\"Scalar AES cipher for arm64\");\n-MODULE_AUTHOR(\"Ard Biesheuvel <ard.biesheuvel@linaro.org>\");\n-MODULE_LICENSE(\"GPL v2\");\n-MODULE_ALIAS_CRYPTO(\"aes\");\ndiff --git a/arch/arm64/crypto/aes-glue.c b/arch/arm64/crypto/aes-glue.c\nindex c51d4487e9e9..92f43e1cd097 100644\n--- a/arch/arm64/crypto/aes-glue.c\n+++ b/arch/arm64/crypto/aes-glue.c\n@@ -19,12 +19,10 @@\n #include <linux/string.h>\n \n #include <asm/hwcap.h>\n #include <asm/simd.h>\n \n-#include \"aes-ce-setkey.h\"\n-\n #ifdef USE_V8_CRYPTO_EXTENSIONS\n #define MODE\t\t\t\"ce\"\n #define PRIO\t\t\t300\n #define aes_expandkey\t\tce_aes_expandkey\n #define aes_ecb_encrypt\t\tce_aes_ecb_encrypt\ndiff --git a/include/crypto/aes.h b/include/crypto/aes.h\nindex 66421795cdab..18af1acbde58 100644\n--- a/include/crypto/aes.h\n+++ b/include/crypto/aes.h\n@@ -114,10 +114,20 @@ static inline int aes_check_keylen(size_t keylen)\n * for the initial combination, the second slot for the first round and so on.\n */\n int aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,\n \t\t unsigned int key_len);\n \n+/*\n+ * The following functions are temporarily exported for use by the AES mode\n+ * implementations in arch/$(SRCARCH)/crypto/. These exports will go away when\n+ * that code is migrated into lib/crypto/.\n+ */\n+#ifdef CONFIG_ARM64\n+int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,\n+\t\t unsigned int key_len);\n+#endif\n+\n /**\n * aes_preparekey() - Prepare an AES key for encryption and decryption\n * @key: (output) The key structure to initialize\n * @in_key: The raw AES key\n * @key_len: Length of the raw key in bytes. Should be either AES_KEYSIZE_128,\ndiff --git a/lib/crypto/Kconfig b/lib/crypto/Kconfig\nindex 60420b421e04..ead47b2a7db6 100644\n--- a/lib/crypto/Kconfig\n+++ b/lib/crypto/Kconfig\n@@ -13,10 +13,11 @@ config CRYPTO_LIB_AES\n \n config CRYPTO_LIB_AES_ARCH\n \tbool\n \tdepends on CRYPTO_LIB_AES && !UML && !KMSAN\n \tdefault y if ARM\n+\tdefault y if ARM64\n \n config CRYPTO_LIB_AESCFB\n \ttristate\n \tselect CRYPTO_LIB_AES\n \tselect CRYPTO_LIB_UTILS\ndiff --git a/lib/crypto/Makefile b/lib/crypto/Makefile\nindex 2f6b0f59eb1b..1b690c63fafb 100644\n--- a/lib/crypto/Makefile\n+++ b/lib/crypto/Makefile\n@@ -22,10 +22,15 @@ libaes-y := aes.o\n ifeq ($(CONFIG_CRYPTO_LIB_AES_ARCH),y)\n CFLAGS_aes.o += -I$(src)/$(SRCARCH)\n \n libaes-$(CONFIG_ARM) += arm/aes-cipher-core.o\n \n+ifeq ($(CONFIG_ARM64),y)\n+libaes-y += arm64/aes-cipher-core.o\n+libaes-$(CONFIG_KERNEL_MODE_NEON) += arm64/aes-ce-core.o\n+endif\n+\n endif # CONFIG_CRYPTO_LIB_AES_ARCH\n \n ################################################################################\n \n obj-$(CONFIG_CRYPTO_LIB_AESCFB)\t\t\t+= libaescfb.o\ndiff --git a/arch/arm64/crypto/aes-ce-core.S b/lib/crypto/arm64/aes-ce-core.S\nsimilarity index 100%\nrename from arch/arm64/crypto/aes-ce-core.S\nrename to lib/crypto/arm64/aes-ce-core.S\ndiff --git a/arch/arm64/crypto/aes-cipher-core.S b/lib/crypto/arm64/aes-cipher-core.S\nsimilarity index 100%\nrename from arch/arm64/crypto/aes-cipher-core.S\nrename to lib/crypto/arm64/aes-cipher-core.S\ndiff --git a/lib/crypto/arm64/aes.h b/lib/crypto/arm64/aes.h\nnew file mode 100644\nindex 000000000000..63eea6271ef9\n--- /dev/null\n+++ b/lib/crypto/arm64/aes.h\n@@ -0,0 +1,164 @@\n+/* SPDX-License-Identifier: GPL-2.0-only */\n+/*\n+ * AES block cipher, optimized for ARM64\n+ *\n+ * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>\n+ * Copyright 2026 Google LLC\n+ */\n+\n+#include <asm/neon.h>\n+#include <asm/simd.h>\n+#include <linux/unaligned.h>\n+#include <linux/cpufeature.h>\n+\n+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_aes);\n+\n+struct aes_block {\n+\tu8 b[AES_BLOCK_SIZE];\n+};\n+\n+asmlinkage void __aes_arm64_encrypt(const u32 rk[], u8 out[AES_BLOCK_SIZE],\n+\t\t\t\t const u8 in[AES_BLOCK_SIZE], int rounds);\n+asmlinkage void __aes_arm64_decrypt(const u32 inv_rk[], u8 out[AES_BLOCK_SIZE],\n+\t\t\t\t const u8 in[AES_BLOCK_SIZE], int rounds);\n+asmlinkage void __aes_ce_encrypt(const u32 rk[], u8 out[AES_BLOCK_SIZE],\n+\t\t\t\t const u8 in[AES_BLOCK_SIZE], int rounds);\n+asmlinkage void __aes_ce_decrypt(const u32 inv_rk[], u8 out[AES_BLOCK_SIZE],\n+\t\t\t\t const u8 in[AES_BLOCK_SIZE], int rounds);\n+asmlinkage u32 __aes_ce_sub(u32 l);\n+asmlinkage void __aes_ce_invert(struct aes_block *out,\n+\t\t\t\tconst struct aes_block *in);\n+\n+/*\n+ * Expand an AES key using the crypto extensions if supported and usable or\n+ * generic code otherwise. The expanded key format is compatible between the\n+ * two cases. The outputs are @rndkeys (required) and @inv_rndkeys (optional).\n+ */\n+static void aes_expandkey_arm64(u32 rndkeys[], u32 *inv_rndkeys,\n+\t\t\t\tconst u8 *in_key, int key_len, int nrounds)\n+{\n+\t/*\n+\t * The AES key schedule round constants\n+\t */\n+\tstatic u8 const rcon[] = {\n+\t\t0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,\n+\t};\n+\n+\tu32 kwords = key_len / sizeof(u32);\n+\tstruct aes_block *key_enc, *key_dec;\n+\tint i, j;\n+\n+\tif (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) ||\n+\t !static_branch_likely(&have_aes) || unlikely(!may_use_simd())) {\n+\t\taes_expandkey_generic(rndkeys, inv_rndkeys, in_key, key_len);\n+\t\treturn;\n+\t}\n+\n+\tfor (i = 0; i < kwords; i++)\n+\t\trndkeys[i] = get_unaligned_le32(&in_key[i * sizeof(u32)]);\n+\n+\tscoped_ksimd() {\n+\t\tfor (i = 0; i < sizeof(rcon); i++) {\n+\t\t\tu32 *rki = &rndkeys[i * kwords];\n+\t\t\tu32 *rko = rki + kwords;\n+\n+\t\t\trko[0] = ror32(__aes_ce_sub(rki[kwords - 1]), 8) ^\n+\t\t\t\t rcon[i] ^ rki[0];\n+\t\t\trko[1] = rko[0] ^ rki[1];\n+\t\t\trko[2] = rko[1] ^ rki[2];\n+\t\t\trko[3] = rko[2] ^ rki[3];\n+\n+\t\t\tif (key_len == AES_KEYSIZE_192) {\n+\t\t\t\tif (i >= 7)\n+\t\t\t\t\tbreak;\n+\t\t\t\trko[4] = rko[3] ^ rki[4];\n+\t\t\t\trko[5] = rko[4] ^ rki[5];\n+\t\t\t} else if (key_len == AES_KEYSIZE_256) {\n+\t\t\t\tif (i >= 6)\n+\t\t\t\t\tbreak;\n+\t\t\t\trko[4] = __aes_ce_sub(rko[3]) ^ rki[4];\n+\t\t\t\trko[5] = rko[4] ^ rki[5];\n+\t\t\t\trko[6] = rko[5] ^ rki[6];\n+\t\t\t\trko[7] = rko[6] ^ rki[7];\n+\t\t\t}\n+\t\t}\n+\n+\t\t/*\n+\t\t * Generate the decryption keys for the Equivalent Inverse\n+\t\t * Cipher. This involves reversing the order of the round\n+\t\t * keys, and applying the Inverse Mix Columns transformation on\n+\t\t * all but the first and the last one.\n+\t\t */\n+\t\tif (inv_rndkeys) {\n+\t\t\tkey_enc = (struct aes_block *)rndkeys;\n+\t\t\tkey_dec = (struct aes_block *)inv_rndkeys;\n+\t\t\tj = nrounds;\n+\n+\t\t\tkey_dec[0] = key_enc[j];\n+\t\t\tfor (i = 1, j--; j > 0; i++, j--)\n+\t\t\t\t__aes_ce_invert(key_dec + i, key_enc + j);\n+\t\t\tkey_dec[i] = key_enc[0];\n+\t\t}\n+\t}\n+}\n+\n+static void aes_preparekey_arch(union aes_enckey_arch *k,\n+\t\t\t\tunion aes_invkey_arch *inv_k,\n+\t\t\t\tconst u8 *in_key, int key_len, int nrounds)\n+{\n+\taes_expandkey_arm64(k->rndkeys, inv_k ? inv_k->inv_rndkeys : NULL,\n+\t\t\t in_key, key_len, nrounds);\n+}\n+\n+/*\n+ * This is here temporarily until the remaining AES mode implementations are\n+ * migrated from arch/arm64/crypto/ to lib/crypto/arm64/.\n+ */\n+int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,\n+\t\t unsigned int key_len)\n+{\n+\tif (aes_check_keylen(key_len) != 0)\n+\t\treturn -EINVAL;\n+\tctx->key_length = key_len;\n+\taes_expandkey_arm64(ctx->key_enc, ctx->key_dec, in_key, key_len,\n+\t\t\t 6 + key_len / 4);\n+\treturn 0;\n+}\n+EXPORT_SYMBOL(ce_aes_expandkey);\n+\n+static void aes_encrypt_arch(const struct aes_enckey *key,\n+\t\t\t u8 out[AES_BLOCK_SIZE],\n+\t\t\t const u8 in[AES_BLOCK_SIZE])\n+{\n+\tif (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&\n+\t static_branch_likely(&have_aes) && likely(may_use_simd())) {\n+\t\tscoped_ksimd()\n+\t\t\t__aes_ce_encrypt(key->k.rndkeys, out, in, key->nrounds);\n+\t} else {\n+\t\t__aes_arm64_encrypt(key->k.rndkeys, out, in, key->nrounds);\n+\t}\n+}\n+\n+static void aes_decrypt_arch(const struct aes_key *key,\n+\t\t\t u8 out[AES_BLOCK_SIZE],\n+\t\t\t const u8 in[AES_BLOCK_SIZE])\n+{\n+\tif (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&\n+\t static_branch_likely(&have_aes) && likely(may_use_simd())) {\n+\t\tscoped_ksimd()\n+\t\t\t__aes_ce_decrypt(key->inv_k.inv_rndkeys, out, in,\n+\t\t\t\t\t key->nrounds);\n+\t} else {\n+\t\t__aes_arm64_decrypt(key->inv_k.inv_rndkeys, out, in,\n+\t\t\t\t key->nrounds);\n+\t}\n+}\n+\n+#ifdef CONFIG_KERNEL_MODE_NEON\n+#define aes_mod_init_arch aes_mod_init_arch\n+static void aes_mod_init_arch(void)\n+{\n+\tif (cpu_have_named_feature(AES))\n+\t\tstatic_branch_enable(&have_aes);\n+}\n+#endif /* CONFIG_KERNEL_MODE_NEON */\n", "prefixes": [ "v2", "11/35" ] }