Skip to content

Commit 67daf84

Browse files
Merge patch series "RISC-V crypto with reworked asm files"
Eric Biggers <[email protected]> says: This patchset, which applies to v6.8-rc1, adds cryptographic algorithm implementations accelerated using the RISC-V vector crypto extensions (https://github.com/riscv/riscv-crypto/releases/download/v1.0.0/riscv-crypto-spec-vector.pdf) and RISC-V vector extension (https://github.com/riscv/riscv-v-spec/releases/download/v1.0/riscv-v-spec-1.0.pdf). The following algorithms are included: AES in ECB, CBC, CTR, and XTS modes; ChaCha20; GHASH; SHA-2; SM3; and SM4. In general, the assembly code requires a 64-bit RISC-V CPU with VLEN >= 128, little endian byte order, and vector unaligned access support. The ECB, CTR, XTS, and ChaCha20 code is designed to naturally scale up to larger VLEN values. Building the assembly code requires tip-of-tree binutils (future 2.42) or tip-of-tree clang (future 18.x). All algorithms pass testing in QEMU, using CONFIG_CRYPTO_MANAGER_EXTRA_TESTS=y. Much of the assembly code is derived from OpenSSL code that was added by openssl/openssl#21923. It's been cleaned up for integration with the kernel, e.g. reducing code duplication, eliminating use of .inst and perlasm, and fixing a few bugs. This patchset incorporates the work of multiple people, including Jerry Shih, Heiko Stuebner, Christoph Müllner, Phoebe Chen, Charalampos Mitrodimas, and myself. This patchset went through several versions from Heiko (last version https://lore.kernel.org/linux-crypto/[email protected]), then several versions from Jerry (last version: https://lore.kernel.org/linux-crypto/[email protected]), then finally several versions from me. Thanks to everyone who has contributed to this patchset or its prerequisites. * b4-shazam-merge: crypto: riscv - add vector crypto accelerated SM4 crypto: riscv - add vector crypto accelerated SM3 crypto: riscv - add vector crypto accelerated SHA-{512,384} crypto: riscv - add vector crypto accelerated SHA-{256,224} crypto: riscv - add vector crypto accelerated GHASH crypto: riscv - add vector crypto accelerated ChaCha20 crypto: riscv - add vector crypto accelerated AES-{ECB,CBC,CTR,XTS} RISC-V: hook new crypto subdir into build-system RISC-V: add TOOLCHAIN_HAS_VECTOR_CRYPTO RISC-V: add helper function to read the vector VLEN Link: https://lore.kernel.org/r/[email protected] Signed-off-by: Palmer Dabbelt <[email protected]>
2 parents 021d234 + b8d0635 commit 67daf84

23 files changed

+3274
-0
lines changed

arch/riscv/Kbuild

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
obj-y += kernel/ mm/ net/
44
obj-$(CONFIG_BUILTIN_DTB) += boot/dts/
5+
obj-$(CONFIG_CRYPTO) += crypto/
56
obj-y += errata/
67
obj-$(CONFIG_KVM) += kvm/
78

arch/riscv/Kconfig

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -581,6 +581,13 @@ config TOOLCHAIN_HAS_ZBB
581581
depends on LLD_VERSION >= 150000 || LD_VERSION >= 23900
582582
depends on AS_HAS_OPTION_ARCH
583583

584+
# This symbol indicates that the toolchain supports all v1.0 vector crypto
585+
# extensions, including Zvk*, Zvbb, and Zvbc. LLVM added all of these at once.
586+
# binutils added all except Zvkb, then added Zvkb. So we just check for Zvkb.
587+
config TOOLCHAIN_HAS_VECTOR_CRYPTO
588+
def_bool $(as-instr, .option arch$(comma) +zvkb)
589+
depends on AS_HAS_OPTION_ARCH
590+
584591
config RISCV_ISA_ZBB
585592
bool "Zbb extension support for bit manipulation instructions"
586593
depends on TOOLCHAIN_HAS_ZBB

arch/riscv/crypto/Kconfig

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
# SPDX-License-Identifier: GPL-2.0
2+
3+
menu "Accelerated Cryptographic Algorithms for CPU (riscv)"
4+
5+
config CRYPTO_AES_RISCV64
6+
tristate "Ciphers: AES, modes: ECB, CBC, CTR, XTS"
7+
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
8+
select CRYPTO_ALGAPI
9+
select CRYPTO_LIB_AES
10+
select CRYPTO_SKCIPHER
11+
help
12+
Block cipher: AES cipher algorithms
13+
Length-preserving ciphers: AES with ECB, CBC, CTR, XTS
14+
15+
Architecture: riscv64 using:
16+
- Zvkned vector crypto extension
17+
- Zvbb vector extension (XTS)
18+
- Zvkb vector crypto extension (CTR)
19+
- Zvkg vector crypto extension (XTS)
20+
21+
config CRYPTO_CHACHA_RISCV64
22+
tristate "Ciphers: ChaCha"
23+
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
24+
select CRYPTO_SKCIPHER
25+
select CRYPTO_LIB_CHACHA_GENERIC
26+
help
27+
Length-preserving ciphers: ChaCha20 stream cipher algorithm
28+
29+
Architecture: riscv64 using:
30+
- Zvkb vector crypto extension
31+
32+
config CRYPTO_GHASH_RISCV64
33+
tristate "Hash functions: GHASH"
34+
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
35+
select CRYPTO_GCM
36+
help
37+
GCM GHASH function (NIST SP 800-38D)
38+
39+
Architecture: riscv64 using:
40+
- Zvkg vector crypto extension
41+
42+
config CRYPTO_SHA256_RISCV64
43+
tristate "Hash functions: SHA-224 and SHA-256"
44+
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
45+
select CRYPTO_SHA256
46+
help
47+
SHA-224 and SHA-256 secure hash algorithm (FIPS 180)
48+
49+
Architecture: riscv64 using:
50+
- Zvknha or Zvknhb vector crypto extensions
51+
- Zvkb vector crypto extension
52+
53+
config CRYPTO_SHA512_RISCV64
54+
tristate "Hash functions: SHA-384 and SHA-512"
55+
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
56+
select CRYPTO_SHA512
57+
help
58+
SHA-384 and SHA-512 secure hash algorithm (FIPS 180)
59+
60+
Architecture: riscv64 using:
61+
- Zvknhb vector crypto extension
62+
- Zvkb vector crypto extension
63+
64+
config CRYPTO_SM3_RISCV64
65+
tristate "Hash functions: SM3 (ShangMi 3)"
66+
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
67+
select CRYPTO_HASH
68+
select CRYPTO_SM3
69+
help
70+
SM3 (ShangMi 3) secure hash function (OSCCA GM/T 0004-2012)
71+
72+
Architecture: riscv64 using:
73+
- Zvksh vector crypto extension
74+
- Zvkb vector crypto extension
75+
76+
config CRYPTO_SM4_RISCV64
77+
tristate "Ciphers: SM4 (ShangMi 4)"
78+
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
79+
select CRYPTO_ALGAPI
80+
select CRYPTO_SM4
81+
help
82+
SM4 block cipher algorithm (OSCCA GB/T 32907-2016,
83+
ISO/IEC 18033-3:2010/Amd 1:2021)
84+
85+
SM4 (GBT.32907-2016) is a cryptographic standard issued by the
86+
Organization of State Commercial Administration of China (OSCCA)
87+
as an authorized cryptographic algorithm for use within China.
88+
89+
Architecture: riscv64 using:
90+
- Zvksed vector crypto extension
91+
- Zvkb vector crypto extension
92+
93+
endmenu

arch/riscv/crypto/Makefile

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
# SPDX-License-Identifier: GPL-2.0-only
2+
3+
obj-$(CONFIG_CRYPTO_AES_RISCV64) += aes-riscv64.o
4+
aes-riscv64-y := aes-riscv64-glue.o aes-riscv64-zvkned.o \
5+
aes-riscv64-zvkned-zvbb-zvkg.o aes-riscv64-zvkned-zvkb.o
6+
7+
obj-$(CONFIG_CRYPTO_CHACHA_RISCV64) += chacha-riscv64.o
8+
chacha-riscv64-y := chacha-riscv64-glue.o chacha-riscv64-zvkb.o
9+
10+
obj-$(CONFIG_CRYPTO_GHASH_RISCV64) += ghash-riscv64.o
11+
ghash-riscv64-y := ghash-riscv64-glue.o ghash-riscv64-zvkg.o
12+
13+
obj-$(CONFIG_CRYPTO_SHA256_RISCV64) += sha256-riscv64.o
14+
sha256-riscv64-y := sha256-riscv64-glue.o sha256-riscv64-zvknha_or_zvknhb-zvkb.o
15+
16+
obj-$(CONFIG_CRYPTO_SHA512_RISCV64) += sha512-riscv64.o
17+
sha512-riscv64-y := sha512-riscv64-glue.o sha512-riscv64-zvknhb-zvkb.o
18+
19+
obj-$(CONFIG_CRYPTO_SM3_RISCV64) += sm3-riscv64.o
20+
sm3-riscv64-y := sm3-riscv64-glue.o sm3-riscv64-zvksh-zvkb.o
21+
22+
obj-$(CONFIG_CRYPTO_SM4_RISCV64) += sm4-riscv64.o
23+
sm4-riscv64-y := sm4-riscv64-glue.o sm4-riscv64-zvksed-zvkb.o

arch/riscv/crypto/aes-macros.S

Lines changed: 156 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,156 @@
1+
/* SPDX-License-Identifier: Apache-2.0 OR BSD-2-Clause */
2+
//
3+
// This file is dual-licensed, meaning that you can use it under your
4+
// choice of either of the following two licenses:
5+
//
6+
// Copyright 2023 The OpenSSL Project Authors. All Rights Reserved.
7+
//
8+
// Licensed under the Apache License 2.0 (the "License"). You can obtain
9+
// a copy in the file LICENSE in the source distribution or at
10+
// https://www.openssl.org/source/license.html
11+
//
12+
// or
13+
//
14+
// Copyright (c) 2023, Christoph Müllner <[email protected]>
15+
// Copyright (c) 2023, Phoebe Chen <[email protected]>
16+
// Copyright (c) 2023, Jerry Shih <[email protected]>
17+
// Copyright 2024 Google LLC
18+
// All rights reserved.
19+
//
20+
// Redistribution and use in source and binary forms, with or without
21+
// modification, are permitted provided that the following conditions
22+
// are met:
23+
// 1. Redistributions of source code must retain the above copyright
24+
// notice, this list of conditions and the following disclaimer.
25+
// 2. Redistributions in binary form must reproduce the above copyright
26+
// notice, this list of conditions and the following disclaimer in the
27+
// documentation and/or other materials provided with the distribution.
28+
//
29+
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30+
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31+
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32+
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33+
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34+
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35+
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36+
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37+
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38+
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39+
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40+
41+
// This file contains macros that are shared by the other aes-*.S files. The
42+
// generated code of these macros depends on the following RISC-V extensions:
43+
// - RV64I
44+
// - RISC-V Vector ('V') with VLEN >= 128
45+
// - RISC-V Vector AES block cipher extension ('Zvkned')
46+
47+
// Loads the AES round keys from \keyp into vector registers and jumps to code
48+
// specific to the length of the key. Specifically:
49+
// - If AES-128, loads round keys into v1-v11 and jumps to \label128.
50+
// - If AES-192, loads round keys into v1-v13 and jumps to \label192.
51+
// - If AES-256, loads round keys into v1-v15 and continues onwards.
52+
//
53+
// Also sets vl=4 and vtype=e32,m1,ta,ma. Clobbers t0 and t1.
54+
.macro aes_begin keyp, label128, label192
55+
lwu t0, 480(\keyp) // t0 = key length in bytes
56+
li t1, 24 // t1 = key length for AES-192
57+
vsetivli zero, 4, e32, m1, ta, ma
58+
vle32.v v1, (\keyp)
59+
addi \keyp, \keyp, 16
60+
vle32.v v2, (\keyp)
61+
addi \keyp, \keyp, 16
62+
vle32.v v3, (\keyp)
63+
addi \keyp, \keyp, 16
64+
vle32.v v4, (\keyp)
65+
addi \keyp, \keyp, 16
66+
vle32.v v5, (\keyp)
67+
addi \keyp, \keyp, 16
68+
vle32.v v6, (\keyp)
69+
addi \keyp, \keyp, 16
70+
vle32.v v7, (\keyp)
71+
addi \keyp, \keyp, 16
72+
vle32.v v8, (\keyp)
73+
addi \keyp, \keyp, 16
74+
vle32.v v9, (\keyp)
75+
addi \keyp, \keyp, 16
76+
vle32.v v10, (\keyp)
77+
addi \keyp, \keyp, 16
78+
vle32.v v11, (\keyp)
79+
blt t0, t1, \label128 // If AES-128, goto label128.
80+
addi \keyp, \keyp, 16
81+
vle32.v v12, (\keyp)
82+
addi \keyp, \keyp, 16
83+
vle32.v v13, (\keyp)
84+
beq t0, t1, \label192 // If AES-192, goto label192.
85+
// Else, it's AES-256.
86+
addi \keyp, \keyp, 16
87+
vle32.v v14, (\keyp)
88+
addi \keyp, \keyp, 16
89+
vle32.v v15, (\keyp)
90+
.endm
91+
92+
// Encrypts \data using zvkned instructions, using the round keys loaded into
93+
// v1-v11 (for AES-128), v1-v13 (for AES-192), or v1-v15 (for AES-256). \keylen
94+
// is the AES key length in bits. vl and vtype must already be set
95+
// appropriately. Note that if vl > 4, multiple blocks are encrypted.
96+
.macro aes_encrypt data, keylen
97+
vaesz.vs \data, v1
98+
vaesem.vs \data, v2
99+
vaesem.vs \data, v3
100+
vaesem.vs \data, v4
101+
vaesem.vs \data, v5
102+
vaesem.vs \data, v6
103+
vaesem.vs \data, v7
104+
vaesem.vs \data, v8
105+
vaesem.vs \data, v9
106+
vaesem.vs \data, v10
107+
.if \keylen == 128
108+
vaesef.vs \data, v11
109+
.elseif \keylen == 192
110+
vaesem.vs \data, v11
111+
vaesem.vs \data, v12
112+
vaesef.vs \data, v13
113+
.else
114+
vaesem.vs \data, v11
115+
vaesem.vs \data, v12
116+
vaesem.vs \data, v13
117+
vaesem.vs \data, v14
118+
vaesef.vs \data, v15
119+
.endif
120+
.endm
121+
122+
// Same as aes_encrypt, but decrypts instead of encrypts.
123+
.macro aes_decrypt data, keylen
124+
.if \keylen == 128
125+
vaesz.vs \data, v11
126+
.elseif \keylen == 192
127+
vaesz.vs \data, v13
128+
vaesdm.vs \data, v12
129+
vaesdm.vs \data, v11
130+
.else
131+
vaesz.vs \data, v15
132+
vaesdm.vs \data, v14
133+
vaesdm.vs \data, v13
134+
vaesdm.vs \data, v12
135+
vaesdm.vs \data, v11
136+
.endif
137+
vaesdm.vs \data, v10
138+
vaesdm.vs \data, v9
139+
vaesdm.vs \data, v8
140+
vaesdm.vs \data, v7
141+
vaesdm.vs \data, v6
142+
vaesdm.vs \data, v5
143+
vaesdm.vs \data, v4
144+
vaesdm.vs \data, v3
145+
vaesdm.vs \data, v2
146+
vaesdf.vs \data, v1
147+
.endm
148+
149+
// Expands to aes_encrypt or aes_decrypt according to \enc, which is 1 or 0.
150+
.macro aes_crypt data, enc, keylen
151+
.if \enc
152+
aes_encrypt \data, \keylen
153+
.else
154+
aes_decrypt \data, \keylen
155+
.endif
156+
.endm

0 commit comments

Comments
 (0)