Skip to content

Commit 187d080

Browse files
committed
Merge tag 'libcrypto-fixes-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux
Pull crypto library fixes from Eric Biggers: "Fixes for some recent regressions as well as some longstanding issues: - Fix incorrect output from the arm64 NEON implementation of GHASH - Merge the ksimd scopes in the arm64 XTS code to reduce stack usage - Roll up the BLAKE2b round loop on 32-bit kernels to greatly reduce code size and stack usage - Add missing RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS dependency - Fix chacha-riscv64-zvkb.S to not use frame pointer for data" * tag 'libcrypto-fixes-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux: crypto: arm64/ghash - Fix incorrect output from ghash-neon crypto/arm64: sm4/xts - Merge ksimd scopes to reduce stack bloat crypto/arm64: aes/xts - Use single ksimd scope to reduce stack bloat lib/crypto: blake2s: Replace manual unrolling with unrolled_full lib/crypto: blake2b: Roll up BLAKE2b round loop on 32-bit lib/crypto: riscv: Depend on RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS lib/crypto: riscv/chacha: Avoid s0/fp register
2 parents 35ebee7 + f6a4587 commit 187d080

File tree

10 files changed

+130
-142
lines changed

10 files changed

+130
-142
lines changed

arch/arm64/crypto/aes-glue.c

Lines changed: 36 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -549,38 +549,37 @@ static int __maybe_unused xts_encrypt(struct skcipher_request *req)
549549
tail = 0;
550550
}
551551

552-
for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
553-
int nbytes = walk.nbytes;
552+
scoped_ksimd() {
553+
for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
554+
int nbytes = walk.nbytes;
554555

555-
if (walk.nbytes < walk.total)
556-
nbytes &= ~(AES_BLOCK_SIZE - 1);
556+
if (walk.nbytes < walk.total)
557+
nbytes &= ~(AES_BLOCK_SIZE - 1);
557558

558-
scoped_ksimd()
559559
aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
560560
ctx->key1.key_enc, rounds, nbytes,
561561
ctx->key2.key_enc, walk.iv, first);
562-
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
563-
}
562+
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
563+
}
564564

565-
if (err || likely(!tail))
566-
return err;
565+
if (err || likely(!tail))
566+
return err;
567567

568-
dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
569-
if (req->dst != req->src)
570-
dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
568+
dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
569+
if (req->dst != req->src)
570+
dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
571571

572-
skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
573-
req->iv);
572+
skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
573+
req->iv);
574574

575-
err = skcipher_walk_virt(&walk, &subreq, false);
576-
if (err)
577-
return err;
575+
err = skcipher_walk_virt(&walk, &subreq, false);
576+
if (err)
577+
return err;
578578

579-
scoped_ksimd()
580579
aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
581580
ctx->key1.key_enc, rounds, walk.nbytes,
582581
ctx->key2.key_enc, walk.iv, first);
583-
582+
}
584583
return skcipher_walk_done(&walk, 0);
585584
}
586585

@@ -619,39 +618,37 @@ static int __maybe_unused xts_decrypt(struct skcipher_request *req)
619618
tail = 0;
620619
}
621620

622-
for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
623-
int nbytes = walk.nbytes;
621+
scoped_ksimd() {
622+
for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
623+
int nbytes = walk.nbytes;
624624

625-
if (walk.nbytes < walk.total)
626-
nbytes &= ~(AES_BLOCK_SIZE - 1);
625+
if (walk.nbytes < walk.total)
626+
nbytes &= ~(AES_BLOCK_SIZE - 1);
627627

628-
scoped_ksimd()
629628
aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
630629
ctx->key1.key_dec, rounds, nbytes,
631630
ctx->key2.key_enc, walk.iv, first);
632-
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
633-
}
631+
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
632+
}
634633

635-
if (err || likely(!tail))
636-
return err;
637-
638-
dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
639-
if (req->dst != req->src)
640-
dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
634+
if (err || likely(!tail))
635+
return err;
641636

642-
skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
643-
req->iv);
637+
dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
638+
if (req->dst != req->src)
639+
dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
644640

645-
err = skcipher_walk_virt(&walk, &subreq, false);
646-
if (err)
647-
return err;
641+
skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
642+
req->iv);
648643

644+
err = skcipher_walk_virt(&walk, &subreq, false);
645+
if (err)
646+
return err;
649647

650-
scoped_ksimd()
651648
aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
652649
ctx->key1.key_dec, rounds, walk.nbytes,
653650
ctx->key2.key_enc, walk.iv, first);
654-
651+
}
655652
return skcipher_walk_done(&walk, 0);
656653
}
657654

arch/arm64/crypto/aes-neonbs-glue.c

Lines changed: 21 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -312,13 +312,13 @@ static int __xts_crypt(struct skcipher_request *req, bool encrypt,
312312
if (err)
313313
return err;
314314

315-
while (walk.nbytes >= AES_BLOCK_SIZE) {
316-
int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
317-
out = walk.dst.virt.addr;
318-
in = walk.src.virt.addr;
319-
nbytes = walk.nbytes;
315+
scoped_ksimd() {
316+
while (walk.nbytes >= AES_BLOCK_SIZE) {
317+
int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
318+
out = walk.dst.virt.addr;
319+
in = walk.src.virt.addr;
320+
nbytes = walk.nbytes;
320321

321-
scoped_ksimd() {
322322
if (blocks >= 8) {
323323
if (first == 1)
324324
neon_aes_ecb_encrypt(walk.iv, walk.iv,
@@ -344,30 +344,28 @@ static int __xts_crypt(struct skcipher_request *req, bool encrypt,
344344
ctx->twkey, walk.iv, first);
345345
nbytes = first = 0;
346346
}
347+
err = skcipher_walk_done(&walk, nbytes);
347348
}
348-
err = skcipher_walk_done(&walk, nbytes);
349-
}
350349

351-
if (err || likely(!tail))
352-
return err;
350+
if (err || likely(!tail))
351+
return err;
353352

354-
/* handle ciphertext stealing */
355-
dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
356-
if (req->dst != req->src)
357-
dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
353+
/* handle ciphertext stealing */
354+
dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
355+
if (req->dst != req->src)
356+
dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
358357

359-
skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
360-
req->iv);
358+
skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
359+
req->iv);
361360

362-
err = skcipher_walk_virt(&walk, req, false);
363-
if (err)
364-
return err;
361+
err = skcipher_walk_virt(&walk, req, false);
362+
if (err)
363+
return err;
365364

366-
out = walk.dst.virt.addr;
367-
in = walk.src.virt.addr;
368-
nbytes = walk.nbytes;
365+
out = walk.dst.virt.addr;
366+
in = walk.src.virt.addr;
367+
nbytes = walk.nbytes;
369368

370-
scoped_ksimd() {
371369
if (encrypt)
372370
neon_aes_xts_encrypt(out, in, ctx->cts.key_enc,
373371
ctx->key.rounds, nbytes, ctx->twkey,

arch/arm64/crypto/ghash-ce-glue.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ static int ghash_finup(struct shash_desc *desc, const u8 *src,
133133
u8 buf[GHASH_BLOCK_SIZE] = {};
134134

135135
memcpy(buf, src, len);
136-
ghash_do_simd_update(1, ctx->digest, src, key, NULL,
136+
ghash_do_simd_update(1, ctx->digest, buf, key, NULL,
137137
pmull_ghash_update_p8);
138138
memzero_explicit(buf, sizeof(buf));
139139
}

arch/arm64/crypto/sm4-ce-glue.c

Lines changed: 20 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -346,11 +346,11 @@ static int sm4_xts_crypt(struct skcipher_request *req, bool encrypt)
346346
tail = 0;
347347
}
348348

349-
while ((nbytes = walk.nbytes) >= SM4_BLOCK_SIZE) {
350-
if (nbytes < walk.total)
351-
nbytes &= ~(SM4_BLOCK_SIZE - 1);
349+
scoped_ksimd() {
350+
while ((nbytes = walk.nbytes) >= SM4_BLOCK_SIZE) {
351+
if (nbytes < walk.total)
352+
nbytes &= ~(SM4_BLOCK_SIZE - 1);
352353

353-
scoped_ksimd() {
354354
if (encrypt)
355355
sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
356356
walk.src.virt.addr, walk.iv, nbytes,
@@ -359,32 +359,30 @@ static int sm4_xts_crypt(struct skcipher_request *req, bool encrypt)
359359
sm4_ce_xts_dec(ctx->key1.rkey_dec, walk.dst.virt.addr,
360360
walk.src.virt.addr, walk.iv, nbytes,
361361
rkey2_enc);
362-
}
363362

364-
rkey2_enc = NULL;
363+
rkey2_enc = NULL;
365364

366-
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
367-
if (err)
368-
return err;
369-
}
365+
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
366+
if (err)
367+
return err;
368+
}
370369

371-
if (likely(tail == 0))
372-
return 0;
370+
if (likely(tail == 0))
371+
return 0;
373372

374-
/* handle ciphertext stealing */
373+
/* handle ciphertext stealing */
375374

376-
dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
377-
if (req->dst != req->src)
378-
dst = scatterwalk_ffwd(sg_dst, req->dst, subreq.cryptlen);
375+
dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
376+
if (req->dst != req->src)
377+
dst = scatterwalk_ffwd(sg_dst, req->dst, subreq.cryptlen);
379378

380-
skcipher_request_set_crypt(&subreq, src, dst, SM4_BLOCK_SIZE + tail,
381-
req->iv);
379+
skcipher_request_set_crypt(&subreq, src, dst,
380+
SM4_BLOCK_SIZE + tail, req->iv);
382381

383-
err = skcipher_walk_virt(&walk, &subreq, false);
384-
if (err)
385-
return err;
382+
err = skcipher_walk_virt(&walk, &subreq, false);
383+
if (err)
384+
return err;
386385

387-
scoped_ksimd() {
388386
if (encrypt)
389387
sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
390388
walk.src.virt.addr, walk.iv, walk.nbytes,

arch/riscv/crypto/Kconfig

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,8 @@ menu "Accelerated Cryptographic Algorithms for CPU (riscv)"
44

55
config CRYPTO_AES_RISCV64
66
tristate "Ciphers: AES, modes: ECB, CBC, CTS, CTR, XTS"
7-
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
7+
depends on 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
8+
RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
89
select CRYPTO_ALGAPI
910
select CRYPTO_LIB_AES
1011
select CRYPTO_SKCIPHER
@@ -20,7 +21,8 @@ config CRYPTO_AES_RISCV64
2021

2122
config CRYPTO_GHASH_RISCV64
2223
tristate "Hash functions: GHASH"
23-
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
24+
depends on 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
25+
RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
2426
select CRYPTO_GCM
2527
help
2628
GCM GHASH function (NIST SP 800-38D)
@@ -30,7 +32,8 @@ config CRYPTO_GHASH_RISCV64
3032

3133
config CRYPTO_SM3_RISCV64
3234
tristate "Hash functions: SM3 (ShangMi 3)"
33-
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
35+
depends on 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
36+
RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
3437
select CRYPTO_HASH
3538
select CRYPTO_LIB_SM3
3639
help
@@ -42,7 +45,8 @@ config CRYPTO_SM3_RISCV64
4245

4346
config CRYPTO_SM4_RISCV64
4447
tristate "Ciphers: SM4 (ShangMi 4)"
45-
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
48+
depends on 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
49+
RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
4650
select CRYPTO_ALGAPI
4751
select CRYPTO_SM4
4852
help

lib/crypto/Kconfig

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,8 @@ config CRYPTO_LIB_CHACHA_ARCH
6161
default y if ARM64 && KERNEL_MODE_NEON
6262
default y if MIPS && CPU_MIPS32_R2
6363
default y if PPC64 && CPU_LITTLE_ENDIAN && VSX
64-
default y if RISCV && 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
64+
default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
65+
RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
6566
default y if S390
6667
default y if X86_64
6768

@@ -184,7 +185,8 @@ config CRYPTO_LIB_SHA256_ARCH
184185
default y if ARM64
185186
default y if MIPS && CPU_CAVIUM_OCTEON
186187
default y if PPC && SPE
187-
default y if RISCV && 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
188+
default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
189+
RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
188190
default y if S390
189191
default y if SPARC64
190192
default y if X86_64
@@ -202,7 +204,8 @@ config CRYPTO_LIB_SHA512_ARCH
202204
default y if ARM && !CPU_V7M
203205
default y if ARM64
204206
default y if MIPS && CPU_CAVIUM_OCTEON
205-
default y if RISCV && 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
207+
default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
208+
RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
206209
default y if S390
207210
default y if SPARC64
208211
default y if X86_64

lib/crypto/Makefile

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ obj-$(CONFIG_CRYPTO_LIB_GF128MUL) += gf128mul.o
3333

3434
obj-$(CONFIG_CRYPTO_LIB_BLAKE2B) += libblake2b.o
3535
libblake2b-y := blake2b.o
36-
CFLAGS_blake2b.o := -Wframe-larger-than=4096 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=105930
3736
ifeq ($(CONFIG_CRYPTO_LIB_BLAKE2B_ARCH),y)
3837
CFLAGS_blake2b.o += -I$(src)/$(SRCARCH)
3938
libblake2b-$(CONFIG_ARM) += arm/blake2b-neon-core.o

lib/crypto/blake2b.c

Lines changed: 20 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
#include <linux/kernel.h>
1515
#include <linux/module.h>
1616
#include <linux/string.h>
17+
#include <linux/unroll.h>
1718
#include <linux/types.h>
1819

1920
static const u8 blake2b_sigma[12][16] = {
@@ -73,31 +74,26 @@ blake2b_compress_generic(struct blake2b_ctx *ctx,
7374
b = ror64(b ^ c, 63); \
7475
} while (0)
7576

76-
#define ROUND(r) do { \
77-
G(r, 0, v[0], v[ 4], v[ 8], v[12]); \
78-
G(r, 1, v[1], v[ 5], v[ 9], v[13]); \
79-
G(r, 2, v[2], v[ 6], v[10], v[14]); \
80-
G(r, 3, v[3], v[ 7], v[11], v[15]); \
81-
G(r, 4, v[0], v[ 5], v[10], v[15]); \
82-
G(r, 5, v[1], v[ 6], v[11], v[12]); \
83-
G(r, 6, v[2], v[ 7], v[ 8], v[13]); \
84-
G(r, 7, v[3], v[ 4], v[ 9], v[14]); \
85-
} while (0)
86-
ROUND(0);
87-
ROUND(1);
88-
ROUND(2);
89-
ROUND(3);
90-
ROUND(4);
91-
ROUND(5);
92-
ROUND(6);
93-
ROUND(7);
94-
ROUND(8);
95-
ROUND(9);
96-
ROUND(10);
97-
ROUND(11);
98-
77+
#ifdef CONFIG_64BIT
78+
/*
79+
* Unroll the rounds loop to enable constant-folding of the
80+
* blake2b_sigma values. Seems worthwhile on 64-bit kernels.
81+
* Not worthwhile on 32-bit kernels because the code size is
82+
* already so large there due to BLAKE2b using 64-bit words.
83+
*/
84+
unrolled_full
85+
#endif
86+
for (int r = 0; r < 12; r++) {
87+
G(r, 0, v[0], v[4], v[8], v[12]);
88+
G(r, 1, v[1], v[5], v[9], v[13]);
89+
G(r, 2, v[2], v[6], v[10], v[14]);
90+
G(r, 3, v[3], v[7], v[11], v[15]);
91+
G(r, 4, v[0], v[5], v[10], v[15]);
92+
G(r, 5, v[1], v[6], v[11], v[12]);
93+
G(r, 6, v[2], v[7], v[8], v[13]);
94+
G(r, 7, v[3], v[4], v[9], v[14]);
95+
}
9996
#undef G
100-
#undef ROUND
10197

10298
for (i = 0; i < 8; ++i)
10399
ctx->h[i] ^= v[i] ^ v[i + 8];

0 commit comments

Comments
 (0)