1- From 3506e5fdbfc8ec0d25f347d1bdda39a292a10b90 Mon Sep 17 00:00:00 2001
1+ From e533f0efb7556ea598ad9f92a1d7ca9cd1db9ac7 Mon Sep 17 00:00:00 2001
22From: MBCX <github.small792@passinbox.com>
33Date: Wed, 17 Sep 2025 20:49:06 -0400
44Subject: [PATCH] Add missing asm Loongarch instruction for `_cpu_pause` and
55 add memory clobbers for the rest.
66
77---
8- core/os/spin_lock.h | 10 +++++++---
9- 1 file changed, 7 insertions(+), 3 deletions(-)
8+ core/os/spin_lock.h | 20 ++++++++++ +++++++---
9+ 1 file changed, 17 insertions(+), 3 deletions(-)
1010
1111diff --git a/core/os/spin_lock.h b/core/os/spin_lock.h
12- index 7a5051035373..968d2cdcdfd0 100644
12+ index 7a5051035373..ae9ce769a8d2 100644
1313--- a/core/os/spin_lock.h
1414+++ b/core/os/spin_lock.h
15- @@ -80,11 +80,15 @@ _ALWAYS_INLINE_ static void _cpu_pause() {
15+ @@ -67,6 +67,11 @@ class SpinLock {
16+
17+ #include <atomic>
18+
19+ + #if defined(__loongarch64)
20+ + // For __ibar C intrinsic.
21+ + #include <larchintrin.h>
22+ + #endif
23+ +
24+ _ALWAYS_INLINE_ static void _cpu_pause() {
25+ #if defined(_MSC_VER)
26+ // ----- MSVC.
27+ @@ -80,11 +85,20 @@ _ALWAYS_INLINE_ static void _cpu_pause() {
1628 #if defined(__i386__) || defined(__x86_64__) // x86.
1729 __builtin_ia32_pause();
1830 #elif defined(__arm__) || defined(__aarch64__) // ARM.
@@ -27,7 +39,12 @@ index 7a5051035373..968d2cdcdfd0 100644
2739- asm volatile(".insn i 0x0F, 0, x0, x0, 0x010");
2840+ asm volatile(".insn i 0x0F, 0, x0, x0, 0x010" ::: "memory");
2941+ #elif defined(__loongarch64) // Loongarch64.
30- + asm volatile("nop" ::: "memory");
42+ + // Use "ibar 0" repeated 32 times to
43+ + // simulate the delay of the x86 pause instruction (approx 140 cycles).
44+ + // See PR #110639#issuecomment-3675019388
45+ + for (int i = 0; i < 32; i++) {
46+ + __ibar(0);
47+ + }
3148 #endif
3249 #endif
3350 }
0 commit comments