[PATCH v3 4/5] arch,locking/atomic: hexagon: add arch_cmpxchg[64]_local

From: wuqiang.matt
Date: Tue Nov 21 2023 - 09:25:31 EST


hexagonc hasn't arch_cmpxhg_local implemented, which causes
building failures for any references of try_cmpxchg_local,
reported by the kernel test robot.

This patch implements arch_cmpxchg[64]_local with the native
cmpxchg variant if the corresponding data size is supported,
otherwise generci_cmpxchg[64]_local is to be used.

Reported-by: kernel test robot <lkp@xxxxxxxxx>
Closes: https://lore.kernel.org/oe-kbuild-all/202310272207.tLPflya4-lkp@xxxxxxxxx/

Signed-off-by: wuqiang.matt <wuqiang.matt@xxxxxxxxxxxxx>
Reviewed-by: Masami Hiramatsu (Google) <mhiramat@xxxxxxxxxx>
---
arch/hexagon/include/asm/cmpxchg.h | 51 +++++++++++++++++++++++++++++-
1 file changed, 50 insertions(+), 1 deletion(-)

diff --git a/arch/hexagon/include/asm/cmpxchg.h b/arch/hexagon/include/asm/cmpxchg.h
index bf6cf5579cf4..302fa30f25aa 100644
--- a/arch/hexagon/include/asm/cmpxchg.h
+++ b/arch/hexagon/include/asm/cmpxchg.h
@@ -8,6 +8,8 @@
#ifndef _ASM_CMPXCHG_H
#define _ASM_CMPXCHG_H

+#include <linux/build_bug.h>
+
/*
* __arch_xchg - atomically exchange a register and a memory location
* @x: value to swap
@@ -51,13 +53,15 @@ __arch_xchg(unsigned long x, volatile void *ptr, int size)
* variable casting.
*/

-#define arch_cmpxchg(ptr, old, new) \
+#define __cmpxchg_32(ptr, old, new) \
({ \
__typeof__(ptr) __ptr = (ptr); \
__typeof__(*(ptr)) __old = (old); \
__typeof__(*(ptr)) __new = (new); \
__typeof__(*(ptr)) __oldval = 0; \
\
+ BUILD_BUG_ON(sizeof(*(ptr)) != 4); \
+ \
asm volatile( \
"1: %0 = memw_locked(%1);\n" \
" { P0 = cmp.eq(%0,%2);\n" \
@@ -72,4 +76,49 @@ __arch_xchg(unsigned long x, volatile void *ptr, int size)
__oldval; \
})

+#define __cmpxchg(ptr, old, val, size) \
+({ \
+ __typeof__(*(ptr)) oldval; \
+ \
+ switch (size) { \
+ case 4: \
+ oldval = __cmpxchg_32(ptr, old, val); \
+ break; \
+ default: \
+ BUILD_BUG(); \
+ oldval = val; \
+ break; \
+ } \
+ \
+ oldval; \
+})
+
+#define arch_cmpxchg(ptr, o, n) __cmpxchg((ptr), (o), (n), sizeof(*(ptr)))
+
+/*
+ * always make arch_cmpxchg[64]_local available, native cmpxchg
+ * will be used if available, then generic_cmpxchg[64]_local
+ */
+#include <asm-generic/cmpxchg-local.h>
+
+#define arch_cmpxchg_local(ptr, old, val) \
+({ \
+ __typeof__(*(ptr)) __retval; \
+ int __size = sizeof(*(ptr)); \
+ \
+ switch (__size) { \
+ case 4: \
+ __retval = __cmpxchg_32(ptr, old, val); \
+ break; \
+ default: \
+ __retval = __generic_cmpxchg_local(ptr, old, \
+ val, __size); \
+ break; \
+ } \
+ \
+ __retval; \
+})
+
+#define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
+
#endif /* _ASM_CMPXCHG_H */
--
2.40.1