summaryrefslogtreecommitdiff
path: root/include/linux/atomic/atomic-arch-fallback.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/atomic/atomic-arch-fallback.h')
-rw-r--r--include/linux/atomic/atomic-arch-fallback.h3166
1 files changed, 1664 insertions, 1502 deletions
diff --git a/include/linux/atomic/atomic-arch-fallback.h b/include/linux/atomic/atomic-arch-fallback.h
index 1a2d81dbc2e4..99bc1a871dc1 100644
--- a/include/linux/atomic/atomic-arch-fallback.h
+++ b/include/linux/atomic/atomic-arch-fallback.h
@@ -8,2749 +8,2911 @@
#include <linux/compiler.h>
-#ifndef arch_xchg_relaxed
-#define arch_xchg_acquire arch_xchg
-#define arch_xchg_release arch_xchg
-#define arch_xchg_relaxed arch_xchg
-#else /* arch_xchg_relaxed */
-
-#ifndef arch_xchg_acquire
-#define arch_xchg_acquire(...) \
- __atomic_op_acquire(arch_xchg, __VA_ARGS__)
+#if defined(arch_xchg)
+#define raw_xchg arch_xchg
+#elif defined(arch_xchg_relaxed)
+#define raw_xchg(...) \
+ __atomic_op_fence(arch_xchg, __VA_ARGS__)
+#else
+extern void raw_xchg_not_implemented(void);
+#define raw_xchg(...) raw_xchg_not_implemented()
#endif
-#ifndef arch_xchg_release
-#define arch_xchg_release(...) \
- __atomic_op_release(arch_xchg, __VA_ARGS__)
+#if defined(arch_xchg_acquire)
+#define raw_xchg_acquire arch_xchg_acquire
+#elif defined(arch_xchg_relaxed)
+#define raw_xchg_acquire(...) \
+ __atomic_op_acquire(arch_xchg, __VA_ARGS__)
+#elif defined(arch_xchg)
+#define raw_xchg_acquire arch_xchg
+#else
+extern void raw_xchg_acquire_not_implemented(void);
+#define raw_xchg_acquire(...) raw_xchg_acquire_not_implemented()
#endif
-#ifndef arch_xchg
-#define arch_xchg(...) \
- __atomic_op_fence(arch_xchg, __VA_ARGS__)
+#if defined(arch_xchg_release)
+#define raw_xchg_release arch_xchg_release
+#elif defined(arch_xchg_relaxed)
+#define raw_xchg_release(...) \
+ __atomic_op_release(arch_xchg, __VA_ARGS__)
+#elif defined(arch_xchg)
+#define raw_xchg_release arch_xchg
+#else
+extern void raw_xchg_release_not_implemented(void);
+#define raw_xchg_release(...) raw_xchg_release_not_implemented()
+#endif
+
+#if defined(arch_xchg_relaxed)
+#define raw_xchg_relaxed arch_xchg_relaxed
+#elif defined(arch_xchg)
+#define raw_xchg_relaxed arch_xchg
+#else
+extern void raw_xchg_relaxed_not_implemented(void);
+#define raw_xchg_relaxed(...) raw_xchg_relaxed_not_implemented()
+#endif
+
+#if defined(arch_cmpxchg)
+#define raw_cmpxchg arch_cmpxchg
+#elif defined(arch_cmpxchg_relaxed)
+#define raw_cmpxchg(...) \
+ __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
+#else
+extern void raw_cmpxchg_not_implemented(void);
+#define raw_cmpxchg(...) raw_cmpxchg_not_implemented()
#endif
-#endif /* arch_xchg_relaxed */
-
-#ifndef arch_cmpxchg_relaxed
-#define arch_cmpxchg_acquire arch_cmpxchg
-#define arch_cmpxchg_release arch_cmpxchg
-#define arch_cmpxchg_relaxed arch_cmpxchg
-#else /* arch_cmpxchg_relaxed */
-
-#ifndef arch_cmpxchg_acquire
-#define arch_cmpxchg_acquire(...) \
+#if defined(arch_cmpxchg_acquire)
+#define raw_cmpxchg_acquire arch_cmpxchg_acquire
+#elif defined(arch_cmpxchg_relaxed)
+#define raw_cmpxchg_acquire(...) \
__atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
+#elif defined(arch_cmpxchg)
+#define raw_cmpxchg_acquire arch_cmpxchg
+#else
+extern void raw_cmpxchg_acquire_not_implemented(void);
+#define raw_cmpxchg_acquire(...) raw_cmpxchg_acquire_not_implemented()
#endif
-#ifndef arch_cmpxchg_release
-#define arch_cmpxchg_release(...) \
+#if defined(arch_cmpxchg_release)
+#define raw_cmpxchg_release arch_cmpxchg_release
+#elif defined(arch_cmpxchg_relaxed)
+#define raw_cmpxchg_release(...) \
__atomic_op_release(arch_cmpxchg, __VA_ARGS__)
+#elif defined(arch_cmpxchg)
+#define raw_cmpxchg_release arch_cmpxchg
+#else
+extern void raw_cmpxchg_release_not_implemented(void);
+#define raw_cmpxchg_release(...) raw_cmpxchg_release_not_implemented()
+#endif
+
+#if defined(arch_cmpxchg_relaxed)
+#define raw_cmpxchg_relaxed arch_cmpxchg_relaxed
+#elif defined(arch_cmpxchg)
+#define raw_cmpxchg_relaxed arch_cmpxchg
+#else
+extern void raw_cmpxchg_relaxed_not_implemented(void);
+#define raw_cmpxchg_relaxed(...) raw_cmpxchg_relaxed_not_implemented()
+#endif
+
+#if defined(arch_cmpxchg64)
+#define raw_cmpxchg64 arch_cmpxchg64
+#elif defined(arch_cmpxchg64_relaxed)
+#define raw_cmpxchg64(...) \
+ __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
+#else
+extern void raw_cmpxchg64_not_implemented(void);
+#define raw_cmpxchg64(...) raw_cmpxchg64_not_implemented()
#endif
-#ifndef arch_cmpxchg
-#define arch_cmpxchg(...) \
- __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
-#endif
-
-#endif /* arch_cmpxchg_relaxed */
-
-#ifndef arch_cmpxchg64_relaxed
-#define arch_cmpxchg64_acquire arch_cmpxchg64
-#define arch_cmpxchg64_release arch_cmpxchg64
-#define arch_cmpxchg64_relaxed arch_cmpxchg64
-#else /* arch_cmpxchg64_relaxed */
-
-#ifndef arch_cmpxchg64_acquire
-#define arch_cmpxchg64_acquire(...) \
+#if defined(arch_cmpxchg64_acquire)
+#define raw_cmpxchg64_acquire arch_cmpxchg64_acquire
+#elif defined(arch_cmpxchg64_relaxed)
+#define raw_cmpxchg64_acquire(...) \
__atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
+#elif defined(arch_cmpxchg64)
+#define raw_cmpxchg64_acquire arch_cmpxchg64
+#else
+extern void raw_cmpxchg64_acquire_not_implemented(void);
+#define raw_cmpxchg64_acquire(...) raw_cmpxchg64_acquire_not_implemented()
#endif
-#ifndef arch_cmpxchg64_release
-#define arch_cmpxchg64_release(...) \
+#if defined(arch_cmpxchg64_release)
+#define raw_cmpxchg64_release arch_cmpxchg64_release
+#elif defined(arch_cmpxchg64_relaxed)
+#define raw_cmpxchg64_release(...) \
__atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
+#elif defined(arch_cmpxchg64)
+#define raw_cmpxchg64_release arch_cmpxchg64
+#else
+extern void raw_cmpxchg64_release_not_implemented(void);
+#define raw_cmpxchg64_release(...) raw_cmpxchg64_release_not_implemented()
+#endif
+
+#if defined(arch_cmpxchg64_relaxed)
+#define raw_cmpxchg64_relaxed arch_cmpxchg64_relaxed
+#elif defined(arch_cmpxchg64)
+#define raw_cmpxchg64_relaxed arch_cmpxchg64
+#else
+extern void raw_cmpxchg64_relaxed_not_implemented(void);
+#define raw_cmpxchg64_relaxed(...) raw_cmpxchg64_relaxed_not_implemented()
+#endif
+
+#if defined(arch_cmpxchg128)
+#define raw_cmpxchg128 arch_cmpxchg128
+#elif defined(arch_cmpxchg128_relaxed)
+#define raw_cmpxchg128(...) \
+ __atomic_op_fence(arch_cmpxchg128, __VA_ARGS__)
+#else
+extern void raw_cmpxchg128_not_implemented(void);
+#define raw_cmpxchg128(...) raw_cmpxchg128_not_implemented()
#endif
-#ifndef arch_cmpxchg64
-#define arch_cmpxchg64(...) \
- __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
-#endif
-
-#endif /* arch_cmpxchg64_relaxed */
-
-#ifndef arch_cmpxchg128_relaxed
-#define arch_cmpxchg128_acquire arch_cmpxchg128
-#define arch_cmpxchg128_release arch_cmpxchg128
-#define arch_cmpxchg128_relaxed arch_cmpxchg128
-#else /* arch_cmpxchg128_relaxed */
-
-#ifndef arch_cmpxchg128_acquire
-#define arch_cmpxchg128_acquire(...) \
+#if defined(arch_cmpxchg128_acquire)
+#define raw_cmpxchg128_acquire arch_cmpxchg128_acquire
+#elif defined(arch_cmpxchg128_relaxed)
+#define raw_cmpxchg128_acquire(...) \
__atomic_op_acquire(arch_cmpxchg128, __VA_ARGS__)
+#elif defined(arch_cmpxchg128)
+#define raw_cmpxchg128_acquire arch_cmpxchg128
+#else
+extern void raw_cmpxchg128_acquire_not_implemented(void);
+#define raw_cmpxchg128_acquire(...) raw_cmpxchg128_acquire_not_implemented()
#endif
-#ifndef arch_cmpxchg128_release
-#define arch_cmpxchg128_release(...) \
+#if defined(arch_cmpxchg128_release)
+#define raw_cmpxchg128_release arch_cmpxchg128_release
+#elif defined(arch_cmpxchg128_relaxed)
+#define raw_cmpxchg128_release(...) \
__atomic_op_release(arch_cmpxchg128, __VA_ARGS__)
-#endif
-
-#ifndef arch_cmpxchg128
-#define arch_cmpxchg128(...) \
- __atomic_op_fence(arch_cmpxchg128, __VA_ARGS__)
-#endif
-
-#endif /* arch_cmpxchg128_relaxed */
-
-#ifndef arch_try_cmpxchg_relaxed
-#ifdef arch_try_cmpxchg
-#define arch_try_cmpxchg_acquire arch_try_cmpxchg
-#define arch_try_cmpxchg_release arch_try_cmpxchg
-#define arch_try_cmpxchg_relaxed arch_try_cmpxchg
-#endif /* arch_try_cmpxchg */
-
-#ifndef arch_try_cmpxchg
-#define arch_try_cmpxchg(_ptr, _oldp, _new) \
+#elif defined(arch_cmpxchg128)
+#define raw_cmpxchg128_release arch_cmpxchg128
+#else
+extern void raw_cmpxchg128_release_not_implemented(void);
+#define raw_cmpxchg128_release(...) raw_cmpxchg128_release_not_implemented()
+#endif
+
+#if defined(arch_cmpxchg128_relaxed)
+#define raw_cmpxchg128_relaxed arch_cmpxchg128_relaxed
+#elif defined(arch_cmpxchg128)
+#define raw_cmpxchg128_relaxed arch_cmpxchg128
+#else
+extern void raw_cmpxchg128_relaxed_not_implemented(void);
+#define raw_cmpxchg128_relaxed(...) raw_cmpxchg128_relaxed_not_implemented()
+#endif
+
+#if defined(arch_try_cmpxchg)
+#define raw_try_cmpxchg arch_try_cmpxchg
+#elif defined(arch_try_cmpxchg_relaxed)
+#define raw_try_cmpxchg(...) \
+ __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
+#else
+#define raw_try_cmpxchg(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg */
+#endif
-#ifndef arch_try_cmpxchg_acquire
-#define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg_acquire)
+#define raw_try_cmpxchg_acquire arch_try_cmpxchg_acquire
+#elif defined(arch_try_cmpxchg_relaxed)
+#define raw_try_cmpxchg_acquire(...) \
+ __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
+#elif defined(arch_try_cmpxchg)
+#define raw_try_cmpxchg_acquire arch_try_cmpxchg
+#else
+#define raw_try_cmpxchg_acquire(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg_acquire((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg_acquire */
+#endif
-#ifndef arch_try_cmpxchg_release
-#define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg_release)
+#define raw_try_cmpxchg_release arch_try_cmpxchg_release
+#elif defined(arch_try_cmpxchg_relaxed)
+#define raw_try_cmpxchg_release(...) \
+ __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
+#elif defined(arch_try_cmpxchg)
+#define raw_try_cmpxchg_release arch_try_cmpxchg
+#else
+#define raw_try_cmpxchg_release(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg_release((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg_release */
+#endif
-#ifndef arch_try_cmpxchg_relaxed
-#define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg_relaxed)
+#define raw_try_cmpxchg_relaxed arch_try_cmpxchg_relaxed
+#elif defined(arch_try_cmpxchg)
+#define raw_try_cmpxchg_relaxed arch_try_cmpxchg
+#else
+#define raw_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg_relaxed((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg_relaxed */
-
-#else /* arch_try_cmpxchg_relaxed */
-
-#ifndef arch_try_cmpxchg_acquire
-#define arch_try_cmpxchg_acquire(...) \
- __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
#endif
-#ifndef arch_try_cmpxchg_release
-#define arch_try_cmpxchg_release(...) \
- __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
-#endif
-
-#ifndef arch_try_cmpxchg
-#define arch_try_cmpxchg(...) \
- __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
-#endif
-
-#endif /* arch_try_cmpxchg_relaxed */
-
-#ifndef arch_try_cmpxchg64_relaxed
-#ifdef arch_try_cmpxchg64
-#define arch_try_cmpxchg64_acquire arch_try_cmpxchg64
-#define arch_try_cmpxchg64_release arch_try_cmpxchg64
-#define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64
-#endif /* arch_try_cmpxchg64 */
-
-#ifndef arch_try_cmpxchg64
-#define arch_try_cmpxchg64(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg64)
+#define raw_try_cmpxchg64 arch_try_cmpxchg64
+#elif defined(arch_try_cmpxchg64_relaxed)
+#define raw_try_cmpxchg64(...) \
+ __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__)
+#else
+#define raw_try_cmpxchg64(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg64((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg64 */
+#endif
-#ifndef arch_try_cmpxchg64_acquire
-#define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg64_acquire)
+#define raw_try_cmpxchg64_acquire arch_try_cmpxchg64_acquire
+#elif defined(arch_try_cmpxchg64_relaxed)
+#define raw_try_cmpxchg64_acquire(...) \
+ __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__)
+#elif defined(arch_try_cmpxchg64)
+#define raw_try_cmpxchg64_acquire arch_try_cmpxchg64
+#else
+#define raw_try_cmpxchg64_acquire(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg64_acquire((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg64_acquire */
+#endif
-#ifndef arch_try_cmpxchg64_release
-#define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg64_release)
+#define raw_try_cmpxchg64_release arch_try_cmpxchg64_release
+#elif defined(arch_try_cmpxchg64_relaxed)
+#define raw_try_cmpxchg64_release(...) \
+ __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__)
+#elif defined(arch_try_cmpxchg64)
+#define raw_try_cmpxchg64_release arch_try_cmpxchg64
+#else
+#define raw_try_cmpxchg64_release(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg64_release((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg64_release */
+#endif
-#ifndef arch_try_cmpxchg64_relaxed
-#define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg64_relaxed)
+#define raw_try_cmpxchg64_relaxed arch_try_cmpxchg64_relaxed
+#elif defined(arch_try_cmpxchg64)
+#define raw_try_cmpxchg64_relaxed arch_try_cmpxchg64
+#else
+#define raw_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg64_relaxed((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg64_relaxed */
-
-#else /* arch_try_cmpxchg64_relaxed */
-
-#ifndef arch_try_cmpxchg64_acquire
-#define arch_try_cmpxchg64_acquire(...) \
- __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__)
-#endif
-
-#ifndef arch_try_cmpxchg64_release
-#define arch_try_cmpxchg64_release(...) \
- __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__)
-#endif
-
-#ifndef arch_try_cmpxchg64
-#define arch_try_cmpxchg64(...) \
- __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__)
#endif
-#endif /* arch_try_cmpxchg64_relaxed */
-
-#ifndef arch_try_cmpxchg128_relaxed
-#ifdef arch_try_cmpxchg128
-#define arch_try_cmpxchg128_acquire arch_try_cmpxchg128
-#define arch_try_cmpxchg128_release arch_try_cmpxchg128
-#define arch_try_cmpxchg128_relaxed arch_try_cmpxchg128
-#endif /* arch_try_cmpxchg128 */
-
-#ifndef arch_try_cmpxchg128
-#define arch_try_cmpxchg128(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg128)
+#define raw_try_cmpxchg128 arch_try_cmpxchg128
+#elif defined(arch_try_cmpxchg128_relaxed)
+#define raw_try_cmpxchg128(...) \
+ __atomic_op_fence(arch_try_cmpxchg128, __VA_ARGS__)
+#else
+#define raw_try_cmpxchg128(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg128((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg128((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg128 */
+#endif
-#ifndef arch_try_cmpxchg128_acquire
-#define arch_try_cmpxchg128_acquire(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg128_acquire)
+#define raw_try_cmpxchg128_acquire arch_try_cmpxchg128_acquire
+#elif defined(arch_try_cmpxchg128_relaxed)
+#define raw_try_cmpxchg128_acquire(...) \
+ __atomic_op_acquire(arch_try_cmpxchg128, __VA_ARGS__)
+#elif defined(arch_try_cmpxchg128)
+#define raw_try_cmpxchg128_acquire arch_try_cmpxchg128
+#else
+#define raw_try_cmpxchg128_acquire(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg128_acquire((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg128_acquire((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg128_acquire */
+#endif
-#ifndef arch_try_cmpxchg128_release
-#define arch_try_cmpxchg128_release(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg128_release)
+#define raw_try_cmpxchg128_release arch_try_cmpxchg128_release
+#elif defined(arch_try_cmpxchg128_relaxed)
+#define raw_try_cmpxchg128_release(...) \
+ __atomic_op_release(arch_try_cmpxchg128, __VA_ARGS__)
+#elif defined(arch_try_cmpxchg128)
+#define raw_try_cmpxchg128_release arch_try_cmpxchg128
+#else
+#define raw_try_cmpxchg128_release(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg128_release((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg128_release((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg128_release */
+#endif
-#ifndef arch_try_cmpxchg128_relaxed
-#define arch_try_cmpxchg128_relaxed(_ptr, _oldp, _new) \
+#if defined(arch_try_cmpxchg128_relaxed)
+#define raw_try_cmpxchg128_relaxed arch_try_cmpxchg128_relaxed
+#elif defined(arch_try_cmpxchg128)
+#define raw_try_cmpxchg128_relaxed arch_try_cmpxchg128
+#else
+#define raw_try_cmpxchg128_relaxed(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg128_relaxed((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg128_relaxed((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg128_relaxed */
-
-#else /* arch_try_cmpxchg128_relaxed */
-
-#ifndef arch_try_cmpxchg128_acquire
-#define arch_try_cmpxchg128_acquire(...) \
- __atomic_op_acquire(arch_try_cmpxchg128, __VA_ARGS__)
#endif
-#ifndef arch_try_cmpxchg128_release
-#define arch_try_cmpxchg128_release(...) \
- __atomic_op_release(arch_try_cmpxchg128, __VA_ARGS__)
-#endif
+#define raw_cmpxchg_local arch_cmpxchg_local
-#ifndef arch_try_cmpxchg128
-#define arch_try_cmpxchg128(...) \
- __atomic_op_fence(arch_try_cmpxchg128, __VA_ARGS__)
+#ifdef arch_try_cmpxchg_local
+#define raw_try_cmpxchg_local arch_try_cmpxchg_local
+#else
+#define raw_try_cmpxchg_local(_ptr, _oldp, _new) \
+({ \
+ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
+ ___r = raw_cmpxchg_local((_ptr), ___o, (_new)); \
+ if (unlikely(___r != ___o)) \
+ *___op = ___r; \
+ likely(___r == ___o); \
+})
#endif
-#endif /* arch_try_cmpxchg128_relaxed */
+#define raw_cmpxchg64_local arch_cmpxchg64_local
-#ifndef arch_try_cmpxchg_local
-#define arch_try_cmpxchg_local(_ptr, _oldp, _new) \
+#ifdef arch_try_cmpxchg64_local
+#define raw_try_cmpxchg64_local arch_try_cmpxchg64_local
+#else
+#define raw_try_cmpxchg64_local(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg_local((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg64_local((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg_local */
+#endif
+
+#define raw_cmpxchg128_local arch_cmpxchg128_local
-#ifndef arch_try_cmpxchg64_local
-#define arch_try_cmpxchg64_local(_ptr, _oldp, _new) \
+#ifdef arch_try_cmpxchg128_local
+#define raw_try_cmpxchg128_local arch_try_cmpxchg128_local
+#else
+#define raw_try_cmpxchg128_local(_ptr, _oldp, _new) \
({ \
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
- ___r = arch_cmpxchg64_local((_ptr), ___o, (_new)); \
+ ___r = raw_cmpxchg128_local((_ptr), ___o, (_new)); \
if (unlikely(___r != ___o)) \
*___op = ___r; \
likely(___r == ___o); \
})
-#endif /* arch_try_cmpxchg64_local */
+#endif
+
+#define raw_sync_cmpxchg arch_sync_cmpxchg
+
+#define raw_atomic_read arch_atomic_read
-#ifndef arch_atomic_read_acquire
+#if defined(arch_atomic_read_acquire)
+#define raw_atomic_read_acquire arch_atomic_read_acquire
+#elif defined(arch_atomic_read)
+#define raw_atomic_read_acquire arch_atomic_read
+#else
static __always_inline int
-arch_atomic_read_acquire(const atomic_t *v)
+raw_atomic_read_acquire(const atomic_t *v)
{
int ret;
if (__native_word(atomic_t)) {
ret = smp_load_acquire(&(v)->counter);
} else {
- ret = arch_atomic_read(v);
+ ret = raw_atomic_read(v);
__atomic_acquire_fence();
}
return ret;
}
-#define arch_atomic_read_acquire arch_atomic_read_acquire
#endif
-#ifndef arch_atomic_set_release
+#define raw_atomic_set arch_atomic_set
+
+#if defined(arch_atomic_set_release)
+#define raw_atomic_set_release arch_atomic_set_release
+#elif defined(arch_atomic_set)
+#define raw_atomic_set_release arch_atomic_set
+#else
static __always_inline void
-arch_atomic_set_release(atomic_t *v, int i)
+raw_atomic_set_release(atomic_t *v, int i)
{
if (__native_word(atomic_t)) {
smp_store_release(&(v)->counter, i);
} else {
__atomic_release_fence();
- arch_atomic_set(v, i);
+ raw_atomic_set(v, i);
}
}
-#define arch_atomic_set_release arch_atomic_set_release
#endif
-#ifndef arch_atomic_add_return_relaxed
-#define arch_atomic_add_return_acquire arch_atomic_add_return
-#define arch_atomic_add_return_release arch_atomic_add_return
-#define arch_atomic_add_return_relaxed arch_atomic_add_return
-#else /* arch_atomic_add_return_relaxed */
+#define raw_atomic_add arch_atomic_add
+
+#if defined(arch_atomic_add_return)
+#define raw_atomic_add_return arch_atomic_add_return
+#elif defined(arch_atomic_add_return_relaxed)
+static __always_inline int
+raw_atomic_add_return(int i, atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = arch_atomic_add_return_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#else
+#error "Unable to define raw_atomic_add_return"
+#endif
-#ifndef arch_atomic_add_return_acquire
+#if defined(arch_atomic_add_return_acquire)
+#define raw_atomic_add_return_acquire arch_atomic_add_return_acquire
+#elif defined(arch_atomic_add_return_relaxed)
static __always_inline int
-arch_atomic_add_return_acquire(int i, atomic_t *v)
+raw_atomic_add_return_acquire(int i, atomic_t *v)
{
int ret = arch_atomic_add_return_relaxed(i, v);
__atomic_acquire_fence();
return ret;
}
-#define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
+#elif defined(arch_atomic_add_return)
+#define raw_atomic_add_return_acquire arch_atomic_add_return
+#else
+#error "Unable to define raw_atomic_add_return_acquire"
#endif
-#ifndef arch_atomic_add_return_release
+#if defined(arch_atomic_add_return_release)
+#define raw_atomic_add_return_release arch_atomic_add_return_release
+#elif defined(arch_atomic_add_return_relaxed)
static __always_inline int
-arch_atomic_add_return_release(int i, atomic_t *v)
+raw_atomic_add_return_release(int i, atomic_t *v)
{
__atomic_release_fence();
return arch_atomic_add_return_relaxed(i, v);
}
-#define arch_atomic_add_return_release arch_atomic_add_return_release
+#elif defined(arch_atomic_add_return)
+#define raw_atomic_add_return_release arch_atomic_add_return
+#else
+#error "Unable to define raw_atomic_add_return_release"
+#endif
+
+#if defined(arch_atomic_add_return_relaxed)
+#define raw_atomic_add_return_relaxed arch_atomic_add_return_relaxed
+#elif defined(arch_atomic_add_return)
+#define raw_atomic_add_return_relaxed arch_atomic_add_return
+#else
+#error "Unable to define raw_atomic_add_return_relaxed"
#endif
-#ifndef arch_atomic_add_return
+#if defined(arch_atomic_fetch_add)
+#define raw_atomic_fetch_add arch_atomic_fetch_add
+#elif defined(arch_atomic_fetch_add_relaxed)
static __always_inline int
-arch_atomic_add_return(int i, atomic_t *v)
+raw_atomic_fetch_add(int i, atomic_t *v)
{
int ret;
__atomic_pre_full_fence();
- ret = arch_atomic_add_return_relaxed(i, v);
+ ret = arch_atomic_fetch_add_relaxed(i, v);
__atomic_post_full_fence();
return ret;
}
-#define arch_atomic_add_return arch_atomic_add_return
+#else
+#error "Unable to define raw_atomic_fetch_add"
#endif
-#endif /* arch_atomic_add_return_relaxed */
-
-#ifndef arch_atomic_fetch_add_relaxed
-#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
-#define arch_atomic_fetch_add_release arch_atomic_fetch_add
-#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
-#else /* arch_atomic_fetch_add_relaxed */
-
-#ifndef arch_atomic_fetch_add_acquire
+#if defined(arch_atomic_fetch_add_acquire)
+#define raw_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
+#elif defined(arch_atomic_fetch_add_relaxed)
static __always_inline int
-arch_atomic_fetch_add_acquire(int i, atomic_t *v)
+raw_atomic_fetch_add_acquire(int i, atomic_t *v)
{
int ret = arch_atomic_fetch_add_relaxed(i, v);
__atomic_acquire_fence();
return ret;
}
-#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
+#elif defined(arch_atomic_fetch_add)
+#define raw_atomic_fetch_add_acquire arch_atomic_fetch_add
+#else
+#error "Unable to define raw_atomic_fetch_add_acquire"
#endif
-#ifndef arch_atomic_fetch_add_release
+#if defined(arch_atomic_fetch_add_release)
+#define raw_atomic_fetch_add_release arch_atomic_fetch_add_release
+#elif defined(arch_atomic_fetch_add_relaxed)
static __always_inline int
-arch_atomic_fetch_add_release(int i, atomic_t *v)
+raw_atomic_fetch_add_release(int i, atomic_t *v)
{
__atomic_release_fence();
return arch_atomic_fetch_add_relaxed(i, v);
}
-#define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
+#elif defined(arch_atomic_fetch_add)
+#define raw_atomic_fetch_add_release arch_atomic_fetch_add
+#else
+#error "Unable to define raw_atomic_fetch_add_release"
+#endif
+
+#if defined(arch_atomic_fetch_add_relaxed)
+#define raw_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed
+#elif defined(arch_atomic_fetch_add)
+#define raw_atomic_fetch_add_relaxed arch_atomic_fetch_add
+#else
+#error "Unable to define raw_atomic_fetch_add_relaxed"
#endif
-#ifndef arch_atomic_fetch_add
+#define raw_atomic_sub arch_atomic_sub
+
+#if defined(arch_atomic_sub_return)
+#define raw_atomic_sub_return arch_atomic_sub_return
+#elif defined(arch_atomic_sub_return_relaxed)
static __always_inline int
-arch_atomic_fetch_add(int i, atomic_t *v)
+raw_atomic_sub_return(int i, atomic_t *v)
{
int ret;
__atomic_pre_full_fence();
- ret = arch_atomic_fetch_add_relaxed(i, v);
+ ret = arch_atomic_sub_return_relaxed(i, v);
__atomic_post_full_fence();
return ret;
}
-#define arch_atomic_fetch_add arch_atomic_fetch_add
+#else
+#error "Unable to define raw_atomic_sub_return"
#endif
-#endif /* arch_atomic_fetch_add_relaxed */
-
-#ifndef arch_atomic_sub_return_relaxed
-#define arch_atomic_sub_return_acquire arch_atomic_sub_return
-#define arch_atomic_sub_return_release arch_atomic_sub_return
-#define arch_atomic_sub_return_relaxed arch_atomic_sub_return
-#else /* arch_atomic_sub_return_relaxed */
-
-#ifndef arch_atomic_sub_return_acquire
+#if defined(arch_atomic_sub_return_acquire)
+#define raw_atomic_sub_return_acquire arch_atomic_sub_return_acquire
+#elif defined(arch_atomic_sub_return_relaxed)
static __always_inline int
-arch_atomic_sub_return_acquire(int i, atomic_t *v)
+raw_atomic_sub_return_acquire(int i, atomic_t *v)
{
int ret = arch_atomic_sub_return_relaxed(i, v);
__atomic_acquire_fence();
return ret;
}
-#define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
+#elif defined(arch_atomic_sub_return)
+#define raw_atomic_sub_return_acquire arch_atomic_sub_return
+#else
+#error "Unable to define raw_atomic_sub_return_acquire"
#endif
-#ifndef arch_atomic_sub_return_release
+#if defined(arch_atomic_sub_return_release)
+#define raw_atomic_sub_return_release arch_atomic_sub_return_release
+#elif defined(arch_atomic_sub_return_relaxed)
static __always_inline int
-arch_atomic_sub_return_release(int i, atomic_t *v)
+raw_atomic_sub_return_release(int i, atomic_t *v)
{
__atomic_release_fence();
return arch_atomic_sub_return_relaxed(i, v);
}
-#define arch_atomic_sub_return_release arch_atomic_sub_return_release
+#elif defined(arch_atomic_sub_return)
+#define raw_atomic_sub_return_release arch_atomic_sub_return
+#else
+#error "Unable to define raw_atomic_sub_return_release"
#endif
-#ifndef arch_atomic_sub_return
+#if defined(arch_atomic_sub_return_relaxed)
+#define raw_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
+#elif defined(arch_atomic_sub_return)
+#define raw_atomic_sub_return_relaxed arch_atomic_sub_return
+#else
+#error "Unable to define raw_atomic_sub_return_relaxed"
+#endif
+
+#if defined(arch_atomic_fetch_sub)
+#define raw_atomic_fetch_sub arch_atomic_fetch_sub
+#elif defined(arch_atomic_fetch_sub_relaxed)
static __always_inline int
-arch_atomic_sub_return(int i, atomic_t *v)
+raw_atomic_fetch_sub(int i, atomic_t *v)
{
int ret;
__atomic_pre_full_fence();
- ret = arch_atomic_sub_return_relaxed(i, v);
+ ret = arch_atomic_fetch_sub_relaxed(i, v);
__atomic_post_full_fence();
return ret;
}
-#define arch_atomic_sub_return arch_atomic_sub_return
+#else
+#error "Unable to define raw_atomic_fetch_sub"
#endif
-#endif /* arch_atomic_sub_return_relaxed */
-
-#ifndef arch_atomic_fetch_sub_relaxed
-#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
-#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
-#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
-#else /* arch_atomic_fetch_sub_relaxed */
-
-#ifndef arch_atomic_fetch_sub_acquire
+#if defined(arch_atomic_fetch_sub_acquire)
+#define raw_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
+#elif defined(arch_atomic_fetch_sub_relaxed)
static __always_inline int
-arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
+raw_atomic_fetch_sub_acquire(int i, atomic_t *v)
{
int ret = arch_atomic_fetch_sub_relaxed(i, v);
__atomic_acquire_fence();
return ret;
}
-#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
+#elif defined(arch_atomic_fetch_sub)
+#define raw_atomic_fetch_sub_acquire arch_atomic_fetch_sub
+#else
+#error "Unable to define raw_atomic_fetch_sub_acquire"
#endif
-#ifndef arch_atomic_fetch_sub_release
+#if defined(arch_atomic_fetch_sub_release)
+#define raw_atomic_fetch_sub_release arch_atomic_fetch_sub_release
+#elif defined(arch_atomic_fetch_sub_relaxed)
static __always_inline int
-arch_atomic_fetch_sub_release(int i, atomic_t *v)
+raw_atomic_fetch_sub_release(int i, atomic_t *v)
{
__atomic_release_fence();
return arch_atomic_fetch_sub_relaxed(i, v);
}
-#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
+#elif defined(arch_atomic_fetch_sub)
+#define raw_atomic_fetch_sub_release arch_atomic_fetch_sub
+#else
+#error "Unable to define raw_atomic_fetch_sub_release"
#endif
-#ifndef arch_atomic_fetch_sub
-static __always_inline int
-arch_atomic_fetch_sub(int i, atomic_t *v)
-{
- int ret;
- __atomic_pre_full_fence();
- ret = arch_atomic_fetch_sub_relaxed(i, v);
- __atomic_post_full_fence();
- return ret;
-}
-#define arch_atomic_fetch_sub arch_atomic_fetch_sub
+#if defined(arch_atomic_fetch_sub_relaxed)
+#define raw_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
+#elif defined(arch_atomic_fetch_sub)
+#define raw_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
+#else
+#error "Unable to define raw_atomic_fetch_sub_relaxed"
#endif
-#endif /* arch_atomic_fetch_sub_relaxed */
-
-#ifndef arch_atomic_inc
+#if defined(arch_atomic_inc)
+#define raw_atomic_inc arch_atomic_inc
+#else
static __always_inline void
-arch_atomic_inc(atomic_t *v)
+raw_atomic_inc(atomic_t *v)
{
- arch_atomic_add(1, v);
+ raw_atomic_add(1, v);
}
-#define arch_atomic_inc arch_atomic_inc
#endif
-#ifndef arch_atomic_inc_return_relaxed
-#ifdef arch_atomic_inc_return
-#define arch_atomic_inc_return_acquire arch_atomic_inc_return
-#define arch_atomic_inc_return_release arch_atomic_inc_return
-#define arch_atomic_inc_return_relaxed arch_atomic_inc_return
-#endif /* arch_atomic_inc_return */
-
-#ifndef arch_atomic_inc_return
+#if defined(arch_atomic_inc_return)
+#define raw_atomic_inc_return arch_atomic_inc_return
+#elif defined(arch_atomic_inc_return_relaxed)
static __always_inline int
-arch_atomic_inc_return(atomic_t *v)
+raw_atomic_inc_return(atomic_t *v)
{
- return arch_atomic_add_return(1, v);
+ int ret;
+ __atomic_pre_full_fence();
+ ret = arch_atomic_inc_return_relaxed(v);
+ __atomic_post_full_fence();
+ return ret;
}
-#define arch_atomic_inc_return arch_atomic_inc_return
-#endif
-
-#ifndef arch_atomic_inc_return_acquire
+#else
static __always_inline int
-arch_atomic_inc_return_acquire(atomic_t *v)
+raw_atomic_inc_return(atomic_t *v)
{
- return arch_atomic_add_return_acquire(1, v);
+ return raw_atomic_add_return(1, v);
}
-#define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
#endif
-#ifndef arch_atomic_inc_return_release
+#if defined(arch_atomic_inc_return_acquire)
+#define raw_atomic_inc_return_acquire arch_atomic_inc_return_acquire
+#elif defined(arch_atomic_inc_return_relaxed)
static __always_inline int
-arch_atomic_inc_return_release(atomic_t *v)
+raw_atomic_inc_return_acquire(atomic_t *v)
{
- return arch_atomic_add_return_release(1, v);
+ int ret = arch_atomic_inc_return_relaxed(v);
+ __atomic_acquire_fence();
+ return ret;
}
-#define arch_atomic_inc_return_release arch_atomic_inc_return_release
-#endif
-
-#ifndef arch_atomic_inc_return_relaxed
+#elif defined(arch_atomic_inc_return)
+#define raw_atomic_inc_return_acquire arch_atomic_inc_return
+#else
static __always_inline int
-arch_atomic_inc_return_relaxed(atomic_t *v)
+raw_atomic_inc_return_acquire(atomic_t *v)
{
- return arch_atomic_add_return_relaxed(1, v);
+ return raw_atomic_add_return_acquire(1, v);
}
-#define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
#endif
-#else /* arch_atomic_inc_return_relaxed */
-
-#ifndef arch_atomic_inc_return_acquire
+#if defined(arch_atomic_inc_return_release)
+#define raw_atomic_inc_return_release arch_atomic_inc_return_release
+#elif defined(arch_atomic_inc