diff options
28 files changed, 2403 insertions, 82 deletions
diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h index 9543b5e0534d..a08890da696c 100644 --- a/arch/arm64/include/asm/atomic.h +++ b/arch/arm64/include/asm/atomic.h @@ -101,8 +101,8 @@ static inline long arch_atomic64_dec_if_positive(atomic64_t *v) #define ATOMIC_INIT(i) { (i) } -#define arch_atomic_read(v) READ_ONCE((v)->counter) -#define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) +#define arch_atomic_read(v) __READ_ONCE((v)->counter) +#define arch_atomic_set(v, i) __WRITE_ONCE(((v)->counter), (i)) #define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire @@ -225,6 +225,6 @@ static inline long arch_atomic64_dec_if_positive(atomic64_t *v) #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive -#include <asm-generic/atomic-instrumented.h> +#define ARCH_ATOMIC #endif /* __ASM_ATOMIC_H */ diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h index 115127c7ad28..a9ae58826074 100644 --- a/arch/x86/include/asm/atomic.h +++ b/arch/x86/include/asm/atomic.h @@ -28,7 +28,7 @@ static __always_inline int arch_atomic_read(const atomic_t *v) * Note for KASAN: we deliberately don't use READ_ONCE_NOCHECK() here, * it's non-inlined function that increases binary size and stack usage. */ - return READ_ONCE((v)->counter); + return __READ_ONCE((v)->counter); } /** @@ -40,7 +40,7 @@ static __always_inline int arch_atomic_read(const atomic_t *v) */ static __always_inline void arch_atomic_set(atomic_t *v, int i) { - WRITE_ONCE(v->counter, i); + __WRITE_ONCE(v->counter, i); } /** @@ -166,6 +166,7 @@ static __always_inline int arch_atomic_add_return(int i, atomic_t *v) { return i + xadd(&v->counter, i); } +#define arch_atomic_add_return arch_atomic_add_return /** * arch_atomic_sub_return - subtract integer and return @@ -178,32 +179,37 @@ static __always_inline int arch_atomic_sub_return(int i, atomic_t *v) { return arch_atomic_add_return(-i, v); } +#define arch_atomic_sub_return arch_atomic_sub_return static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v) { return xadd(&v->counter, i); } +#define arch_atomic_fetch_add arch_atomic_fetch_add static __always_inline int arch_atomic_fetch_sub(int i, atomic_t *v) { return xadd(&v->counter, -i); } +#define arch_atomic_fetch_sub arch_atomic_fetch_sub static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new) { return arch_cmpxchg(&v->counter, old, new); } +#define arch_atomic_cmpxchg arch_atomic_cmpxchg -#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) { return try_cmpxchg(&v->counter, old, new); } +#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg static inline int arch_atomic_xchg(atomic_t *v, int new) { return arch_xchg(&v->counter, new); } +#define arch_atomic_xchg arch_atomic_xchg static inline void arch_atomic_and(int i, atomic_t *v) { @@ -221,6 +227,7 @@ static inline int arch_atomic_fetch_and(int i, atomic_t *v) return val; } +#define arch_atomic_fetch_and arch_atomic_fetch_and static inline void arch_atomic_or(int i, atomic_t *v) { @@ -238,6 +245,7 @@ static inline int arch_atomic_fetch_or(int i, atomic_t *v) return val; } +#define arch_atomic_fetch_or arch_atomic_fetch_or static inline void arch_atomic_xor(int i, atomic_t *v) { @@ -255,6 +263,7 @@ static inline int arch_atomic_fetch_xor(int i, atomic_t *v) return val; } +#define arch_atomic_fetch_xor arch_atomic_fetch_xor #ifdef CONFIG_X86_32 # include <asm/atomic64_32.h> @@ -262,6 +271,6 @@ static inline int arch_atomic_fetch_xor(int i, atomic_t *v) # include <asm/atomic64_64.h> #endif -#include <asm-generic/atomic-instrumented.h> +#define ARCH_ATOMIC #endif /* _ASM_X86_ATOMIC_H */ diff --git a/arch/x86/include/asm/atomic64_32.h b/arch/x86/include/asm/atomic64_32.h index 52cfaecb13f9..5efd01b548d1 100644 --- a/arch/x86/include/asm/atomic64_32.h +++ b/arch/x86/include/asm/atomic64_32.h @@ -75,6 +75,7 @@ static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n) { return arch_cmpxchg64(&v->counter, o, n); } +#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg /** * arch_atomic64_xchg - xchg atomic64 variable @@ -94,6 +95,7 @@ static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) : "memory"); return o; } +#define arch_atomic64_xchg arch_atomic64_xchg /** * arch_atomic64_set - set atomic64 variable @@ -138,6 +140,7 @@ static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) ASM_NO_INPUT_CLOBBER("memory")); return i; } +#define arch_atomic64_add_return arch_atomic64_add_return /* * Other variants with different arithmetic operators: @@ -149,6 +152,7 @@ static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) ASM_NO_INPUT_CLOBBER("memory")); return i; } +#define arch_atomic64_sub_return arch_atomic64_sub_return static inline s64 arch_atomic64_inc_return(atomic64_t *v) { @@ -242,6 +246,7 @@ static inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) "S" (v) : "memory"); return (int)a; } +#define arch_atomic64_add_unless arch_atomic64_add_unless static inline int arch_atomic64_inc_not_zero(atomic64_t *v) { @@ -281,6 +286,7 @@ static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v) return old; } +#define arch_atomic64_fetch_and arch_atomic64_fetch_and static inline void arch_atomic64_or(s64 i, atomic64_t *v) { @@ -299,6 +305,7 @@ static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v) return old; } +#define arch_atomic64_fetch_or arch_atomic64_fetch_or static inline void arch_atomic64_xor(s64 i, atomic64_t *v) { @@ -317,6 +324,7 @@ static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) return old; } +#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) { @@ -327,6 +335,7 @@ static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) return old; } +#define arch_atomic64_fetch_add arch_atomic64_fetch_add #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), (v)) diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h index 95c6ceac66b9..809bd010a751 100644 --- a/arch/x86/include/asm/atomic64_64.h +++ b/arch/x86/include/asm/atomic64_64.h @@ -19,7 +19,7 @@ */ static inline s64 arch_atomic64_read(const atomic64_t *v) { - return READ_ONCE((v)->counter); + return __READ_ONCE((v)->counter); } /** @@ -31,7 +31,7 @@ static inline s64 arch_atomic64_read(const atomic64_t *v) */ static inline void arch_atomic64_set(atomic64_t *v, s64 i) { - WRITE_ONCE(v->counter, i); + __WRITE_ONCE(v->counter, i); } /** @@ -159,37 +159,43 @@ static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) { return i + xadd(&v->counter, i); } +#define arch_atomic64_add_return arch_atomic64_add_return static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) { return arch_atomic64_add_return(-i, v); } +#define arch_atomic64_sub_return arch_atomic64_sub_return static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) { return xadd(&v->counter, i); } +#define arch_atomic64_fetch_add arch_atomic64_fetch_add static inline s64 arch_atomic64_fetch_sub(s64 i, atomic64_t *v) { return xadd(&v->counter, -i); } +#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) { return arch_cmpxchg(&v->counter, old, new); } +#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg -#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) { return try_cmpxchg(&v->counter, old, new); } +#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new) { return arch_xchg(&v->counter, new); } +#define arch_atomic64_xchg arch_atomic64_xchg static inline void arch_atomic64_and(s64 i, atomic64_t *v) { @@ -207,6 +213,7 @@ static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v) } while (!arch_atomic64_try_cmpxchg(v, &val, val & i)); return val; } +#define arch_atomic64_fetch_and arch_atomic64_fetch_and static inline void arch_atomic64_or(s64 i, atomic64_t *v) { @@ -224,6 +231,7 @@ static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v) } while (!arch_atomic64_try_cmpxchg(v, &val, val | i)); return val; } +#define arch_atomic64_fetch_or arch_atomic64_fetch_or static inline void arch_atomic64_xor(s64 i, atomic64_t *v) { @@ -241,5 +249,6 @@ static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i)); return val; } +#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor #endif /* _ASM_X86_ATOMIC64_64_H */ diff --git a/include/linux/atomic-arch-fallback.h b/include/linux/atomic-arch-fallback.h new file mode 100644 index 000000000000..bcb6aa27cfa6 --- /dev/null +++ b/include/linux/atomic-arch-fallback.h @@ -0,0 +1,2291 @@ +// SPDX-License-Identifier: GPL-2.0 + +// Generated by scripts/atomic/gen-atomic-fallback.sh +// DO NOT MODIFY THIS FILE DIRECTLY + +#ifndef _LINUX_ATOMIC_FALLBACK_H +#define _LINUX_ATOMIC_FALLBACK_H + +#include <linux/compiler.h> + +#ifndef arch_xchg_relaxed +#define arch_xchg_relaxed arch_xchg +#define arch_xchg_acquire arch_xchg +#define arch_xchg_release arch_xchg +#else /* arch_xchg_relaxed */ + +#ifndef arch_xchg_acquire +#define arch_xchg_acquire(...) \ + __atomic_op_acquire(arch_xchg, __VA_ARGS__) +#endif + +#ifndef arch_xchg_release +#define arch_xchg_release(...) \ + __atomic_op_release(arch_xchg, __VA_ARGS__) +#endif + +#ifndef arch_xchg +#define arch_xchg(...) \ + __atomic_op_fence(arch_xchg, __VA_ARGS__) +#endif + +#endif /* arch_xchg_relaxed */ + +#ifndef arch_cmpxchg_relaxed +#define arch_cmpxchg_relaxed arch_cmpxchg +#define arch_cmpxchg_acquire arch_cmpxchg +#define arch_cmpxchg_release arch_cmpxchg +#else /* arch_cmpxchg_relaxed */ + +#ifndef arch_cmpxchg_acquire +#define arch_cmpxchg_acquire(...) \ + __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__) +#endif + +#ifndef arch_cmpxchg_release +#define arch_cmpxchg_release(...) \ + __atomic_op_release(arch_cmpxchg, __VA_ARGS__) +#endif + +#ifndef arch_cmpxchg +#define arch_cmpxchg(...) \ + __atomic_op_fence(arch_cmpxchg, __VA_ARGS__) +#endif + +#endif /* arch_cmpxchg_relaxed */ + +#ifndef arch_cmpxchg64_relaxed +#define arch_cmpxchg64_relaxed arch_cmpxchg64 +#define arch_cmpxchg64_acquire arch_cmpxchg64 +#define arch_cmpxchg64_release arch_cmpxchg64 +#else /* arch_cmpxchg64_relaxed */ + +#ifndef arch_cmpxchg64_acquire +#define arch_cmpxchg64_acquire(...) \ + __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__) +#endif + +#ifndef arch_cmpxchg64_release +#define arch_cmpxchg64_release(...) \ + __atomic_op_release(arch_cmpxchg64, __VA_ARGS__) +#endif + +#ifndef arch_cmpxchg64 +#define arch_cmpxchg64(...) \ + __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__) +#endif + +#endif /* arch_cmpxchg64_relaxed */ + +#ifndef arch_atomic_read_acquire +static __always_inline int +arch_atomic_read_acquire(const atomic_t *v) +{ + return smp_load_acquire(&(v)->counter); +} +#define arch_atomic_read_acquire arch_atomic_read_acquire +#endif + +#ifndef arch_atomic_set_release +static __always_inline void +arch_atomic_set_release(atomic_t *v, int i) +{ + smp_store_release(&(v)->counter, i); +} +#define arch_atomic_set_release arch_atomic_set_release +#endif + +#ifndef arch_atomic_add_return_relaxed +#define arch_atomic_add_return_acquire arch_atomic_add_return +#define arch_atomic_add_return_release arch_atomic_add_return +#define arch_atomic_add_return_relaxed arch_atomic_add_return +#else /* arch_atomic_add_return_relaxed */ + +#ifndef arch_atomic_add_return_acquire +static __always_inline int +arch_atomic_add_return_acquire(int i, atomic_t *v) +{ + int ret = arch_atomic_add_return_relaxed(i, v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_add_return_acquire arch_atomic_add_return_acquire +#endif + +#ifndef arch_atomic_add_return_release +static __always_inline int +arch_atomic_add_return_release(int i, atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_add_return_relaxed(i, v); +} +#define arch_atomic_add_return_release arch_atomic_add_return_release +#endif + +#ifndef arch_atomic_add_return +static __always_inline int +arch_atomic_add_return(int i, atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_add_return_relaxed(i, v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_add_return arch_atomic_add_return +#endif + +#endif /* arch_atomic_add_return_relaxed */ + +#ifndef arch_atomic_fetch_add_relaxed +#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add +#define arch_atomic_fetch_add_release arch_atomic_fetch_add +#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add +#else /* arch_atomic_fetch_add_relaxed */ + +#ifndef arch_atomic_fetch_add_acquire +static __always_inline int +arch_atomic_fetch_add_acquire(int i, atomic_t *v) +{ + int ret = arch_atomic_fetch_add_relaxed(i, v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire +#endif + +#ifndef arch_atomic_fetch_add_release +static __always_inline int +arch_atomic_fetch_add_release(int i, atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_fetch_add_relaxed(i, v); +} +#define arch_atomic_fetch_add_release arch_atomic_fetch_add_release +#endif + +#ifndef arch_atomic_fetch_add +static __always_inline int +arch_atomic_fetch_add(int i, atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_fetch_add_relaxed(i, v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_fetch_add arch_atomic_fetch_add +#endif + +#endif /* arch_atomic_fetch_add_relaxed */ + +#ifndef arch_atomic_sub_return_relaxed +#define arch_atomic_sub_return_acquire arch_atomic_sub_return +#define arch_atomic_sub_return_release arch_atomic_sub_return +#define arch_atomic_sub_return_relaxed arch_atomic_sub_return +#else /* arch_atomic_sub_return_relaxed */ + +#ifndef arch_atomic_sub_return_acquire +static __always_inline int +arch_atomic_sub_return_acquire(int i, atomic_t *v) +{ + int ret = arch_atomic_sub_return_relaxed(i, v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire +#endif + +#ifndef arch_atomic_sub_return_release +static __always_inline int +arch_atomic_sub_return_release(int i, atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_sub_return_relaxed(i, v); +} +#define arch_atomic_sub_return_release arch_atomic_sub_return_release +#endif + +#ifndef arch_atomic_sub_return +static __always_inline int +arch_atomic_sub_return(int i, atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_sub_return_relaxed(i, v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_sub_return arch_atomic_sub_return +#endif + +#endif /* arch_atomic_sub_return_relaxed */ + +#ifndef arch_atomic_fetch_sub_relaxed +#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub +#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub +#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub +#else /* arch_atomic_fetch_sub_relaxed */ + +#ifndef arch_atomic_fetch_sub_acquire +static __always_inline int +arch_atomic_fetch_sub_acquire(int i, atomic_t *v) +{ + int ret = arch_atomic_fetch_sub_relaxed(i, v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire +#endif + +#ifndef arch_atomic_fetch_sub_release +static __always_inline int +arch_atomic_fetch_sub_release(int i, atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_fetch_sub_relaxed(i, v); +} +#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release +#endif + +#ifndef arch_atomic_fetch_sub +static __always_inline int +arch_atomic_fetch_sub(int i, atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_fetch_sub_relaxed(i, v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_fetch_sub arch_atomic_fetch_sub +#endif + +#endif /* arch_atomic_fetch_sub_relaxed */ + +#ifndef arch_atomic_inc +static __always_inline void +arch_atomic_inc(atomic_t *v) +{ + arch_atomic_add(1, v); +} +#define arch_atomic_inc arch_atomic_inc +#endif + +#ifndef arch_atomic_inc_return_relaxed +#ifdef arch_atomic_inc_return +#define arch_atomic_inc_return_acquire arch_atomic_inc_return +#define arch_atomic_inc_return_release arch_atomic_inc_return +#define arch_atomic_inc_return_relaxed arch_atomic_inc_return +#endif /* arch_atomic_inc_return */ + +#ifndef arch_atomic_inc_return +static __always_inline int +arch_atomic_inc_return(atomic_t *v) +{ + return arch_atomic_add_return(1, v); +} +#define arch_atomic_inc_return arch_atomic_inc_return +#endif + +#ifndef arch_atomic_inc_return_acquire +static __always_inline int +arch_atomic_inc_return_acquire(atomic_t *v) +{ + return arch_atomic_add_return_acquire(1, v); +} +#define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire +#endif + +#ifndef arch_atomic_inc_return_release +static __always_inline int +arch_atomic_inc_return_release(atomic_t *v) +{ + return arch_atomic_add_return_release(1, v); +} +#define arch_atomic_inc_return_release arch_atomic_inc_return_release +#endif + +#ifndef arch_atomic_inc_return_relaxed +static __always_inline int +arch_atomic_inc_return_relaxed(atomic_t *v) +{ + return arch_atomic_add_return_relaxed(1, v); +} +#define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed +#endif + +#else /* arch_atomic_inc_return_relaxed */ + +#ifndef arch_atomic_inc_return_acquire +static __always_inline int +arch_atomic_inc_return_acquire(atomic_t *v) +{ + int ret = arch_atomic_inc_return_relaxed(v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire +#endif + +#ifndef arch_atomic_inc_return_release +static __always_inline int +arch_atomic_inc_return_release(atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_inc_return_relaxed(v); +} +#define arch_atomic_inc_return_release arch_atomic_inc_return_release +#endif + +#ifndef arch_atomic_inc_return +static __always_inline int +arch_atomic_inc_return(atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_inc_return_relaxed(v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_inc_return arch_atomic_inc_return +#endif + +#endif /* arch_atomic_inc_return_relaxed */ + +#ifndef arch_atomic_fetch_inc_relaxed +#ifdef arch_atomic_fetch_inc +#define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc +#define arch_atomic_fetch_inc_release arch_atomic_fetch_inc +#define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc +#endif /* arch_atomic_fetch_inc */ + +#ifndef arch_atomic_fetch_inc +static __always_inline int +arch_atomic_fetch_inc(atomic_t *v) +{ + return arch_atomic_fetch_add(1, v); +} +#define arch_atomic_fetch_inc arch_atomic_fetch_inc +#endif + +#ifndef arch_atomic_fetch_inc_acquire +static __always_inline int +arch_atomic_fetch_inc_acquire(atomic_t *v) +{ + return arch_atomic_fetch_add_acquire(1, v); +} +#define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire +#endif + +#ifndef arch_atomic_fetch_inc_release +static __always_inline int +arch_atomic_fetch_inc_release(atomic_t *v) +{ + return arch_atomic_fetch_add_release(1, v); +} +#define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release +#endif + +#ifndef arch_atomic_fetch_inc_relaxed +static __always_inline int +arch_atomic_fetch_inc_relaxed(atomic_t *v) +{ + return arch_atomic_fetch_add_relaxed(1, v); +} +#define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed +#endif + +#else /* arch_atomic_fetch_inc_relaxed */ + +#ifndef arch_atomic_fetch_inc_acquire +static __always_inline int +arch_atomic_fetch_inc_acquire(atomic_t *v) +{ + int ret = arch_atomic_fetch_inc_relaxed(v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire +#endif + +#ifndef arch_atomic_fetch_inc_release +static __always_inline int +arch_atomic_fetch_inc_release(atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_fetch_inc_relaxed(v); +} +#define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release +#endif + +#ifndef arch_atomic_fetch_inc +static __always_inline int +arch_atomic_fetch_inc(atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_fetch_inc_relaxed(v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_fetch_inc arch_atomic_fetch_inc +#endif + +#endif /* arch_atomic_fetch_inc_relaxed */ + +#ifndef arch_atomic_dec +static __always_inline void +arch_atomic_dec(atomic_t *v) +{ + arch_atomic_sub(1, v); +} +#define arch_atomic_dec arch_atomic_dec +#endif + +#ifndef arch_atomic_dec_return_relaxed +#ifdef arch_atomic_dec_return +#define arch_atomic_dec_return_acquire arch_atomic_dec_return +#define arch_atomic_dec_return_release arch_atomic_dec_return +#define arch_atomic_dec_return_relaxed arch_atomic_dec_return +#endif /* arch_atomic_dec_return */ + +#ifndef arch_atomic_dec_return +static __always_inline int +arch_atomic_dec_return(atomic_t *v) +{ + return arch_atomic_sub_return(1, v); +} +#define arch_atomic_dec_return arch_atomic_dec_return +#endif + +#ifndef arch_atomic_dec_return_acquire +static __always_inline int +arch_atomic_dec_return_acquire(atomic_t *v) +{ + return arch_atomic_sub_return_acquire(1, v); +} +#define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire +#endif + +#ifndef arch_atomic_dec_return_release +static __always_inline int +arch_atomic_dec_return_release(atomic_t *v) +{ + return arch_atomic_sub_return_release(1, v); +} +#define arch_atomic_dec_return_release arch_atomic_dec_return_release +#endif + +#ifndef arch_atomic_dec_return_relaxed +static __always_inline int +arch_atomic_dec_return_relaxed(atomic_t *v) +{ + return arch_atomic_sub_return_relaxed(1, v); +} +#define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed +#endif + +#else /* arch_atomic_dec_return_relaxed */ + +#ifndef arch_atomic_dec_return_acquire +static __always_inline int +arch_atomic_dec_return_acquire(atomic_t *v) +{ + int ret = arch_atomic_dec_return_relaxed(v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire +#endif + +#ifndef arch_atomic_dec_return_release +static __always_inline int +arch_atomic_dec_return_release(atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_dec_return_relaxed(v); +} +#define arch_atomic_dec_return_release arch_atomic_dec_return_release +#endif + +#ifndef arch_atomic_dec_return +static __always_inline int +arch_atomic_dec_return(atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_dec_return_relaxed(v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_dec_return arch_atomic_dec_return +#endif + +#endif /* arch_atomic_dec_return_relaxed */ + +#ifndef arch_atomic_fetch_dec_relaxed +#ifdef arch_atomic_fetch_dec +#define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec +#define arch_atomic_fetch_dec_release arch_atomic_fetch_dec +#define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec +#endif /* arch_atomic_fetch_dec */ + +#ifndef arch_atomic_fetch_dec +static __always_inline int +arch_atomic_fetch_dec(atomic_t *v) +{ + return arch_atomic_fetch_sub(1, v); +} +#define arch_atomic_fetch_dec arch_atomic_fetch_dec +#endif + +#ifndef arch_atomic_fetch_dec_acquire +static __always_inline int +arch_atomic_fetch_dec_acquire(atomic_t *v) +{ + return arch_atomic_fetch_sub_acquire(1, v); +} +#define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire +#endif + +#ifndef arch_atomic_fetch_dec_release +static __always_inline int +arch_atomic_fetch_dec_release(atomic_t *v) +{ + return arch_atomic_fetch_sub_release(1, v); +} +#define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release +#endif + +#ifndef arch_atomic_fetch_dec_relaxed +static __always_inline int +arch_atomic_fetch_dec_relaxed(atomic_t *v) +{ + return arch_atomic_fetch_sub_relaxed(1, v); +} +#define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed +#endif + +#else /* arch_atomic_fetch_dec_relaxed */ + +#ifndef arch_atomic_fetch_dec_acquire +static __always_inline int +arch_atomic_fetch_dec_acquire(atomic_t *v) +{ + int ret = arch_atomic_fetch_dec_relaxed(v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire +#endif + +#ifndef arch_atomic_fetch_dec_release +static __always_inline int +arch_atomic_fetch_dec_release(atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_fetch_dec_relaxed(v); +} +#define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release +#endif + +#ifndef arch_atomic_fetch_dec +static __always_inline int +arch_atomic_fetch_dec(atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_fetch_dec_relaxed(v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_fetch_dec arch_atomic_fetch_dec +#endif + +#endif /* arch_atomic_fetch_dec_relaxed */ + +#ifndef arch_atomic_fetch_and_relaxed +#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and +#define arch_atomic_fetch_and_release arch_atomic_fetch_and +#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and +#else /* arch_atomic_fetch_and_relaxed */ + +#ifndef arch_atomic_fetch_and_acquire +static __always_inline int +arch_atomic_fetch_and_acquire(int i, atomic_t *v) +{ + int ret = arch_atomic_fetch_and_relaxed(i, v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire +#endif + +#ifndef arch_atomic_fetch_and_release +static __always_inline int +arch_atomic_fetch_and_release(int i, atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_fetch_and_relaxed(i, v); +} +#define arch_atomic_fetch_and_release arch_atomic_fetch_and_release +#endif + +#ifndef arch_atomic_fetch_and +static __always_inline int +arch_atomic_fetch_and(int i, atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_fetch_and_relaxed(i, v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_fetch_and arch_atomic_fetch_and +#endif + +#endif /* arch_atomic_fetch_and_relaxed */ + |