summaryrefslogtreecommitdiff
path: root/include/linux/atomic/atomic-long.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/atomic/atomic-long.h')
-rw-r--r--include/linux/atomic/atomic-long.h2122
1 files changed, 1435 insertions, 687 deletions
diff --git a/include/linux/atomic/atomic-long.h b/include/linux/atomic/atomic-long.h
index 2fc51ba66beb..c82947170ddc 100644
--- a/include/linux/atomic/atomic-long.h
+++ b/include/linux/atomic/atomic-long.h
@@ -21,1030 +21,1778 @@ typedef atomic_t atomic_long_t;
#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
#endif
-#ifdef CONFIG_64BIT
-
-static __always_inline long
-arch_atomic_long_read(const atomic_long_t *v)
-{
- return arch_atomic64_read(v);
-}
-
-static __always_inline long
-arch_atomic_long_read_acquire(const atomic_long_t *v)
-{
- return arch_atomic64_read_acquire(v);
-}
-
-static __always_inline void
-arch_atomic_long_set(atomic_long_t *v, long i)
-{
- arch_atomic64_set(v, i);
-}
-
-static __always_inline void
-arch_atomic_long_set_release(atomic_long_t *v, long i)
-{
- arch_atomic64_set_release(v, i);
-}
-
-static __always_inline void
-arch_atomic_long_add(long i, atomic_long_t *v)
-{
- arch_atomic64_add(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_add_return(long i, atomic_long_t *v)
-{
- return arch_atomic64_add_return(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
-{
- return arch_atomic64_add_return_acquire(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_add_return_release(long i, atomic_long_t *v)
-{
- return arch_atomic64_add_return_release(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
-{
- return arch_atomic64_add_return_relaxed(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_add(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_add(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_add_acquire(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_add_release(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_add_relaxed(i, v);
-}
-
-static __always_inline void
-arch_atomic_long_sub(long i, atomic_long_t *v)
-{
- arch_atomic64_sub(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_sub_return(long i, atomic_long_t *v)
-{
- return arch_atomic64_sub_return(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
-{
- return arch_atomic64_sub_return_acquire(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
-{
- return arch_atomic64_sub_return_release(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
-{
- return arch_atomic64_sub_return_relaxed(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_sub(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_sub_acquire(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_sub_release(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_sub_relaxed(i, v);
-}
-
-static __always_inline void
-arch_atomic_long_inc(atomic_long_t *v)
-{
- arch_atomic64_inc(v);
-}
-
-static __always_inline long
-arch_atomic_long_inc_return(atomic_long_t *v)
-{
- return arch_atomic64_inc_return(v);
-}
-
-static __always_inline long
-arch_atomic_long_inc_return_acquire(atomic_long_t *v)
-{
- return arch_atomic64_inc_return_acquire(v);
-}
-
-static __always_inline long
-arch_atomic_long_inc_return_release(atomic_long_t *v)
-{
- return arch_atomic64_inc_return_release(v);
-}
-
-static __always_inline long
-arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
-{
- return arch_atomic64_inc_return_relaxed(v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_inc(atomic_long_t *v)
-{
- return arch_atomic64_fetch_inc(v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
-{
- return arch_atomic64_fetch_inc_acquire(v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_inc_release(atomic_long_t *v)
-{
- return arch_atomic64_fetch_inc_release(v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
-{
- return arch_atomic64_fetch_inc_relaxed(v);
-}
-
-static __always_inline void
-arch_atomic_long_dec(atomic_long_t *v)
-{
- arch_atomic64_dec(v);
-}
-
-static __always_inline long
-arch_atomic_long_dec_return(atomic_long_t *v)
-{
- return arch_atomic64_dec_return(v);
-}
-
-static __always_inline long
-arch_atomic_long_dec_return_acquire(atomic_long_t *v)
-{
- return arch_atomic64_dec_return_acquire(v);
-}
-
-static __always_inline long
-arch_atomic_long_dec_return_release(atomic_long_t *v)
-{
- return arch_atomic64_dec_return_release(v);
-}
-
-static __always_inline long
-arch_atomic_long_dec_return_relaxed(atomic_long_t *v)
-{
- return arch_atomic64_dec_return_relaxed(v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_dec(atomic_long_t *v)
-{
- return arch_atomic64_fetch_dec(v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_dec_acquire(atomic_long_t *v)
-{
- return arch_atomic64_fetch_dec_acquire(v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_dec_release(atomic_long_t *v)
-{
- return arch_atomic64_fetch_dec_release(v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
-{
- return arch_atomic64_fetch_dec_relaxed(v);
-}
-
-static __always_inline void
-arch_atomic_long_and(long i, atomic_long_t *v)
-{
- arch_atomic64_and(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_and(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_and(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_and_acquire(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_and_release(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_and_release(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_and_relaxed(i, v);
-}
-
-static __always_inline void
-arch_atomic_long_andnot(long i, atomic_long_t *v)
-{
- arch_atomic64_andnot(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_andnot(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_andnot(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_andnot_acquire(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_andnot_release(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_andnot_relaxed(i, v);
-}
-
-static __always_inline void
-arch_atomic_long_or(long i, atomic_long_t *v)
-{
- arch_atomic64_or(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_or(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_or(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_or_acquire(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_or_release(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_or_release(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_or_relaxed(i, v);
-}
-
-static __always_inline void
-arch_atomic_long_xor(long i, atomic_long_t *v)
-{
- arch_atomic64_xor(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_xor(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_xor(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_xor_acquire(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_xor_release(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
-{
- return arch_atomic64_fetch_xor_relaxed(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_xchg(atomic_long_t *v, long i)
-{
- return arch_atomic64_xchg(v, i);
-}
-
-static __always_inline long
-arch_atomic_long_xchg_acquire(atomic_long_t *v, long i)
-{
- return arch_atomic64_xchg_acquire(v, i);
-}
-
-static __always_inline long
-arch_atomic_long_xchg_release(atomic_long_t *v, long i)
-{
- return arch_atomic64_xchg_release(v, i);
-}
-
-static __always_inline long
-arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i)
-{
- return arch_atomic64_xchg_relaxed(v, i);
-}
-
-static __always_inline long
-arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
-{
- return arch_atomic64_cmpxchg(v, old, new);
-}
-
-static __always_inline long
-arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
+/**
+ * raw_atomic_long_read() - atomic load with relaxed ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically loads the value of @v with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_read() elsewhere.
+ *
+ * Return: The value loaded from @v.
+ */
+static __always_inline long
+raw_atomic_long_read(const atomic_long_t *v)
{
- return arch_atomic64_cmpxchg_acquire(v, old, new);
-}
-
-static __always_inline long
-arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
-{
- return arch_atomic64_cmpxchg_release(v, old, new);
-}
-
-static __always_inline long
-arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
-{
- return arch_atomic64_cmpxchg_relaxed(v, old, new);
-}
-
-static __always_inline bool
-arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
-{
- return arch_atomic64_try_cmpxchg(v, (s64 *)old, new);
-}
-
-static __always_inline bool
-arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
-{
- return arch_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
-}
-
-static __always_inline bool
-arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
-{
- return arch_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
-}
-
-static __always_inline bool
-arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
-{
- return arch_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
-}
-
-static __always_inline bool
-arch_atomic_long_sub_and_test(long i, atomic_long_t *v)
-{
- return arch_atomic64_sub_and_test(i, v);
-}
-
-static __always_inline bool
-arch_atomic_long_dec_and_test(atomic_long_t *v)
-{
- return arch_atomic64_dec_and_test(v);
-}
-
-static __always_inline bool
-arch_atomic_long_inc_and_test(atomic_long_t *v)
-{
- return arch_atomic64_inc_and_test(v);
-}
-
-static __always_inline bool
-arch_atomic_long_add_negative(long i, atomic_long_t *v)
-{
- return arch_atomic64_add_negative(i, v);
-}
-
-static __always_inline bool
-arch_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
-{
- return arch_atomic64_add_negative_acquire(i, v);
-}
-
-static __always_inline bool
-arch_atomic_long_add_negative_release(long i, atomic_long_t *v)
-{
- return arch_atomic64_add_negative_release(i, v);
-}
-
-static __always_inline bool
-arch_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
-{
- return arch_atomic64_add_negative_relaxed(i, v);
-}
-
-static __always_inline long
-arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
-{
- return arch_atomic64_fetch_add_unless(v, a, u);
-}
-
-static __always_inline bool
-arch_atomic_long_add_unless(atomic_long_t *v, long a, long u)
-{
- return arch_atomic64_add_unless(v, a, u);
-}
-
-static __always_inline bool
-arch_atomic_long_inc_not_zero(atomic_long_t *v)
-{
- return arch_atomic64_inc_not_zero(v);
-}
-
-static __always_inline bool
-arch_atomic_long_inc_unless_negative(atomic_long_t *v)
-{
- return arch_atomic64_inc_unless_negative(v);
-}
-
-static __always_inline bool
-arch_atomic_long_dec_unless_positive(atomic_long_t *v)
-{
- return arch_atomic64_dec_unless_positive(v);
-}
-
-static __always_inline long
-arch_atomic_long_dec_if_positive(atomic_long_t *v)
-{
- return arch_atomic64_dec_if_positive(v);
-}
-
-#else /* CONFIG_64BIT */
-
-static __always_inline long
-arch_atomic_long_read(const atomic_long_t *v)
-{
- return arch_atomic_read(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_read(v);
+#else
+ return raw_atomic_read(v);
+#endif
}
+/**
+ * raw_atomic_long_read_acquire() - atomic load with acquire ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically loads the value of @v with acquire ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_read_acquire() elsewhere.
+ *
+ * Return: The value loaded from @v.
+ */
static __always_inline long
-arch_atomic_long_read_acquire(const atomic_long_t *v)
+raw_atomic_long_read_acquire(const atomic_long_t *v)
{
- return arch_atomic_read_acquire(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_read_acquire(v);
+#else
+ return raw_atomic_read_acquire(v);
+#endif
}
+/**
+ * raw_atomic_long_set() - atomic set with relaxed ordering
+ * @v: pointer to atomic_long_t
+ * @i: long value to assign
+ *
+ * Atomically sets @v to @i with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_set() elsewhere.
+ *
+ * Return: Nothing.
+ */
static __always_inline void
-arch_atomic_long_set(atomic_long_t *v, long i)
+raw_atomic_long_set(atomic_long_t *v, long i)
{
- arch_atomic_set(v, i);
+#ifdef CONFIG_64BIT
+ raw_atomic64_set(v, i);
+#else
+ raw_atomic_set(v, i);
+#endif
}
+/**
+ * raw_atomic_long_set_release() - atomic set with release ordering
+ * @v: pointer to atomic_long_t
+ * @i: long value to assign
+ *
+ * Atomically sets @v to @i with release ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_set_release() elsewhere.
+ *
+ * Return: Nothing.
+ */
static __always_inline void
-arch_atomic_long_set_release(atomic_long_t *v, long i)
+raw_atomic_long_set_release(atomic_long_t *v, long i)
{
- arch_atomic_set_release(v, i);
+#ifdef CONFIG_64BIT
+ raw_atomic64_set_release(v, i);
+#else
+ raw_atomic_set_release(v, i);
+#endif
}
+/**
+ * raw_atomic_long_add() - atomic add with relaxed ordering
+ * @i: long value to add
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + @i) with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_add() elsewhere.
+ *
+ * Return: Nothing.
+ */
static __always_inline void
-arch_atomic_long_add(long i, atomic_long_t *v)
+raw_atomic_long_add(long i, atomic_long_t *v)
{
- arch_atomic_add(i, v);
+#ifdef CONFIG_64BIT
+ raw_atomic64_add(i, v);
+#else
+ raw_atomic_add(i, v);
+#endif
}
+/**
+ * raw_atomic_long_add_return() - atomic add with full ordering
+ * @i: long value to add
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + @i) with full ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_add_return() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_add_return(long i, atomic_long_t *v)
+raw_atomic_long_add_return(long i, atomic_long_t *v)
{
- return arch_atomic_add_return(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_add_return(i, v);
+#else
+ return raw_atomic_add_return(i, v);
+#endif
}
+/**
+ * raw_atomic_long_add_return_acquire() - atomic add with acquire ordering
+ * @i: long value to add
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + @i) with acquire ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_add_return_acquire() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
+raw_atomic_long_add_return_acquire(long i, atomic_long_t *v)
{
- return arch_atomic_add_return_acquire(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_add_return_acquire(i, v);
+#else
+ return raw_atomic_add_return_acquire(i, v);
+#endif
}
+/**
+ * raw_atomic_long_add_return_release() - atomic add with release ordering
+ * @i: long value to add
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + @i) with release ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_add_return_release() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_add_return_release(long i, atomic_long_t *v)
+raw_atomic_long_add_return_release(long i, atomic_long_t *v)
{
- return arch_atomic_add_return_release(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_add_return_release(i, v);
+#else
+ return raw_atomic_add_return_release(i, v);
+#endif
}
+/**
+ * raw_atomic_long_add_return_relaxed() - atomic add with relaxed ordering
+ * @i: long value to add
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + @i) with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_add_return_relaxed() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
+raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
{
- return arch_atomic_add_return_relaxed(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_add_return_relaxed(i, v);
+#else
+ return raw_atomic_add_return_relaxed(i, v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_add() - atomic add with full ordering
+ * @i: long value to add
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + @i) with full ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_add() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_add(long i, atomic_long_t *v)
+raw_atomic_long_fetch_add(long i, atomic_long_t *v)
{
- return arch_atomic_fetch_add(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_add(i, v);
+#else
+ return raw_atomic_fetch_add(i, v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_add_acquire() - atomic add with acquire ordering
+ * @i: long value to add
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + @i) with acquire ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_add_acquire() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
+raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
{
- return arch_atomic_fetch_add_acquire(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_add_acquire(i, v);
+#else
+ return raw_atomic_fetch_add_acquire(i, v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_add_release() - atomic add with release ordering
+ * @i: long value to add
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + @i) with release ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_add_release() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
+raw_atomic_long_fetch_add_release(long i, atomic_long_t *v)
{
- return arch_atomic_fetch_add_release(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_add_release(i, v);
+#else
+ return raw_atomic_fetch_add_release(i, v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering
+ * @i: long value to add
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + @i) with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_add_relaxed() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
+raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
{
- return arch_atomic_fetch_add_relaxed(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_add_relaxed(i, v);
+#else
+ return raw_atomic_fetch_add_relaxed(i, v);
+#endif
}
+/**
+ * raw_atomic_long_sub() - atomic subtract with relaxed ordering
+ * @i: long value to subtract
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v - @i) with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_sub() elsewhere.
+ *
+ * Return: Nothing.
+ */
static __always_inline void
-arch_atomic_long_sub(long i, atomic_long_t *v)
+raw_atomic_long_sub(long i, atomic_long_t *v)
{
- arch_atomic_sub(i, v);
+#ifdef CONFIG_64BIT
+ raw_atomic64_sub(i, v);
+#else
+ raw_atomic_sub(i, v);
+#endif
}
+/**
+ * raw_atomic_long_sub_return() - atomic subtract with full ordering
+ * @i: long value to subtract
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v - @i) with full ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_sub_return() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_sub_return(long i, atomic_long_t *v)
+raw_atomic_long_sub_return(long i, atomic_long_t *v)
{
- return arch_atomic_sub_return(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_sub_return(i, v);
+#else
+ return raw_atomic_sub_return(i, v);
+#endif
}
+/**
+ * raw_atomic_long_sub_return_acquire() - atomic subtract with acquire ordering
+ * @i: long value to subtract
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v - @i) with acquire ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_sub_return_acquire() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
+raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
{
- return arch_atomic_sub_return_acquire(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_sub_return_acquire(i, v);
+#else
+ return raw_atomic_sub_return_acquire(i, v);
+#endif
}
+/**
+ * raw_atomic_long_sub_return_release() - atomic subtract with release ordering
+ * @i: long value to subtract
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v - @i) with release ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_sub_return_release() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
+raw_atomic_long_sub_return_release(long i, atomic_long_t *v)
{
- return arch_atomic_sub_return_release(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_sub_return_release(i, v);
+#else
+ return raw_atomic_sub_return_release(i, v);
+#endif
}
+/**
+ * raw_atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering
+ * @i: long value to subtract
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v - @i) with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_sub_return_relaxed() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
+raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
{
- return arch_atomic_sub_return_relaxed(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_sub_return_relaxed(i, v);
+#else
+ return raw_atomic_sub_return_relaxed(i, v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_sub() - atomic subtract with full ordering
+ * @i: long value to subtract
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v - @i) with full ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_sub() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
+raw_atomic_long_fetch_sub(long i, atomic_long_t *v)
{
- return arch_atomic_fetch_sub(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_sub(i, v);
+#else
+ return raw_atomic_fetch_sub(i, v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering
+ * @i: long value to subtract
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v - @i) with acquire ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_sub_acquire() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
+raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
{
- return arch_atomic_fetch_sub_acquire(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_sub_acquire(i, v);
+#else
+ return raw_atomic_fetch_sub_acquire(i, v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_sub_release() - atomic subtract with release ordering
+ * @i: long value to subtract
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v - @i) with release ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_sub_release() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
+raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
{
- return arch_atomic_fetch_sub_release(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_sub_release(i, v);
+#else
+ return raw_atomic_fetch_sub_release(i, v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering
+ * @i: long value to subtract
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v - @i) with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_sub_relaxed() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
+raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
{
- return arch_atomic_fetch_sub_relaxed(i, v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_sub_relaxed(i, v);
+#else
+ return raw_atomic_fetch_sub_relaxed(i, v);
+#endif
}
+/**
+ * raw_atomic_long_inc() - atomic increment with relaxed ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + 1) with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_inc() elsewhere.
+ *
+ * Return: Nothing.
+ */
static __always_inline void
-arch_atomic_long_inc(atomic_long_t *v)
+raw_atomic_long_inc(atomic_long_t *v)
{
- arch_atomic_inc(v);
+#ifdef CONFIG_64BIT
+ raw_atomic64_inc(v);
+#else
+ raw_atomic_inc(v);
+#endif
}
+/**
+ * raw_atomic_long_inc_return() - atomic increment with full ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + 1) with full ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_inc_return() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_inc_return(atomic_long_t *v)
+raw_atomic_long_inc_return(atomic_long_t *v)
{
- return arch_atomic_inc_return(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_inc_return(v);
+#else
+ return raw_atomic_inc_return(v);
+#endif
}
+/**
+ * raw_atomic_long_inc_return_acquire() - atomic increment with acquire ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + 1) with acquire ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_inc_return_acquire() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_inc_return_acquire(atomic_long_t *v)
+raw_atomic_long_inc_return_acquire(atomic_long_t *v)
{
- return arch_atomic_inc_return_acquire(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_inc_return_acquire(v);
+#else
+ return raw_atomic_inc_return_acquire(v);
+#endif
}
+/**
+ * raw_atomic_long_inc_return_release() - atomic increment with release ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + 1) with release ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_inc_return_release() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_inc_return_release(atomic_long_t *v)
+raw_atomic_long_inc_return_release(atomic_long_t *v)
{
- return arch_atomic_inc_return_release(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_inc_return_release(v);
+#else
+ return raw_atomic_inc_return_release(v);
+#endif
}
+/**
+ * raw_atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + 1) with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_inc_return_relaxed() elsewhere.
+ *
+ * Return: The updated value of @v.
+ */
static __always_inline long
-arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
+raw_atomic_long_inc_return_relaxed(atomic_long_t *v)
{
- return arch_atomic_inc_return_relaxed(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_inc_return_relaxed(v);
+#else
+ return raw_atomic_inc_return_relaxed(v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_inc() - atomic increment with full ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + 1) with full ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_inc() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_inc(atomic_long_t *v)
+raw_atomic_long_fetch_inc(atomic_long_t *v)
{
- return arch_atomic_fetch_inc(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_inc(v);
+#else
+ return raw_atomic_fetch_inc(v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + 1) with acquire ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_inc_acquire() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
+raw_atomic_long_fetch_inc_acquire(atomic_long_t *v)
{
- return arch_atomic_fetch_inc_acquire(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_inc_acquire(v);
+#else
+ return raw_atomic_fetch_inc_acquire(v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_inc_release() - atomic increment with release ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + 1) with release ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_inc_release() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_inc_release(atomic_long_t *v)
+raw_atomic_long_fetch_inc_release(atomic_long_t *v)
{
- return arch_atomic_fetch_inc_release(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_inc_release(v);
+#else
+ return raw_atomic_fetch_inc_release(v);
+#endif
}
+/**
+ * raw_atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering
+ * @v: pointer to atomic_long_t
+ *
+ * Atomically updates @v to (@v + 1) with relaxed ordering.
+ *
+ * Safe to use in noinstr code; prefer atomic_long_fetch_inc_relaxed() elsewhere.
+ *
+ * Return: The original value of @v.
+ */
static __always_inline long
-arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
+raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
{
- return arch_atomic_fetch_inc_relaxed(v);
+#ifdef CONFIG_64BIT
+ return raw_atomic64_fetch_inc_relaxed(v);