mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/herbert/cryptodev-2.6.git
synced 2026-04-23 05:56:14 -04:00
Currently the atomics are documented in Documentation/atomic_t.txt, and have no kerneldoc comments. There are a sufficient number of gotchas (e.g. semantics, noinstr-safety) that it would be nice to have comments to call these out, and it would be nice to have kerneldoc comments such that these can be collated. While it's possible to derive the semantics from the code, this can be painful given the amount of indirection we currently have (e.g. fallback paths), and it's easy to be mislead by naming, e.g. * The unconditional void-returning ops *only* have relaxed variants without a _relaxed suffix, and can easily be mistaken for being fully ordered. It would be nice to give these a _relaxed() suffix, but this would result in significant churn throughout the kernel. * Our naming of conditional and unconditional+test ops is rather inconsistent, and it can be difficult to derive the name of an operation, or to identify where an op is conditional or unconditional+test. Some ops are clearly conditional: - dec_if_positive - add_unless - dec_unless_positive - inc_unless_negative Some ops are clearly unconditional+test: - sub_and_test - dec_and_test - inc_and_test However, what exactly those test is not obvious. A _test_zero suffix might be clearer. Others could be read ambiguously: - inc_not_zero // conditional - add_negative // unconditional+test It would probably be worth renaming these, e.g. to inc_unless_zero and add_test_negative. As a step towards making this more consistent and easier to understand, this patch adds kerneldoc comments for all generated *atomic*_*() functions. These are generated from templates, with some common text shared, making it easy to extend these in future if necessary. I've tried to make these as consistent and clear as possible, and I've deliberately ensured: * All ops have their ordering explicitly mentioned in the short and long description. * All test ops have "test" in their short description. * All ops are described as an expression using their usual C operator. For example: andnot: "Atomically updates @v to (@v & ~@i)" inc: "Atomically updates @v to (@v + 1)" Which may be clearer to non-naative English speakers, and allows all the operations to be described in the same style. * All conditional ops have their condition described as an expression using the usual C operators. For example: add_unless: "If (@v != @u), atomically updates @v to (@v + @i)" cmpxchg: "If (@v == @old), atomically updates @v to @new" Which may be clearer to non-naative English speakers, and allows all the operations to be described in the same style. * All bitwise ops (and,andnot,or,xor) explicitly mention that they are bitwise in their short description, so that they are not mistaken for performing their logical equivalents. * The noinstr safety of each op is explicitly described, with a description of whether or not to use the raw_ form of the op. There should be no functional change as a result of this patch. Reported-by: Paul E. McKenney <paulmck@kernel.org> Signed-off-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Reviewed-by: Kees Cook <keescook@chromium.org> Link: https://lore.kernel.org/r/20230605070124.3741859-26-mark.rutland@arm.com
1799 lines
45 KiB
C
1799 lines
45 KiB
C
// SPDX-License-Identifier: GPL-2.0
|
|
|
|
// Generated by scripts/atomic/gen-atomic-long.sh
|
|
// DO NOT MODIFY THIS FILE DIRECTLY
|
|
|
|
#ifndef _LINUX_ATOMIC_LONG_H
|
|
#define _LINUX_ATOMIC_LONG_H
|
|
|
|
#include <linux/compiler.h>
|
|
#include <asm/types.h>
|
|
|
|
#ifdef CONFIG_64BIT
|
|
typedef atomic64_t atomic_long_t;
|
|
#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
|
|
#define atomic_long_cond_read_acquire atomic64_cond_read_acquire
|
|
#define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
|
|
#else
|
|
typedef atomic_t atomic_long_t;
|
|
#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
|
|
#define atomic_long_cond_read_acquire atomic_cond_read_acquire
|
|
#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
|
|
#endif
|
|
|
|
/**
|
|
* raw_atomic_long_read() - atomic load with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically loads the value of @v with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_read() elsewhere.
|
|
*
|
|
* Return: The value loaded from @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_read(const atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_read(v);
|
|
#else
|
|
return raw_atomic_read(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_read_acquire() - atomic load with acquire ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically loads the value of @v with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_read_acquire() elsewhere.
|
|
*
|
|
* Return: The value loaded from @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_read_acquire(const atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_read_acquire(v);
|
|
#else
|
|
return raw_atomic_read_acquire(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_set() - atomic set with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @i: long value to assign
|
|
*
|
|
* Atomically sets @v to @i with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_set() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_set(atomic_long_t *v, long i)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_set(v, i);
|
|
#else
|
|
raw_atomic_set(v, i);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_set_release() - atomic set with release ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @i: long value to assign
|
|
*
|
|
* Atomically sets @v to @i with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_set_release() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_set_release(atomic_long_t *v, long i)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_set_release(v, i);
|
|
#else
|
|
raw_atomic_set_release(v, i);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add() - atomic add with relaxed ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_add(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_add(i, v);
|
|
#else
|
|
raw_atomic_add(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add_return() - atomic add with full ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add_return() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_add_return(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_add_return(i, v);
|
|
#else
|
|
return raw_atomic_add_return(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add_return_acquire() - atomic add with acquire ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add_return_acquire() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_add_return_acquire(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_add_return_acquire(i, v);
|
|
#else
|
|
return raw_atomic_add_return_acquire(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add_return_release() - atomic add with release ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add_return_release() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_add_return_release(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_add_return_release(i, v);
|
|
#else
|
|
return raw_atomic_add_return_release(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add_return_relaxed() - atomic add with relaxed ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add_return_relaxed() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_add_return_relaxed(i, v);
|
|
#else
|
|
return raw_atomic_add_return_relaxed(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_add() - atomic add with full ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_add() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_add(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_add(i, v);
|
|
#else
|
|
return raw_atomic_fetch_add(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_add_acquire() - atomic add with acquire ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_add_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_add_acquire(i, v);
|
|
#else
|
|
return raw_atomic_fetch_add_acquire(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_add_release() - atomic add with release ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_add_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_add_release(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_add_release(i, v);
|
|
#else
|
|
return raw_atomic_fetch_add_release(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_add_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_add_relaxed(i, v);
|
|
#else
|
|
return raw_atomic_fetch_add_relaxed(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_sub() - atomic subtract with relaxed ordering
|
|
* @i: long value to subtract
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_sub() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_sub(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_sub(i, v);
|
|
#else
|
|
raw_atomic_sub(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_sub_return() - atomic subtract with full ordering
|
|
* @i: long value to subtract
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_sub_return() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_sub_return(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_sub_return(i, v);
|
|
#else
|
|
return raw_atomic_sub_return(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_sub_return_acquire() - atomic subtract with acquire ordering
|
|
* @i: long value to subtract
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_sub_return_acquire() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_sub_return_acquire(i, v);
|
|
#else
|
|
return raw_atomic_sub_return_acquire(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_sub_return_release() - atomic subtract with release ordering
|
|
* @i: long value to subtract
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_sub_return_release() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_sub_return_release(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_sub_return_release(i, v);
|
|
#else
|
|
return raw_atomic_sub_return_release(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering
|
|
* @i: long value to subtract
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_sub_return_relaxed() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_sub_return_relaxed(i, v);
|
|
#else
|
|
return raw_atomic_sub_return_relaxed(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_sub() - atomic subtract with full ordering
|
|
* @i: long value to subtract
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_sub() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_sub(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_sub(i, v);
|
|
#else
|
|
return raw_atomic_fetch_sub(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering
|
|
* @i: long value to subtract
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_sub_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_sub_acquire(i, v);
|
|
#else
|
|
return raw_atomic_fetch_sub_acquire(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_sub_release() - atomic subtract with release ordering
|
|
* @i: long value to subtract
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_sub_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_sub_release(i, v);
|
|
#else
|
|
return raw_atomic_fetch_sub_release(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering
|
|
* @i: long value to subtract
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_sub_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_sub_relaxed(i, v);
|
|
#else
|
|
return raw_atomic_fetch_sub_relaxed(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_inc() - atomic increment with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_inc() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_inc(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_inc(v);
|
|
#else
|
|
raw_atomic_inc(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_inc_return() - atomic increment with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_inc_return() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_inc_return(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_inc_return(v);
|
|
#else
|
|
return raw_atomic_inc_return(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_inc_return_acquire() - atomic increment with acquire ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_inc_return_acquire() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_inc_return_acquire(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_inc_return_acquire(v);
|
|
#else
|
|
return raw_atomic_inc_return_acquire(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_inc_return_release() - atomic increment with release ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_inc_return_release() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_inc_return_release(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_inc_return_release(v);
|
|
#else
|
|
return raw_atomic_inc_return_release(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_inc_return_relaxed() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_inc_return_relaxed(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_inc_return_relaxed(v);
|
|
#else
|
|
return raw_atomic_inc_return_relaxed(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_inc() - atomic increment with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_inc() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_inc(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_inc(v);
|
|
#else
|
|
return raw_atomic_fetch_inc(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_inc_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_inc_acquire(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_inc_acquire(v);
|
|
#else
|
|
return raw_atomic_fetch_inc_acquire(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_inc_release() - atomic increment with release ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_inc_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_inc_release(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_inc_release(v);
|
|
#else
|
|
return raw_atomic_fetch_inc_release(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_inc_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_inc_relaxed(v);
|
|
#else
|
|
return raw_atomic_fetch_inc_relaxed(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_dec() - atomic decrement with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_dec() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_dec(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_dec(v);
|
|
#else
|
|
raw_atomic_dec(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_dec_return() - atomic decrement with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_dec_return() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_dec_return(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_dec_return(v);
|
|
#else
|
|
return raw_atomic_dec_return(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_dec_return_acquire() - atomic decrement with acquire ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_dec_return_acquire() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_dec_return_acquire(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_dec_return_acquire(v);
|
|
#else
|
|
return raw_atomic_dec_return_acquire(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_dec_return_release() - atomic decrement with release ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_dec_return_release() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_dec_return_release(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_dec_return_release(v);
|
|
#else
|
|
return raw_atomic_dec_return_release(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_dec_return_relaxed() elsewhere.
|
|
*
|
|
* Return: The updated value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_dec_return_relaxed(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_dec_return_relaxed(v);
|
|
#else
|
|
return raw_atomic_dec_return_relaxed(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_dec() - atomic decrement with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_dec() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_dec(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_dec(v);
|
|
#else
|
|
return raw_atomic_fetch_dec(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_dec_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_dec_acquire(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_dec_acquire(v);
|
|
#else
|
|
return raw_atomic_fetch_dec_acquire(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_dec_release() - atomic decrement with release ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_dec_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_dec_release(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_dec_release(v);
|
|
#else
|
|
return raw_atomic_fetch_dec_release(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_dec_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_dec_relaxed(v);
|
|
#else
|
|
return raw_atomic_fetch_dec_relaxed(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_and() - atomic bitwise AND with relaxed ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_and() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_and(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_and(i, v);
|
|
#else
|
|
raw_atomic_and(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_and() - atomic bitwise AND with full ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & @i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_and() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_and(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_and(i, v);
|
|
#else
|
|
return raw_atomic_fetch_and(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & @i) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_and_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_and_acquire(i, v);
|
|
#else
|
|
return raw_atomic_fetch_and_acquire(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_and_release() - atomic bitwise AND with release ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & @i) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_and_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_and_release(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_and_release(i, v);
|
|
#else
|
|
return raw_atomic_fetch_and_release(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_and_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_and_relaxed(i, v);
|
|
#else
|
|
return raw_atomic_fetch_and_relaxed(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & ~@i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_andnot() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_andnot(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_andnot(i, v);
|
|
#else
|
|
raw_atomic_andnot(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & ~@i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_andnot() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_andnot(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_andnot(i, v);
|
|
#else
|
|
return raw_atomic_fetch_andnot(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & ~@i) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_andnot_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_andnot_acquire(i, v);
|
|
#else
|
|
return raw_atomic_fetch_andnot_acquire(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & ~@i) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_andnot_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_andnot_release(i, v);
|
|
#else
|
|
return raw_atomic_fetch_andnot_release(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v & ~@i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_andnot_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_andnot_relaxed(i, v);
|
|
#else
|
|
return raw_atomic_fetch_andnot_relaxed(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_or() - atomic bitwise OR with relaxed ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v | @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_or() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_or(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_or(i, v);
|
|
#else
|
|
raw_atomic_or(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_or() - atomic bitwise OR with full ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v | @i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_or() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_or(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_or(i, v);
|
|
#else
|
|
return raw_atomic_fetch_or(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v | @i) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_or_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_or_acquire(i, v);
|
|
#else
|
|
return raw_atomic_fetch_or_acquire(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_or_release() - atomic bitwise OR with release ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v | @i) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_or_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_or_release(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_or_release(i, v);
|
|
#else
|
|
return raw_atomic_fetch_or_release(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v | @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_or_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_or_relaxed(i, v);
|
|
#else
|
|
return raw_atomic_fetch_or_relaxed(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_xor() - atomic bitwise XOR with relaxed ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v ^ @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_xor() elsewhere.
|
|
*
|
|
* Return: Nothing.
|
|
*/
|
|
static __always_inline void
|
|
raw_atomic_long_xor(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
raw_atomic64_xor(i, v);
|
|
#else
|
|
raw_atomic_xor(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_xor() - atomic bitwise XOR with full ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v ^ @i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_xor() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_xor(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_xor(i, v);
|
|
#else
|
|
return raw_atomic_fetch_xor(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v ^ @i) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_xor_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_xor_acquire(i, v);
|
|
#else
|
|
return raw_atomic_fetch_xor_acquire(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v ^ @i) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_xor_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_xor_release(i, v);
|
|
#else
|
|
return raw_atomic_fetch_xor_release(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
|
|
* @i: long value
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v ^ @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_xor_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_xor_relaxed(i, v);
|
|
#else
|
|
return raw_atomic_fetch_xor_relaxed(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_xchg() - atomic exchange with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @new: long value to assign
|
|
*
|
|
* Atomically updates @v to @new with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_xchg() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_xchg(atomic_long_t *v, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_xchg(v, new);
|
|
#else
|
|
return raw_atomic_xchg(v, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_xchg_acquire() - atomic exchange with acquire ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @new: long value to assign
|
|
*
|
|
* Atomically updates @v to @new with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_xchg_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_xchg_acquire(atomic_long_t *v, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_xchg_acquire(v, new);
|
|
#else
|
|
return raw_atomic_xchg_acquire(v, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_xchg_release() - atomic exchange with release ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @new: long value to assign
|
|
*
|
|
* Atomically updates @v to @new with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_xchg_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_xchg_release(atomic_long_t *v, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_xchg_release(v, new);
|
|
#else
|
|
return raw_atomic_xchg_release(v, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @new: long value to assign
|
|
*
|
|
* Atomically updates @v to @new with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_xchg_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_xchg_relaxed(atomic_long_t *v, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_xchg_relaxed(v, new);
|
|
#else
|
|
return raw_atomic_xchg_relaxed(v, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_cmpxchg() - atomic compare and exchange with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @old: long value to compare with
|
|
* @new: long value to assign
|
|
*
|
|
* If (@v == @old), atomically updates @v to @new with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_cmpxchg() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_cmpxchg(v, old, new);
|
|
#else
|
|
return raw_atomic_cmpxchg(v, old, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @old: long value to compare with
|
|
* @new: long value to assign
|
|
*
|
|
* If (@v == @old), atomically updates @v to @new with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_cmpxchg_acquire() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_cmpxchg_acquire(v, old, new);
|
|
#else
|
|
return raw_atomic_cmpxchg_acquire(v, old, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @old: long value to compare with
|
|
* @new: long value to assign
|
|
*
|
|
* If (@v == @old), atomically updates @v to @new with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_cmpxchg_release() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_cmpxchg_release(v, old, new);
|
|
#else
|
|
return raw_atomic_cmpxchg_release(v, old, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @old: long value to compare with
|
|
* @new: long value to assign
|
|
*
|
|
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_cmpxchg_relaxed() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_cmpxchg_relaxed(v, old, new);
|
|
#else
|
|
return raw_atomic_cmpxchg_relaxed(v, old, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @old: pointer to long value to compare with
|
|
* @new: long value to assign
|
|
*
|
|
* If (@v == @old), atomically updates @v to @new with full ordering.
|
|
* Otherwise, updates @old to the current value of @v.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_try_cmpxchg() elsewhere.
|
|
*
|
|
* Return: @true if the exchange occured, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_try_cmpxchg(v, (s64 *)old, new);
|
|
#else
|
|
return raw_atomic_try_cmpxchg(v, (int *)old, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @old: pointer to long value to compare with
|
|
* @new: long value to assign
|
|
*
|
|
* If (@v == @old), atomically updates @v to @new with acquire ordering.
|
|
* Otherwise, updates @old to the current value of @v.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_acquire() elsewhere.
|
|
*
|
|
* Return: @true if the exchange occured, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
|
|
#else
|
|
return raw_atomic_try_cmpxchg_acquire(v, (int *)old, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @old: pointer to long value to compare with
|
|
* @new: long value to assign
|
|
*
|
|
* If (@v == @old), atomically updates @v to @new with release ordering.
|
|
* Otherwise, updates @old to the current value of @v.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_release() elsewhere.
|
|
*
|
|
* Return: @true if the exchange occured, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
|
|
#else
|
|
return raw_atomic_try_cmpxchg_release(v, (int *)old, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @old: pointer to long value to compare with
|
|
* @new: long value to assign
|
|
*
|
|
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
|
|
* Otherwise, updates @old to the current value of @v.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_relaxed() elsewhere.
|
|
*
|
|
* Return: @true if the exchange occured, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
|
|
#else
|
|
return raw_atomic_try_cmpxchg_relaxed(v, (int *)old, new);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - @i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_sub_and_test() elsewhere.
|
|
*
|
|
* Return: @true if the resulting value of @v is zero, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_sub_and_test(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_sub_and_test(i, v);
|
|
#else
|
|
return raw_atomic_sub_and_test(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v - 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_dec_and_test() elsewhere.
|
|
*
|
|
* Return: @true if the resulting value of @v is zero, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_dec_and_test(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_dec_and_test(v);
|
|
#else
|
|
return raw_atomic_dec_and_test(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_inc_and_test() - atomic increment and test if zero with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_inc_and_test() elsewhere.
|
|
*
|
|
* Return: @true if the resulting value of @v is zero, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_inc_and_test(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_inc_and_test(v);
|
|
#else
|
|
return raw_atomic_inc_and_test(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add_negative() - atomic add and test if negative with full ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add_negative() elsewhere.
|
|
*
|
|
* Return: @true if the resulting value of @v is negative, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_add_negative(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_add_negative(i, v);
|
|
#else
|
|
return raw_atomic_add_negative(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with acquire ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add_negative_acquire() elsewhere.
|
|
*
|
|
* Return: @true if the resulting value of @v is negative, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_add_negative_acquire(i, v);
|
|
#else
|
|
return raw_atomic_add_negative_acquire(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add_negative_release() - atomic add and test if negative with release ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with release ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add_negative_release() elsewhere.
|
|
*
|
|
* Return: @true if the resulting value of @v is negative, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_add_negative_release(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_add_negative_release(i, v);
|
|
#else
|
|
return raw_atomic_add_negative_release(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
|
|
* @i: long value to add
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* Atomically updates @v to (@v + @i) with relaxed ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add_negative_relaxed() elsewhere.
|
|
*
|
|
* Return: @true if the resulting value of @v is negative, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_add_negative_relaxed(i, v);
|
|
#else
|
|
return raw_atomic_add_negative_relaxed(i, v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_fetch_add_unless() - atomic add unless value with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @a: long value to add
|
|
* @u: long value to compare with
|
|
*
|
|
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_fetch_add_unless() elsewhere.
|
|
*
|
|
* Return: The original value of @v.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_fetch_add_unless(v, a, u);
|
|
#else
|
|
return raw_atomic_fetch_add_unless(v, a, u);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_add_unless() - atomic add unless value with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
* @a: long value to add
|
|
* @u: long value to compare with
|
|
*
|
|
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_add_unless() elsewhere.
|
|
*
|
|
* Return: @true if @v was updated, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_add_unless(atomic_long_t *v, long a, long u)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_add_unless(v, a, u);
|
|
#else
|
|
return raw_atomic_add_unless(v, a, u);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_inc_not_zero() - atomic increment unless zero with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_inc_not_zero() elsewhere.
|
|
*
|
|
* Return: @true if @v was updated, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_inc_not_zero(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_inc_not_zero(v);
|
|
#else
|
|
return raw_atomic_inc_not_zero(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_inc_unless_negative() elsewhere.
|
|
*
|
|
* Return: @true if @v was updated, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_inc_unless_negative(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_inc_unless_negative(v);
|
|
#else
|
|
return raw_atomic_inc_unless_negative(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_dec_unless_positive() elsewhere.
|
|
*
|
|
* Return: @true if @v was updated, @false otherwise.
|
|
*/
|
|
static __always_inline bool
|
|
raw_atomic_long_dec_unless_positive(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_dec_unless_positive(v);
|
|
#else
|
|
return raw_atomic_dec_unless_positive(v);
|
|
#endif
|
|
}
|
|
|
|
/**
|
|
* raw_atomic_long_dec_if_positive() - atomic decrement if positive with full ordering
|
|
* @v: pointer to atomic_long_t
|
|
*
|
|
* If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
|
|
*
|
|
* Safe to use in noinstr code; prefer atomic_long_dec_if_positive() elsewhere.
|
|
*
|
|
* Return: @true if @v was updated, @false otherwise.
|
|
*/
|
|
static __always_inline long
|
|
raw_atomic_long_dec_if_positive(atomic_long_t *v)
|
|
{
|
|
#ifdef CONFIG_64BIT
|
|
return raw_atomic64_dec_if_positive(v);
|
|
#else
|
|
return raw_atomic_dec_if_positive(v);
|
|
#endif
|
|
}
|
|
|
|
#endif /* _LINUX_ATOMIC_LONG_H */
|
|
// 029d2e3a493086671e874a4c2e0e42084be42403
|