Skip to content

Commit ade5ef9

Browse files
mrutland-armingomolnar
authored andcommitted
atomics: Make conditional ops return 'bool'
Some of the atomics return a status value, which is a boolean value describing whether the operation was performed. To make it clear that this is a boolean value, let's update the common fallbacks to return bool, fixing up the return values and comments likewise. At the same time, let's simplify the description of the operations in their respective comments. The instrumented atomics and generic atomic64 implementation are updated accordingly. Note that atomic64_dec_if_positive() doesn't follow the usual test op pattern, and returns the would-be decremented value. This is not changed. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Reviewed-by: Will Deacon <will.deacon@arm.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Michael Ellerman <mpe@ellerman.id.au> Cc: Thomas Gleixner <tglx@linutronix.de> Link: https://lore.kernel.org/lkml/20180621121321.4761-5-mark.rutland@arm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
1 parent f74445b commit ade5ef9

File tree

4 files changed

+19
-16
lines changed

4 files changed

+19
-16
lines changed

include/asm-generic/atomic-instrumented.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ static __always_inline s64 atomic64_dec_return(atomic64_t *v)
205205
return arch_atomic64_dec_return(v);
206206
}
207207

208-
static __always_inline s64 atomic64_inc_not_zero(atomic64_t *v)
208+
static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
209209
{
210210
kasan_check_write(v, sizeof(*v));
211211
return arch_atomic64_inc_not_zero(v);

include/asm-generic/atomic64.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
*/
1212
#ifndef _ASM_GENERIC_ATOMIC64_H
1313
#define _ASM_GENERIC_ATOMIC64_H
14+
#include <linux/types.h>
1415

1516
typedef struct {
1617
long long counter;
@@ -52,7 +53,7 @@ ATOMIC64_OPS(xor)
5253
extern long long atomic64_dec_if_positive(atomic64_t *v);
5354
extern long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n);
5455
extern long long atomic64_xchg(atomic64_t *v, long long new);
55-
extern int atomic64_add_unless(atomic64_t *v, long long a, long long u);
56+
extern bool atomic64_add_unless(atomic64_t *v, long long a, long long u);
5657

5758
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
5859
#define atomic64_inc(v) atomic64_add(1LL, (v))

include/linux/atomic.h

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
/* Atomic operations usable in machine independent code */
33
#ifndef _LINUX_ATOMIC_H
44
#define _LINUX_ATOMIC_H
5+
#include <linux/types.h>
6+
57
#include <asm/atomic.h>
68
#include <asm/barrier.h>
79

@@ -525,10 +527,10 @@
525527
* @a: the amount to add to v...
526528
* @u: ...unless v is equal to u.
527529
*
528-
* Atomically adds @a to @v, so long as @v was not already @u.
529-
* Returns non-zero if @v was not @u, and zero otherwise.
530+
* Atomically adds @a to @v, if @v was not already @u.
531+
* Returns true if the addition was done.
530532
*/
531-
static inline int atomic_add_unless(atomic_t *v, int a, int u)
533+
static inline bool atomic_add_unless(atomic_t *v, int a, int u)
532534
{
533535
return atomic_fetch_add_unless(v, a, u) != u;
534536
}
@@ -537,8 +539,8 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
537539
* atomic_inc_not_zero - increment unless the number is zero
538540
* @v: pointer of type atomic_t
539541
*
540-
* Atomically increments @v by 1, so long as @v is non-zero.
541-
* Returns non-zero if @v was non-zero, and zero otherwise.
542+
* Atomically increments @v by 1, if @v is non-zero.
543+
* Returns true if the increment was done.
542544
*/
543545
#ifndef atomic_inc_not_zero
544546
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
@@ -572,28 +574,28 @@ static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
572574
#endif
573575

574576
#ifndef atomic_inc_unless_negative
575-
static inline int atomic_inc_unless_negative(atomic_t *p)
577+
static inline bool atomic_inc_unless_negative(atomic_t *p)
576578
{
577579
int v, v1;
578580
for (v = 0; v >= 0; v = v1) {
579581
v1 = atomic_cmpxchg(p, v, v + 1);
580582
if (likely(v1 == v))
581-
return 1;
583+
return true;
582584
}
583-
return 0;
585+
return false;
584586
}
585587
#endif
586588

587589
#ifndef atomic_dec_unless_positive
588-
static inline int atomic_dec_unless_positive(atomic_t *p)
590+
static inline bool atomic_dec_unless_positive(atomic_t *p)
589591
{
590592
int v, v1;
591593
for (v = 0; v <= 0; v = v1) {
592594
v1 = atomic_cmpxchg(p, v, v - 1);
593595
if (likely(v1 == v))
594-
return 1;
596+
return true;
595597
}
596-
return 0;
598+
return false;
597599
}
598600
#endif
599601

lib/atomic64.c

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -178,16 +178,16 @@ long long atomic64_xchg(atomic64_t *v, long long new)
178178
}
179179
EXPORT_SYMBOL(atomic64_xchg);
180180

181-
int atomic64_add_unless(atomic64_t *v, long long a, long long u)
181+
bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
182182
{
183183
unsigned long flags;
184184
raw_spinlock_t *lock = lock_addr(v);
185-
int ret = 0;
185+
bool ret = false;
186186

187187
raw_spin_lock_irqsave(lock, flags);
188188
if (v->counter != u) {
189189
v->counter += a;
190-
ret = 1;
190+
ret = true;
191191
}
192192
raw_spin_unlock_irqrestore(lock, flags);
193193
return ret;

0 commit comments

Comments
 (0)