Skip to content

Commit a6cf7ed

Browse files
antonblanchardozbenh
authored andcommitted
powerpc/atomic: Implement atomic*_inc_not_zero
Implement atomic_inc_not_zero and atomic64_inc_not_zero. At the moment we use atomic*_add_unless which requires us to put 0 and 1 constants into registers. We can also avoid a subtract by saving the original value in a second temporary. This removes 3 instructions from fget: - c0000000001b63c0: 39 00 00 00 li r8,0 - c0000000001b63c4: 39 40 00 01 li r10,1 ... - c0000000001b63e8: 7c 0a 00 50 subf r0,r10,r0 Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
1 parent b1ada60 commit a6cf7ed

File tree

1 file changed

+58
-1
lines changed

1 file changed

+58
-1
lines changed

arch/powerpc/include/asm/atomic.h

Lines changed: 58 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,36 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
212212
return t;
213213
}
214214

215+
/**
216+
* atomic_inc_not_zero - increment unless the number is zero
217+
* @v: pointer of type atomic_t
218+
*
219+
* Atomically increments @v by 1, so long as @v is non-zero.
220+
* Returns non-zero if @v was non-zero, and zero otherwise.
221+
*/
222+
static __inline__ int atomic_inc_not_zero(atomic_t *v)
223+
{
224+
int t1, t2;
225+
226+
__asm__ __volatile__ (
227+
PPC_ATOMIC_ENTRY_BARRIER
228+
"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
229+
cmpwi 0,%0,0\n\
230+
beq- 2f\n\
231+
addic %1,%0,1\n"
232+
PPC405_ERR77(0,%2)
233+
" stwcx. %1,0,%2\n\
234+
bne- 1b\n"
235+
PPC_ATOMIC_EXIT_BARRIER
236+
"\n\
237+
2:"
238+
: "=&r" (t1), "=&r" (t2)
239+
: "r" (&v->counter)
240+
: "cc", "xer", "memory");
241+
242+
return t1;
243+
}
244+
#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
215245

216246
#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
217247
#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
@@ -467,7 +497,34 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
467497
return t != u;
468498
}
469499

470-
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
500+
/**
501+
* atomic_inc64_not_zero - increment unless the number is zero
502+
* @v: pointer of type atomic64_t
503+
*
504+
* Atomically increments @v by 1, so long as @v is non-zero.
505+
* Returns non-zero if @v was non-zero, and zero otherwise.
506+
*/
507+
static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
508+
{
509+
long t1, t2;
510+
511+
__asm__ __volatile__ (
512+
PPC_ATOMIC_ENTRY_BARRIER
513+
"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
514+
cmpdi 0,%0,0\n\
515+
beq- 2f\n\
516+
addic %1,%0,1\n\
517+
stdcx. %1,0,%2\n\
518+
bne- 1b\n"
519+
PPC_ATOMIC_EXIT_BARRIER
520+
"\n\
521+
2:"
522+
: "=&r" (t1), "=&r" (t2)
523+
: "r" (&v->counter)
524+
: "cc", "xer", "memory");
525+
526+
return t1;
527+
}
471528

472529
#endif /* __powerpc64__ */
473530

0 commit comments

Comments
 (0)