Skip to content

Commit d0563a1

Browse files
Pan Xinhuimpe
authored andcommitted
powerpc: Implement {cmp}xchg for u8 and u16
Implement xchg{u8,u16}{local,relaxed}, and cmpxchg{u8,u16}{,local,acquire,relaxed}. It works on all ppc. remove volatile of first parameter in __cmpxchg_local and __cmpxchg Suggested-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Pan Xinhui <xinhui.pan@linux.vnet.ibm.com> Acked-by: Boqun Feng <boqun.feng@gmail.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
1 parent 8004ca9 commit d0563a1

File tree

1 file changed

+106
-3
lines changed

1 file changed

+106
-3
lines changed

arch/powerpc/include/asm/cmpxchg.h

Lines changed: 106 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,83 @@
77
#include <asm/asm-compat.h>
88
#include <linux/bug.h>
99

10+
#ifdef __BIG_ENDIAN
11+
#define BITOFF_CAL(size, off) ((sizeof(u32) - size - off) * BITS_PER_BYTE)
12+
#else
13+
#define BITOFF_CAL(size, off) (off * BITS_PER_BYTE)
14+
#endif
15+
16+
#define XCHG_GEN(type, sfx, cl) \
17+
static inline u32 __xchg_##type##sfx(void *p, u32 val) \
18+
{ \
19+
unsigned int prev, prev_mask, tmp, bitoff, off; \
20+
\
21+
off = (unsigned long)p % sizeof(u32); \
22+
bitoff = BITOFF_CAL(sizeof(type), off); \
23+
p -= off; \
24+
val <<= bitoff; \
25+
prev_mask = (u32)(type)-1 << bitoff; \
26+
\
27+
__asm__ __volatile__( \
28+
"1: lwarx %0,0,%3\n" \
29+
" andc %1,%0,%5\n" \
30+
" or %1,%1,%4\n" \
31+
PPC405_ERR77(0,%3) \
32+
" stwcx. %1,0,%3\n" \
33+
" bne- 1b\n" \
34+
: "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
35+
: "r" (p), "r" (val), "r" (prev_mask) \
36+
: "cc", cl); \
37+
\
38+
return prev >> bitoff; \
39+
}
40+
41+
#define CMPXCHG_GEN(type, sfx, br, br2, cl) \
42+
static inline \
43+
u32 __cmpxchg_##type##sfx(void *p, u32 old, u32 new) \
44+
{ \
45+
unsigned int prev, prev_mask, tmp, bitoff, off; \
46+
\
47+
off = (unsigned long)p % sizeof(u32); \
48+
bitoff = BITOFF_CAL(sizeof(type), off); \
49+
p -= off; \
50+
old <<= bitoff; \
51+
new <<= bitoff; \
52+
prev_mask = (u32)(type)-1 << bitoff; \
53+
\
54+
__asm__ __volatile__( \
55+
br \
56+
"1: lwarx %0,0,%3\n" \
57+
" and %1,%0,%6\n" \
58+
" cmpw 0,%1,%4\n" \
59+
" bne- 2f\n" \
60+
" andc %1,%0,%6\n" \
61+
" or %1,%1,%5\n" \
62+
PPC405_ERR77(0,%3) \
63+
" stwcx. %1,0,%3\n" \
64+
" bne- 1b\n" \
65+
br2 \
66+
"\n" \
67+
"2:" \
68+
: "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
69+
: "r" (p), "r" (old), "r" (new), "r" (prev_mask) \
70+
: "cc", cl); \
71+
\
72+
return prev >> bitoff; \
73+
}
74+
1075
/*
1176
* Atomic exchange
1277
*
1378
* Changes the memory location '*p' to be val and returns
1479
* the previous value stored there.
1580
*/
1681

82+
XCHG_GEN(u8, _local, "memory");
83+
XCHG_GEN(u8, _relaxed, "cc");
84+
XCHG_GEN(u16, _local, "memory");
85+
XCHG_GEN(u16, _relaxed, "cc");
86+
1787
static __always_inline unsigned long
1888
__xchg_u32_local(volatile void *p, unsigned long val)
1989
{
@@ -85,9 +155,13 @@ __xchg_u64_relaxed(u64 *p, unsigned long val)
85155
#endif
86156

87157
static __always_inline unsigned long
88-
__xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
158+
__xchg_local(void *ptr, unsigned long x, unsigned int size)
89159
{
90160
switch (size) {
161+
case 1:
162+
return __xchg_u8_local(ptr, x);
163+
case 2:
164+
return __xchg_u16_local(ptr, x);
91165
case 4:
92166
return __xchg_u32_local(ptr, x);
93167
#ifdef CONFIG_PPC64
@@ -103,6 +177,10 @@ static __always_inline unsigned long
103177
__xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
104178
{
105179
switch (size) {
180+
case 1:
181+
return __xchg_u8_relaxed(ptr, x);
182+
case 2:
183+
return __xchg_u16_relaxed(ptr, x);
106184
case 4:
107185
return __xchg_u32_relaxed(ptr, x);
108186
#ifdef CONFIG_PPC64
@@ -131,6 +209,15 @@ __xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
131209
* and return the old value of *p.
132210
*/
133211

212+
CMPXCHG_GEN(u8, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
213+
CMPXCHG_GEN(u8, _local, , , "memory");
214+
CMPXCHG_GEN(u8, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
215+
CMPXCHG_GEN(u8, _relaxed, , , "cc");
216+
CMPXCHG_GEN(u16, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
217+
CMPXCHG_GEN(u16, _local, , , "memory");
218+
CMPXCHG_GEN(u16, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
219+
CMPXCHG_GEN(u16, _relaxed, , , "cc");
220+
134221
static __always_inline unsigned long
135222
__cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
136223
{
@@ -312,10 +399,14 @@ __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new)
312399
#endif
313400

314401
static __always_inline unsigned long
315-
__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
402+
__cmpxchg(void *ptr, unsigned long old, unsigned long new,
316403
unsigned int size)
317404
{
318405
switch (size) {
406+
case 1:
407+
return __cmpxchg_u8(ptr, old, new);
408+
case 2:
409+
return __cmpxchg_u16(ptr, old, new);
319410
case 4:
320411
return __cmpxchg_u32(ptr, old, new);
321412
#ifdef CONFIG_PPC64
@@ -328,10 +419,14 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
328419
}
329420

330421
static __always_inline unsigned long
331-
__cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,
422+
__cmpxchg_local(void *ptr, unsigned long old, unsigned long new,
332423
unsigned int size)
333424
{
334425
switch (size) {
426+
case 1:
427+
return __cmpxchg_u8_local(ptr, old, new);
428+
case 2:
429+
return __cmpxchg_u16_local(ptr, old, new);
335430
case 4:
336431
return __cmpxchg_u32_local(ptr, old, new);
337432
#ifdef CONFIG_PPC64
@@ -348,6 +443,10 @@ __cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new,
348443
unsigned int size)
349444
{
350445
switch (size) {
446+
case 1:
447+
return __cmpxchg_u8_relaxed(ptr, old, new);
448+
case 2:
449+
return __cmpxchg_u16_relaxed(ptr, old, new);
351450
case 4:
352451
return __cmpxchg_u32_relaxed(ptr, old, new);
353452
#ifdef CONFIG_PPC64
@@ -364,6 +463,10 @@ __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new,
364463
unsigned int size)
365464
{
366465
switch (size) {
466+
case 1:
467+
return __cmpxchg_u8_acquire(ptr, old, new);
468+
case 2:
469+
return __cmpxchg_u16_acquire(ptr, old, new);
367470
case 4:
368471
return __cmpxchg_u32_acquire(ptr, old, new);
369472
#ifdef CONFIG_PPC64

0 commit comments

Comments
 (0)