7
7
#include <asm/asm-compat.h>
8
8
#include <linux/bug.h>
9
9
10
+ #ifdef __BIG_ENDIAN
11
+ #define BITOFF_CAL (size , off ) ((sizeof(u32) - size - off) * BITS_PER_BYTE)
12
+ #else
13
+ #define BITOFF_CAL (size , off ) (off * BITS_PER_BYTE)
14
+ #endif
15
+
16
+ #define XCHG_GEN (type , sfx , cl ) \
17
+ static inline u32 __xchg_##type##sfx(void *p, u32 val) \
18
+ { \
19
+ unsigned int prev, prev_mask, tmp, bitoff, off; \
20
+ \
21
+ off = (unsigned long)p % sizeof(u32); \
22
+ bitoff = BITOFF_CAL(sizeof(type), off); \
23
+ p -= off; \
24
+ val <<= bitoff; \
25
+ prev_mask = (u32)(type)-1 << bitoff; \
26
+ \
27
+ __asm__ __volatile__( \
28
+ "1: lwarx %0,0,%3\n" \
29
+ " andc %1,%0,%5\n" \
30
+ " or %1,%1,%4\n" \
31
+ PPC405_ERR77(0,%3) \
32
+ " stwcx. %1,0,%3\n" \
33
+ " bne- 1b\n" \
34
+ : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
35
+ : "r" (p), "r" (val), "r" (prev_mask) \
36
+ : "cc", cl); \
37
+ \
38
+ return prev >> bitoff; \
39
+ }
40
+
41
+ #define CMPXCHG_GEN (type , sfx , br , br2 , cl ) \
42
+ static inline \
43
+ u32 __cmpxchg_##type##sfx(void *p, u32 old, u32 new) \
44
+ { \
45
+ unsigned int prev, prev_mask, tmp, bitoff, off; \
46
+ \
47
+ off = (unsigned long)p % sizeof(u32); \
48
+ bitoff = BITOFF_CAL(sizeof(type), off); \
49
+ p -= off; \
50
+ old <<= bitoff; \
51
+ new <<= bitoff; \
52
+ prev_mask = (u32)(type)-1 << bitoff; \
53
+ \
54
+ __asm__ __volatile__( \
55
+ br \
56
+ "1: lwarx %0,0,%3\n" \
57
+ " and %1,%0,%6\n" \
58
+ " cmpw 0,%1,%4\n" \
59
+ " bne- 2f\n" \
60
+ " andc %1,%0,%6\n" \
61
+ " or %1,%1,%5\n" \
62
+ PPC405_ERR77(0,%3) \
63
+ " stwcx. %1,0,%3\n" \
64
+ " bne- 1b\n" \
65
+ br2 \
66
+ "\n" \
67
+ "2:" \
68
+ : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
69
+ : "r" (p), "r" (old), "r" (new), "r" (prev_mask) \
70
+ : "cc", cl); \
71
+ \
72
+ return prev >> bitoff; \
73
+ }
74
+
10
75
/*
11
76
* Atomic exchange
12
77
*
13
78
* Changes the memory location '*p' to be val and returns
14
79
* the previous value stored there.
15
80
*/
16
81
82
+ XCHG_GEN (u8 , _local , "memory" );
83
+ XCHG_GEN (u8 , _relaxed , "cc" );
84
+ XCHG_GEN (u16 , _local , "memory" );
85
+ XCHG_GEN (u16 , _relaxed , "cc" );
86
+
17
87
static __always_inline unsigned long
18
88
__xchg_u32_local (volatile void * p , unsigned long val )
19
89
{
@@ -85,9 +155,13 @@ __xchg_u64_relaxed(u64 *p, unsigned long val)
85
155
#endif
86
156
87
157
static __always_inline unsigned long
88
- __xchg_local (volatile void * ptr , unsigned long x , unsigned int size )
158
+ __xchg_local (void * ptr , unsigned long x , unsigned int size )
89
159
{
90
160
switch (size ) {
161
+ case 1 :
162
+ return __xchg_u8_local (ptr , x );
163
+ case 2 :
164
+ return __xchg_u16_local (ptr , x );
91
165
case 4 :
92
166
return __xchg_u32_local (ptr , x );
93
167
#ifdef CONFIG_PPC64
@@ -103,6 +177,10 @@ static __always_inline unsigned long
103
177
__xchg_relaxed (void * ptr , unsigned long x , unsigned int size )
104
178
{
105
179
switch (size ) {
180
+ case 1 :
181
+ return __xchg_u8_relaxed (ptr , x );
182
+ case 2 :
183
+ return __xchg_u16_relaxed (ptr , x );
106
184
case 4 :
107
185
return __xchg_u32_relaxed (ptr , x );
108
186
#ifdef CONFIG_PPC64
@@ -131,6 +209,15 @@ __xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
131
209
* and return the old value of *p.
132
210
*/
133
211
212
+ CMPXCHG_GEN (u8 , , PPC_ATOMIC_ENTRY_BARRIER , PPC_ATOMIC_EXIT_BARRIER , "memory" );
213
+ CMPXCHG_GEN (u8 , _local , , , "memory" );
214
+ CMPXCHG_GEN (u8 , _acquire , , PPC_ACQUIRE_BARRIER , "memory" );
215
+ CMPXCHG_GEN (u8 , _relaxed , , , "cc" );
216
+ CMPXCHG_GEN (u16 , , PPC_ATOMIC_ENTRY_BARRIER , PPC_ATOMIC_EXIT_BARRIER , "memory" );
217
+ CMPXCHG_GEN (u16 , _local , , , "memory" );
218
+ CMPXCHG_GEN (u16 , _acquire , , PPC_ACQUIRE_BARRIER , "memory" );
219
+ CMPXCHG_GEN (u16 , _relaxed , , , "cc" );
220
+
134
221
static __always_inline unsigned long
135
222
__cmpxchg_u32 (volatile unsigned int * p , unsigned long old , unsigned long new )
136
223
{
@@ -312,10 +399,14 @@ __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new)
312
399
#endif
313
400
314
401
static __always_inline unsigned long
315
- __cmpxchg (volatile void * ptr , unsigned long old , unsigned long new ,
402
+ __cmpxchg (void * ptr , unsigned long old , unsigned long new ,
316
403
unsigned int size )
317
404
{
318
405
switch (size ) {
406
+ case 1 :
407
+ return __cmpxchg_u8 (ptr , old , new );
408
+ case 2 :
409
+ return __cmpxchg_u16 (ptr , old , new );
319
410
case 4 :
320
411
return __cmpxchg_u32 (ptr , old , new );
321
412
#ifdef CONFIG_PPC64
@@ -328,10 +419,14 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
328
419
}
329
420
330
421
static __always_inline unsigned long
331
- __cmpxchg_local (volatile void * ptr , unsigned long old , unsigned long new ,
422
+ __cmpxchg_local (void * ptr , unsigned long old , unsigned long new ,
332
423
unsigned int size )
333
424
{
334
425
switch (size ) {
426
+ case 1 :
427
+ return __cmpxchg_u8_local (ptr , old , new );
428
+ case 2 :
429
+ return __cmpxchg_u16_local (ptr , old , new );
335
430
case 4 :
336
431
return __cmpxchg_u32_local (ptr , old , new );
337
432
#ifdef CONFIG_PPC64
@@ -348,6 +443,10 @@ __cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new,
348
443
unsigned int size )
349
444
{
350
445
switch (size ) {
446
+ case 1 :
447
+ return __cmpxchg_u8_relaxed (ptr , old , new );
448
+ case 2 :
449
+ return __cmpxchg_u16_relaxed (ptr , old , new );
351
450
case 4 :
352
451
return __cmpxchg_u32_relaxed (ptr , old , new );
353
452
#ifdef CONFIG_PPC64
@@ -364,6 +463,10 @@ __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new,
364
463
unsigned int size )
365
464
{
366
465
switch (size ) {
466
+ case 1 :
467
+ return __cmpxchg_u8_acquire (ptr , old , new );
468
+ case 2 :
469
+ return __cmpxchg_u16_acquire (ptr , old , new );
367
470
case 4 :
368
471
return __cmpxchg_u32_acquire (ptr , old , new );
369
472
#ifdef CONFIG_PPC64
0 commit comments