|
36 | 36 | */
|
37 | 37 | extern unsigned long __cmpxchg_called_with_bad_pointer(void)
|
38 | 38 | __compiletime_error("Bad argument size for cmpxchg");
|
| 39 | +extern unsigned long __cmpxchg64_unsupported(void) |
| 40 | + __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false"); |
39 | 41 | extern unsigned long __xchg_called_with_bad_pointer(void)
|
40 | 42 | __compiletime_error("Bad argument size for xchg");
|
41 | 43 |
|
@@ -204,12 +206,102 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
|
204 | 206 | cmpxchg((ptr), (o), (n)); \
|
205 | 207 | })
|
206 | 208 | #else
|
207 |
| -#include <asm-generic/cmpxchg-local.h> |
208 |
| -#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) |
209 |
| -#ifndef CONFIG_SMP |
210 |
| -#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n)) |
211 |
| -#endif |
212 |
| -#endif |
| 209 | + |
| 210 | +# include <asm-generic/cmpxchg-local.h> |
| 211 | +# define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) |
| 212 | + |
| 213 | +# ifdef CONFIG_SMP |
| 214 | + |
| 215 | +static inline unsigned long __cmpxchg64(volatile void *ptr, |
| 216 | + unsigned long long old, |
| 217 | + unsigned long long new) |
| 218 | +{ |
| 219 | + unsigned long long tmp, ret; |
| 220 | + unsigned long flags; |
| 221 | + |
| 222 | + /* |
| 223 | + * The assembly below has to combine 32 bit values into a 64 bit |
| 224 | + * register, and split 64 bit values from one register into two. If we |
| 225 | + * were to take an interrupt in the middle of this we'd only save the |
| 226 | + * least significant 32 bits of each register & probably clobber the |
| 227 | + * most significant 32 bits of the 64 bit values we're using. In order |
| 228 | + * to avoid this we must disable interrupts. |
| 229 | + */ |
| 230 | + local_irq_save(flags); |
| 231 | + |
| 232 | + asm volatile( |
| 233 | + " .set push \n" |
| 234 | + " .set " MIPS_ISA_ARCH_LEVEL " \n" |
| 235 | + /* Load 64 bits from ptr */ |
| 236 | + "1: lld %L0, %3 # __cmpxchg64 \n" |
| 237 | + /* |
| 238 | + * Split the 64 bit value we loaded into the 2 registers that hold the |
| 239 | + * ret variable. |
| 240 | + */ |
| 241 | + " dsra %M0, %L0, 32 \n" |
| 242 | + " sll %L0, %L0, 0 \n" |
| 243 | + /* |
| 244 | + * Compare ret against old, breaking out of the loop if they don't |
| 245 | + * match. |
| 246 | + */ |
| 247 | + " bne %M0, %M4, 2f \n" |
| 248 | + " bne %L0, %L4, 2f \n" |
| 249 | + /* |
| 250 | + * Combine the 32 bit halves from the 2 registers that hold the new |
| 251 | + * variable into a single 64 bit register. |
| 252 | + */ |
| 253 | +# if MIPS_ISA_REV >= 2 |
| 254 | + " move %L1, %L5 \n" |
| 255 | + " dins %L1, %M5, 32, 32 \n" |
| 256 | +# else |
| 257 | + " dsll %L1, %L5, 32 \n" |
| 258 | + " dsrl %L1, %L1, 32 \n" |
| 259 | + " .set noat \n" |
| 260 | + " dsll $at, %M5, 32 \n" |
| 261 | + " or %L1, %L1, $at \n" |
| 262 | + " .set at \n" |
| 263 | +# endif |
| 264 | + /* Attempt to store new at ptr */ |
| 265 | + " scd %L1, %2 \n" |
| 266 | + /* If we failed, loop! */ |
| 267 | + "\t" __scbeqz " %L1, 1b \n" |
| 268 | + " .set pop \n" |
| 269 | + "2: \n" |
| 270 | + : "=&r"(ret), |
| 271 | + "=&r"(tmp), |
| 272 | + "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr) |
| 273 | + : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr), |
| 274 | + "r" (old), |
| 275 | + "r" (new) |
| 276 | + : "memory"); |
| 277 | + |
| 278 | + local_irq_restore(flags); |
| 279 | + return ret; |
| 280 | +} |
| 281 | + |
| 282 | +# define cmpxchg64(ptr, o, n) ({ \ |
| 283 | + unsigned long long __old = (__typeof__(*(ptr)))(o); \ |
| 284 | + unsigned long long __new = (__typeof__(*(ptr)))(n); \ |
| 285 | + __typeof__(*(ptr)) __res; \ |
| 286 | + \ |
| 287 | + /* \ |
| 288 | + * We can only use cmpxchg64 if we know that the CPU supports \ |
| 289 | + * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported \ |
| 290 | + * will cause a build error unless cpu_has_64bits is a \ |
| 291 | + * compile-time constant 1. \ |
| 292 | + */ \ |
| 293 | + if (cpu_has_64bits && kernel_uses_llsc) \ |
| 294 | + __res = __cmpxchg64((ptr), __old, __new); \ |
| 295 | + else \ |
| 296 | + __res = __cmpxchg64_unsupported(); \ |
| 297 | + \ |
| 298 | + __res; \ |
| 299 | +}) |
| 300 | + |
| 301 | +# else /* !CONFIG_SMP */ |
| 302 | +# define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n)) |
| 303 | +# endif /* !CONFIG_SMP */ |
| 304 | +#endif /* !CONFIG_64BIT */ |
213 | 305 |
|
214 | 306 | #undef __scbeqz
|
215 | 307 |
|
|
0 commit comments