|
4 | 4 | #include <asm/unistd.h>
|
5 | 5 |
|
6 | 6 | #if defined(__i386__)
|
| 7 | +#define mb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory") |
| 8 | +#define wmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory") |
7 | 9 | #define rmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory")
|
8 | 10 | #define cpu_relax() asm volatile("rep; nop" ::: "memory");
|
9 | 11 | #define CPUINFO_PROC "model name"
|
|
13 | 15 | #endif
|
14 | 16 |
|
15 | 17 | #if defined(__x86_64__)
|
| 18 | +#define mb() asm volatile("mfence" ::: "memory") |
| 19 | +#define wmb() asm volatile("sfence" ::: "memory") |
16 | 20 | #define rmb() asm volatile("lfence" ::: "memory")
|
17 | 21 | #define cpu_relax() asm volatile("rep; nop" ::: "memory");
|
18 | 22 | #define CPUINFO_PROC "model name"
|
|
23 | 27 |
|
24 | 28 | #ifdef __powerpc__
|
25 | 29 | #include "../../arch/powerpc/include/uapi/asm/unistd.h"
|
| 30 | +#define mb() asm volatile ("sync" ::: "memory") |
| 31 | +#define wmb() asm volatile ("sync" ::: "memory") |
26 | 32 | #define rmb() asm volatile ("sync" ::: "memory")
|
27 |
| -#define cpu_relax() asm volatile ("" ::: "memory"); |
28 | 33 | #define CPUINFO_PROC "cpu"
|
29 | 34 | #endif
|
30 | 35 |
|
31 | 36 | #ifdef __s390__
|
| 37 | +#define mb() asm volatile("bcr 15,0" ::: "memory") |
| 38 | +#define wmb() asm volatile("bcr 15,0" ::: "memory") |
32 | 39 | #define rmb() asm volatile("bcr 15,0" ::: "memory")
|
33 |
| -#define cpu_relax() asm volatile("" ::: "memory"); |
34 | 40 | #endif
|
35 | 41 |
|
36 | 42 | #ifdef __sh__
|
37 | 43 | #if defined(__SH4A__) || defined(__SH5__)
|
| 44 | +# define mb() asm volatile("synco" ::: "memory") |
| 45 | +# define wmb() asm volatile("synco" ::: "memory") |
38 | 46 | # define rmb() asm volatile("synco" ::: "memory")
|
39 | 47 | #else
|
| 48 | +# define mb() asm volatile("" ::: "memory") |
| 49 | +# define wmb() asm volatile("" ::: "memory") |
40 | 50 | # define rmb() asm volatile("" ::: "memory")
|
41 | 51 | #endif
|
42 |
| -#define cpu_relax() asm volatile("" ::: "memory") |
43 | 52 | #define CPUINFO_PROC "cpu type"
|
44 | 53 | #endif
|
45 | 54 |
|
46 | 55 | #ifdef __hppa__
|
| 56 | +#define mb() asm volatile("" ::: "memory") |
| 57 | +#define wmb() asm volatile("" ::: "memory") |
47 | 58 | #define rmb() asm volatile("" ::: "memory")
|
48 |
| -#define cpu_relax() asm volatile("" ::: "memory"); |
49 | 59 | #define CPUINFO_PROC "cpu"
|
50 | 60 | #endif
|
51 | 61 |
|
52 | 62 | #ifdef __sparc__
|
| 63 | +#ifdef __LP64__ |
| 64 | +#define mb() asm volatile("ba,pt %%xcc, 1f\n" \ |
| 65 | + "membar #StoreLoad\n" \ |
| 66 | + "1:\n":::"memory") |
| 67 | +#else |
| 68 | +#define mb() asm volatile("":::"memory") |
| 69 | +#endif |
| 70 | +#define wmb() asm volatile("":::"memory") |
53 | 71 | #define rmb() asm volatile("":::"memory")
|
54 |
| -#define cpu_relax() asm volatile("":::"memory") |
55 | 72 | #define CPUINFO_PROC "cpu"
|
56 | 73 | #endif
|
57 | 74 |
|
58 | 75 | #ifdef __alpha__
|
| 76 | +#define mb() asm volatile("mb" ::: "memory") |
| 77 | +#define wmb() asm volatile("wmb" ::: "memory") |
59 | 78 | #define rmb() asm volatile("mb" ::: "memory")
|
60 |
| -#define cpu_relax() asm volatile("" ::: "memory") |
61 | 79 | #define CPUINFO_PROC "cpu model"
|
62 | 80 | #endif
|
63 | 81 |
|
64 | 82 | #ifdef __ia64__
|
| 83 | +#define mb() asm volatile ("mf" ::: "memory") |
| 84 | +#define wmb() asm volatile ("mf" ::: "memory") |
65 | 85 | #define rmb() asm volatile ("mf" ::: "memory")
|
66 | 86 | #define cpu_relax() asm volatile ("hint @pause" ::: "memory")
|
67 | 87 | #define CPUINFO_PROC "model name"
|
|
72 | 92 | * Use the __kuser_memory_barrier helper in the CPU helper page. See
|
73 | 93 | * arch/arm/kernel/entry-armv.S in the kernel source for details.
|
74 | 94 | */
|
| 95 | +#define mb() ((void(*)(void))0xffff0fa0)() |
| 96 | +#define wmb() ((void(*)(void))0xffff0fa0)() |
75 | 97 | #define rmb() ((void(*)(void))0xffff0fa0)()
|
76 |
| -#define cpu_relax() asm volatile("":::"memory") |
77 | 98 | #define CPUINFO_PROC "Processor"
|
78 | 99 | #endif
|
79 | 100 |
|
80 | 101 | #ifdef __aarch64__
|
81 |
| -#define rmb() asm volatile("dmb ld" ::: "memory") |
| 102 | +#define mb() asm volatile("dmb ish" ::: "memory") |
| 103 | +#define wmb() asm volatile("dmb ishld" ::: "memory") |
| 104 | +#define rmb() asm volatile("dmb ishst" ::: "memory") |
82 | 105 | #define cpu_relax() asm volatile("yield" ::: "memory")
|
83 | 106 | #endif
|
84 | 107 |
|
85 | 108 | #ifdef __mips__
|
86 |
| -#define rmb() asm volatile( \ |
| 109 | +#define mb() asm volatile( \ |
87 | 110 | ".set mips2\n\t" \
|
88 | 111 | "sync\n\t" \
|
89 | 112 | ".set mips0" \
|
90 | 113 | : /* no output */ \
|
91 | 114 | : /* no input */ \
|
92 | 115 | : "memory")
|
93 |
| -#define cpu_relax() asm volatile("" ::: "memory") |
| 116 | +#define wmb() mb() |
| 117 | +#define rmb() mb() |
94 | 118 | #define CPUINFO_PROC "cpu model"
|
95 | 119 | #endif
|
96 | 120 |
|
97 | 121 | #ifdef __arc__
|
| 122 | +#define mb() asm volatile("" ::: "memory") |
| 123 | +#define wmb() asm volatile("" ::: "memory") |
98 | 124 | #define rmb() asm volatile("" ::: "memory")
|
99 |
| -#define cpu_relax() rmb() |
100 | 125 | #define CPUINFO_PROC "Processor"
|
101 | 126 | #endif
|
102 | 127 |
|
103 | 128 | #ifdef __metag__
|
| 129 | +#define mb() asm volatile("" ::: "memory") |
| 130 | +#define wmb() asm volatile("" ::: "memory") |
104 | 131 | #define rmb() asm volatile("" ::: "memory")
|
105 |
| -#define cpu_relax() asm volatile("" ::: "memory") |
106 | 132 | #define CPUINFO_PROC "CPU"
|
107 | 133 | #endif
|
108 | 134 |
|
| 135 | +#define barrier() asm volatile ("" ::: "memory") |
| 136 | + |
| 137 | +#ifndef cpu_relax |
| 138 | +#define cpu_relax() barrier() |
| 139 | +#endif |
| 140 | + |
| 141 | +#define ACCESS_ONCE(x) (*(volatile typeof(x) *)&(x)) |
| 142 | + |
| 143 | + |
109 | 144 | #include <time.h>
|
110 | 145 | #include <unistd.h>
|
111 | 146 | #include <sys/types.h>
|
|
0 commit comments