Skip to content

Commit 288e452

Browse files
Peter ZijlstraIngo Molnar
authored andcommitted
x86/asm: 'Simplify' GEN_*_RMWcc() macros
Currently the GEN_*_RMWcc() macros include a return statement, which pretty much mandates we directly wrap them in a (inline) function. Macros with return statements are tricky and, as per the above, limit use, so remove the return statement and make them statement-expressions. This allows them to be used more widely. Also, shuffle the arguments a bit. Place the @cc argument as 3rd, this makes it consistent between UNARY and BINARY, but more importantly, it makes the @arg0 argument last. Since the @arg0 argument is now last, we can do CPP trickery and make it an optional argument, simplifying the users; 17 out of 18 occurences do not need this argument. Finally, change to asm symbolic names, instead of the numeric ordering of operands, which allows us to get rid of __BINARY_RMWcc_ARG and get cleaner code overall. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: JBeulich@suse.com Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Andy Lutomirski <luto@amacapital.net> Cc: Borislav Petkov <bp@alien8.de> Cc: Brian Gerst <brgerst@gmail.com> Cc: Denys Vlasenko <dvlasenk@redhat.com> Cc: H. Peter Anvin <hpa@zytor.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: bp@alien8.de Cc: hpa@linux.intel.com Link: https://lkml.kernel.org/r/20181003130957.108960094@infradead.org Signed-off-by: Ingo Molnar <mingo@kernel.org>
1 parent 756b1df commit 288e452

File tree

7 files changed

+64
-53
lines changed

7 files changed

+64
-53
lines changed

arch/x86/include/asm/atomic.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ static __always_inline void arch_atomic_sub(int i, atomic_t *v)
8282
*/
8383
static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
8484
{
85-
GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
85+
return GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, e, "er", i);
8686
}
8787
#define arch_atomic_sub_and_test arch_atomic_sub_and_test
8888

@@ -122,7 +122,7 @@ static __always_inline void arch_atomic_dec(atomic_t *v)
122122
*/
123123
static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
124124
{
125-
GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e);
125+
return GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, e);
126126
}
127127
#define arch_atomic_dec_and_test arch_atomic_dec_and_test
128128

@@ -136,7 +136,7 @@ static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
136136
*/
137137
static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
138138
{
139-
GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e);
139+
return GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, e);
140140
}
141141
#define arch_atomic_inc_and_test arch_atomic_inc_and_test
142142

@@ -151,7 +151,7 @@ static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
151151
*/
152152
static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v)
153153
{
154-
GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s);
154+
return GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, s, "er", i);
155155
}
156156
#define arch_atomic_add_negative arch_atomic_add_negative
157157

arch/x86/include/asm/atomic64_64.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ static inline void arch_atomic64_sub(long i, atomic64_t *v)
7373
*/
7474
static inline bool arch_atomic64_sub_and_test(long i, atomic64_t *v)
7575
{
76-
GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
76+
return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i);
7777
}
7878
#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
7979

@@ -115,7 +115,7 @@ static __always_inline void arch_atomic64_dec(atomic64_t *v)
115115
*/
116116
static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
117117
{
118-
GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
118+
return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e);
119119
}
120120
#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
121121

@@ -129,7 +129,7 @@ static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
129129
*/
130130
static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
131131
{
132-
GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
132+
return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e);
133133
}
134134
#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
135135

@@ -144,7 +144,7 @@ static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
144144
*/
145145
static inline bool arch_atomic64_add_negative(long i, atomic64_t *v)
146146
{
147-
GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
147+
return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i);
148148
}
149149
#define arch_atomic64_add_negative arch_atomic64_add_negative
150150

arch/x86/include/asm/bitops.h

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -217,8 +217,7 @@ static __always_inline void change_bit(long nr, volatile unsigned long *addr)
217217
*/
218218
static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr)
219219
{
220-
GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts),
221-
*addr, "Ir", nr, "%0", c);
220+
return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), *addr, c, "Ir", nr);
222221
}
223222

224223
/**
@@ -264,8 +263,7 @@ static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long *
264263
*/
265264
static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
266265
{
267-
GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr),
268-
*addr, "Ir", nr, "%0", c);
266+
return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), *addr, c, "Ir", nr);
269267
}
270268

271269
/**
@@ -318,8 +316,7 @@ static __always_inline bool __test_and_change_bit(long nr, volatile unsigned lon
318316
*/
319317
static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr)
320318
{
321-
GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc),
322-
*addr, "Ir", nr, "%0", c);
319+
return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, c, "Ir", nr);
323320
}
324321

325322
static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)

arch/x86/include/asm/local.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ static inline void local_sub(long i, local_t *l)
5353
*/
5454
static inline bool local_sub_and_test(long i, local_t *l)
5555
{
56-
GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, "er", i, "%0", e);
56+
return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i);
5757
}
5858

5959
/**
@@ -66,7 +66,7 @@ static inline bool local_sub_and_test(long i, local_t *l)
6666
*/
6767
static inline bool local_dec_and_test(local_t *l)
6868
{
69-
GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, "%0", e);
69+
return GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, e);
7070
}
7171

7272
/**
@@ -79,7 +79,7 @@ static inline bool local_dec_and_test(local_t *l)
7979
*/
8080
static inline bool local_inc_and_test(local_t *l)
8181
{
82-
GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, "%0", e);
82+
return GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, e);
8383
}
8484

8585
/**
@@ -93,7 +93,7 @@ static inline bool local_inc_and_test(local_t *l)
9393
*/
9494
static inline bool local_add_negative(long i, local_t *l)
9595
{
96-
GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, "er", i, "%0", s);
96+
return GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, s, "er", i);
9797
}
9898

9999
/**

arch/x86/include/asm/preempt.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ static __always_inline void __preempt_count_sub(int val)
8888
*/
8989
static __always_inline bool __preempt_count_dec_and_test(void)
9090
{
91-
GEN_UNARY_RMWcc("decl", __preempt_count, __percpu_arg(0), e);
91+
return GEN_UNARY_RMWcc("decl", __preempt_count, e, __percpu_arg([var]));
9292
}
9393

9494
/*

arch/x86/include/asm/refcount.h

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -79,16 +79,17 @@ static __always_inline void refcount_dec(refcount_t *r)
7979
static __always_inline __must_check
8080
bool refcount_sub_and_test(unsigned int i, refcount_t *r)
8181
{
82-
GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl",
83-
"REFCOUNT_CHECK_LT_ZERO counter=\"%0\"",
84-
r->refs.counter, "er", i, "%0", e, "cx");
82+
83+
return GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl",
84+
"REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"",
85+
r->refs.counter, e, "er", i, "cx");
8586
}
8687

8788
static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r)
8889
{
89-
GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl",
90-
"REFCOUNT_CHECK_LT_ZERO counter=\"%0\"",
91-
r->refs.counter, "%0", e, "cx");
90+
return GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl",
91+
"REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"",
92+
r->refs.counter, e, "cx");
9293
}
9394

9495
static __always_inline __must_check

arch/x86/include/asm/rmwcc.h

Lines changed: 41 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -2,56 +2,69 @@
22
#ifndef _ASM_X86_RMWcc
33
#define _ASM_X86_RMWcc
44

5+
/* This counts to 12. Any more, it will return 13th argument. */
6+
#define __RMWcc_ARGS(_0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _n, X...) _n
7+
#define RMWcc_ARGS(X...) __RMWcc_ARGS(, ##X, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0)
8+
9+
#define __RMWcc_CONCAT(a, b) a ## b
10+
#define RMWcc_CONCAT(a, b) __RMWcc_CONCAT(a, b)
11+
512
#define __CLOBBERS_MEM(clb...) "memory", ## clb
613

714
#if !defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(CC_HAVE_ASM_GOTO)
815

916
/* Use asm goto */
1017

11-
#define __GEN_RMWcc(fullop, var, cc, clobbers, ...) \
12-
do { \
18+
#define __GEN_RMWcc(fullop, _var, cc, clobbers, ...) \
19+
({ \
20+
bool c = false; \
1321
asm_volatile_goto (fullop "; j" #cc " %l[cc_label]" \
14-
: : [counter] "m" (var), ## __VA_ARGS__ \
22+
: : [var] "m" (_var), ## __VA_ARGS__ \
1523
: clobbers : cc_label); \
16-
return 0; \
17-
cc_label: \
18-
return 1; \
19-
} while (0)
20-
21-
#define __BINARY_RMWcc_ARG " %1, "
22-
24+
if (0) { \
25+
cc_label: c = true; \
26+
} \
27+
c; \
28+
})
2329

2430
#else /* defined(__GCC_ASM_FLAG_OUTPUTS__) || !defined(CC_HAVE_ASM_GOTO) */
2531

2632
/* Use flags output or a set instruction */
2733

28-
#define __GEN_RMWcc(fullop, var, cc, clobbers, ...) \
29-
do { \
34+
#define __GEN_RMWcc(fullop, _var, cc, clobbers, ...) \
35+
({ \
3036
bool c; \
3137
asm volatile (fullop CC_SET(cc) \
32-
: [counter] "+m" (var), CC_OUT(cc) (c) \
38+
: [var] "+m" (_var), CC_OUT(cc) (c) \
3339
: __VA_ARGS__ : clobbers); \
34-
return c; \
35-
} while (0)
36-
37-
#define __BINARY_RMWcc_ARG " %2, "
40+
c; \
41+
})
3842

3943
#endif /* defined(__GCC_ASM_FLAG_OUTPUTS__) || !defined(CC_HAVE_ASM_GOTO) */
4044

41-
#define GEN_UNARY_RMWcc(op, var, arg0, cc) \
45+
#define GEN_UNARY_RMWcc_4(op, var, cc, arg0) \
4246
__GEN_RMWcc(op " " arg0, var, cc, __CLOBBERS_MEM())
4347

44-
#define GEN_UNARY_SUFFIXED_RMWcc(op, suffix, var, arg0, cc, clobbers...)\
45-
__GEN_RMWcc(op " " arg0 "\n\t" suffix, var, cc, \
46-
__CLOBBERS_MEM(clobbers))
48+
#define GEN_UNARY_RMWcc_3(op, var, cc) \
49+
GEN_UNARY_RMWcc_4(op, var, cc, "%[var]")
4750

48-
#define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \
49-
__GEN_RMWcc(op __BINARY_RMWcc_ARG arg0, var, cc, \
50-
__CLOBBERS_MEM(), vcon (val))
51+
#define GEN_UNARY_RMWcc(X...) RMWcc_CONCAT(GEN_UNARY_RMWcc_, RMWcc_ARGS(X))(X)
52+
53+
#define GEN_BINARY_RMWcc_6(op, var, cc, vcon, _val, arg0) \
54+
__GEN_RMWcc(op " %[val], " arg0, var, cc, \
55+
__CLOBBERS_MEM(), [val] vcon (_val))
56+
57+
#define GEN_BINARY_RMWcc_5(op, var, cc, vcon, val) \
58+
GEN_BINARY_RMWcc_6(op, var, cc, vcon, val, "%[var]")
59+
60+
#define GEN_BINARY_RMWcc(X...) RMWcc_CONCAT(GEN_BINARY_RMWcc_, RMWcc_ARGS(X))(X)
61+
62+
#define GEN_UNARY_SUFFIXED_RMWcc(op, suffix, var, cc, clobbers...) \
63+
__GEN_RMWcc(op " %[var]\n\t" suffix, var, cc, \
64+
__CLOBBERS_MEM(clobbers))
5165

52-
#define GEN_BINARY_SUFFIXED_RMWcc(op, suffix, var, vcon, val, arg0, cc, \
53-
clobbers...) \
54-
__GEN_RMWcc(op __BINARY_RMWcc_ARG arg0 "\n\t" suffix, var, cc, \
55-
__CLOBBERS_MEM(clobbers), vcon (val))
66+
#define GEN_BINARY_SUFFIXED_RMWcc(op, suffix, var, cc, vcon, _val, clobbers...)\
67+
__GEN_RMWcc(op " %[val], %[var]\n\t" suffix, var, cc, \
68+
__CLOBBERS_MEM(clobbers), [val] vcon (_val))
5669

5770
#endif /* _ASM_X86_RMWcc */

0 commit comments

Comments
 (0)