4
4
* x86-specific implementation of refcount_t. Based on PAX_REFCOUNT from
5
5
* PaX/grsecurity.
6
6
*/
7
-
8
- #ifdef __ASSEMBLY__
9
-
10
- #include <asm/asm.h>
11
- #include <asm/bug.h>
12
-
13
- .macro REFCOUNT_EXCEPTION counter :req
14
- .pushsection .text ..refcount
15
- 111 : lea \counter , %_ASM_CX
16
- 112 : ud2
17
- ASM_UNREACHABLE
18
- .popsection
19
- 113 : _ASM_EXTABLE_REFCOUNT (112b , 113b )
20
- .endm
21
-
22
- /* Trigger refcount exception if refcount result is negative. */
23
- .macro REFCOUNT_CHECK_LT_ZERO counter :req
24
- js 111f
25
- REFCOUNT_EXCEPTION counter = "\counter"
26
- .endm
27
-
28
- /* Trigger refcount exception if refcount result is zero or negative. */
29
- .macro REFCOUNT_CHECK_LE_ZERO counter :req
30
- jz 111f
31
- REFCOUNT_CHECK_LT_ZERO counter = "\counter"
32
- .endm
33
-
34
- /* Trigger refcount exception unconditionally. */
35
- .macro REFCOUNT_ERROR counter :req
36
- jmp 111f
37
- REFCOUNT_EXCEPTION counter = "\counter"
38
- .endm
39
-
40
- #else /* __ASSEMBLY__ */
41
-
42
7
#include <linux/refcount.h>
43
8
#include <asm/bug.h>
44
9
50
15
* central refcount exception. The fixup address for the exception points
51
16
* back to the regular execution flow in .text.
52
17
*/
18
+ #define _REFCOUNT_EXCEPTION \
19
+ ".pushsection .text..refcount\n" \
20
+ "111:\tlea %[var], %%" _ASM_CX "\n" \
21
+ "112:\t" ASM_UD2 "\n" \
22
+ ASM_UNREACHABLE \
23
+ ".popsection\n" \
24
+ "113:\n" \
25
+ _ASM_EXTABLE_REFCOUNT(112b, 113b)
26
+
27
+ /* Trigger refcount exception if refcount result is negative. */
28
+ #define REFCOUNT_CHECK_LT_ZERO \
29
+ "js 111f\n\t" \
30
+ _REFCOUNT_EXCEPTION
31
+
32
+ /* Trigger refcount exception if refcount result is zero or negative. */
33
+ #define REFCOUNT_CHECK_LE_ZERO \
34
+ "jz 111f\n\t" \
35
+ REFCOUNT_CHECK_LT_ZERO
36
+
37
+ /* Trigger refcount exception unconditionally. */
38
+ #define REFCOUNT_ERROR \
39
+ "jmp 111f\n\t" \
40
+ _REFCOUNT_EXCEPTION
53
41
54
42
static __always_inline void refcount_add (unsigned int i , refcount_t * r )
55
43
{
56
44
asm volatile (LOCK_PREFIX "addl %1,%0\n\t"
57
- " REFCOUNT_CHECK_LT_ZERO counter=\"%[counter]\""
58
- : [counter ] "+m" (r -> refs .counter )
45
+ REFCOUNT_CHECK_LT_ZERO
46
+ : [var ] "+m" (r -> refs .counter )
59
47
: "ir" (i )
60
48
: "cc" , "cx" );
61
49
}
62
50
63
51
static __always_inline void refcount_inc (refcount_t * r )
64
52
{
65
53
asm volatile (LOCK_PREFIX "incl %0\n\t"
66
- " REFCOUNT_CHECK_LT_ZERO counter=\"%[counter]\""
67
- : [counter ] "+m" (r -> refs .counter )
54
+ REFCOUNT_CHECK_LT_ZERO
55
+ : [var ] "+m" (r -> refs .counter )
68
56
: : "cc" , "cx" );
69
57
}
70
58
71
59
static __always_inline void refcount_dec (refcount_t * r )
72
60
{
73
61
asm volatile (LOCK_PREFIX "decl %0\n\t"
74
- " REFCOUNT_CHECK_LE_ZERO counter=\"%[counter]\""
75
- : [counter ] "+m" (r -> refs .counter )
62
+ REFCOUNT_CHECK_LE_ZERO
63
+ : [var ] "+m" (r -> refs .counter )
76
64
: : "cc" , "cx" );
77
65
}
78
66
79
67
static __always_inline __must_check
80
68
bool refcount_sub_and_test (unsigned int i , refcount_t * r )
81
69
{
82
-
83
70
return GEN_BINARY_SUFFIXED_RMWcc (LOCK_PREFIX "subl" ,
84
- " REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"" ,
71
+ REFCOUNT_CHECK_LT_ZERO ,
85
72
r -> refs .counter , e , "er" , i , "cx" );
86
73
}
87
74
88
75
static __always_inline __must_check bool refcount_dec_and_test (refcount_t * r )
89
76
{
90
77
return GEN_UNARY_SUFFIXED_RMWcc (LOCK_PREFIX "decl" ,
91
- " REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"" ,
78
+ REFCOUNT_CHECK_LT_ZERO ,
92
79
r -> refs .counter , e , "cx" );
93
80
}
94
81
@@ -106,8 +93,8 @@ bool refcount_add_not_zero(unsigned int i, refcount_t *r)
106
93
107
94
/* Did we try to increment from/to an undesirable state? */
108
95
if (unlikely (c < 0 || c == INT_MAX || result < c )) {
109
- asm volatile (" REFCOUNT_ERROR counter=\"%[counter]\""
110
- : : [counter ] "m" (r -> refs .counter )
96
+ asm volatile (REFCOUNT_ERROR
97
+ : : [var ] "m" (r -> refs .counter )
111
98
: "cc ", " cx ");
112
99
break ;
113
100
}
@@ -122,6 +109,4 @@ static __always_inline __must_check bool refcount_inc_not_zero(refcount_t *r)
122
109
return refcount_add_not_zero (1 , r );
123
110
}
124
111
125
- #endif /* __ASSEMBLY__ */
126
-
127
112
#endif
0 commit comments