Skip to content

Commit 6ffbe7d

Browse files
committed
Merge branch 'core-locking-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull core locking changes from Ingo Molnar: - futex performance increases: larger hashes, smarter wakeups - mutex debugging improvements - lots of SMP ordering documentation updates - introduce the smp_load_acquire(), smp_store_release() primitives. (There are WIP patches that make use of them - not yet merged) - lockdep micro-optimizations - lockdep improvement: better cover IRQ contexts - liblockdep at last. We'll continue to monitor how useful this is * 'core-locking-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (34 commits) futexes: Fix futex_hashsize initialization arch: Re-sort some Kbuild files to hopefully help avoid some conflicts futexes: Avoid taking the hb->lock if there's nothing to wake up futexes: Document multiprocessor ordering guarantees futexes: Increase hash table size for better performance futexes: Clean up various details arch: Introduce smp_load_acquire(), smp_store_release() arch: Clean up asm/barrier.h implementations using asm-generic/barrier.h arch: Move smp_mb__{before,after}_atomic_{inc,dec}.h into asm/atomic.h locking/doc: Rename LOCK/UNLOCK to ACQUIRE/RELEASE mutexes: Give more informative mutex warning in the !lock->owner case powerpc: Full barrier for smp_mb__after_unlock_lock() rcu: Apply smp_mb__after_unlock_lock() to preserve grace periods Documentation/memory-barriers.txt: Downgrade UNLOCK+BLOCK locking: Add an smp_mb__after_unlock_lock() for UNLOCK+BLOCK barrier Documentation/memory-barriers.txt: Document ACCESS_ONCE() Documentation/memory-barriers.txt: Prohibit speculative writes Documentation/memory-barriers.txt: Add long atomic examples to memory-barriers.txt Documentation/memory-barriers.txt: Add needed ACCESS_ONCE() calls to memory-barriers.txt Revert "smp/cpumask: Make CONFIG_CPUMASK_OFFSTACK=y usable without debug dependency" ...
2 parents 897aea3 + 63b1a81 commit 6ffbe7d

File tree

105 files changed

+2825
-683
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

105 files changed

+2825
-683
lines changed

Documentation/memory-barriers.txt

Lines changed: 716 additions & 206 deletions
Large diffs are not rendered by default.

Documentation/robust-futex-ABI.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -146,8 +146,8 @@ On removal:
146146
1) set the 'list_op_pending' word to the address of the 'lock entry'
147147
to be removed,
148148
2) remove the lock entry for this lock from the 'head' list,
149-
2) release the futex lock, and
150-
2) clear the 'lock_op_pending' word.
149+
3) release the futex lock, and
150+
4) clear the 'lock_op_pending' word.
151151

152152
On exit, the kernel will consider the address stored in
153153
'list_op_pending' and the address of each 'lock word' found by walking

MAINTAINERS

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5141,6 +5141,11 @@ F: drivers/lguest/
51415141
F: include/linux/lguest*.h
51425142
F: tools/lguest/
51435143

5144+
LIBLOCKDEP
5145+
M: Sasha Levin <sasha.levin@oracle.com>
5146+
S: Maintained
5147+
F: tools/lib/lockdep/
5148+
51445149
LINUX FOR IBM pSERIES (RS/6000)
51455150
M: Paul Mackerras <paulus@au.ibm.com>
51465151
W: http://www.ibm.com/linux/ltc/projects/ppc

arch/alpha/include/asm/barrier.h

Lines changed: 5 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -3,33 +3,18 @@
33

44
#include <asm/compiler.h>
55

6-
#define mb() \
7-
__asm__ __volatile__("mb": : :"memory")
6+
#define mb() __asm__ __volatile__("mb": : :"memory")
7+
#define rmb() __asm__ __volatile__("mb": : :"memory")
8+
#define wmb() __asm__ __volatile__("wmb": : :"memory")
89

9-
#define rmb() \
10-
__asm__ __volatile__("mb": : :"memory")
11-
12-
#define wmb() \
13-
__asm__ __volatile__("wmb": : :"memory")
14-
15-
#define read_barrier_depends() \
16-
__asm__ __volatile__("mb": : :"memory")
10+
#define read_barrier_depends() __asm__ __volatile__("mb": : :"memory")
1711

1812
#ifdef CONFIG_SMP
1913
#define __ASM_SMP_MB "\tmb\n"
20-
#define smp_mb() mb()
21-
#define smp_rmb() rmb()
22-
#define smp_wmb() wmb()
23-
#define smp_read_barrier_depends() read_barrier_depends()
2414
#else
2515
#define __ASM_SMP_MB
26-
#define smp_mb() barrier()
27-
#define smp_rmb() barrier()
28-
#define smp_wmb() barrier()
29-
#define smp_read_barrier_depends() do { } while (0)
3016
#endif
3117

32-
#define set_mb(var, value) \
33-
do { var = value; mb(); } while (0)
18+
#include <asm-generic/barrier.h>
3419

3520
#endif /* __BARRIER_H */

arch/arc/include/asm/Kbuild

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
generic-y += auxvec.h
2+
generic-y += barrier.h
23
generic-y += bugs.h
34
generic-y += bitsperlong.h
45
generic-y += clkdev.h

arch/arc/include/asm/atomic.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -190,6 +190,11 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
190190

191191
#endif /* !CONFIG_ARC_HAS_LLSC */
192192

193+
#define smp_mb__before_atomic_dec() barrier()
194+
#define smp_mb__after_atomic_dec() barrier()
195+
#define smp_mb__before_atomic_inc() barrier()
196+
#define smp_mb__after_atomic_inc() barrier()
197+
193198
/**
194199
* __atomic_add_unless - add unless the number is a given value
195200
* @v: pointer of type atomic_t

arch/arc/include/asm/barrier.h

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,6 @@
3030
#define smp_wmb() barrier()
3131
#endif
3232

33-
#define smp_mb__before_atomic_dec() barrier()
34-
#define smp_mb__after_atomic_dec() barrier()
35-
#define smp_mb__before_atomic_inc() barrier()
36-
#define smp_mb__after_atomic_inc() barrier()
37-
3833
#define smp_read_barrier_depends() do { } while (0)
3934

4035
#endif

arch/arm/include/asm/barrier.h

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,21 @@
5959
#define smp_wmb() dmb(ishst)
6060
#endif
6161

62+
#define smp_store_release(p, v) \
63+
do { \
64+
compiletime_assert_atomic_type(*p); \
65+
smp_mb(); \
66+
ACCESS_ONCE(*p) = (v); \
67+
} while (0)
68+
69+
#define smp_load_acquire(p) \
70+
({ \
71+
typeof(*p) ___p1 = ACCESS_ONCE(*p); \
72+
compiletime_assert_atomic_type(*p); \
73+
smp_mb(); \
74+
___p1; \
75+
})
76+
6277
#define read_barrier_depends() do { } while(0)
6378
#define smp_read_barrier_depends() do { } while(0)
6479

arch/arm64/include/asm/barrier.h

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,10 +35,60 @@
3535
#define smp_mb() barrier()
3636
#define smp_rmb() barrier()
3737
#define smp_wmb() barrier()
38+
39+
#define smp_store_release(p, v) \
40+
do { \
41+
compiletime_assert_atomic_type(*p); \
42+
smp_mb(); \
43+
ACCESS_ONCE(*p) = (v); \
44+
} while (0)
45+
46+
#define smp_load_acquire(p) \
47+
({ \
48+
typeof(*p) ___p1 = ACCESS_ONCE(*p); \
49+
compiletime_assert_atomic_type(*p); \
50+
smp_mb(); \
51+
___p1; \
52+
})
53+
3854
#else
55+
3956
#define smp_mb() asm volatile("dmb ish" : : : "memory")
4057
#define smp_rmb() asm volatile("dmb ishld" : : : "memory")
4158
#define smp_wmb() asm volatile("dmb ishst" : : : "memory")
59+
60+
#define smp_store_release(p, v) \
61+
do { \
62+
compiletime_assert_atomic_type(*p); \
63+
switch (sizeof(*p)) { \
64+
case 4: \
65+
asm volatile ("stlr %w1, %0" \
66+
: "=Q" (*p) : "r" (v) : "memory"); \
67+
break; \
68+
case 8: \
69+
asm volatile ("stlr %1, %0" \
70+
: "=Q" (*p) : "r" (v) : "memory"); \
71+
break; \
72+
} \
73+
} while (0)
74+
75+
#define smp_load_acquire(p) \
76+
({ \
77+
typeof(*p) ___p1; \
78+
compiletime_assert_atomic_type(*p); \
79+
switch (sizeof(*p)) { \
80+
case 4: \
81+
asm volatile ("ldar %w0, %1" \
82+
: "=r" (___p1) : "Q" (*p) : "memory"); \
83+
break; \
84+
case 8: \
85+
asm volatile ("ldar %0, %1" \
86+
: "=r" (___p1) : "Q" (*p) : "memory"); \
87+
break; \
88+
} \
89+
___p1; \
90+
})
91+
4292
#endif
4393

4494
#define read_barrier_depends() do { } while(0)

arch/avr32/include/asm/barrier.h

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -8,22 +8,15 @@
88
#ifndef __ASM_AVR32_BARRIER_H
99
#define __ASM_AVR32_BARRIER_H
1010

11-
#define nop() asm volatile("nop")
12-
13-
#define mb() asm volatile("" : : : "memory")
14-
#define rmb() mb()
15-
#define wmb() asm volatile("sync 0" : : : "memory")
16-
#define read_barrier_depends() do { } while(0)
17-
#define set_mb(var, value) do { var = value; mb(); } while(0)
11+
/*
12+
* Weirdest thing ever.. no full barrier, but it has a write barrier!
13+
*/
14+
#define wmb() asm volatile("sync 0" : : : "memory")
1815

1916
#ifdef CONFIG_SMP
2017
# error "The AVR32 port does not support SMP"
21-
#else
22-
# define smp_mb() barrier()
23-
# define smp_rmb() barrier()
24-
# define smp_wmb() barrier()
25-
# define smp_read_barrier_depends() do { } while(0)
2618
#endif
2719

20+
#include <asm-generic/barrier.h>
2821

2922
#endif /* __ASM_AVR32_BARRIER_H */

arch/blackfin/include/asm/barrier.h

Lines changed: 1 addition & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -23,26 +23,10 @@
2323
# define rmb() do { barrier(); smp_check_barrier(); } while (0)
2424
# define wmb() do { barrier(); smp_mark_barrier(); } while (0)
2525
# define read_barrier_depends() do { barrier(); smp_check_barrier(); } while (0)
26-
#else
27-
# define mb() barrier()
28-
# define rmb() barrier()
29-
# define wmb() barrier()
30-
# define read_barrier_depends() do { } while (0)
3126
#endif
3227

33-
#else /* !CONFIG_SMP */
34-
35-
#define mb() barrier()
36-
#define rmb() barrier()
37-
#define wmb() barrier()
38-
#define read_barrier_depends() do { } while (0)
39-
4028
#endif /* !CONFIG_SMP */
4129

42-
#define smp_mb() mb()
43-
#define smp_rmb() rmb()
44-
#define smp_wmb() wmb()
45-
#define set_mb(var, value) do { var = value; mb(); } while (0)
46-
#define smp_read_barrier_depends() read_barrier_depends()
30+
#include <asm-generic/barrier.h>
4731

4832
#endif /* _BLACKFIN_BARRIER_H */

arch/cris/include/asm/Kbuild

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ header-y += arch-v10/
33
header-y += arch-v32/
44

55

6+
generic-y += barrier.h
67
generic-y += clkdev.h
78
generic-y += exec.h
89
generic-y += kvm_para.h

arch/cris/include/asm/barrier.h

Lines changed: 0 additions & 25 deletions
This file was deleted.

arch/frv/include/asm/barrier.h

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,7 @@
1717
#define mb() asm volatile ("membar" : : :"memory")
1818
#define rmb() asm volatile ("membar" : : :"memory")
1919
#define wmb() asm volatile ("membar" : : :"memory")
20-
#define read_barrier_depends() do { } while (0)
2120

22-
#define smp_mb() barrier()
23-
#define smp_rmb() barrier()
24-
#define smp_wmb() barrier()
25-
#define smp_read_barrier_depends() do {} while(0)
26-
#define set_mb(var, value) \
27-
do { var = (value); barrier(); } while (0)
21+
#include <asm-generic/barrier.h>
2822

2923
#endif /* _ASM_BARRIER_H */

arch/hexagon/include/asm/Kbuild

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
header-y += ucontext.h
33

44
generic-y += auxvec.h
5+
generic-y += barrier.h
56
generic-y += bug.h
67
generic-y += bugs.h
78
generic-y += clkdev.h

arch/hexagon/include/asm/atomic.h

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,8 +160,12 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
160160
#define atomic_sub_and_test(i, v) (atomic_sub_return(i, (v)) == 0)
161161
#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
162162

163-
164163
#define atomic_inc_return(v) (atomic_add_return(1, v))
165164
#define atomic_dec_return(v) (atomic_sub_return(1, v))
166165

166+
#define smp_mb__before_atomic_dec() barrier()
167+
#define smp_mb__after_atomic_dec() barrier()
168+
#define smp_mb__before_atomic_inc() barrier()
169+
#define smp_mb__after_atomic_inc() barrier()
170+
167171
#endif

arch/hexagon/include/asm/barrier.h

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,6 @@
2929
#define smp_read_barrier_depends() barrier()
3030
#define smp_wmb() barrier()
3131
#define smp_mb() barrier()
32-
#define smp_mb__before_atomic_dec() barrier()
33-
#define smp_mb__after_atomic_dec() barrier()
34-
#define smp_mb__before_atomic_inc() barrier()
35-
#define smp_mb__after_atomic_inc() barrier()
3632

3733
/* Set a value and use a memory barrier. Used by the scheduler somewhere. */
3834
#define set_mb(var, value) \

arch/ia64/include/asm/barrier.h

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,13 +45,36 @@
4545
# define smp_rmb() rmb()
4646
# define smp_wmb() wmb()
4747
# define smp_read_barrier_depends() read_barrier_depends()
48+
4849
#else
50+
4951
# define smp_mb() barrier()
5052
# define smp_rmb() barrier()
5153
# define smp_wmb() barrier()
5254
# define smp_read_barrier_depends() do { } while(0)
55+
5356
#endif
5457

58+
/*
59+
* IA64 GCC turns volatile stores into st.rel and volatile loads into ld.acq no
60+
* need for asm trickery!
61+
*/
62+
63+
#define smp_store_release(p, v) \
64+
do { \
65+
compiletime_assert_atomic_type(*p); \
66+
barrier(); \
67+
ACCESS_ONCE(*p) = (v); \
68+
} while (0)
69+
70+
#define smp_load_acquire(p) \
71+
({ \
72+
typeof(*p) ___p1 = ACCESS_ONCE(*p); \
73+
compiletime_assert_atomic_type(*p); \
74+
barrier(); \
75+
___p1; \
76+
})
77+
5578
/*
5679
* XXX check on this ---I suspect what Linus really wants here is
5780
* acquire vs release semantics but we can't discuss this stuff with

0 commit comments

Comments
 (0)