Skip to content

Commit 88776c0

Browse files
committed
parisc: Fix alignment of pa_tlb_lock in assembly on 32-bit SMP kernel
Qemu for PARISC reported on a 32bit SMP parisc kernel strange failures about "Not-handled unaligned insn 0x0e8011d6 and 0x0c2011c9." Those opcodes evaluate to the ldcw() assembly instruction which requires (on 32bit) an alignment of 16 bytes to ensure atomicity. As it turns out, qemu is correct and in our assembly code in entry.S and pacache.S we don't pay attention to the required alignment. This patch fixes the problem by aligning the lock offset in assembly code in the same manner as we do in our C-code. Signed-off-by: Helge Deller <deller@gmx.de> Cc: <stable@vger.kernel.org> # v4.0+
1 parent 28df2f8 commit 88776c0

File tree

3 files changed

+20
-4
lines changed

3 files changed

+20
-4
lines changed

arch/parisc/include/asm/ldcw.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
for the semaphore. */
1313

1414
#define __PA_LDCW_ALIGNMENT 16
15+
#define __PA_LDCW_ALIGN_ORDER 4
1516
#define __ldcw_align(a) ({ \
1617
unsigned long __ret = (unsigned long) &(a)->lock[0]; \
1718
__ret = (__ret + __PA_LDCW_ALIGNMENT - 1) \
@@ -29,6 +30,7 @@
2930
ldcd). */
3031

3132
#define __PA_LDCW_ALIGNMENT 4
33+
#define __PA_LDCW_ALIGN_ORDER 2
3234
#define __ldcw_align(a) (&(a)->slock)
3335
#define __LDCW "ldcw,co"
3436

arch/parisc/kernel/entry.S

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
#include <asm/pgtable.h>
3636
#include <asm/signal.h>
3737
#include <asm/unistd.h>
38+
#include <asm/ldcw.h>
3839
#include <asm/thread_info.h>
3940

4041
#include <linux/linkage.h>
@@ -46,6 +47,14 @@
4647
#endif
4748

4849
.import pa_tlb_lock,data
50+
.macro load_pa_tlb_lock reg
51+
#if __PA_LDCW_ALIGNMENT > 4
52+
load32 PA(pa_tlb_lock) + __PA_LDCW_ALIGNMENT-1, \reg
53+
depi 0,31,__PA_LDCW_ALIGN_ORDER, \reg
54+
#else
55+
load32 PA(pa_tlb_lock), \reg
56+
#endif
57+
.endm
4958

5059
/* space_to_prot macro creates a prot id from a space id */
5160

@@ -457,7 +466,7 @@
457466
.macro tlb_lock spc,ptp,pte,tmp,tmp1,fault
458467
#ifdef CONFIG_SMP
459468
cmpib,COND(=),n 0,\spc,2f
460-
load32 PA(pa_tlb_lock),\tmp
469+
load_pa_tlb_lock \tmp
461470
1: LDCW 0(\tmp),\tmp1
462471
cmpib,COND(=) 0,\tmp1,1b
463472
nop
@@ -480,7 +489,7 @@
480489
/* Release pa_tlb_lock lock. */
481490
.macro tlb_unlock1 spc,tmp
482491
#ifdef CONFIG_SMP
483-
load32 PA(pa_tlb_lock),\tmp
492+
load_pa_tlb_lock \tmp
484493
tlb_unlock0 \spc,\tmp
485494
#endif
486495
.endm

arch/parisc/kernel/pacache.S

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
#include <asm/assembly.h>
3737
#include <asm/pgtable.h>
3838
#include <asm/cache.h>
39+
#include <asm/ldcw.h>
3940
#include <linux/linkage.h>
4041

4142
.text
@@ -333,8 +334,12 @@ ENDPROC_CFI(flush_data_cache_local)
333334

334335
.macro tlb_lock la,flags,tmp
335336
#ifdef CONFIG_SMP
336-
ldil L%pa_tlb_lock,%r1
337-
ldo R%pa_tlb_lock(%r1),\la
337+
#if __PA_LDCW_ALIGNMENT > 4
338+
load32 pa_tlb_lock + __PA_LDCW_ALIGNMENT-1, \la
339+
depi 0,31,__PA_LDCW_ALIGN_ORDER, \la
340+
#else
341+
load32 pa_tlb_lock, \la
342+
#endif
338343
rsm PSW_SM_I,\flags
339344
1: LDCW 0(\la),\tmp
340345
cmpib,<>,n 0,\tmp,3f

0 commit comments

Comments
 (0)