Skip to content

Commit 39d668e

Browse files
joergroedelKAGA-KOKO
authored andcommitted
x86/mm/pti: Make pti_clone_kernel_text() compile on 32 bit
The pti_clone_kernel_text() function references __end_rodata_hpage_align, which is only present on x86-64. This makes sense as the end of the rodata section is not huge-page aligned on 32 bit. Nevertheless a symbol is required for the function that points at the right address for both 32 and 64 bit. Introduce __end_rodata_aligned for that purpose and use it in pti_clone_kernel_text(). Signed-off-by: Joerg Roedel <jroedel@suse.de> Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Tested-by: Pavel Machek <pavel@ucw.cz> Cc: "H . Peter Anvin" <hpa@zytor.com> Cc: linux-mm@kvack.org Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Andy Lutomirski <luto@kernel.org> Cc: Dave Hansen <dave.hansen@intel.com> Cc: Josh Poimboeuf <jpoimboe@redhat.com> Cc: Juergen Gross <jgross@suse.com> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Borislav Petkov <bp@alien8.de> Cc: Jiri Kosina <jkosina@suse.cz> Cc: Boris Ostrovsky <boris.ostrovsky@oracle.com> Cc: Brian Gerst <brgerst@gmail.com> Cc: David Laight <David.Laight@aculab.com> Cc: Denys Vlasenko <dvlasenk@redhat.com> Cc: Eduardo Valentin <eduval@amazon.com> Cc: Greg KH <gregkh@linuxfoundation.org> Cc: Will Deacon <will.deacon@arm.com> Cc: aliguori@amazon.com Cc: daniel.gruss@iaik.tugraz.at Cc: hughd@google.com Cc: keescook@google.com Cc: Andrea Arcangeli <aarcange@redhat.com> Cc: Waiman Long <llong@redhat.com> Cc: "David H . Gutteridge" <dhgutteridge@sympatico.ca> Cc: joro@8bytes.org Link: https://lkml.kernel.org/r/1531906876-13451-28-git-send-email-joro@8bytes.org
1 parent f94560c commit 39d668e

File tree

3 files changed

+12
-8
lines changed

3 files changed

+12
-8
lines changed

arch/x86/include/asm/sections.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
extern char __brk_base[], __brk_limit[];
99
extern struct exception_table_entry __stop___ex_table[];
10+
extern char __end_rodata_aligned[];
1011

1112
#if defined(CONFIG_X86_64)
1213
extern char __end_rodata_hpage_align[];

arch/x86/kernel/vmlinux.lds.S

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -55,19 +55,22 @@ jiffies_64 = jiffies;
5555
* so we can enable protection checks as well as retain 2MB large page
5656
* mappings for kernel text.
5757
*/
58-
#define X64_ALIGN_RODATA_BEGIN . = ALIGN(HPAGE_SIZE);
58+
#define X86_ALIGN_RODATA_BEGIN . = ALIGN(HPAGE_SIZE);
5959

60-
#define X64_ALIGN_RODATA_END \
60+
#define X86_ALIGN_RODATA_END \
6161
. = ALIGN(HPAGE_SIZE); \
62-
__end_rodata_hpage_align = .;
62+
__end_rodata_hpage_align = .; \
63+
__end_rodata_aligned = .;
6364

6465
#define ALIGN_ENTRY_TEXT_BEGIN . = ALIGN(PMD_SIZE);
6566
#define ALIGN_ENTRY_TEXT_END . = ALIGN(PMD_SIZE);
6667

6768
#else
6869

69-
#define X64_ALIGN_RODATA_BEGIN
70-
#define X64_ALIGN_RODATA_END
70+
#define X86_ALIGN_RODATA_BEGIN
71+
#define X86_ALIGN_RODATA_END \
72+
. = ALIGN(PAGE_SIZE); \
73+
__end_rodata_aligned = .;
7174

7275
#define ALIGN_ENTRY_TEXT_BEGIN
7376
#define ALIGN_ENTRY_TEXT_END
@@ -141,9 +144,9 @@ SECTIONS
141144

142145
/* .text should occupy whole number of pages */
143146
. = ALIGN(PAGE_SIZE);
144-
X64_ALIGN_RODATA_BEGIN
147+
X86_ALIGN_RODATA_BEGIN
145148
RO_DATA(PAGE_SIZE)
146-
X64_ALIGN_RODATA_END
149+
X86_ALIGN_RODATA_END
147150

148151
/* Data */
149152
.data : AT(ADDR(.data) - LOAD_OFFSET) {

arch/x86/mm/pti.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -470,7 +470,7 @@ void pti_clone_kernel_text(void)
470470
* clone the areas past rodata, they might contain secrets.
471471
*/
472472
unsigned long start = PFN_ALIGN(_text);
473-
unsigned long end = (unsigned long)__end_rodata_hpage_align;
473+
unsigned long end = (unsigned long)__end_rodata_aligned;
474474

475475
if (!pti_kernel_image_global_ok())
476476
return;

0 commit comments

Comments
 (0)