Skip to content

Commit 1e48ef7

Browse files
Ard Biesheuvelctmarinas
authored andcommitted
arm64: add support for building vmlinux as a relocatable PIE binary
This implements CONFIG_RELOCATABLE, which links the final vmlinux image with a dynamic relocation section, allowing the early boot code to perform a relocation to a different virtual address at runtime. This is a prerequisite for KASLR (CONFIG_RANDOMIZE_BASE). Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
1 parent 6c94f27 commit 1e48ef7

File tree

5 files changed

+65
-0
lines changed

5 files changed

+65
-0
lines changed

arch/arm64/Kconfig

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -787,6 +787,17 @@ config ARM64_MODULE_PLTS
787787
select ARM64_MODULE_CMODEL_LARGE
788788
select HAVE_MOD_ARCH_SPECIFIC
789789

790+
config RELOCATABLE
791+
bool
792+
help
793+
This builds the kernel as a Position Independent Executable (PIE),
794+
which retains all relocation metadata required to relocate the
795+
kernel binary at runtime to a different virtual address than the
796+
address it was linked at.
797+
Since AArch64 uses the RELA relocation format, this requires a
798+
relocation pass at runtime even if the kernel is loaded at the
799+
same address it was linked at.
800+
790801
endmenu
791802

792803
menu "Boot options"

arch/arm64/Makefile

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,10 @@ CPPFLAGS_vmlinux.lds = -DTEXT_OFFSET=$(TEXT_OFFSET)
1515
OBJCOPYFLAGS :=-O binary -R .note -R .note.gnu.build-id -R .comment -S
1616
GZFLAGS :=-9
1717

18+
ifneq ($(CONFIG_RELOCATABLE),)
19+
LDFLAGS_vmlinux += -pie
20+
endif
21+
1822
KBUILD_DEFCONFIG := defconfig
1923

2024
# Check for binutils support for specific extensions

arch/arm64/include/asm/elf.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,8 @@
7777
#define R_AARCH64_MOVW_PREL_G2_NC 292
7878
#define R_AARCH64_MOVW_PREL_G3 293
7979

80+
#define R_AARCH64_RELATIVE 1027
81+
8082
/*
8183
* These are used to set parameters in the core dumps.
8284
*/

arch/arm64/kernel/head.S

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
#include <asm/asm-offsets.h>
3030
#include <asm/cache.h>
3131
#include <asm/cputype.h>
32+
#include <asm/elf.h>
3233
#include <asm/kernel-pgtable.h>
3334
#include <asm/memory.h>
3435
#include <asm/pgtable-hwdef.h>
@@ -432,6 +433,37 @@ __mmap_switched:
432433
bl __pi_memset
433434
dsb ishst // Make zero page visible to PTW
434435

436+
#ifdef CONFIG_RELOCATABLE
437+
438+
/*
439+
* Iterate over each entry in the relocation table, and apply the
440+
* relocations in place.
441+
*/
442+
adr_l x8, __dynsym_start // start of symbol table
443+
adr_l x9, __reloc_start // start of reloc table
444+
adr_l x10, __reloc_end // end of reloc table
445+
446+
0: cmp x9, x10
447+
b.hs 2f
448+
ldp x11, x12, [x9], #24
449+
ldr x13, [x9, #-8]
450+
cmp w12, #R_AARCH64_RELATIVE
451+
b.ne 1f
452+
str x13, [x11]
453+
b 0b
454+
455+
1: cmp w12, #R_AARCH64_ABS64
456+
b.ne 0b
457+
add x12, x12, x12, lsl #1 // symtab offset: 24x top word
458+
add x12, x8, x12, lsr #(32 - 3) // ... shifted into bottom word
459+
ldr x15, [x12, #8] // Elf64_Sym::st_value
460+
add x15, x13, x15
461+
str x15, [x11]
462+
b 0b
463+
464+
2:
465+
#endif
466+
435467
adr_l sp, initial_sp, x4
436468
mov x4, sp
437469
and x4, x4, #~(THREAD_SIZE - 1)

arch/arm64/kernel/vmlinux.lds.S

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@ SECTIONS
8787
EXIT_CALL
8888
*(.discard)
8989
*(.discard.*)
90+
*(.interp .dynamic)
9091
}
9192

9293
. = KIMAGE_VADDR + TEXT_OFFSET;
@@ -149,6 +150,21 @@ SECTIONS
149150
.altinstr_replacement : {
150151
*(.altinstr_replacement)
151152
}
153+
.rela : ALIGN(8) {
154+
__reloc_start = .;
155+
*(.rela .rela*)
156+
__reloc_end = .;
157+
}
158+
.dynsym : ALIGN(8) {
159+
__dynsym_start = .;
160+
*(.dynsym)
161+
}
162+
.dynstr : {
163+
*(.dynstr)
164+
}
165+
.hash : {
166+
*(.hash)
167+
}
152168

153169
. = ALIGN(PAGE_SIZE);
154170
__init_end = .;

0 commit comments

Comments
 (0)