summaryrefslogtreecommitdiff
path: root/arch/arm64/kernel
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2016-01-26 09:13:44 +0100
committerAlex Shi <alex.shi@linaro.org>2016-05-11 23:18:45 +0800
commit89328d41aa99df071dadb43c722cd88ffafc77e2 (patch)
treed8f3aa049954132710c199b24ea62de1ac4646c5 /arch/arm64/kernel
parent525787eea48f8c6a1630e6dab07313896cfc6b8d (diff)
arm64: add support for building vmlinux as a relocatable PIE binary
This implements CONFIG_RELOCATABLE, which links the final vmlinux image with a dynamic relocation section, allowing the early boot code to perform a relocation to a different virtual address at runtime. This is a prerequisite for KASLR (CONFIG_RANDOMIZE_BASE). Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com> (cherry picked from commit 1e48ef7fcc374051730381a2a05da77eb4eafdb0) Signed-off-by: Alex Shi <alex.shi@linaro.org>
Diffstat (limited to 'arch/arm64/kernel')
-rw-r--r--arch/arm64/kernel/head.S32
-rw-r--r--arch/arm64/kernel/vmlinux.lds.S16
2 files changed, 48 insertions, 0 deletions
diff --git a/arch/arm64/kernel/head.S b/arch/arm64/kernel/head.S
index 4cad8f9f2268..4e69412a7323 100644
--- a/arch/arm64/kernel/head.S
+++ b/arch/arm64/kernel/head.S
@@ -29,6 +29,7 @@
#include <asm/asm-offsets.h>
#include <asm/cache.h>
#include <asm/cputype.h>
+#include <asm/elf.h>
#include <asm/kernel-pgtable.h>
#include <asm/memory.h>
#include <asm/pgtable-hwdef.h>
@@ -432,6 +433,37 @@ __mmap_switched:
bl __pi_memset
dsb ishst // Make zero page visible to PTW
+#ifdef CONFIG_RELOCATABLE
+
+ /*
+ * Iterate over each entry in the relocation table, and apply the
+ * relocations in place.
+ */
+ adr_l x8, __dynsym_start // start of symbol table
+ adr_l x9, __reloc_start // start of reloc table
+ adr_l x10, __reloc_end // end of reloc table
+
+0: cmp x9, x10
+ b.hs 2f
+ ldp x11, x12, [x9], #24
+ ldr x13, [x9, #-8]
+ cmp w12, #R_AARCH64_RELATIVE
+ b.ne 1f
+ str x13, [x11]
+ b 0b
+
+1: cmp w12, #R_AARCH64_ABS64
+ b.ne 0b
+ add x12, x12, x12, lsl #1 // symtab offset: 24x top word
+ add x12, x8, x12, lsr #(32 - 3) // ... shifted into bottom word
+ ldr x15, [x12, #8] // Elf64_Sym::st_value
+ add x15, x13, x15
+ str x15, [x11]
+ b 0b
+
+2:
+#endif
+
adr_l sp, initial_sp, x4
mov x4, sp
and x4, x4, #~(THREAD_SIZE - 1)
diff --git a/arch/arm64/kernel/vmlinux.lds.S b/arch/arm64/kernel/vmlinux.lds.S
index 282e3e64a17e..e3f6cd740ea3 100644
--- a/arch/arm64/kernel/vmlinux.lds.S
+++ b/arch/arm64/kernel/vmlinux.lds.S
@@ -87,6 +87,7 @@ SECTIONS
EXIT_CALL
*(.discard)
*(.discard.*)
+ *(.interp .dynamic)
}
. = KIMAGE_VADDR + TEXT_OFFSET;
@@ -149,6 +150,21 @@ SECTIONS
.altinstr_replacement : {
*(.altinstr_replacement)
}
+ .rela : ALIGN(8) {
+ __reloc_start = .;
+ *(.rela .rela*)
+ __reloc_end = .;
+ }
+ .dynsym : ALIGN(8) {
+ __dynsym_start = .;
+ *(.dynsym)
+ }
+ .dynstr : {
+ *(.dynstr)
+ }
+ .hash : {
+ *(.hash)
+ }
. = ALIGN(PAGE_SIZE);
__init_end = .;