summaryrefslogtreecommitdiff
path: root/arch/x86/kernel/vmlinux.lds.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/kernel/vmlinux.lds.S')
-rw-r--r--arch/x86/kernel/vmlinux.lds.S32
1 files changed, 31 insertions, 1 deletions
diff --git a/arch/x86/kernel/vmlinux.lds.S b/arch/x86/kernel/vmlinux.lds.S
index b36f5f42a754..3c8593cda3a1 100644
--- a/arch/x86/kernel/vmlinux.lds.S
+++ b/arch/x86/kernel/vmlinux.lds.S
@@ -133,7 +133,20 @@ SECTIONS
LOCK_TEXT
KPROBES_TEXT
ALIGN_ENTRY_TEXT_BEGIN
+#ifdef CONFIG_CPU_SRSO
+ *(.text.__x86.rethunk_untrain)
+#endif
+
ENTRY_TEXT
+
+#ifdef CONFIG_CPU_SRSO
+ /*
+ * See the comment above srso_untrain_ret_alias()'s
+ * definition.
+ */
+ . = srso_untrain_ret_alias | (1 << 2) | (1 << 8) | (1 << 14) | (1 << 20);
+ *(.text.__x86.rethunk_safe)
+#endif
ALIGN_ENTRY_TEXT_END
SOFTIRQENTRY_TEXT
STATIC_CALL_TEXT
@@ -142,13 +155,15 @@ SECTIONS
#ifdef CONFIG_RETPOLINE
__indirect_thunk_start = .;
- *(.text.__x86.*)
+ *(.text.__x86.indirect_thunk)
+ *(.text.__x86.return_thunk)
__indirect_thunk_end = .;
#endif
} :text =0xcccc
/* End of text section, which should occupy whole number of pages */
_etext = .;
+
. = ALIGN(PAGE_SIZE);
X86_ALIGN_RODATA_BEGIN
@@ -502,6 +517,21 @@ INIT_PER_CPU(irq_stack_backing_store);
"fixed_percpu_data is not at start of per-cpu area");
#endif
+#ifdef CONFIG_RETHUNK
+. = ASSERT((__ret & 0x3f) == 0, "__ret not cacheline-aligned");
+. = ASSERT((srso_safe_ret & 0x3f) == 0, "srso_safe_ret not cacheline-aligned");
+#endif
+
+#ifdef CONFIG_CPU_SRSO
+/*
+ * GNU ld cannot do XOR so do: (A | B) - (A & B) in order to compute the XOR
+ * of the two function addresses:
+ */
+. = ASSERT(((srso_untrain_ret_alias | srso_safe_ret_alias) -
+ (srso_untrain_ret_alias & srso_safe_ret_alias)) == ((1 << 2) | (1 << 8) | (1 << 14) | (1 << 20)),
+ "SRSO function pair won't alias");
+#endif
+
#endif /* CONFIG_X86_32 */
#ifdef CONFIG_KEXEC_CORE