diff options
| author | Peter Zijlstra <peterz@infradead.org> | 2021-12-04 14:43:40 +0100 |
|---|---|---|
| committer | Borislav Petkov <bp@suse.de> | 2021-12-08 12:25:37 +0100 |
| commit | f94909ceb1ed4bfdb2ada72f93236305e6d6951f (patch) | |
| tree | e7368c974b829530e82f34111f1f5ab183504921 /arch/x86/kernel/relocate_kernel_64.S | |
| parent | 22da5a07c75e1104caf6a42f189c97b83d070073 (diff) | |
| download | linux-f94909ceb1ed4bfdb2ada72f93236305e6d6951f.tar.gz linux-f94909ceb1ed4bfdb2ada72f93236305e6d6951f.tar.bz2 linux-f94909ceb1ed4bfdb2ada72f93236305e6d6951f.zip | |
x86: Prepare asm files for straight-line-speculation
Replace all ret/retq instructions with RET in preparation of making
RET a macro. Since AS is case insensitive it's a big no-op without
RET defined.
find arch/x86/ -name \*.S | while read file
do
sed -i 's/\<ret[q]*\>/RET/' $file
done
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Borislav Petkov <bp@suse.de>
Link: https://lore.kernel.org/r/20211204134907.905503893@infradead.org
Diffstat (limited to 'arch/x86/kernel/relocate_kernel_64.S')
| -rw-r--r-- | arch/x86/kernel/relocate_kernel_64.S | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/arch/x86/kernel/relocate_kernel_64.S b/arch/x86/kernel/relocate_kernel_64.S index c8fe74a28143..399f075ccdc4 100644 --- a/arch/x86/kernel/relocate_kernel_64.S +++ b/arch/x86/kernel/relocate_kernel_64.S @@ -104,7 +104,7 @@ SYM_CODE_START_NOALIGN(relocate_kernel) /* jump to identity mapped page */ addq $(identity_mapped - relocate_kernel), %r8 pushq %r8 - ret + RET SYM_CODE_END(relocate_kernel) SYM_CODE_START_LOCAL_NOALIGN(identity_mapped) @@ -191,7 +191,7 @@ SYM_CODE_START_LOCAL_NOALIGN(identity_mapped) xorl %r14d, %r14d xorl %r15d, %r15d - ret + RET 1: popq %rdx @@ -210,7 +210,7 @@ SYM_CODE_START_LOCAL_NOALIGN(identity_mapped) call swap_pages movq $virtual_mapped, %rax pushq %rax - ret + RET SYM_CODE_END(identity_mapped) SYM_CODE_START_LOCAL_NOALIGN(virtual_mapped) @@ -231,7 +231,7 @@ SYM_CODE_START_LOCAL_NOALIGN(virtual_mapped) popq %r12 popq %rbp popq %rbx - ret + RET SYM_CODE_END(virtual_mapped) /* Do the copies */ @@ -288,7 +288,7 @@ SYM_CODE_START_LOCAL_NOALIGN(swap_pages) lea PAGE_SIZE(%rax), %rsi jmp 0b 3: - ret + RET SYM_CODE_END(swap_pages) .globl kexec_control_code_size |
