Skip to content

Commit aa3d480

Browse files
Peter Zijlstrasuryasaimadhu
Peter Zijlstra
authored andcommitted
x86: Use return-thunk in asm code
Use the return thunk in asm code. If the thunk isn't needed, it will get patched into a RET instruction during boot by apply_returns(). Since alternatives can't handle relocations outside of the first instruction, putting a 'jmp __x86_return_thunk' in one is not valid, therefore carve out the memmove ERMS path into a separate label and jump to it. Signed-off-by: Peter Zijlstra (Intel) <[email protected]> Signed-off-by: Borislav Petkov <[email protected]> Reviewed-by: Josh Poimboeuf <[email protected]> Signed-off-by: Borislav Petkov <[email protected]>
1 parent 0ee9073 commit aa3d480

File tree

3 files changed

+15
-1
lines changed

3 files changed

+15
-1
lines changed

arch/x86/entry/vdso/Makefile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@ endif
9292
endif
9393

9494
$(vobjs): KBUILD_CFLAGS := $(filter-out $(CC_FLAGS_LTO) $(RANDSTRUCT_CFLAGS) $(GCC_PLUGINS_CFLAGS) $(RETPOLINE_CFLAGS),$(KBUILD_CFLAGS)) $(CFL)
95+
$(vobjs): KBUILD_AFLAGS += -DBUILD_VDSO
9596

9697
#
9798
# vDSO code runs in userspace and -pg doesn't help with profiling anyway.

arch/x86/include/asm/linkage.h

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,19 +19,27 @@
1919
#define __ALIGN_STR __stringify(__ALIGN)
2020
#endif
2121

22+
#if defined(CONFIG_RETPOLINE) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
23+
#define RET jmp __x86_return_thunk
24+
#else /* CONFIG_RETPOLINE */
2225
#ifdef CONFIG_SLS
2326
#define RET ret; int3
2427
#else
2528
#define RET ret
2629
#endif
30+
#endif /* CONFIG_RETPOLINE */
2731

2832
#else /* __ASSEMBLY__ */
2933

34+
#if defined(CONFIG_RETPOLINE) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
35+
#define ASM_RET "jmp __x86_return_thunk\n\t"
36+
#else /* CONFIG_RETPOLINE */
3037
#ifdef CONFIG_SLS
3138
#define ASM_RET "ret; int3\n\t"
3239
#else
3340
#define ASM_RET "ret\n\t"
3441
#endif
42+
#endif /* CONFIG_RETPOLINE */
3543

3644
#endif /* __ASSEMBLY__ */
3745

arch/x86/lib/memmove_64.S

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ SYM_FUNC_START(__memmove)
3939
/* FSRM implies ERMS => no length checks, do the copy directly */
4040
.Lmemmove_begin_forward:
4141
ALTERNATIVE "cmp $0x20, %rdx; jb 1f", "", X86_FEATURE_FSRM
42-
ALTERNATIVE "", __stringify(movq %rdx, %rcx; rep movsb; RET), X86_FEATURE_ERMS
42+
ALTERNATIVE "", "jmp .Lmemmove_erms", X86_FEATURE_ERMS
4343

4444
/*
4545
* movsq instruction have many startup latency
@@ -205,6 +205,11 @@ SYM_FUNC_START(__memmove)
205205
movb %r11b, (%rdi)
206206
13:
207207
RET
208+
209+
.Lmemmove_erms:
210+
movq %rdx, %rcx
211+
rep movsb
212+
RET
208213
SYM_FUNC_END(__memmove)
209214
EXPORT_SYMBOL(__memmove)
210215

0 commit comments

Comments
 (0)