From d9a96cc18bec65c39822ee0a1672d7dc3fda150a Mon Sep 17 00:00:00 2001 From: "Peter Zijlstra (Intel)" Date: Tue, 18 Nov 2025 10:29:05 -0800 Subject: [PATCH] x86/asm: Introduce inline memcpy and memset Provide inline memcpy and memset functions that can be used instead of the GCC builtins when necessary. The immediate use case is for the text poking functions to avoid the standard memcpy()/memset() calls because objtool complains about such dynamic calls within an AC=1 region. See tools/objtool/Documentation/objtool.txt, warning #9, regarding function calls with UACCESS enabled. Some user copy functions such as copy_user_generic() and __clear_user() have similar rep_{movs,stos} usages. But, those are highly specialized and hard to combine or reuse for other things. Define these new helpers for all other usages that need a completely unoptimized, strictly inline version of memcpy() or memset(). Signed-off-by: Peter Zijlstra (Intel) Signed-off-by: Sohil Mehta Signed-off-by: Dave Hansen Reviewed-by: Dave Hansen Link: https://patch.msgid.link/20251118182911.2983253-4-sohil.mehta%40intel.com --- arch/x86/include/asm/string.h | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/arch/x86/include/asm/string.h b/arch/x86/include/asm/string.h index c3c2c1914d657..9cb5aae7fba9f 100644 --- a/arch/x86/include/asm/string.h +++ b/arch/x86/include/asm/string.h @@ -1,6 +1,32 @@ /* SPDX-License-Identifier: GPL-2.0 */ +#ifndef _ASM_X86_STRING_H +#define _ASM_X86_STRING_H + #ifdef CONFIG_X86_32 # include #else # include #endif + +static __always_inline void *__inline_memcpy(void *to, const void *from, size_t len) +{ + void *ret = to; + + asm volatile("rep movsb" + : "+D" (to), "+S" (from), "+c" (len) + : : "memory"); + return ret; +} + +static __always_inline void *__inline_memset(void *s, int v, size_t n) +{ + void *ret = s; + + asm volatile("rep stosb" + : "+D" (s), "+c" (n) + : "a" ((uint8_t)v) + : "memory"); + return ret; +} + +#endif /* _ASM_X86_STRING_H */ -- 2.47.3