From: Uros Bizjak Date: Mon, 25 Aug 2025 07:50:37 +0000 (+0200) Subject: x32: Fix, optimize and cleanup RSEQ_* accessors X-Git-Url: http://git.ipfire.org/gitweb.cgi?a=commitdiff_plain;h=9a1cb8f783cb98d4c5fd180c43855fdbb74fbe71;p=thirdparty%2Fglibc.git x32: Fix, optimize and cleanup RSEQ_* accessors Add missing "memory" clobber to accessors. The "memory" clobber tells the compiler that the assembly code performs memory reads or writes to items other than those listed in the input and output operands (for example, accessing the memory pointed to by one of the input parameters). Use MOVZBL instead of MOVB when reading 1-byte memory location into a register. MOVB to a register actually inserts into the LSB of the word-sized register, making the result dependent on the previous register value. MOVZBL avoids this issue. Change %P asm operand modifiers to %c. The ā€˜c’ modifier is a generic asm operand modifier that requires a constant operand and prints the constant expression without punctuation. Replace %b asm operand modifiers with explicit casts. Explicit casts inform the compiler which part of the register value is used, allowing it to perform additional optimizations (e.g. narrowing the preceding operation). Remove %q asm operand modifiers. Since the value is already cast to 'long long int', the compiler will emit a 64-bit register name in the assembly without needing %q. No functional changes intended. Tested-by: H.J. Lu Co-Authored-By: H.J. Lu Signed-off-by: H.J. Lu Signed-off-by: Uros Bizjak Cc: Florian Weimer Cc: Carlos O'Donell Reviewed-by: H.J. Lu --- diff --git a/sysdeps/x86_64/x32/nptl/rseq-access.h b/sysdeps/x86_64/x32/nptl/rseq-access.h index 8386ebd4bf..d9bb215c35 100644 --- a/sysdeps/x86_64/x32/nptl/rseq-access.h +++ b/sysdeps/x86_64/x32/nptl/rseq-access.h @@ -18,29 +18,32 @@ /* Read member of the RSEQ area directly, with single-copy atomicity semantics. */ #define RSEQ_GETMEM_ONCE(member) \ - ({ __typeof (RSEQ_SELF()->member) __value; \ + ({ \ + __typeof (RSEQ_SELF()->member) __value; \ _Static_assert (sizeof (__value) == 1 \ || sizeof (__value) == 4 \ || sizeof (__value) == 8, \ - "size of rseq data"); \ + "size of rseq data"); \ if (sizeof (__value) == 1) \ - asm volatile ("movb %%fs:%P2(%q3),%b0" \ - : "=q" (__value) \ - : "0" (0), "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ + asm volatile ("movzbl %%fs:%c1(%2),%k0" \ + : "=r" (__value) \ + : "i" (offsetof (struct rseq_area, member)), \ + "r" ((long long int) __rseq_offset) \ + : "memory" ); \ else if (sizeof (__value) == 4) \ - asm volatile ("movl %%fs:%P1(%q2),%0" \ + asm volatile ("movl %%fs:%c1(%2),%0" \ : "=r" (__value) \ : "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ else /* 8 */ \ - { \ - asm volatile ("movq %%fs:%P1(%q2),%q0" \ - : "=r" (__value) \ - : "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ - } \ - __value; }) + asm volatile ("movq %%fs:%c1(%2),%0" \ + : "=r" (__value) \ + : "i" (offsetof (struct rseq_area, member)), \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ + __value; \ + }) /* Read member of the RSEQ area directly. */ #define RSEQ_GETMEM(member) RSEQ_GETMEM_ONCE(member) @@ -59,27 +62,32 @@ _Static_assert (sizeof (RSEQ_SELF()->member) == 1 \ || sizeof (RSEQ_SELF()->member) == 4 \ || sizeof (RSEQ_SELF()->member) == 8, \ - "size of rseq data"); \ + "size of rseq data"); \ if (sizeof (RSEQ_SELF()->member) == 1) \ - asm volatile ("movb %b0,%%fs:%P1(%q2)" : \ - : "iq" (value), \ + asm volatile ("movb %0,%%fs:%c1(%2)" \ + : \ + : "iq" ((uint8_t) cast_to_integer (value)), \ "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ else if (sizeof (RSEQ_SELF()->member) == 4) \ - asm volatile ("movl %0,%%fs:%P1(%q2)" : \ - : IMM_MODE (value), \ + asm volatile ("movl %0,%%fs:%c1(%2)" \ + : \ + : IMM_MODE ((uint32_t) cast_to_integer (value)), \ "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ else /* 8 */ \ - { \ - /* Since movq takes a signed 32-bit immediate or a register source \ - operand, use "er" constraint for 32-bit signed integer constant \ - or register. */ \ - asm volatile ("movq %q0,%%fs:%P1(%q2)" : \ - : "er" ((uint64_t) cast_to_integer (value)), \ - "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ - }}) + /* Since movq takes a signed 32-bit immediate or a register source \ + operand, use "er" constraint for 32-bit signed integer constant \ + or register. */ \ + asm volatile ("movq %0,%%fs:%c1(%2)" \ + : \ + : "er" ((uint64_t) cast_to_integer (value)), \ + "i" (offsetof (struct rseq_area, member)), \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ + }) /* Set member of the RSEQ area directly. */ #define RSEQ_SETMEM(member, value) RSEQ_SETMEM_ONCE(member, value)