]> git.ipfire.org Git - thirdparty/valgrind.git/commitdiff
Add ppc64le linux hardwire for ld64.so.2 strcmp VALGRIND_3_25_BRANCH
authorMark Wielaard <mark@klomp.org>
Mon, 18 Aug 2025 13:30:47 +0000 (15:30 +0200)
committerMark Wielaard <mark@klomp.org>
Mon, 15 Sep 2025 23:07:23 +0000 (01:07 +0200)
When dlopen is used we might end up in an assembly powerpc/strcmp.S
variant that is optimized in a way memcheck cannot proof correct. We
try to intercept strcmp in ld.so, but might fail when strcmp is called
before our interception code is loaded. Having an hardwire for ld.so
strcmp (earlier intercept) would solve this.

https://bugs.kde.org/show_bug.cgi?id=508145

(cherry picked from commit 78fe3625f6b8ed4de28527d71c4a98d70e5b3035)

NEWS
coregrind/m_redir.c
coregrind/m_trampoline.S
coregrind/pub_core_trampoline.h

diff --git a/NEWS b/NEWS
index 3eea6bb1b709469aaa5ac2bf5d9b16ec455edda5..7d7a1dd579a5b68f7a24bb0f0e6d3984b9f01d1f 100644 (file)
--- a/NEWS
+++ b/NEWS
@@ -7,6 +7,7 @@ The following bugs have been fixed or resolved on this branch.
 
 503241  s390x: Support z17 changes to the NNPA instruction
 508030  Add several missing syscall hooks to ppc64-linux
+508145  ppc64le needs ld.so hardwire for strcmp
 
 To see details of a given bug, visit
   https://bugs.kde.org/show_bug.cgi?id=XXXXXX
index 63172b97178eb9c67ad51ef2e3fd9c61459c62a5..857f910cb1e85ec2acd827e393a48aef92aa4213 100644 (file)
@@ -1508,6 +1508,12 @@ void VG_(redir_initialise) ( void )
          NULL /* not mandatory - so why bother at all? */
          /* glibc-2.5 (FC6, ppc64) seems fine without it */
       );
+
+      add_hardwired_spec(
+         "ld64.so.2", "strcmp",
+         (Addr)&VG_(ppc64_linux_REDIR_FOR_strcmp),
+         NULL
+      );
    }
 
 #  elif defined(VGP_arm_linux)
index 2c2cc0dc2a3d2f0e017bc670a677d884a41242c0..af25c8f7a1d4320259a3fb65a18fdf61204b6479 100644 (file)
@@ -604,6 +604,51 @@ VG_(ppc64_linux_REDIR_FOR_strchr):
 .L1end:
 
        
+       /* this function is written using the "dotless" ABI convention */
+       .align 2
+       .globl VG_(ppc64_linux_REDIR_FOR_strcmp)
+#if !defined VGP_ppc64be_linux || _CALL_ELF == 2
+        /* Little Endian uses ELF version 2 */
+        .type VG_(ppc64_linux_REDIR_FOR_strcmp),@function
+VG_(ppc64_linux_REDIR_FOR_strcmp):
+#else
+        /* Big Endian uses ELF version 1 */
+       .section        ".opd","aw"
+       .align 3
+VG_(ppc64_linux_REDIR_FOR_strcmp):
+       .quad   .L.VG_(ppc64_linux_REDIR_FOR_strcmp),.TOC.@tocbase,0
+       .previous
+       .size   VG_(ppc64_linux_REDIR_FOR_strcmp), \
+                       .LFE0-.L.VG_(ppc64_linux_REDIR_FOR_strcmp)
+       .type   VG_(ppc64_linux_REDIR_FOR_strcmp), @function
+
+.L.VG_(ppc64_linux_REDIR_FOR_strcmp):
+#endif
+#if _CALL_ELF == 2
+0:      addis        2,12,.TOC.-0b@ha
+        addi         2,2,.TOC.-0b@l
+        .localentry  VG_(ppc64_linux_REDIR_FOR_strcmp), .-VG_(ppc64_linux_REDIR_FOR_strcmp)
+#endif
+.LFB0:
+        .cfi_startproc
+        li 10,0
+.L3:
+        lbzx 8,3,10
+        lbzx 9,4,10
+        cmpwi 0,8,0
+        beq 0,.L2
+        cmpw 0,8,9
+        addi 10,10,1
+        beq 0,.L3
+.L2:
+        subf 3,9,8
+        extsw 3,3
+        blr
+        .long 0
+        .byte 0,0,0,0,0,0,0,0
+        .cfi_endproc
+.LFE0:
+
 .global VG_(trampoline_stuff_end)
 VG_(trampoline_stuff_end):
 
index 11d791df7c89e4414124137cddd32cc9244d120c..92b4fc67bf355b2c1bd0911817784228632107f0 100644 (file)
@@ -97,6 +97,7 @@ extern void* VG_(ppc32_linux_REDIR_FOR_strchr)( void*, Int );
 #if defined(VGP_ppc64be_linux) || defined(VGP_ppc64le_linux)
 extern Addr  VG_(ppc64_linux_SUBST_FOR_rt_sigreturn);
 extern UInt  VG_(ppc64_linux_REDIR_FOR_strlen)( void* );
+extern UInt  VG_(ppc64_linux_REDIR_FOR_strcmp)( void*, void* );
 extern void* VG_(ppc64_linux_REDIR_FOR_strchr)( void*, Int );
 /* A label (sans dot) marking the ultra-magical return stub via which
    all redirected and wrapped functions are made to "return" on