DELOUSE was added to asm code to make them compatible with non-LP64
ABIs, but it is an unfortunate name and the code was not compatible
with ABIs where pointer and size_t are different. Glibc currently
only supports the LP64 ABI so these macros are not really needed or
tested, but for now the name is changed to be more meaningful instead
of removing them completely.
Some DELOUSE macros were dropped: clone, strlen and strnlen used it
unnecessarily.
The out of tree ILP32 patches are currently not maintained and will
likely need a rework to rebase them on top of the time64 changes.
(cherry picked from commit
45b1e17e9150dbd9ac2d578579063fbfa8e1b327)
cfi_offset(d14, JB_D14<<3)
cfi_offset(d15, JB_D15<<3)
- DELOUSE (0)
+ PTR_ARG (0)
ldp x19, x20, [x0, #JB_X19<<3]
ldp x21, x22, [x0, #JB_X21<<3]
.align 2
_dl_tlsdesc_return:
BTI_C
- DELOUSE (0)
+ PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
RET
cfi_endproc
BTI_C
str x1, [sp, #-16]!
cfi_adjust_cfa_offset (16)
- DELOUSE (0)
+ PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
mrs x1, tpidr_el0
sub PTR_REG (0), PTR_REG (0), PTR_REG (1)
.align 2
_dl_tlsdesc_dynamic:
BTI_C
- DELOUSE (0)
+ PTR_ARG (0)
/* Save just enough registers to support fast path, if we fall
into slow path we will save additional registers. */
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (MEMCHR)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
bic src, srcin, 15
cbz cntin, L(nomatch)
ld1 {vdata.16b}, [src]
#define tmp2 x8
ENTRY_ALIGN (memcmp, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
subs limit, limit, 16
b.lo L(less16)
*/
ENTRY_ALIGN (MEMCPY, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
libc_hidden_builtin_def (MEMCPY)
ENTRY_ALIGN (MEMMOVE, 4)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (__memrchr)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
add end, srcin, cntin
sub endm1, end, 1
bic src, endm1, 15
ENTRY_ALIGN (MEMSET, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
dup v0.16B, valw
add dstend, dstin, count
ENTRY_ALIGN (MEMCHR, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
/* Do not dereference srcin if no bytes to compare. */
cbz cntin, L(none_chr)
from the end. */
ENTRY (__memcpy_simd)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
ENTRY (__memmove_simd)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
#if IS_IN (libc)
ENTRY_ALIGN (__memcpy_falkor, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
cmp count, 32
add srcend, src, count
ENTRY_ALIGN (__memmove_falkor, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
cmp count, 32
add srcend, src, count
ENTRY_ALIGN (MEMMOVE, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
sub tmp1, dstin, src
cmp count, 96
libc_hidden_builtin_def (MEMMOVE)
ENTRY (MEMCPY)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
prfm PLDL1KEEP, [src]
add srcend, src, count
ENTRY_ALIGN (MEMMOVE, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
cmp count, 16
.p2align 4
ENTRY (MEMCPY)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
cmp count, 16
ENTRY_ALIGN (MEMSET, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
bfi valw, valw, 8, 8
bfi valw, valw, 16, 16
ENTRY_ALIGN (MEMSET, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
dup v0.16B, valw
add dstend, dstin, count
character, return the length, if not, continue in the main loop. */
ENTRY (__strlen_asimd)
- DELOUSE (0)
+ PTR_ARG (0)
and tmp1, srcin, MIN_PAGE_SIZE - 1
cmp tmp1, MIN_PAGE_SIZE - 32
libc_hidden_def (_setjmp)
ENTRY (__sigsetjmp)
- DELOUSE (0)
+ PTR_ARG (0)
1:
stp x19, x20, [x0, #JB_X19<<3]
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (strchr)
- DELOUSE (0)
+ PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (__strchrnul)
- DELOUSE (0)
+ PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]
NUL too in big-endian, byte-reverse the data before the NUL check. */
ENTRY(strcmp)
- DELOUSE (0)
- DELOUSE (1)
+ PTR_ARG (0)
+ PTR_ARG (1)
sub off2, src2, src1
mov zeroones, REP8_01
and tmp, src1, 7
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (STRCPY)
- DELOUSE (0)
- DELOUSE (1)
+ PTR_ARG (0)
+ PTR_ARG (1)
bic src, srcin, 15
mov wtmp, 0xf00f
ld1 {vdata.16b}, [src]
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (STRLEN)
- DELOUSE (0)
- DELOUSE (1)
+ PTR_ARG (0)
bic src, srcin, 15
mov wtmp, 0xf00f
ld1 {vdata.16b}, [src]
#define REP8_80 0x8080808080808080
ENTRY_ALIGN_AND_PAD (__strnlen, 6, 9)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (1)
cbz limit, L(hit_limit)
mov zeroones, #REP8_01
bic src, srcin, #15
if the relevant byte matched the NUL end of string. */
ENTRY(strrchr)
- DELOUSE (0)
+ PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
mov wtmp, 0x3003
# define AARCH64_R(NAME) R_AARCH64_ ## NAME
# define PTR_REG(n) x##n
# define PTR_LOG_SIZE 3
-# define DELOUSE(n)
+# define PTR_ARG(n)
+# define SIZE_ARG(n)
#else
# define AARCH64_R(NAME) R_AARCH64_P32_ ## NAME
# define PTR_REG(n) w##n
# define PTR_LOG_SIZE 2
-# define DELOUSE(n) mov w##n, w##n
+# define PTR_ARG(n) mov w##n, w##n
+# define SIZE_ARG(n) mov w##n, w##n
#endif
#define PTR_SIZE (1<<PTR_LOG_SIZE)
*/
.text
ENTRY(__clone)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
- DELOUSE (3)
- DELOUSE (4)
- DELOUSE (5)
- DELOUSE (6)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ PTR_ARG (3)
+ PTR_ARG (4)
+ PTR_ARG (5)
+ PTR_ARG (6)
/* Save args for the child. */
mov x10, x0
mov x11, x2
.text
ENTRY(__getcontext)
- DELOUSE (0)
+ PTR_ARG (0)
/* The saved context will return to the getcontext() call point
with a return value of 0 */
str xzr, [x0, oX0 + 0 * SZREG]
.text
ENTRY (__setcontext)
- DELOUSE (0)
+ PTR_ARG (0)
/* Save a copy of UCP. */
mov x9, x0
.text
ENTRY(__swapcontext)
- DELOUSE (0)
+ PTR_ARG (0)
/* Set the value returned when swapcontext() returns in this context.
And set up x1 to become the return address of the caller, so we
can return there with a normal RET instead of an indirect jump. */