cfi_offset(d14, JB_D14<<3)
cfi_offset(d15, JB_D15<<3)
- PTR_ARG (0)
-
#if IS_IN(libc)
/* Disable ZA state of SME in libc.a and libc.so, but not in ld.so. */
# if HAVE_AARCH64_PAC_RET
#define zva_val x4
ENTRY (__libc_mtag_tag_region)
- PTR_ARG (0)
- SIZE_ARG (1)
-
add dstend, dstin, count
cmp count, 96
#define zva_val x4
ENTRY (__libc_mtag_tag_zero_region)
- PTR_ARG (0)
- SIZE_ARG (1)
-
add dstend, dstin, count
cmp count, 96
/* Load and relocate all library dependencies. */
mov x0, sp
- PTR_ARG (0)
bl _dl_start
/* Returns user entry point in x0. */
mov PTR_REG (21), PTR_REG (0)
.align 2
_dl_tlsdesc_return:
BTI_C
- PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
RET
cfi_endproc
BTI_C
str x1, [sp, #-16]!
cfi_adjust_cfa_offset (16)
- PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
mrs x1, tpidr_el0
sub PTR_REG (0), PTR_REG (0), PTR_REG (1)
.align 2
_dl_tlsdesc_dynamic:
BTI_C
- PTR_ARG (0)
/* Save just enough registers to support fast path, if we fall
into slow path we will save additional registers. */
exactly which byte matched. */
ENTRY (MEMCHR)
- PTR_ARG (0)
- SIZE_ARG (2)
bic src, srcin, 15
cbz cntin, L(nomatch)
ld1 {vdata.16b}, [src]
ENTRY (memcmp)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
cmp limit, 16
b.lo L(less16)
ldp data1, data3, [src1]
from the end. */
ENTRY (MEMCPY)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
add srcend, src, count
add dstend, dstin, count
cmp count, 128
ENTRY (MEMMOVE)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
add srcend, src, count
add dstend, dstin, count
cmp count, 128
exactly which byte matched. */
ENTRY (__memrchr)
- PTR_ARG (0)
- SIZE_ARG (2)
add end, srcin, cntin
sub endm1, end, 1
bic src, endm1, 15
#define dstend2 x5
ENTRY (MEMSET)
- PTR_ARG (0)
- SIZE_ARG (2)
-
dup v0.16B, valw
cmp count, 16
b.lo L(set_small)
ENTRY (__memchr_nosimd)
- PTR_ARG (0)
- SIZE_ARG (2)
-
/* Do not dereference srcin if no bytes to compare. */
cbz cntin, L(none_chr)
ENTRY (__memcpy_a64fx)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
cntb vlen
cmp n, vlen, lsl 1
b.hi L(copy_small)
ENTRY_ALIGN (__memmove_a64fx, 4)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
/* Fast case for up to 2 vectors. */
cntb vlen
cmp n, vlen, lsl 1
*/
ENTRY (__memcpy_mops)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
mov x3, x0
.inst 0x19010443 /* cpyfp [x3]!, [x1]!, x2! */
.inst 0x19410443 /* cpyfm [x3]!, [x1]!, x2! */
ENTRY (__memmove_oryon1)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
sub tmp1, dstin, src
cmp count, 96
ccmp tmp1, count, 2, hi
ENTRY (__memcpy_oryon1)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
add srcend, src, count
add dstend, dstin, count
cmp count, 16
.arch armv8.2-a+sve
ENTRY (__memcpy_sve)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
cmp count, 128
b.hi L(copy_long)
cntb vlen
ENTRY (__memmove_sve)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
cmp count, 128
b.hi L(move_long)
cntb vlen
*/
ENTRY (__memmove_mops)
- PTR_ARG (0)
- PTR_ARG (1)
- SIZE_ARG (2)
-
mov x3, x0
.inst 0x1d010443 /* cpyp [x3]!, [x1]!, x2! */
.inst 0x1d410443 /* cpym [x3]!, [x1]!, x2! */
#define BTI_C
ENTRY (__memset_a64fx)
- PTR_ARG (0)
- SIZE_ARG (2)
cntb vector_length
dup z0.b, valw
ENTRY (__memset_emag)
- PTR_ARG (0)
- SIZE_ARG (2)
-
bfi valw, valw, 8, 8
bfi valw, valw, 16, 16
bfi val, val, 32, 32
ENTRY (__memset_kunpeng)
- PTR_ARG (0)
- SIZE_ARG (2)
-
dup v0.16B, valw
add dstend, dstin, count
*/
ENTRY (__memset_mops)
- PTR_ARG (0)
- SIZE_ARG (2)
-
mov x3, x0
.inst 0x19c10443 /* setp [x3]!, x2!, x1 */
.inst 0x19c14443 /* setm [x3]!, x2!, x1 */
ENTRY (__memset_oryon1)
- PTR_ARG (0)
- SIZE_ARG (2)
-
bfi valw, valw, 8, 8
bfi valw, valw, 16, 16
bfi val, val, 32, 32
character, return the length, if not, continue in the main loop. */
ENTRY (__strlen_asimd)
- PTR_ARG (0)
and tmp1, srcin, MIN_PAGE_SIZE - 1
cmp tmp1, MIN_PAGE_SIZE - 32
b.hi L(page_cross)
libc_hidden_def (_setjmp)
ENTRY (__sigsetjmp)
- PTR_ARG (0)
-
1:
stp x19, x20, [x0, #JB_X19<<3]
stp x21, x22, [x0, #JB_X21<<3]
If it is not a multiple of 4, there was no match. */
ENTRY (strchr)
- PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]
exactly which byte matched. */
ENTRY (__strchrnul)
- PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]
NUL too in big-endian, byte-reverse the data before the NUL check. */
ENTRY(strcmp)
- PTR_ARG (0)
- PTR_ARG (1)
sub off2, src2, src1
mov zeroones, REP8_01
and tmp, src1, 7
exactly which byte matched. */
ENTRY (STRCPY)
- PTR_ARG (0)
- PTR_ARG (1)
bic src, srcin, 15
ld1 {vdata.16b}, [src]
cmeq vhas_nul.16b, vdata.16b, 0
identifies the first zero byte. */
ENTRY (STRLEN)
- PTR_ARG (0)
bic src, srcin, 15
ld1 {vdata.16b}, [src]
cmeq vhas_nul.16b, vdata.16b, 0
identifies the first zero byte. */
ENTRY (__strnlen)
- PTR_ARG (0)
- SIZE_ARG (1)
bic src, srcin, 15
cbz cntin, L(nomatch)
ld1 {vdata.16b}, [src]
if the relevant byte matched the NUL end of string. */
ENTRY (strrchr)
- PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
movi vrepmask.16b, 0x33
*/
.text
ENTRY(__clone)
- PTR_ARG (0)
- PTR_ARG (1)
- PTR_ARG (3)
- PTR_ARG (4)
- PTR_ARG (5)
- PTR_ARG (6)
/* Save args for the child. */
mov x10, x0
mov x11, x2
.text
ENTRY(__clone3)
- PTR_ARG (0)
- PTR_ARG (1)
- PTR_ARG (3)
- PTR_ARG (4)
/* Save args for the child. */
mov x10, x0 /* cl_args */
mov x11, x2 /* func */
.text
ENTRY(__getcontext)
- PTR_ARG (0)
/* The saved context will return to the getcontext() call point
with a return value of 0 */
str xzr, [x0, oX0 + 0 * SZREG]
.text
ENTRY (__setcontext)
- PTR_ARG (0)
/* Save a copy of UCP. */
mov x9, x0
.text
ENTRY(__swapcontext)
- PTR_ARG (0)
/* Set the value returned when swapcontext() returns in this context.
And set up x1 to become the return address of the caller, so we
can return there with a normal RET instead of an indirect jump. */