@@ -46,7 +46,7 @@ ENTRY (__longjmp)
cfi_offset(d14, JB_D14<<3)
cfi_offset(d15, JB_D15<<3)
- DELOUSE (0)
+ PTR_ARG (0)
ldp x19, x20, [x0, #JB_X19<<3]
ldp x21, x22, [x0, #JB_X21<<3]
@@ -75,7 +75,7 @@
.align 2
_dl_tlsdesc_return:
BTI_C
- DELOUSE (0)
+ PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
RET
cfi_endproc
@@ -99,7 +99,7 @@ _dl_tlsdesc_undefweak:
BTI_C
str x1, [sp, #-16]!
cfi_adjust_cfa_offset (16)
- DELOUSE (0)
+ PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
mrs x1, tpidr_el0
sub PTR_REG (0), PTR_REG (0), PTR_REG (1)
@@ -145,7 +145,7 @@ _dl_tlsdesc_undefweak:
.align 2
_dl_tlsdesc_dynamic:
BTI_C
- DELOUSE (0)
+ PTR_ARG (0)
/* Save just enough registers to support fast path, if we fall
into slow path we will save additional registers. */
@@ -61,8 +61,8 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (MEMCHR)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
bic src, srcin, 15
cbz cntin, L(nomatch)
ld1 {vdata.16b}, [src]
@@ -42,9 +42,9 @@
#define tmp2 x8
ENTRY_ALIGN (memcmp, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
subs limit, limit, 16
b.lo L(less16)
@@ -73,9 +73,9 @@
*/
ENTRY_ALIGN (MEMCPY, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
@@ -209,9 +209,9 @@ END (MEMCPY)
libc_hidden_builtin_def (MEMCPY)
ENTRY_ALIGN (MEMMOVE, 4)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
@@ -59,8 +59,8 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (__memrchr)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
add end, srcin, cntin
sub endm1, end, 1
bic src, endm1, 15
@@ -31,8 +31,8 @@
ENTRY_ALIGN (MEMSET, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
dup v0.16B, valw
add dstend, dstin, count
@@ -64,8 +64,8 @@
ENTRY_ALIGN (MEMCHR, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
/* Do not dereference srcin if no bytes to compare. */
cbz cntin, L(none_chr)
@@ -64,9 +64,9 @@
from the end. */
ENTRY (__memcpy_simd)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
@@ -181,9 +181,9 @@ libc_hidden_builtin_def (__memcpy_simd)
ENTRY (__memmove_simd)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
@@ -73,9 +73,9 @@
#if IS_IN (libc)
ENTRY_ALIGN (__memcpy_falkor, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
cmp count, 32
add srcend, src, count
@@ -218,9 +218,9 @@ libc_hidden_builtin_def (__memcpy_falkor)
ENTRY_ALIGN (__memmove_falkor, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
cmp count, 32
add srcend, src, count
@@ -81,9 +81,9 @@
ENTRY_ALIGN (MEMMOVE, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
sub tmp1, dstin, src
cmp count, 96
@@ -95,9 +95,9 @@ END (MEMMOVE)
libc_hidden_builtin_def (MEMMOVE)
ENTRY (MEMCPY)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
prfm PLDL1KEEP, [src]
add srcend, src, count
@@ -97,9 +97,9 @@
ENTRY_ALIGN (MEMMOVE, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
cmp count, 16
@@ -127,9 +127,9 @@ libc_hidden_builtin_def (MEMMOVE)
.p2align 4
ENTRY (MEMCPY)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
cmp count, 16
@@ -36,8 +36,8 @@
ENTRY_ALIGN (MEMSET, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
bfi valw, valw, 8, 8
bfi valw, valw, 16, 16
@@ -31,8 +31,8 @@
ENTRY_ALIGN (MEMSET, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
dup v0.16B, valw
add dstend, dstin, count
@@ -86,7 +86,7 @@
character, return the length, if not, continue in the main loop. */
ENTRY (__strlen_asimd)
- DELOUSE (0)
+ PTR_ARG (0)
and tmp1, srcin, MIN_PAGE_SIZE - 1
cmp tmp1, MIN_PAGE_SIZE - 32
@@ -33,7 +33,7 @@ END (_setjmp)
libc_hidden_def (_setjmp)
ENTRY (__sigsetjmp)
- DELOUSE (0)
+ PTR_ARG (0)
1:
stp x19, x20, [x0, #JB_X19<<3]
@@ -56,7 +56,7 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (strchr)
- DELOUSE (0)
+ PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]
@@ -54,7 +54,7 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (__strchrnul)
- DELOUSE (0)
+ PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]
@@ -62,8 +62,8 @@
NUL too in big-endian, byte-reverse the data before the NUL check. */
ENTRY(strcmp)
- DELOUSE (0)
- DELOUSE (1)
+ PTR_ARG (0)
+ PTR_ARG (1)
sub off2, src2, src1
mov zeroones, REP8_01
and tmp, src1, 7
@@ -73,8 +73,8 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (STRCPY)
- DELOUSE (0)
- DELOUSE (1)
+ PTR_ARG (0)
+ PTR_ARG (1)
bic src, srcin, 15
mov wtmp, 0xf00f
ld1 {vdata.16b}, [src]
@@ -54,8 +54,7 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (STRLEN)
- DELOUSE (0)
- DELOUSE (1)
+ PTR_ARG (0)
bic src, srcin, 15
mov wtmp, 0xf00f
ld1 {vdata.16b}, [src]
@@ -55,9 +55,8 @@
#define REP8_80 0x8080808080808080
ENTRY_ALIGN_AND_PAD (__strnlen, 6, 9)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (1)
cbz limit, L(hit_limit)
mov zeroones, #REP8_01
bic src, srcin, #15
@@ -59,7 +59,7 @@
if the relevant byte matched the NUL end of string. */
ENTRY(strrchr)
- DELOUSE (0)
+ PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
mov wtmp, 0x3003
@@ -25,12 +25,14 @@
# define AARCH64_R(NAME) R_AARCH64_ ## NAME
# define PTR_REG(n) x##n
# define PTR_LOG_SIZE 3
-# define DELOUSE(n)
+# define PTR_ARG(n)
+# define SIZE_ARG(n)
#else
# define AARCH64_R(NAME) R_AARCH64_P32_ ## NAME
# define PTR_REG(n) w##n
# define PTR_LOG_SIZE 2
-# define DELOUSE(n) mov w##n, w##n
+# define PTR_ARG(n) mov w##n, w##n
+# define SIZE_ARG(n) mov w##n, w##n
#endif
#define PTR_SIZE (1<<PTR_LOG_SIZE)
@@ -33,13 +33,12 @@
*/
.text
ENTRY(__clone)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
- DELOUSE (3)
- DELOUSE (4)
- DELOUSE (5)
- DELOUSE (6)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ PTR_ARG (3)
+ PTR_ARG (4)
+ PTR_ARG (5)
+ PTR_ARG (6)
/* Save args for the child. */
mov x10, x0
mov x11, x2
@@ -30,7 +30,7 @@
.text
ENTRY(__getcontext)
- DELOUSE (0)
+ PTR_ARG (0)
/* The saved context will return to the getcontext() call point
with a return value of 0 */
str xzr, [x0, oX0 + 0 * SZREG]
@@ -34,7 +34,7 @@
.text
ENTRY (__setcontext)
- DELOUSE (0)
+ PTR_ARG (0)
/* Save a copy of UCP. */
mov x9, x0
@@ -27,7 +27,7 @@
.text
ENTRY(__swapcontext)
- DELOUSE (0)
+ PTR_ARG (0)
/* Set the value returned when swapcontext() returns in this context.
And set up x1 to become the return address of the caller, so we
can return there with a normal RET instead of an indirect jump. */