summaryrefslogtreecommitdiff
path: root/sysdeps/aarch64/strlen.S
diff options
context:
space:
mode:
authorWilco Dijkstra <wdijkstr@arm.com>2016-06-20 17:48:20 +0100
committerWilco Dijkstra <wdijkstr@arm.com>2016-06-20 17:48:20 +0100
commit58ec4fb881719d0b69989f9a4955290fca531831 (patch)
tree6dd1655ed1d33543036be0e7a18ac2998dd79b2f /sysdeps/aarch64/strlen.S
parentb998e16e71c8617746b7c39500e925d28ff22ed8 (diff)
downloadglibc-58ec4fb881719d0b69989f9a4955290fca531831.tar.gz
Add a simple rawmemchr implementation. Use strlen for rawmemchr(s, '\0') as it
is the fastest way to search for '\0'. Otherwise use memchr with an infinite size. This is 3x faster on benchtests for large sizes. Passes GLIBC tests. * sysdeps/aarch64/rawmemchr.S (__rawmemchr): New file. * sysdeps/aarch64/strlen.S (__strlen): Change to __strlen to avoid PLT.
Diffstat (limited to 'sysdeps/aarch64/strlen.S')
-rw-r--r--sysdeps/aarch64/strlen.S5
1 files changed, 3 insertions, 2 deletions
diff --git a/sysdeps/aarch64/strlen.S b/sysdeps/aarch64/strlen.S
index 9b4d1da60c..a07834bf96 100644
--- a/sysdeps/aarch64/strlen.S
+++ b/sysdeps/aarch64/strlen.S
@@ -84,7 +84,7 @@
whether the first fetch, which may be misaligned, crosses a page
boundary. */
-ENTRY_ALIGN (strlen, 6)
+ENTRY_ALIGN (__strlen, 6)
and tmp1, srcin, MIN_PAGE_SIZE - 1
mov zeroones, REP8_01
cmp tmp1, MIN_PAGE_SIZE - 16
@@ -213,5 +213,6 @@ L(page_cross):
csel data1, data1, tmp4, eq
csel data2, data2, tmp2, eq
b L(page_cross_entry)
-END (strlen)
+END (__strlen)
+weak_alias (__strlen, strlen)
libc_hidden_builtin_def (strlen)