diff options
author | Ulrich Drepper <drepper@redhat.com> | 1999-07-27 04:43:32 +0000 |
---|---|---|
committer | Ulrich Drepper <drepper@redhat.com> | 1999-07-27 04:43:32 +0000 |
commit | 8cb079d41b2108d7a6db4c91a51156464912548b (patch) | |
tree | 6cbfca0ae13331d50e1559c50c9a128dec6082a0 /sysdeps/sparc/sparc64/strspn.S | |
parent | f05f5ca3857fbf83460003f12e81667c2f60851e (diff) | |
download | glibc-8cb079d41b2108d7a6db4c91a51156464912548b.tar.gz |
Update.
* sysdeps/sparc/sparc64/add_n.S: Avoid using %g2, %g3, %g7 registers
as much as possible. Declare them using .register pseudo-op if they
are still used.
* sysdeps/sparc/sparc64/lshift.S: Likewise.
* sysdeps/sparc/sparc64/memchr.S: Likewise.
* sysdeps/sparc/sparc64/memcmp.S: Likewise.
* sysdeps/sparc/sparc64/memcpy.S: Likewise.
* sysdeps/sparc/sparc64/memset.S: Likewise.
* sysdeps/sparc/sparc64/rawmemchr.S: Likewise.
* sysdeps/sparc/sparc64/rshift.S: Likewise.
* sysdeps/sparc/sparc64/stpcpy.S: Likewise.
* sysdeps/sparc/sparc64/stpncpy.S: Likewise.
* sysdeps/sparc/sparc64/strcat.S: Likewise.
* sysdeps/sparc/sparc64/strchr.S: Likewise.
* sysdeps/sparc/sparc64/strcmp.S: Likewise.
* sysdeps/sparc/sparc64/strcpy.S: Likewise.
* sysdeps/sparc/sparc64/strcspn.S: Likewise.
* sysdeps/sparc/sparc64/strlen.S: Likewise.
* sysdeps/sparc/sparc64/strncmp.S: Likewise.
* sysdeps/sparc/sparc64/strncpy.S: Likewise.
* sysdeps/sparc/sparc64/strpbrk.S: Likewise.
* sysdeps/sparc/sparc64/strspn.S: Likewise.
* sysdeps/sparc/sparc64/sub_n.S: Likewise.
* sysdeps/sparc/sparc64/dl-machine.h: Likewise.
Optimize trampoline code for .plt4-.plt32767.
Fix trampolines for .plt32768+.
1999-07-25 Jakub Jelinek <jj@ultra.linux.cz>
Diffstat (limited to 'sysdeps/sparc/sparc64/strspn.S')
-rw-r--r-- | sysdeps/sparc/sparc64/strspn.S | 71 |
1 files changed, 36 insertions, 35 deletions
diff --git a/sysdeps/sparc/sparc64/strspn.S b/sysdeps/sparc/sparc64/strspn.S index de440c0755..69e82d17b7 100644 --- a/sysdeps/sparc/sparc64/strspn.S +++ b/sysdeps/sparc/sparc64/strspn.S @@ -26,6 +26,7 @@ #define XCC xcc #define STACK_SIZE 128 #define STACK_OFFSET 128+0x7ff + .register %g2, #scratch #else #define STACK_SIZE 64 #define STACK_OFFSET 64 @@ -37,7 +38,7 @@ ENTRY(strspn) sub %sp, STACK_SIZE+32, %sp /* IEU0 Group */ mov 1, %o4 /* IEU1 */ stx %g0, [%sp + STACK_OFFSET] /* Store Group */ - mov %o0, %g7 /* IEU0 */ + mov %o0, %g4 /* IEU0 */ stx %g0, [%sp + STACK_OFFSET + 8] /* Store Group */ add %sp, STACK_OFFSET, %o5 /* IEU0 */ @@ -79,74 +80,74 @@ ENTRY(strspn) ldx [%o0], %o2 /* Load Group */ 4: srlx %o2, 59, %o3 /* IEU0 Group */ - srlx %o2, 56, %g3 /* IEU0 Group */ + srlx %o2, 56, %g5 /* IEU0 Group */ 5: and %o3, 0x18, %o3 /* IEU1 */ - andcc %g3, 0x3f, %g3 /* IEU1 Group */ + andcc %g5, 0x3f, %g5 /* IEU1 Group */ ldx [%o5 + %o3], %g2 /* Load */ srlx %o2, 51, %o3 /* IEU0 */ - sllx %o4, %g3, %g1 /* IEU0 Group */ - srlx %o2, 48, %g3 /* IEU0 Group */ + sllx %o4, %g5, %g1 /* IEU0 Group */ + srlx %o2, 48, %g5 /* IEU0 Group */ andcc %g2, %g1, %g2 /* IEU1 */ be,pn %xcc, 13f /* CTI */ and %o3, 0x18, %o3 /* IEU0 Group */ - and %g3, 0x3f, %g3 /* IEU1 */ + and %g5, 0x3f, %g5 /* IEU1 */ ldx [%o5 + %o3], %g2 /* Load Group */ srlx %o2, 43, %o3 /* IEU0 */ - sllx %o4, %g3, %g1 /* IEU0 Group */ - srlx %o2, 40, %g3 /* IEU0 Group */ + sllx %o4, %g5, %g1 /* IEU0 Group */ + srlx %o2, 40, %g5 /* IEU0 Group */ andcc %g2, %g1, %g2 /* IEU1 */ be,pn %xcc, 14f /* CTI */ and %o3, 0x18, %o3 /* IEU0 Group */ - and %g3, 0x3f, %g3 /* IEU1 */ + and %g5, 0x3f, %g5 /* IEU1 */ ldx [%o5 + %o3], %g2 /* Load Group */ srlx %o2, 35, %o3 /* IEU0 */ - sllx %o4, %g3, %g1 /* IEU0 Group */ - srlx %o2, 32, %g3 /* IEU0 Group */ + sllx %o4, %g5, %g1 /* IEU0 Group */ + srlx %o2, 32, %g5 /* IEU0 Group */ andcc %g2, %g1, %g2 /* IEU1 */ be,pn %xcc, 15f /* CTI */ and %o3, 0x18, %o3 /* IEU0 Group */ - and %g3, 0x3f, %g3 /* IEU1 */ + and %g5, 0x3f, %g5 /* IEU1 */ ldx [%o5 + %o3], %g2 /* Load Group */ srlx %o2, 27, %o3 /* IEU0 */ - sllx %o4, %g3, %g1 /* IEU0 Group */ - srlx %o2, 24, %g3 /* IEU0 Group */ + sllx %o4, %g5, %g1 /* IEU0 Group */ + srlx %o2, 24, %g5 /* IEU0 Group */ andcc %g2, %g1, %g2 /* IEU1 */ be,pn %xcc, 16f /* CTI */ and %o3, 0x18, %o3 /* IEU0 Group */ - and %g3, 0x3f, %g3 /* IEU1 */ + and %g5, 0x3f, %g5 /* IEU1 */ ldx [%o5 + %o3], %g2 /* Load Group */ srlx %o2, 19, %o3 /* IEU0 */ - sllx %o4, %g3, %g1 /* IEU0 Group */ - srlx %o2, 16, %g3 /* IEU0 Group */ + sllx %o4, %g5, %g1 /* IEU0 Group */ + srlx %o2, 16, %g5 /* IEU0 Group */ andcc %g2, %g1, %g2 /* IEU1 */ be,pn %xcc, 17f /* CTI */ and %o3, 0x18, %o3 /* IEU0 Group */ - and %g3, 0x3f, %g3 /* IEU1 */ + and %g5, 0x3f, %g5 /* IEU1 */ ldx [%o5 + %o3], %g2 /* Load Group */ srlx %o2, 11, %o3 /* IEU0 */ - sllx %o4, %g3, %g1 /* IEU0 Group */ + sllx %o4, %g5, %g1 /* IEU0 Group */ add %o0, 8, %o0 /* IEU1 */ - srlx %o2, 8, %g3 /* IEU0 Group */ + srlx %o2, 8, %g5 /* IEU0 Group */ andcc %g2, %g1, %g2 /* IEU1 */ be,pn %xcc, 18f /* CTI */ and %o3, 0x18, %o3 /* IEU0 Group */ - and %g3, 0x3f, %g3 /* IEU1 */ + and %g5, 0x3f, %g5 /* IEU1 */ ldx [%o5 + %o3], %g2 /* Load Group */ - sllx %o4, %g3, %g1 /* IEU0 */ - mov %o2, %g3 /* IEU1 */ + sllx %o4, %g5, %g1 /* IEU0 */ + mov %o2, %g5 /* IEU1 */ srlx %o2, 3, %o3 /* IEU0 Group */ ldxa [%o0] ASI_PNF, %o2 /* Load */ @@ -154,58 +155,58 @@ ENTRY(strspn) be,pn %xcc, 19f /* CTI */ and %o3, 0x18, %o3 /* IEU0 Group */ - and %g3, 0x3f, %g3 /* IEU1 */ + and %g5, 0x3f, %g5 /* IEU1 */ ldx [%o5 + %o3], %g2 /* Load Group */ - sllx %o4, %g3, %g1 /* IEU0 */ + sllx %o4, %g5, %g1 /* IEU0 */ srlx %o2, 59, %o3 /* IEU0 Group */ andcc %g2, %g1, %g2 /* IEU1 Group */ bne,pt %xcc, 5b /* CTI */ - srlx %o2, 56, %g3 /* IEU0 Group */ + srlx %o2, 56, %g5 /* IEU0 Group */ sub %o0, 1, %o0 /* IEU1 */ add %sp, STACK_SIZE+32, %sp /* IEU0 Group */ retl /* CTI+IEU1 Group */ - sub %o0, %g7, %o0 /* IEU0 */ + sub %o0, %g4, %o0 /* IEU0 */ .align 16 19: sub %o0, 2, %o0 /* IEU1 */ add %sp, STACK_SIZE+32, %sp /* IEU0 Group */ retl /* CTI+IEU1 Group */ - sub %o0, %g7, %o0 /* IEU0 */ + sub %o0, %g4, %o0 /* IEU0 */ 18: sub %o0, 3, %o0 /* IEU1 */ add %sp, STACK_SIZE+32, %sp /* IEU0 Group */ retl /* CTI+IEU1 Group */ - sub %o0, %g7, %o0 /* IEU0 */ + sub %o0, %g4, %o0 /* IEU0 */ 17: add %o0, 4, %o0 /* IEU1 */ add %sp, STACK_SIZE+32, %sp /* IEU0 Group */ retl /* CTI+IEU1 Group */ - sub %o0, %g7, %o0 /* IEU0 */ + sub %o0, %g4, %o0 /* IEU0 */ 16: add %o0, 3, %o0 /* IEU1 */ add %sp, STACK_SIZE+32, %sp /* IEU0 Group */ retl /* CTI+IEU1 Group */ - sub %o0, %g7, %o0 /* IEU0 */ + sub %o0, %g4, %o0 /* IEU0 */ 15: add %o0, 2, %o0 /* IEU1 */ add %sp, STACK_SIZE+32, %sp /* IEU0 Group */ retl /* CTI+IEU1 Group */ - sub %o0, %g7, %o0 /* IEU0 */ + sub %o0, %g4, %o0 /* IEU0 */ 14: add %o0, 1, %o0 /* IEU1 */ add %sp, STACK_SIZE+32, %sp /* IEU0 Group */ retl /* CTI+IEU1 Group */ - sub %o0, %g7, %o0 /* IEU0 */ + sub %o0, %g4, %o0 /* IEU0 */ 13: add %sp, STACK_SIZE+32, %sp /* IEU1 */ retl /* CTI+IEU1 Group */ - sub %o0, %g7, %o0 /* IEU0 */ + sub %o0, %g4, %o0 /* IEU0 */ .align 16 12: sub %o0, 1, %o0 /* IEU0 Group */ add %sp, STACK_SIZE+32, %sp /* IEU1 */ retl /* CTI+IEU1 Group */ - sub %o0, %g7, %o0 /* IEU0 */ + sub %o0, %g4, %o0 /* IEU0 */ END(strspn) |