summaryrefslogtreecommitdiff
path: root/sysdeps/powerpc/add_n.S
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2000-02-28 22:36:31 +0000
committerUlrich Drepper <drepper@redhat.com>2000-02-28 22:36:31 +0000
commit4e141d64a5e933013a479f4e3695f5e9a5a86059 (patch)
tree99ddae8e5664d52ef5999f20e50833f8537e8cde /sysdeps/powerpc/add_n.S
parentb041ebc280681f391cd0a3b4d70e08ec72179952 (diff)
downloadglibc-4e141d64a5e933013a479f4e3695f5e9a5a86059.tar.gz
Update.
* sysdeps/powerpc/fpu/s_copysign.S: Use L() instead of local labels. * sysdeps/powerpc/submul_1.S: Likewise. * sysdeps/powerpc/sub_n.S: Likewise. * sysdeps/powerpc/strcpy.S: Likewise. * sysdeps/powerpc/strcmp.S: Likewise. * sysdeps/powerpc/stpcpy.S: Likewise. * sysdeps/powerpc/rshift.S: Likewise. * sysdeps/powerpc/mul_1.S: Likewise. * sysdeps/powerpc/memset.S: Likewise. * sysdeps/powerpc/lshift.S: Likewise. * sysdeps/powerpc/addmul_1.S: Likewise. * sysdeps/powerpc/add_n.S: Likewise.
Diffstat (limited to 'sysdeps/powerpc/add_n.S')
-rw-r--r--sysdeps/powerpc/add_n.S16
1 files changed, 8 insertions, 8 deletions
diff --git a/sysdeps/powerpc/add_n.S b/sysdeps/powerpc/add_n.S
index 88f5b9cf7a..7b683b7be4 100644
--- a/sysdeps/powerpc/add_n.S
+++ b/sysdeps/powerpc/add_n.S
@@ -1,5 +1,5 @@
/* Add two limb vectors of equal, non-zero length for PowerPC.
- Copyright (C) 1997, 1999 Free Software Foundation, Inc.
+ Copyright (C) 1997, 1999, 2000 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
@@ -33,7 +33,7 @@ EALIGN(__mpn_add_n,3,0)
srwi. r7,r6,1
li r10,0
mtctr r7
- bt 31,2f
+ bt 31,L(2)
/* Clear the carry. */
addic r0,r0,0
@@ -41,19 +41,19 @@ EALIGN(__mpn_add_n,3,0)
addi r3,r3,-4
addi r4,r4,-4
addi r5,r5,-4
- b 0f
+ b L(0)
-2: lwz r7,0(r5)
+L(2): lwz r7,0(r5)
lwz r6,0(r4)
addc r6,r6,r7
stw r6,0(r3)
- beq 1f
+ beq L(1)
/* The loop. */
/* Align start of loop to an odd word boundary to guarantee that the
last two words can be fetched in one access (for 601). */
-0: lwz r9,4(r4)
+L(0): lwz r9,4(r4)
lwz r8,4(r5)
lwzu r6,8(r4)
lwzu r7,8(r5)
@@ -61,8 +61,8 @@ EALIGN(__mpn_add_n,3,0)
stw r8,4(r3)
adde r6,r6,r7
stwu r6,8(r3)
- bdnz 0b
+ bdnz L(0)
/* Return the carry. */
-1: addze r3,r10
+L(1): addze r3,r10
blr
END(__mpn_add_n)