diff options
Diffstat (limited to 'sysdeps/i386/i586')
-rw-r--r-- | sysdeps/i386/i586/add_n.S | 20 | ||||
-rw-r--r-- | sysdeps/i386/i586/addmul_1.S | 18 | ||||
-rw-r--r-- | sysdeps/i386/i586/lshift.S | 37 | ||||
-rw-r--r-- | sysdeps/i386/i586/memcpy.S | 10 | ||||
-rw-r--r-- | sysdeps/i386/i586/memset.S | 6 | ||||
-rw-r--r-- | sysdeps/i386/i586/mul_1.S | 18 | ||||
-rw-r--r-- | sysdeps/i386/i586/rshift.S | 37 | ||||
-rw-r--r-- | sysdeps/i386/i586/strchr.S | 35 | ||||
-rw-r--r-- | sysdeps/i386/i586/strcpy.S | 16 | ||||
-rw-r--r-- | sysdeps/i386/i586/sub_n.S | 20 | ||||
-rw-r--r-- | sysdeps/i386/i586/submul_1.S | 18 |
11 files changed, 215 insertions, 20 deletions
diff --git a/sysdeps/i386/i586/add_n.S b/sysdeps/i386/i586/add_n.S index c2afc37ee3..57706b23b4 100644 --- a/sysdeps/i386/i586/add_n.S +++ b/sysdeps/i386/i586/add_n.S @@ -1,6 +1,6 @@ /* Pentium __mpn_add_n -- Add two limb vectors of the same length > 0 and store sum in a third limb vector. - Copyright (C) 1992, 94, 95, 96, 97, 98, 2000 Free Software Foundation, Inc. + Copyright (C) 1992,94,95,96,97,98,2000,2005 Free Software Foundation, Inc. This file is part of the GNU MP Library. The GNU MP Library is free software; you can redistribute it and/or modify @@ -34,13 +34,20 @@ ENTRY (BP_SYM (__mpn_add_n)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) pushl %esi + cfi_adjust_cfa_offset (4) pushl %ebp + cfi_adjust_cfa_offset (4) pushl %ebx + cfi_adjust_cfa_offset (4) movl RES(%esp),%edi + cfi_rel_offset (edi, 12) movl S1(%esp),%esi + cfi_rel_offset (esi, 8) movl S2(%esp),%ebx + cfi_rel_offset (ebx, 0) movl SIZE(%esp),%ecx #if __BOUNDED_POINTERS__ shll $2, %ecx /* convert limbs to bytes */ @@ -50,6 +57,7 @@ ENTRY (BP_SYM (__mpn_add_n)) shrl $2, %ecx #endif movl (%ebx),%ebp + cfi_rel_offset (ebp, 4) decl %ecx movl %ecx,%edx @@ -58,6 +66,7 @@ ENTRY (BP_SYM (__mpn_add_n)) testl %ecx,%ecx /* zero carry flag */ jz L(end) pushl %edx + cfi_adjust_cfa_offset (4) ALIGN (3) L(oop): movl 28(%edi),%eax /* fetch destination cache line */ @@ -105,6 +114,7 @@ L(4): movl 24(%esi),%eax jnz L(oop) popl %edx + cfi_adjust_cfa_offset (-4) L(end): decl %edx /* test %edx w/o clobbering carry */ js L(end2) @@ -128,9 +138,17 @@ L(end2): negl %eax popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %ebp + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE ret diff --git a/sysdeps/i386/i586/addmul_1.S b/sysdeps/i386/i586/addmul_1.S index 9329637fe2..18955e36d9 100644 --- a/sysdeps/i386/i586/addmul_1.S +++ b/sysdeps/i386/i586/addmul_1.S @@ -1,6 +1,6 @@ /* Pentium __mpn_addmul_1 -- Multiply a limb vector with a limb and add the result to a second limb vector. - Copyright (C) 1992, 94, 96, 97, 98, 00 Free Software Foundation, Inc. + Copyright (C) 1992, 94, 96, 97, 98, 00, 2005 Free Software Foundation, Inc. This file is part of the GNU MP Library. The GNU MP Library is free software; you can redistribute it and/or modify @@ -39,14 +39,21 @@ ENTRY (BP_SYM (__mpn_addmul_1)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) pushl %esi + cfi_adjust_cfa_offset (4) pushl %ebp + cfi_adjust_cfa_offset (4) pushl %ebx + cfi_adjust_cfa_offset (4) movl RES(%esp), %res_ptr + cfi_rel_offset (res_ptr, 12) movl S1(%esp), %s1_ptr + cfi_rel_offset (s1_ptr, 8) movl SIZE(%esp), %size movl S2LIMB(%esp), %s2_limb + cfi_rel_offset (s2_limb, 0) #if __BOUNDED_POINTERS__ shll $2, %size /* convert limbs to bytes */ CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size) @@ -57,6 +64,7 @@ ENTRY (BP_SYM (__mpn_addmul_1)) leal (%s1_ptr,%size,4), %s1_ptr negl %size xorl %ebp, %ebp + cfi_rel_offset (ebp, 4) ALIGN (3) L(oop): adcl $0, %ebp @@ -79,9 +87,17 @@ L(oop): adcl $0, %ebp adcl $0, %ebp movl %ebp, %eax popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %ebp + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE ret diff --git a/sysdeps/i386/i586/lshift.S b/sysdeps/i386/i586/lshift.S index 59d587934e..bc73ee6f3c 100644 --- a/sysdeps/i386/i586/lshift.S +++ b/sysdeps/i386/i586/lshift.S @@ -1,5 +1,5 @@ /* Pentium optimized __mpn_lshift -- - Copyright (C) 1992, 94, 95, 96, 97, 98, 2000 Free Software Foundation, Inc. + Copyright (C) 1992,94,95,96,97,98,2000,2005 Free Software Foundation, Inc. This file is part of the GNU C Library. The GNU C Library is free software; you can redistribute it and/or @@ -33,13 +33,21 @@ ENTRY (BP_SYM (__mpn_lshift)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) pushl %esi + cfi_adjust_cfa_offset (4) pushl %ebp + cfi_adjust_cfa_offset (4) + cfi_rel_offset (ebp, 0) pushl %ebx + cfi_adjust_cfa_offset (4) movl RES(%esp),%edi + cfi_rel_offset (edi, 12) movl S(%esp),%esi + cfi_rel_offset (esi, 8) movl SIZE(%esp),%ebx + cfi_rel_offset (ebx, 0) movl CNT(%esp),%ecx #if __BOUNDED_POINTERS__ shll $2, %ebx /* convert limbs to bytes */ @@ -67,9 +75,11 @@ L(normal): xorl %eax,%eax shldl %cl,%edx,%eax /* compute carry limb */ pushl %eax /* push carry limb onto stack */ + cfi_adjust_cfa_offset (4) decl %ebx pushl %ebx + cfi_adjust_cfa_offset (4) shrl $3,%ebx jz L(end) @@ -113,6 +123,7 @@ L(oop): movl -28(%edi),%eax /* fetch destination cache line */ jnz L(oop) L(end): popl %ebx + cfi_adjust_cfa_offset (-4) andl $7,%ebx jz L(end2) L(oop2): @@ -130,11 +141,20 @@ L(end2): movl %edx,(%edi) /* store it */ popl %eax /* pop carry limb */ + cfi_adjust_cfa_offset (-4) popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %ebp + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE ret @@ -144,12 +164,18 @@ L(end2): function is documented to work for overlapping source and destination. */ + cfi_adjust_cfa_offset (16) + cfi_rel_offset (edi, 12) + cfi_rel_offset (esi, 8) + cfi_rel_offset (ebp, 4) + cfi_rel_offset (ebx, 0) L(special): movl (%esi),%edx addl $4,%esi decl %ebx pushl %ebx + cfi_adjust_cfa_offset (4) shrl $3,%ebx addl %edx,%edx @@ -199,6 +225,7 @@ L(Loop): L(Lend): popl %ebx + cfi_adjust_cfa_offset (-4) sbbl %eax,%eax /* save carry in %eax */ andl $7,%ebx jz L(Lend2) @@ -223,9 +250,17 @@ L(L1): movl %edx,(%edi) /* store last limb */ negl %eax popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %ebp + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE ret diff --git a/sysdeps/i386/i586/memcpy.S b/sysdeps/i386/i586/memcpy.S index d8181ffa1b..1fa267ef50 100644 --- a/sysdeps/i386/i586/memcpy.S +++ b/sysdeps/i386/i586/memcpy.S @@ -1,5 +1,5 @@ /* Highly optimized version for i586. - Copyright (C) 1997, 2000, 2003 Free Software Foundation, Inc. + Copyright (C) 1997, 2000, 2003, 2005 Free Software Foundation, Inc. This file is part of the GNU C Library. Contributed by Ulrich Drepper <drepper@cygnus.com>, 1997. @@ -39,10 +39,14 @@ ENTRY (BP_SYM (memcpy)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) pushl %esi + cfi_adjust_cfa_offset (4) movl DEST(%esp), %edi + cfi_rel_offset (edi, 4) movl SRC(%esp), %esi + cfi_rel_offset (esi, 4) movl LEN(%esp), %ecx CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %ecx) CHECK_BOUNDS_BOTH_WIDE (%esi, SRC(%esp), %ecx) @@ -111,7 +115,11 @@ L(1): rep; movsb #endif popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE RET_PTR diff --git a/sysdeps/i386/i586/memset.S b/sysdeps/i386/i586/memset.S index 0b59849f9e..c21e9f7a71 100644 --- a/sysdeps/i386/i586/memset.S +++ b/sysdeps/i386/i586/memset.S @@ -1,6 +1,6 @@ /* memset/bzero -- set memory area to CH/0 Highly optimized version for ix86, x>=5. - Copyright (C) 1996, 1997, 2000, 2003 Free Software Foundation, Inc. + Copyright (C) 1996, 1997, 2000, 2003, 2005 Free Software Foundation, Inc. This file is part of the GNU C Library. Contributed by Torbjorn Granlund, <tege@matematik.su.se> @@ -42,8 +42,10 @@ ENTRY (BP_SYM (memset)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) movl DEST(%esp), %edi + cfi_rel_offset (edi, 0) movl LEN(%esp), %edx CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %edx) #if BZERO_P @@ -106,6 +108,8 @@ L(2): shrl $2, %ecx /* convert byte count to longword count */ RETURN_BOUNDED_POINTER (DEST(%esp)) #endif popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE #if BZERO_P diff --git a/sysdeps/i386/i586/mul_1.S b/sysdeps/i386/i586/mul_1.S index f7865697e6..b537c709a3 100644 --- a/sysdeps/i386/i586/mul_1.S +++ b/sysdeps/i386/i586/mul_1.S @@ -1,6 +1,6 @@ /* Pentium __mpn_mul_1 -- Multiply a limb vector with a limb and store the result in a second limb vector. - Copyright (C) 1992, 94, 96, 97, 98, 00 Free Software Foundation, Inc. + Copyright (C) 1992, 94, 96, 97, 98, 00, 2005 Free Software Foundation, Inc. This file is part of the GNU MP Library. The GNU MP Library is free software; you can redistribute it and/or modify @@ -39,14 +39,21 @@ ENTRY (BP_SYM (__mpn_mul_1)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) pushl %esi + cfi_adjust_cfa_offset (4) pushl %ebp + cfi_adjust_cfa_offset (4) pushl %ebx + cfi_adjust_cfa_offset (4) movl RES(%esp), %res_ptr + cfi_rel_offset (res_ptr, 12) movl S1(%esp), %s1_ptr + cfi_rel_offset (s1_ptr, 8) movl SIZE(%esp), %size movl S2LIMB(%esp), %s2_limb + cfi_rel_offset (s2_limb, 0) #if __BOUNDED_POINTERS__ shll $2, %size /* convert limbs to bytes */ CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size) @@ -57,6 +64,7 @@ ENTRY (BP_SYM (__mpn_mul_1)) leal (%s1_ptr,%size,4), %s1_ptr negl %size xorl %ebp, %ebp + cfi_rel_offset (ebp, 4) ALIGN (3) L(oop): adcl $0, %ebp @@ -75,9 +83,17 @@ L(oop): adcl $0, %ebp adcl $0, %ebp movl %ebp, %eax popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %ebp + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE ret diff --git a/sysdeps/i386/i586/rshift.S b/sysdeps/i386/i586/rshift.S index db9326a442..29eded369b 100644 --- a/sysdeps/i386/i586/rshift.S +++ b/sysdeps/i386/i586/rshift.S @@ -1,5 +1,5 @@ /* Pentium optimized __mpn_rshift -- - Copyright (C) 1992, 94, 95, 96, 97, 98, 2000 Free Software Foundation, Inc. + Copyright (C) 1992,94,95,96,97,98,2000,2005 Free Software Foundation, Inc. This file is part of the GNU MP Library. The GNU MP Library is free software; you can redistribute it and/or modify @@ -33,13 +33,21 @@ ENTRY (BP_SYM (__mpn_rshift)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) pushl %esi + cfi_adjust_cfa_offset (4) pushl %ebp + cfi_adjust_cfa_offset (4) + cfi_rel_offset (ebp, 0) pushl %ebx + cfi_adjust_cfa_offset (4) movl RES(%esp),%edi + cfi_rel_offset (edi, 12) movl S(%esp),%esi + cfi_rel_offset (esi, 8) movl SIZE(%esp),%ebx + cfi_rel_offset (ebx, 0) movl CNT(%esp),%ecx #if __BOUNDED_POINTERS__ shll $2, %ebx /* convert limbs to bytes */ @@ -64,9 +72,11 @@ L(normal): xorl %eax,%eax shrdl %cl,%edx,%eax /* compute carry limb */ pushl %eax /* push carry limb onto stack */ + cfi_adjust_cfa_offset (4) decl %ebx pushl %ebx + cfi_adjust_cfa_offset (4) shrl $3,%ebx jz L(end) @@ -110,6 +120,7 @@ L(oop): movl 28(%edi),%eax /* fetch destination cache line */ jnz L(oop) L(end): popl %ebx + cfi_adjust_cfa_offset (-4) andl $7,%ebx jz L(end2) L(oop2): @@ -127,11 +138,20 @@ L(end2): movl %edx,(%edi) /* store it */ popl %eax /* pop carry limb */ + cfi_adjust_cfa_offset (-4) popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %ebp + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE ret @@ -141,6 +161,11 @@ L(end2): function is documented to work for overlapping source and destination. */ + cfi_adjust_cfa_offset (16) + cfi_rel_offset (edi, 12) + cfi_rel_offset (esi, 8) + cfi_rel_offset (ebp, 4) + cfi_rel_offset (ebx, 3) L(special): leal -4(%edi,%ebx,4),%edi leal -4(%esi,%ebx,4),%esi @@ -150,6 +175,7 @@ L(special): decl %ebx pushl %ebx + cfi_adjust_cfa_offset (4) shrl $3,%ebx shrl $1,%edx @@ -199,6 +225,7 @@ L(Loop): L(Lend): popl %ebx + cfi_adjust_cfa_offset (-4) sbbl %eax,%eax /* save carry in %eax */ andl $7,%ebx jz L(Lend2) @@ -223,9 +250,17 @@ L(L1): movl %edx,(%edi) /* store last limb */ rcrl $1,%eax popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %ebp + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE ret diff --git a/sysdeps/i386/i586/strchr.S b/sysdeps/i386/i586/strchr.S index 9df504d335..136b19a3f3 100644 --- a/sysdeps/i386/i586/strchr.S +++ b/sysdeps/i386/i586/strchr.S @@ -1,6 +1,6 @@ /* Find character CH in a NUL terminated string. Highly optimized version for ix85, x>=5. - Copyright (C) 1995, 1996, 1997, 2000, 2003 Free Software Foundation, Inc. + Copyright (C) 1995,1996,1997,2000,2003,2005 Free Software Foundation, Inc. This file is part of the GNU C Library. Contributed by Ulrich Drepper, <drepper@gnu.ai.mit.edu>. @@ -47,16 +47,21 @@ ENTRY (BP_SYM (strchr)) ENTER pushl %edi /* Save callee-safe registers. */ + cfi_adjust_cfa_offset (-4) pushl %esi + cfi_adjust_cfa_offset (-4) pushl %ebx + cfi_adjust_cfa_offset (-4) pushl %ebp + cfi_adjust_cfa_offset (-4) movl STR(%esp), %eax movl CHR(%esp), %edx CHECK_BOUNDS_LOW (%eax, STR(%esp)) movl %eax, %edi /* duplicate string pointer for later */ + cfi_rel_offset (edi, 12) xorl %ecx, %ecx /* clear %ecx */ /* At the moment %edx contains C. What we need for the @@ -107,6 +112,10 @@ L(0): movb (%eax), %cl /* load single byte */ incl %eax /* increment pointer */ + cfi_rel_offset (esi, 8) + cfi_rel_offset (ebx, 4) + cfi_rel_offset (ebp, 0) + /* The following code is the preparation for the loop. The four instruction up to `L1' will not be executed in the loop because the same code is found at the end of the loop, but @@ -283,15 +292,28 @@ L(5): subl $4, %eax /* adjust pointer */ L(2): CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb) RETURN_BOUNDED_POINTER (STR(%esp)) - popl %ebp /* restore saved registers */ +L(out): popl %ebp /* restore saved registers */ + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE RET_PTR + cfi_adjust_cfa_offset (16) + cfi_rel_offset (edi, 12) + cfi_rel_offset (esi, 8) + cfi_rel_offset (ebx, 4) + cfi_rel_offset (ebp, 0) /* We know there is a NUL byte in the word. But we have to test whether there is an C byte before it in the word. */ L(4): subl $4, %eax /* adjust pointer */ @@ -327,14 +349,7 @@ L(4): subl $4, %eax /* adjust pointer */ L(3): xorl %eax, %eax RETURN_NULL_BOUNDED_POINTER - popl %ebp /* restore saved registers */ - popl %ebx - - popl %esi - popl %edi - - LEAVE - RET_PTR + jmp L(out) END (BP_SYM (strchr)) #undef index diff --git a/sysdeps/i386/i586/strcpy.S b/sysdeps/i386/i586/strcpy.S index f7c1986b4b..5426e59749 100644 --- a/sysdeps/i386/i586/strcpy.S +++ b/sysdeps/i386/i586/strcpy.S @@ -1,5 +1,5 @@ /* strcpy/stpcpy implementation for i586. - Copyright (C) 1997, 2000, 2003 Free Software Foundation, Inc. + Copyright (C) 1997, 2000, 2003, 2005 Free Software Foundation, Inc. This file is part of the GNU C Library. Contributed by Ulrich Drepper <drepper@cygnus.com>, 1997. @@ -39,11 +39,16 @@ ENTRY (BP_SYM (STRCPY)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) pushl %esi + cfi_adjust_cfa_offset (4) pushl %ebx + cfi_adjust_cfa_offset (4) movl DEST(%esp), %edi + cfi_rel_offset (edi, 8) movl SRC(%esp), %esi + cfi_rel_offset (esi, 4) CHECK_BOUNDS_LOW (%edi, DEST(%esp)) CHECK_BOUNDS_LOW (%esi, SRC(%esp)) @@ -51,11 +56,14 @@ ENTRY (BP_SYM (STRCPY)) leal -1(%esi), %ecx movl $magic, %ebx + cfi_rel_offset (ebx, 0) andl $3, %ecx #ifdef PIC call 2f + cfi_adjust_cfa_offset (4) 2: popl %edx + cfi_adjust_cfa_offset (-4) /* 0xb is the distance between 2: and 1: but we avoid writing 1f-2b because the assembler generates worse code. */ leal 0xb(%edx,%ecx,8), %ecx @@ -153,8 +161,14 @@ L(end2): #endif RETURN_BOUNDED_POINTER (DEST(%esp)) popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE RET_PTR diff --git a/sysdeps/i386/i586/sub_n.S b/sysdeps/i386/i586/sub_n.S index fcc9cba4ad..1c40a80082 100644 --- a/sysdeps/i386/i586/sub_n.S +++ b/sysdeps/i386/i586/sub_n.S @@ -1,6 +1,6 @@ /* Pentium __mpn_sub_n -- Subtract two limb vectors of the same length > 0 and store difference in a third limb vector. - Copyright (C) 1992, 94, 95, 96, 97, 98, 2000 Free Software Foundation, Inc. + Copyright (C) 1992,94,95,96,97,98,2000,2005 Free Software Foundation, Inc. This file is part of the GNU MP Library. The GNU MP Library is free software; you can redistribute it and/or modify @@ -34,13 +34,20 @@ ENTRY (BP_SYM (__mpn_sub_n)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) pushl %esi + cfi_adjust_cfa_offset (4) pushl %ebp + cfi_adjust_cfa_offset (4) pushl %ebx + cfi_adjust_cfa_offset (4) movl RES(%esp),%edi + cfi_rel_offset (edi, 12) movl S1(%esp),%esi + cfi_rel_offset (esi, 8) movl S2(%esp),%ebx + cfi_rel_offset (ebx, 0) movl SIZE(%esp),%ecx #if __BOUNDED_POINTERS__ shll $2, %ecx /* convert limbs to bytes */ @@ -49,6 +56,7 @@ ENTRY (BP_SYM (__mpn_sub_n)) CHECK_BOUNDS_BOTH_WIDE (%ebx, S2(%esp), %ecx) shrl $2, %ecx #endif + cfi_rel_offset (ebp, 4) movl (%ebx),%ebp decl %ecx @@ -58,6 +66,7 @@ ENTRY (BP_SYM (__mpn_sub_n)) testl %ecx,%ecx /* zero carry flag */ jz L(end) pushl %edx + cfi_adjust_cfa_offset (4) ALIGN (3) L(oop): movl 28(%edi),%eax /* fetch destination cache line */ @@ -105,6 +114,7 @@ L(4): movl 24(%esi),%eax jnz L(oop) popl %edx + cfi_adjust_cfa_offset (-4) L(end): decl %edx /* test %edx w/o clobbering carry */ js L(end2) @@ -128,9 +138,17 @@ L(end2): negl %eax popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %ebp + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE ret diff --git a/sysdeps/i386/i586/submul_1.S b/sysdeps/i386/i586/submul_1.S index 542200110f..b9e578fde4 100644 --- a/sysdeps/i386/i586/submul_1.S +++ b/sysdeps/i386/i586/submul_1.S @@ -1,6 +1,6 @@ /* Pentium __mpn_submul_1 -- Multiply a limb vector with a limb and subtract the result from a second limb vector. - Copyright (C) 1992, 94, 96, 97, 98, 00 Free Software Foundation, Inc. + Copyright (C) 1992, 94, 96, 97, 98, 00, 2005 Free Software Foundation, Inc. This file is part of the GNU MP Library. The GNU MP Library is free software; you can redistribute it and/or modify @@ -39,14 +39,21 @@ ENTRY (BP_SYM (__mpn_submul_1)) ENTER pushl %edi + cfi_adjust_cfa_offset (4) pushl %esi + cfi_adjust_cfa_offset (4) pushl %ebp + cfi_adjust_cfa_offset (4) pushl %ebx + cfi_adjust_cfa_offset (4) movl RES(%esp), %res_ptr + cfi_rel_offset (res_ptr, 12) movl S1(%esp), %s1_ptr + cfi_rel_offset (s1_ptr, 8) movl SIZE(%esp), %size movl S2LIMB(%esp), %s2_limb + cfi_rel_offset (s2_limb, 0) #if __BOUNDED_POINTERS__ shll $2, %sizeP /* convert limbs to bytes */ CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %sizeP) @@ -57,6 +64,7 @@ ENTRY (BP_SYM (__mpn_submul_1)) leal (%s1_ptr,%size,4), %s1_ptr negl %size xorl %ebp, %ebp + cfi_rel_offset (ebp, 4) ALIGN (3) L(oop): adcl $0, %ebp @@ -79,9 +87,17 @@ L(oop): adcl $0, %ebp adcl $0, %ebp movl %ebp, %eax popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) popl %ebp + cfi_adjust_cfa_offset (-4) + cfi_restore (ebp) popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) LEAVE ret |