summaryrefslogtreecommitdiff
path: root/sysdeps/powerpc/powerpc32/fpu/setjmp-common.S
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2004-02-14 05:16:57 +0000
committerUlrich Drepper <drepper@redhat.com>2004-02-14 05:16:57 +0000
commit5c76ff279fa8fd1425b86a39fe75507660cc0b5c (patch)
tree420bed38e7d88b534f57f82497cc0dd77a26b936 /sysdeps/powerpc/powerpc32/fpu/setjmp-common.S
parent9f8765bc46dbb4991dc20905c5d10d867956d489 (diff)
downloadglibc-5c76ff279fa8fd1425b86a39fe75507660cc0b5c.tar.gz
Update.
2004-02-13 Steven Munroe <sjmunroe@us.ibm.com> * sysdeps/powerpc/powerpc64/__longjmp-common.S: New file. * sysdeps/powerpc/powerpc64/__longjmp.S [NOT_IN_libc](__longjmp): Non-versioned __longjmp for rtld-__longjmp. [!NOT_IN_libc](__vmx__longjmp): Add VMX reg support and define as default version of __longjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_3, GLIBC_2_3_4)] (__novmx__longjmp): Original version of __longjmp. * sysdeps/powerpc/powerpc64/bsd-_setjmp.S [NOT_IN_libc](_setjmp): Non-versioned _setjmp for rtld-_setjmp. [!NOT_IN_libc](__vmx_setjmp): Branch to __vmx__sigsetjmp and define as default version of _setjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_3, GLIBC_2_3_4)] (__novmx_setjmp): Original version of _setjmp. * sysdeps/powerpc/powerpc64/bsd-setjmp.S (__vmxsetjmp): Branch to __vmx__sigsetjmp and define as default version of setjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_3, GLIBC_2_3_4)] (__novmxsetjmp): Original version of setjmp. * sysdeps/powerpc/powerpc64/setjmp-common.S: New file. * sysdeps/powerpc/powerpc64/setjmp.S [NOT_IN_libc](__setjmp): Non-versioned __sigsetjmp for rtld-setjmp. [!NOT_IN_libc](__vmx__sigsetjmp): Add VMX reg support and define as default version of __sigsetjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_3, GLIBC_2_3_4)] (__novmx__sigsetjmp): Original version of __sigsetjmp. * sysdeps/powerpc/powerpc32/__longjmp-common.S: New File * sysdeps/powerpc/powerpc32/__longjmp.S [NOT_IN_libc](__longjmp): Non-versioned __longjmp for rtld-__longjmp. [!NOT_IN_libc](__vmx__longjmp): Add VMX reg support and define as default version of __longjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx__longjmp): Original version of __longjmp. * sysdeps/powerpc/powerpc32/bsd-_setjmp.S [NOT_IN_libc](_setjmp): Non-versioned _setjmp for rtld-_setjmp. [!NOT_IN_libc](__vmx_setjmp): Branch to __vmx__sigsetjmp and define as default version of _setjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx_setjmp): Original version of _setjmp. * sysdeps/powerpc/powerpc32/bsd-setjmp.S (__vmxsetjmp): Branch to __vmx__sigsetjmp and define as default version of setjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmxsetjmp): Original version of setjmp. * sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S: New file. * sysdeps/powerpc/powerpc32/fpu/__longjmp.S [NOT_IN_libc](__longjmp): Non-versioned __longjmp for rtld-__longjmp. [!NOT_IN_libc](__vmx__longjmp): Add VMX reg support and define as default version of __longjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx__longjmp): Original version of __longjmp. * sysdeps/powerpc/powerpc32/fpu/setjmp-common.S: New file. * sysdeps/powerpc/powerpc32/fpu/setjmp.S [NOT_IN_libc](__setjmp): Non-versioned __sigsetjmp for rtld-setjmp. [!NOT_IN_libc](__vmx__sigsetjmp): Add VMX reg support and define as default version of __sigsetjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx__sigsetjmp): Original version of __sigsetjmp. * sysdeps/powerpc/powerpc32/setjmp-common.S: New file. * sysdeps/powerpc/powerpc32/setjmp.S [NOT_IN_libc](__setjmp): Non-versioned __sigsetjmp for rtld-setjmp. [!NOT_IN_libc](__vmx__sigsetjmp): Add VMX reg support and define as default version of __sigsetjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx__sigsetjmp): Original version of __sigsetjmp. * sysdeps/powerpc/Dist: New File. * sysdeps/powerpc/Makefile (sysdep_routines): Add novmx-longjmp.c and novmx-sigjmp.c * sysdeps/powerpc/Versions (libc): To GLIBC_2.3_4 add _longjmp, __sigsetjmp, _setjmp, longjmp, and setjmp. To GLIBC_PRIVATE add __novmx__libc_longjmp, __novmx__libc_siglongjmp, __vmx__libc_longjmp, and __vmx__libc_siglongjmp. * sysdeps/powerpc/bits/setjmp.h: Define JB_VRSAVE, JB_VRS, and adjust JB_SIZE to add VMX regs to __jmp_buf. * sysdeps/powerpc/longjmp.c: New file. * sysdeps/powerpc/novmxsetjmp.h: New file. * sysdeps/powerpc/novmx-longjmp.c: New file. * sysdeps/powerpc/novmx-sigjmp.c: New file. * sysdeps/powerpc/sigjmp.c: New file.
Diffstat (limited to 'sysdeps/powerpc/powerpc32/fpu/setjmp-common.S')
-rw-r--r--sysdeps/powerpc/powerpc32/fpu/setjmp-common.S168
1 files changed, 168 insertions, 0 deletions
diff --git a/sysdeps/powerpc/powerpc32/fpu/setjmp-common.S b/sysdeps/powerpc/powerpc32/fpu/setjmp-common.S
new file mode 100644
index 0000000000..b69ce33755
--- /dev/null
+++ b/sysdeps/powerpc/powerpc32/fpu/setjmp-common.S
@@ -0,0 +1,168 @@
+/* setjmp for PowerPC.
+ Copyright (C) 1995-99, 2000, 2003, 2004 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with the GNU C Library; if not, write to the Free
+ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+ 02111-1307 USA. */
+
+#include <sysdep.h>
+#define _ASM
+#define _SETJMP_H
+#ifdef __NO_VMX__
+# include <novmxsetjmp.h>
+#else
+# include <bits/setjmp.h>
+#endif
+#include <bp-sym.h>
+#include <bp-asm.h>
+
+
+ENTRY (BP_SYM (__sigsetjmp))
+ CHECK_BOUNDS_BOTH_WIDE_LIT (r3, r8, r9, JB_SIZE)
+
+ stw r1,(JB_GPR1*4)(3)
+ mflr r0
+ stw r14,((JB_GPRS+0)*4)(3)
+ stfd fp14,((JB_FPRS+0*2)*4)(3)
+ stw r0,(JB_LR*4)(3)
+ stw r15,((JB_GPRS+1)*4)(3)
+ stfd fp15,((JB_FPRS+1*2)*4)(3)
+ mfcr r0
+ stw r16,((JB_GPRS+2)*4)(3)
+ stfd fp16,((JB_FPRS+2*2)*4)(3)
+ stw r0,(JB_CR*4)(3)
+ stw r17,((JB_GPRS+3)*4)(3)
+ stfd fp17,((JB_FPRS+3*2)*4)(3)
+ stw r18,((JB_GPRS+4)*4)(3)
+ stfd fp18,((JB_FPRS+4*2)*4)(3)
+ stw r19,((JB_GPRS+5)*4)(3)
+ stfd fp19,((JB_FPRS+5*2)*4)(3)
+ stw r20,((JB_GPRS+6)*4)(3)
+ stfd fp20,((JB_FPRS+6*2)*4)(3)
+ stw r21,((JB_GPRS+7)*4)(3)
+ stfd fp21,((JB_FPRS+7*2)*4)(3)
+ stw r22,((JB_GPRS+8)*4)(3)
+ stfd fp22,((JB_FPRS+8*2)*4)(3)
+ stw r23,((JB_GPRS+9)*4)(3)
+ stfd fp23,((JB_FPRS+9*2)*4)(3)
+ stw r24,((JB_GPRS+10)*4)(3)
+ stfd fp24,((JB_FPRS+10*2)*4)(3)
+ stw r25,((JB_GPRS+11)*4)(3)
+ stfd fp25,((JB_FPRS+11*2)*4)(3)
+ stw r26,((JB_GPRS+12)*4)(3)
+ stfd fp26,((JB_FPRS+12*2)*4)(3)
+ stw r27,((JB_GPRS+13)*4)(3)
+ stfd fp27,((JB_FPRS+13*2)*4)(3)
+ stw r28,((JB_GPRS+14)*4)(3)
+ stfd fp28,((JB_FPRS+14*2)*4)(3)
+ stw r29,((JB_GPRS+15)*4)(3)
+ stfd fp29,((JB_FPRS+15*2)*4)(3)
+ stw r30,((JB_GPRS+16)*4)(3)
+ stfd fp30,((JB_FPRS+16*2)*4)(3)
+ stw r31,((JB_GPRS+17)*4)(3)
+ stfd fp31,((JB_FPRS+17*2)*4)(3)
+#ifndef __NO_VMX__
+#ifdef PIC
+ mflr r6
+ bl _GLOBAL_OFFSET_TABLE_@local-4
+ mflr r5
+#ifdef SHARED
+ lwz r5,_rtld_global@got(r5)
+ mtlr r6
+ lwz r5,RTLD_GLOBAL_DL_HWCAP_OFFSET(r5)
+#else
+ lwz r5,_rtld_global@got(r5)
+ mtlr r6
+ lwz r5,0(r5)
+#endif
+#else
+ lis r5,_dl_hwcap@ha
+ lwz r5,_dl_hwcap@l(r5)
+#endif
+ andis. r5,r5,(PPC_FEATURE_HAS_ALTIVEC >> 16)
+ beq no_vmx
+ la r5,((JB_VRS)*4)(3)
+ andi. r6,r5,0xf
+ mfspr r0,VRSAVE
+ stw r0,((JB_VRSAVE)*4)(3)
+ addi r6,r5,16
+ beq+ aligned_save_vmx
+ lvsr v0,0,r5
+ vspltisb v1,-1 /* set v1 to all 1's */
+ vspltisb v2,0 /* set v2 to all 0's */
+ vperm v3,v2,v1,v0 /* v3 contains shift mask with num all 1 bytes on left = misalignment */
+
+
+ /* Special case for v20 we need to preserve what is in save area below v20 before obliterating it */
+ lvx v5,0,r5
+ vperm v20,v20,v20,v0
+ vsel v5,v5,v20,v3
+ vsel v20,v20,v2,v3
+ stvx v5,0,r5
+
+#define save_2vmx_partial(savevr,prev_savevr,hivr,shiftvr,maskvr,savegpr,addgpr) \
+ addi addgpr,addgpr,32; \
+ vperm savevr,savevr,savevr,shiftvr; \
+ vsel hivr,prev_savevr,savevr,maskvr; \
+ stvx hivr,0,savegpr;
+
+ save_2vmx_partial(v21,v20,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v22,v21,v5,v0,v3,r5,r6)
+ save_2vmx_partial(v23,v22,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v24,v23,v5,v0,v3,r5,r6)
+ save_2vmx_partial(v25,v24,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v26,v25,v5,v0,v3,r5,r6)
+ save_2vmx_partial(v27,v26,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v28,v27,v5,v0,v3,r5,r6)
+ save_2vmx_partial(v29,v28,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v30,v29,v5,v0,v3,r5,r6)
+
+ /* Special case for r31 we need to preserve what is in save area above v31 before obliterating it */
+ addi r5,r5,32
+ vperm v31,v31,v31,v0
+ lvx v4,0,r5
+ vsel v5,v30,v31,v3
+ stvx v5,0,r6
+ vsel v4,v31,v4,v3
+ stvx v4,0,r5
+ b no_vmx
+
+aligned_save_vmx:
+ stvx 20,0,r5
+ addi r5,r5,32
+ stvx 21,0,r6
+ addi r6,r6,32
+ stvx 22,0,r5
+ addi r5,r5,32
+ stvx 23,0,r6
+ addi r6,r6,32
+ stvx 24,0,r5
+ addi r5,r5,32
+ stvx 25,0,r6
+ addi r6,r6,32
+ stvx 26,0,r5
+ addi r5,r5,32
+ stvx 27,0,r6
+ addi r6,r6,32
+ stvx 28,0,r5
+ addi r5,r5,32
+ stvx 29,0,r6
+ addi r6,r6,32
+ stvx 30,0,r5
+ stvx 31,0,r6
+no_vmx:
+#endif
+ b JUMPTARGET (BP_SYM (__sigjmp_save))
+END (BP_SYM (__sigsetjmp))