summaryrefslogtreecommitdiff
path: root/longlong.h
diff options
context:
space:
mode:
authortege <tege@gmplib.org>2003-06-09 15:46:55 +0200
committertege <tege@gmplib.org>2003-06-09 15:46:55 +0200
commit975b4a821842e3f6ac9a66dbd7522d64c265436d (patch)
treefaa823a5f952369449c32e6be42613d06567699f /longlong.h
parent53cc3b936b97c1771e7e5d87ea541f72fff5456e (diff)
downloadgmp-975b4a821842e3f6ac9a66dbd7522d64c265436d.tar.gz
Tabify.
Diffstat (limited to 'longlong.h')
-rw-r--r--longlong.h60
1 files changed, 30 insertions, 30 deletions
diff --git a/longlong.h b/longlong.h
index 3baf7b94f..0369f279f 100644
--- a/longlong.h
+++ b/longlong.h
@@ -217,7 +217,7 @@ long __MPN(count_leading_zeros) _PROTO ((UDItype));
#if defined (__GNUC__)
/* Do both product parts in assembly, since that gives better code with
all gcc versions. Some callers will just use the upper part, and in
- that situation we waste an instruction, but not any cycles. */
+ that situation we waste an instruction, but not any cycles. */
#define umul_ppmm(ph, pl, m0, m1) \
__asm__ ("xma.hu %0 = %2, %3, f0\n\txma.l %1 = %2, %3, f0" \
: "=&f" (ph), "=f" (pl) \
@@ -239,12 +239,12 @@ long __MPN(count_leading_zeros) _PROTO ((UDItype));
} while (0)
/* similar to what gcc does for __builtin_ffs, but 0 based rather than 1
based, and we don't need a special case for x==0 here */
-#define count_trailing_zeros(count, x) \
+#define count_trailing_zeros(count, x) \
do { \
UWtype __ctz_x = (x); \
- __asm__ ("popcnt %0 = %1" \
- : "=r" (count) \
- : "r" ((__ctz_x-1) & ~__ctz_x)); \
+ __asm__ ("popcnt %0 = %1" \
+ : "=r" (count) \
+ : "r" ((__ctz_x-1) & ~__ctz_x)); \
} while (0)
#endif
#ifndef LONGLONG_STANDALONE
@@ -645,10 +645,10 @@ extern UWtype __MPN(udiv_qrnnd) _PROTO ((UWtype *, UWtype, UWtype, UWtype));
cost of one extra instruction. Do this for "i386" too, since that means
generic x86. */
#if __GNUC__ < 3 \
- && (HAVE_HOST_CPU_i386 \
- || HAVE_HOST_CPU_i686 \
- || HAVE_HOST_CPU_pentiumpro \
- || HAVE_HOST_CPU_pentium2 \
+ && (HAVE_HOST_CPU_i386 \
+ || HAVE_HOST_CPU_i686 \
+ || HAVE_HOST_CPU_pentiumpro \
+ || HAVE_HOST_CPU_pentium2 \
|| HAVE_HOST_CPU_pentium3)
#define count_leading_zeros(count, x) \
do { \
@@ -1396,11 +1396,11 @@ extern UWtype __MPN(udiv_qrnnd) _PROTO ((UWtype *, UWtype, UWtype, UWtype));
#if 0
/* FIXME: This instruction appears to be unimplemented on some systems (vax
8800 maybe). */
-#define count_trailing_zeros(count,x) \
+#define count_trailing_zeros(count,x) \
do { \
- __asm__ ("ffs 0, 31, %1, %0" \
+ __asm__ ("ffs 0, 31, %1, %0" \
: "=g" ((USItype) (count)) \
- : "g" ((USItype) (x))); \
+ : "g" ((USItype) (x))); \
} while (0)
#endif
#endif /* __vax__ */
@@ -1463,37 +1463,37 @@ extern UWtype mpn_umul_ppmm _PROTO ((UWtype *, UWtype, UWtype));
#if ! defined (umul_ppmm) && HAVE_NATIVE_mpn_umul_ppmm \
&& ! defined (LONGLONG_STANDALONE)
-#define umul_ppmm(wh, wl, u, v) \
- do { \
- UWtype __umul_ppmm__p0; \
+#define umul_ppmm(wh, wl, u, v) \
+ do { \
+ UWtype __umul_ppmm__p0; \
(wh) = mpn_umul_ppmm (&__umul_ppmm__p0, (UWtype) (u), (UWtype) (v)); \
- (wl) = __umul_ppmm__p0; \
+ (wl) = __umul_ppmm__p0; \
} while (0)
#endif
#define mpn_umul_ppmm_r __MPN(umul_ppmm_r)
extern UWtype mpn_umul_ppmm_r _PROTO ((UWtype, UWtype, UWtype *));
-#if ! defined (umul_ppmm) && HAVE_NATIVE_mpn_umul_ppmm_r \
+#if ! defined (umul_ppmm) && HAVE_NATIVE_mpn_umul_ppmm_r \
&& ! defined (LONGLONG_STANDALONE)
-#define umul_ppmm(wh, wl, u, v) \
- do { \
- UWtype __umul_ppmm__p0; \
+#define umul_ppmm(wh, wl, u, v) \
+ do { \
+ UWtype __umul_ppmm__p0; \
(wh) = mpn_umul_ppmm_r ((UWtype) (u), (UWtype) (v), &__umul_ppmm__p0); \
- (wl) = __umul_ppmm__p0; \
+ (wl) = __umul_ppmm__p0; \
} while (0)
#endif
#define mpn_udiv_qrnnd __MPN(udiv_qrnnd)
extern UWtype mpn_udiv_qrnnd _PROTO ((UWtype *, UWtype, UWtype, UWtype));
-#if ! defined (udiv_qrnnd) && HAVE_NATIVE_mpn_udiv_qrnnd \
+#if ! defined (udiv_qrnnd) && HAVE_NATIVE_mpn_udiv_qrnnd \
&& ! defined (LONGLONG_STANDALONE)
#define udiv_qrnnd(q, r, n1, n0, d) \
do { \
UWtype __udiv_qrnnd__r; \
(q) = mpn_udiv_qrnnd (&__udiv_qrnnd__r, \
- (UWtype) (n1), (UWtype) (n0), (UWtype) d); \
+ (UWtype) (n1), (UWtype) (n0), (UWtype) d); \
(r) = __udiv_qrnnd__r; \
} while (0)
#endif
@@ -1501,14 +1501,14 @@ extern UWtype mpn_udiv_qrnnd _PROTO ((UWtype *, UWtype, UWtype, UWtype));
#define mpn_udiv_qrnnd_r __MPN(udiv_qrnnd_r)
extern UWtype mpn_udiv_qrnnd_r _PROTO ((UWtype, UWtype, UWtype, UWtype *));
-#if ! defined (udiv_qrnnd) && HAVE_NATIVE_mpn_udiv_qrnnd_r \
+#if ! defined (udiv_qrnnd) && HAVE_NATIVE_mpn_udiv_qrnnd_r \
&& ! defined (LONGLONG_STANDALONE)
-#define udiv_qrnnd(q, r, n1, n0, d) \
- do { \
- UWtype __udiv_qrnnd__r; \
- (q) = mpn_udiv_qrnnd_r ((UWtype) (n1), (UWtype) (n0), (UWtype) d, \
- &__udiv_qrnnd__r); \
- (r) = __udiv_qrnnd__r; \
+#define udiv_qrnnd(q, r, n1, n0, d) \
+ do { \
+ UWtype __udiv_qrnnd__r; \
+ (q) = mpn_udiv_qrnnd_r ((UWtype) (n1), (UWtype) (n0), (UWtype) d, \
+ &__udiv_qrnnd__r); \
+ (r) = __udiv_qrnnd__r; \
} while (0)
#endif