summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorSanjay Patel <spatel@rotateright.com>2014-05-28 20:26:57 +0000
committerSanjay Patel <spatel@rotateright.com>2014-05-28 20:26:57 +0000
commit65a963d4021375aaf6791377a41b5b83b9b20708 (patch)
tree84345e30dbb7fe6347cecb8c989af53995f2952b /lib
parentf722606af2c9dbc2d24705254bdf89bcc48a7a79 (diff)
downloadclang-65a963d4021375aaf6791377a41b5b83b9b20708.tar.gz
added Intel's BMI intrinsic variants
(fixes PR19431 - http://llvm.org/bugs/show_bug.cgi?id=19431) git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@209769 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib')
-rw-r--r--lib/Headers/bmiintrin.h35
1 files changed, 34 insertions, 1 deletions
diff --git a/lib/Headers/bmiintrin.h b/lib/Headers/bmiintrin.h
index 8cb00f51d3..43c4a5e5de 100644
--- a/lib/Headers/bmiintrin.h
+++ b/lib/Headers/bmiintrin.h
@@ -32,6 +32,14 @@
#ifndef __BMIINTRIN_H
#define __BMIINTRIN_H
+#define _tzcnt_u16(a) (__tzcnt_u16((a)))
+#define _andn_u32(a, b) (__andn_u32((a), (b)))
+/* _bextr_u32 != __bextr_u32 */
+#define _blsi_u32(a) (__blsi_u32((a)))
+#define _blsmsk_u32(a) (__blsmsk_u32((a)))
+#define _blsr_u32(a) (__blsr_u32((a)))
+#define _tzcnt_u32(a) (__tzcnt_u32((a)))
+
static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
__tzcnt_u16(unsigned short __X)
{
@@ -44,12 +52,20 @@ __andn_u32(unsigned int __X, unsigned int __Y)
return ~__X & __Y;
}
+/* AMD-specified, double-leading-underscore version of BEXTR */
static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
__bextr_u32(unsigned int __X, unsigned int __Y)
{
return __builtin_ia32_bextr_u32(__X, __Y);
}
+/* Intel-specified, single-leading-underscore version of BEXTR */
+static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
+_bextr_u32(unsigned int __X, unsigned int __Y, unsigned int __Z)
+{
+ return __builtin_ia32_bextr_u32 (__X, ((__Y & 0xff) | ((__Z & 0xff) << 8)));
+}
+
static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
__blsi_u32(unsigned int __X)
{
@@ -75,18 +91,34 @@ __tzcnt_u32(unsigned int __X)
}
#ifdef __x86_64__
+
+#define _andn_u64(a, b) (__andn_u64((a), (b)))
+/* _bextr_u64 != __bextr_u64 */
+#define _blsi_u64(a) (__blsi_u64((a)))
+#define _blsmsk_u64(a) (__blsmsk_u64((a)))
+#define _blsr_u64(a) (__blsr_u64((a)))
+#define _tzcnt_u64(a) (__tzcnt_u64((a)))
+
static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__))
__andn_u64 (unsigned long long __X, unsigned long long __Y)
{
return ~__X & __Y;
}
+/* AMD-specified, double-leading-underscore version of BEXTR */
static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__))
__bextr_u64(unsigned long long __X, unsigned long long __Y)
{
return __builtin_ia32_bextr_u64(__X, __Y);
}
+/* Intel-specified, single-leading-underscore version of BEXTR */
+static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__))
+_bextr_u64(unsigned long long __X, unsigned int __Y, unsigned int __Z)
+{
+ return __builtin_ia32_bextr_u64 (__X, ((__Y & 0xff) | ((__Z & 0xff) << 8)));
+}
+
static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__))
__blsi_u64(unsigned long long __X)
{
@@ -110,6 +142,7 @@ __tzcnt_u64(unsigned long long __X)
{
return __builtin_ctzll(__X);
}
-#endif
+
+#endif /* __x86_64__ */
#endif /* __BMIINTRIN_H */