summaryrefslogtreecommitdiff
path: root/includes/stg/SMP.h
diff options
context:
space:
mode:
authorErik de Castro Lopo <erikd@mega-nerd.com>2016-10-23 08:30:19 +1100
committerErik de Castro Lopo <erikd@mega-nerd.com>2017-03-04 17:16:12 +1100
commit8bc809b7d3bc416d63eba96013553498c95ecc6f (patch)
tree685963d18a1e9eed2d8ee98484646b192435a8f8 /includes/stg/SMP.h
parent96f5656d2ae885fa4b0227c4650b1c375e16dd00 (diff)
downloadhaskell-wip/erikd/rts.tar.gz
Enable new warning for bad CPP #if usagewip/erikd/rts
The C code in the RTS now gets built with `-Wundef` and the Haskell code (stages 1 and 2 only) with `-Wcpp-undef`. We now get warnings whereever `#if` is used on undefined identifiers.
Diffstat (limited to 'includes/stg/SMP.h')
-rw-r--r--includes/stg/SMP.h37
1 files changed, 20 insertions, 17 deletions
diff --git a/includes/stg/SMP.h b/includes/stg/SMP.h
index 0e806b6716..424de89872 100644
--- a/includes/stg/SMP.h
+++ b/includes/stg/SMP.h
@@ -14,7 +14,7 @@
#ifndef SMP_H
#define SMP_H
-#if arm_HOST_ARCH && defined(arm_HOST_ARCH_PRE_ARMv6)
+#if defined arm_HOST_ARCH && defined arm_HOST_ARCH_PRE_ARMv6
void arm_atomic_spin_lock(void);
void arm_atomic_spin_unlock(void);
#endif
@@ -187,14 +187,15 @@ EXTERN_INLINE void
write_barrier(void) {
#if defined(NOSMP)
return;
-#elif i386_HOST_ARCH || x86_64_HOST_ARCH
+#elif defined i386_HOST_ARCH || defined x86_64_HOST_ARCH
__asm__ __volatile__ ("" : : : "memory");
-#elif powerpc_HOST_ARCH || powerpc64_HOST_ARCH || powerpc64le_HOST_ARCH
+#elif defined powerpc_HOST_ARCH || defined powerpc64_HOST_ARCH \
+ || defined powerpc64le_HOST_ARCH
__asm__ __volatile__ ("lwsync" : : : "memory");
-#elif sparc_HOST_ARCH
+#elif defined sparc_HOST_ARCH
/* Sparc in TSO mode does not require store/store barriers. */
__asm__ __volatile__ ("" : : : "memory");
-#elif (arm_HOST_ARCH) || aarch64_HOST_ARCH
+#elif defined arm_HOST_ARCH || defined aarch64_HOST_ARCH
__asm__ __volatile__ ("dmb st" : : : "memory");
#else
#error memory barriers unimplemented on this architecture
@@ -205,17 +206,18 @@ EXTERN_INLINE void
store_load_barrier(void) {
#if defined(NOSMP)
return;
-#elif i386_HOST_ARCH
+#elif defined i386_HOST_ARCH
__asm__ __volatile__ ("lock; addl $0,0(%%esp)" : : : "memory");
-#elif x86_64_HOST_ARCH
+#elif defined x86_64_HOST_ARCH
__asm__ __volatile__ ("lock; addq $0,0(%%rsp)" : : : "memory");
-#elif powerpc_HOST_ARCH || powerpc64_HOST_ARCH || powerpc64le_HOST_ARCH
+#elif defined powerpc_HOST_ARCH || defined powerpc64_HOST_ARCH \
+ || defined powerpc64le_HOST_ARCH
__asm__ __volatile__ ("sync" : : : "memory");
-#elif sparc_HOST_ARCH
+#elif defined sparc_HOST_ARCH
__asm__ __volatile__ ("membar #StoreLoad" : : : "memory");
-#elif arm_HOST_ARCH
+#elif defined arm_HOST_ARCH
__asm__ __volatile__ ("dmb" : : : "memory");
-#elif aarch64_HOST_ARCH
+#elif defined aarch64_HOST_ARCH
__asm__ __volatile__ ("dmb sy" : : : "memory");
#else
#error memory barriers unimplemented on this architecture
@@ -226,18 +228,19 @@ EXTERN_INLINE void
load_load_barrier(void) {
#if defined(NOSMP)
return;
-#elif i386_HOST_ARCH
+#elif defined i386_HOST_ARCH
__asm__ __volatile__ ("" : : : "memory");
-#elif x86_64_HOST_ARCH
+#elif defined x86_64_HOST_ARCH
__asm__ __volatile__ ("" : : : "memory");
-#elif powerpc_HOST_ARCH || powerpc64_HOST_ARCH || powerpc64le_HOST_ARCH
+#elif defined powerpc_HOST_ARCH || defined powerpc64_HOST_ARCH \
+ || defined powerpc64le_HOST_ARCH
__asm__ __volatile__ ("lwsync" : : : "memory");
-#elif sparc_HOST_ARCH
+#elif defined sparc_HOST_ARCH
/* Sparc in TSO mode does not require load/load barriers. */
__asm__ __volatile__ ("" : : : "memory");
-#elif arm_HOST_ARCH
+#elif defined arm_HOST_ARCH
__asm__ __volatile__ ("dmb" : : : "memory");
-#elif aarch64_HOST_ARCH
+#elif defined aarch64_HOST_ARCH
__asm__ __volatile__ ("dmb sy" : : : "memory");
#else
#error memory barriers unimplemented on this architecture