55 *
66 * Note that we actually require a 486 upwards because the 386 doesn't have
77 * support for xadd and cmpxchg. Given that the 386 isn't supported anywhere
8- * anymore that's not much of restriction luckily.
8+ * anymore that's not much of a restriction luckily.
99 *
1010 * Portions Copyright (c) 1996-2015, PostgreSQL Global Development Group
1111 * Portions Copyright (c) 1994, Regents of the University of California
2828 * do those things, a compiler barrier should be enough.
2929 *
3030 * "lock; addl" has worked for longer than "mfence". It's also rumored to be
31- * faster in many scenarios
31+ * faster in many scenarios.
3232 */
3333
34- #if defined(__INTEL_COMPILER )
35- #define pg_memory_barrier_impl () _mm_mfence()
36- #elif defined(__GNUC__ ) && (defined(__i386__ ) || defined(__i386 ))
34+ #if defined(__GNUC__ ) || defined(__INTEL_COMPILER )
35+ #if defined(__i386__ ) || defined(__i386 )
3736#define pg_memory_barrier_impl () \
3837 __asm__ __volatile__ ("lock; addl $0,0(%%esp)" : : : "memory", "cc")
39- #elif defined(__GNUC__ ) && defined( __x86_64__ )
38+ #elif defined(__x86_64__ )
4039#define pg_memory_barrier_impl () \
4140 __asm__ __volatile__ ("lock; addl $0,0(%%rsp)" : : : "memory", "cc")
4241#endif
42+ #endif /* defined(__GNUC__) || defined(__INTEL_COMPILER) */
4343
4444#define pg_read_barrier_impl () pg_compiler_barrier_impl()
4545#define pg_write_barrier_impl () pg_compiler_barrier_impl()
5151 */
5252#if defined(HAVE_ATOMICS )
5353
54- #if defined(__GNUC__ ) && ! defined(__INTEL_COMPILER )
54+ #if defined(__GNUC__ ) || defined(__INTEL_COMPILER )
5555
5656#define PG_HAVE_ATOMIC_FLAG_SUPPORT
5757typedef struct pg_atomic_flag
@@ -67,7 +67,7 @@ typedef struct pg_atomic_uint32
6767
6868/*
6969 * It's too complicated to write inline asm for 64bit types on 32bit and the
70- * 468 can't do it.
70+ * 468 can't do it anyway .
7171 */
7272#ifdef __x86_64__
7373#define PG_HAVE_ATOMIC_U64_SUPPORT
@@ -76,11 +76,11 @@ typedef struct pg_atomic_uint64
7676 /* alignment guaranteed due to being on a 64bit platform */
7777 volatile uint64 value ;
7878} pg_atomic_uint64 ;
79- #endif
79+ #endif /* __x86_64__ */
8080
81- #endif /* defined(HAVE_ATOMICS ) */
81+ #endif /* defined(__GNUC__) || defined(__INTEL_COMPILER ) */
8282
83- #endif /* defined(__GNUC__) && !defined(__INTEL_COMPILER ) */
83+ #endif /* defined(HAVE_ATOMICS ) */
8484
8585#if !defined(PG_HAVE_SPIN_DELAY )
8686/*
@@ -106,20 +106,12 @@ typedef struct pg_atomic_uint64
106106 * de-pipelines the spin-wait loop to prevent it from
107107 * consuming execution resources excessively.
108108 */
109- #if defined(__INTEL_COMPILER )
110- #define PG_HAVE_SPIN_DELAY
111- static inline
112- pg_spin_delay_impl (void )
113- {
114- _mm_pause ();
115- }
116- #elif defined(__GNUC__ )
109+ #if defined(__GNUC__ ) || defined(__INTEL_COMPILER )
117110#define PG_HAVE_SPIN_DELAY
118111static __inline__ void
119112pg_spin_delay_impl (void )
120113{
121- __asm__ __volatile__(
122- " rep; nop \n" );
114+ __asm__ __volatile__(" rep; nop \n" );
123115}
124116#elif defined(WIN32_ONLY_COMPILER ) && defined(__x86_64__ )
125117#define PG_HAVE_SPIN_DELAY
@@ -142,8 +134,7 @@ pg_spin_delay_impl(void)
142134
143135#if defined(HAVE_ATOMICS )
144136
145- /* inline assembly implementation for gcc */
146- #if defined(__GNUC__ ) && !defined(__INTEL_COMPILER )
137+ #if defined(__GNUC__ ) || defined(__INTEL_COMPILER )
147138
148139#define PG_HAVE_ATOMIC_TEST_SET_FLAG
149140static inline bool
@@ -246,6 +237,6 @@ pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
246237
247238#endif /* __x86_64__ */
248239
249- #endif /* defined(__GNUC__) && ! defined(__INTEL_COMPILER) */
240+ #endif /* defined(__GNUC__) || defined(__INTEL_COMPILER) */
250241
251242#endif /* HAVE_ATOMICS */
0 commit comments