7 #if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 0)
12 bool atomic_flag_test_and_set_explicit
13 ( volatile atomic_flag* __a__, memory_order __x__ )
16 if ( __x__ >= memory_order_acq_rel )
18 return __sync_lock_test_and_set( &(__a__->__f__), 1 );
20 bool result = __a__->__f__;
26 bool atomic_flag_test_and_set( volatile atomic_flag* __a__ )
27 { return atomic_flag_test_and_set_explicit( __a__, memory_order_seq_cst ); }
29 void atomic_flag_clear_explicit
30 ( volatile atomic_flag* __a__, memory_order __x__ )
33 __sync_lock_release( &(__a__->__f__) );
34 if ( __x__ >= memory_order_acq_rel )
41 void atomic_flag_clear( volatile atomic_flag* __a__ )
42 { atomic_flag_clear_explicit( __a__, memory_order_seq_cst ); }
44 void atomic_flag_fence( const volatile atomic_flag* __a__, memory_order __x__ )
51 void __atomic_flag_wait__( volatile atomic_flag* __a__ )
52 { while ( atomic_flag_test_and_set( __a__ ) ); }
54 void __atomic_flag_wait_explicit__( volatile atomic_flag* __a__,
56 { while ( atomic_flag_test_and_set_explicit( __a__, __x__ ) ); }
60 static atomic_flag volatile __atomic_flag_anon_table__[ 1 << LOGSIZE ] =
62 ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT,
63 ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT,
64 ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT,
65 ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT, ATOMIC_FLAG_INIT,
68 volatile atomic_flag* __atomic_flag_for_address__( const volatile void* __z__ )
70 uintptr_t __u__ = (uintptr_t)__z__;
71 __u__ += (__u__ >> 2) + (__u__ << 4);
72 __u__ += (__u__ >> 7) + (__u__ << 5);
73 __u__ += (__u__ >> 17) + (__u__ << 13);
74 if ( sizeof(uintptr_t) > 4 ) __u__ += (__u__ >> 31);
75 __u__ &= ~((~(uintptr_t)0) << LOGSIZE);
76 return __atomic_flag_anon_table__ + __u__;