4 * @brief Common header for C11/C++11 atomics
6 * Note that some features are unavailable, as they require support from a true
10 #ifndef __IMPATOMIC_H__
11 #define __IMPATOMIC_H__
13 #include "memoryorder.h"
14 #include "cmodelint.h"
22 #define CPP0X( feature )
24 typedef struct atomic_flag
27 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
28 void clear( memory_order = memory_order_seq_cst ) volatile;
30 CPP0X( atomic_flag() = default; )
31 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
32 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
39 #define ATOMIC_FLAG_INIT { false }
45 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
46 extern bool atomic_flag_test_and_set_explicit
47 ( volatile atomic_flag*, memory_order );
48 extern void atomic_flag_clear( volatile atomic_flag* );
49 extern void atomic_flag_clear_explicit
50 ( volatile atomic_flag*, memory_order );
51 extern void __atomic_flag_wait__
52 ( volatile atomic_flag* );
53 extern void __atomic_flag_wait_explicit__
54 ( volatile atomic_flag*, memory_order );
62 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
63 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
65 inline void atomic_flag::clear( memory_order __x__ ) volatile
66 { atomic_flag_clear_explicit( this, __x__ ); }
72 The remainder of the example implementation uses the following
73 macros. These macros exploit GNU extensions for value-returning
74 blocks (AKA statement expressions) and __typeof__.
76 The macros rely on data fields of atomic structs being named __f__.
77 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
78 __g__=flag, __m__=modified, __o__=operation, __r__=result,
79 __p__=pointer to field, __v__=value (for single evaluation),
80 __x__=memory-ordering, and __y__=memory-ordering.
83 #define _ATOMIC_LOAD_( __a__, __x__ ) \
84 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
85 __typeof__((__a__)->__f__)__r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
88 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
89 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
90 __typeof__(__m__)__v__ = (__m__); \
91 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
92 __v__ = __v__; /* Silence clang (-Wunused-value) */ \
96 #define _ATOMIC_INIT_( __a__, __m__ ) \
97 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
98 __typeof__(__m__)__v__ = (__m__); \
99 model_init_action((void *) __p__, (uint64_t) __v__); \
100 __v__ = __v__; /* Silence clang (-Wunused-value) */ \
103 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
104 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
105 __typeof__((__a__)->__f__)__old__=(__typeof__((__a__)->__f__))model_rmwr_action((void *)__p__, __x__); \
106 __typeof__(__m__)__v__ = (__m__); \
107 __typeof__((__a__)->__f__)__copy__= __old__; \
108 __copy__ __o__ __v__; \
109 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
110 __old__ = __old__; /* Silence clang (-Wunused-value) */ \
113 /* No spurious failure for now */
114 #define _ATOMIC_CMPSWP_WEAK_ _ATOMIC_CMPSWP_
116 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
117 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
118 __typeof__(__e__)__q__ = (__e__); \
119 __typeof__(__m__)__v__ = (__m__); \
121 __typeof__((__a__)->__f__)__t__=(__typeof__((__a__)->__f__))model_rmwrcas_action((void *)__p__, __x__, (uint64_t) *__q__, sizeof((__a__)->__f__)); \
122 if (__t__ == *__q__ ) {; \
123 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
124 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
127 #define _ATOMIC_FENCE_( __x__ ) \
128 ({ model_fence_action(__x__);})
131 #define ATOMIC_CHAR_LOCK_FREE 1
132 #define ATOMIC_CHAR16_T_LOCK_FREE 1
133 #define ATOMIC_CHAR32_T_LOCK_FREE 1
134 #define ATOMIC_WCHAR_T_LOCK_FREE 1
135 #define ATOMIC_SHORT_LOCK_FREE 1
136 #define ATOMIC_INT_LOCK_FREE 1
137 #define ATOMIC_LONG_LOCK_FREE 1
138 #define ATOMIC_LLONG_LOCK_FREE 1
139 #define ATOMIC_ADDRESS_LOCK_FREE 1
141 typedef struct atomic_bool
144 bool is_lock_free() const volatile;
145 void store( bool, memory_order = memory_order_seq_cst ) volatile;
146 bool load( memory_order = memory_order_seq_cst ) volatile;
147 bool exchange( bool, memory_order = memory_order_seq_cst ) volatile;
148 bool compare_exchange_weak ( bool&, bool, memory_order, memory_order ) volatile;
149 bool compare_exchange_strong ( bool&, bool, memory_order, memory_order ) volatile;
150 bool compare_exchange_weak ( bool&, bool,
151 memory_order = memory_order_seq_cst) volatile;
152 bool compare_exchange_strong ( bool&, bool,
153 memory_order = memory_order_seq_cst) volatile;
155 CPP0X( atomic_bool() = delete; )
156 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) {
158 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
159 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
161 bool operator =( bool __v__ ) volatile
162 { store( __v__ ); return __v__; }
164 friend void atomic_store_explicit( volatile atomic_bool*, bool,
166 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
167 friend bool atomic_exchange_explicit( volatile atomic_bool*, bool,
169 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_bool*, bool*, bool,
170 memory_order, memory_order );
171 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_bool*, bool*, bool,
172 memory_order, memory_order );
180 typedef struct atomic_address
183 bool is_lock_free() const volatile;
184 void store( void*, memory_order = memory_order_seq_cst ) volatile;
185 void* load( memory_order = memory_order_seq_cst ) volatile;
186 void* exchange( void*, memory_order = memory_order_seq_cst ) volatile;
187 bool compare_exchange_weak( void*&, void*, memory_order, memory_order ) volatile;
188 bool compare_exchange_strong( void*&, void*, memory_order, memory_order ) volatile;
189 bool compare_exchange_weak( void*&, void*,
190 memory_order = memory_order_seq_cst ) volatile;
191 bool compare_exchange_strong( void*&, void*,
192 memory_order = memory_order_seq_cst ) volatile;
193 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
194 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
196 CPP0X( atomic_address() = default; )
197 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) {
199 CPP0X( atomic_address( const atomic_address& ) = delete; )
200 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
202 void* operator =( void* __v__ ) volatile
203 { store( __v__ ); return __v__; }
205 void* operator +=( ptrdiff_t __v__ ) volatile
206 { return fetch_add( __v__ ); }
208 void* operator -=( ptrdiff_t __v__ ) volatile
209 { return fetch_sub( __v__ ); }
211 friend void atomic_store_explicit( volatile atomic_address*, void*,
213 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
214 friend void* atomic_exchange_explicit( volatile atomic_address*, void*,
216 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_address*,
217 void**, void*, memory_order, memory_order );
218 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_address*,
219 void**, void*, memory_order, memory_order );
220 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
222 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
231 typedef struct atomic_char
234 bool is_lock_free() const volatile;
236 memory_order = memory_order_seq_cst ) volatile;
237 char load( memory_order = memory_order_seq_cst ) volatile;
239 memory_order = memory_order_seq_cst ) volatile;
240 bool compare_exchange_weak( char&, char,
241 memory_order, memory_order ) volatile;
242 bool compare_exchange_strong( char&, char,
243 memory_order, memory_order ) volatile;
244 bool compare_exchange_weak( char&, char,
245 memory_order = memory_order_seq_cst ) volatile;
246 bool compare_exchange_strong( char&, char,
247 memory_order = memory_order_seq_cst ) volatile;
248 char fetch_add( char,
249 memory_order = memory_order_seq_cst ) volatile;
250 char fetch_sub( char,
251 memory_order = memory_order_seq_cst ) volatile;
252 char fetch_and( char,
253 memory_order = memory_order_seq_cst ) volatile;
255 memory_order = memory_order_seq_cst ) volatile;
256 char fetch_xor( char,
257 memory_order = memory_order_seq_cst ) volatile;
259 CPP0X( atomic_char() = default; )
260 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) {
262 CPP0X( atomic_char( const atomic_char& ) = delete; )
263 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
265 char operator =( char __v__ ) volatile
266 { store( __v__ ); return __v__; }
268 char operator ++( int ) volatile
269 { return fetch_add( 1 ); }
271 char operator --( int ) volatile
272 { return fetch_sub( 1 ); }
274 char operator ++() volatile
275 { return fetch_add( 1 ) + 1; }
277 char operator --() volatile
278 { return fetch_sub( 1 ) - 1; }
280 char operator +=( char __v__ ) volatile
281 { return fetch_add( __v__ ) + __v__; }
283 char operator -=( char __v__ ) volatile
284 { return fetch_sub( __v__ ) - __v__; }
286 char operator &=( char __v__ ) volatile
287 { return fetch_and( __v__ ) & __v__; }
289 char operator |=( char __v__ ) volatile
290 { return fetch_or( __v__ ) | __v__; }
292 char operator ^=( char __v__ ) volatile
293 { return fetch_xor( __v__ ) ^ __v__; }
295 friend void atomic_store_explicit( volatile atomic_char*, char,
297 friend char atomic_load_explicit( volatile atomic_char*,
299 friend char atomic_exchange_explicit( volatile atomic_char*,
300 char, memory_order );
301 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_char*,
302 char*, char, memory_order, memory_order );
303 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_char*,
304 char*, char, memory_order, memory_order );
305 friend char atomic_fetch_add_explicit( volatile atomic_char*,
306 char, memory_order );
307 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
308 char, memory_order );
309 friend char atomic_fetch_and_explicit( volatile atomic_char*,
310 char, memory_order );
311 friend char atomic_fetch_or_explicit( volatile atomic_char*,
312 char, memory_order );
313 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
314 char, memory_order );
322 typedef struct atomic_schar
325 bool is_lock_free() const volatile;
326 void store( signed char,
327 memory_order = memory_order_seq_cst ) volatile;
328 signed char load( memory_order = memory_order_seq_cst ) volatile;
329 signed char exchange( signed char,
330 memory_order = memory_order_seq_cst ) volatile;
331 bool compare_exchange_weak( signed char&, signed char,
332 memory_order, memory_order ) volatile;
333 bool compare_exchange_strong( signed char&, signed char,
334 memory_order, memory_order ) volatile;
335 bool compare_exchange_weak( signed char&, signed char,
336 memory_order = memory_order_seq_cst ) volatile;
337 bool compare_exchange_strong( signed char&, signed char,
338 memory_order = memory_order_seq_cst ) volatile;
339 signed char fetch_add( signed char,
340 memory_order = memory_order_seq_cst ) volatile;
341 signed char fetch_sub( signed char,
342 memory_order = memory_order_seq_cst ) volatile;
343 signed char fetch_and( signed char,
344 memory_order = memory_order_seq_cst ) volatile;
345 signed char fetch_or( signed char,
346 memory_order = memory_order_seq_cst ) volatile;
347 signed char fetch_xor( signed char,
348 memory_order = memory_order_seq_cst ) volatile;
350 CPP0X( atomic_schar() = default; )
351 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) {
353 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
354 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
356 signed char operator =( signed char __v__ ) volatile
357 { store( __v__ ); return __v__; }
359 signed char operator ++( int ) volatile
360 { return fetch_add( 1 ); }
362 signed char operator --( int ) volatile
363 { return fetch_sub( 1 ); }
365 signed char operator ++() volatile
366 { return fetch_add( 1 ) + 1; }
368 signed char operator --() volatile
369 { return fetch_sub( 1 ) - 1; }
371 signed char operator +=( signed char __v__ ) volatile
372 { return fetch_add( __v__ ) + __v__; }
374 signed char operator -=( signed char __v__ ) volatile
375 { return fetch_sub( __v__ ) - __v__; }
377 signed char operator &=( signed char __v__ ) volatile
378 { return fetch_and( __v__ ) & __v__; }
380 signed char operator |=( signed char __v__ ) volatile
381 { return fetch_or( __v__ ) | __v__; }
383 signed char operator ^=( signed char __v__ ) volatile
384 { return fetch_xor( __v__ ) ^ __v__; }
386 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
388 friend signed char atomic_load_explicit( volatile atomic_schar*,
390 friend signed char atomic_exchange_explicit( volatile atomic_schar*,
391 signed char, memory_order );
392 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_schar*,
393 signed char*, signed char, memory_order, memory_order );
394 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_schar*,
395 signed char*, signed char, memory_order, memory_order );
396 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
397 signed char, memory_order );
398 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
399 signed char, memory_order );
400 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
401 signed char, memory_order );
402 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
403 signed char, memory_order );
404 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
405 signed char, memory_order );
413 typedef struct atomic_uchar
416 bool is_lock_free() const volatile;
417 void store( unsigned char,
418 memory_order = memory_order_seq_cst ) volatile;
419 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
420 unsigned char exchange( unsigned char,
421 memory_order = memory_order_seq_cst ) volatile;
422 bool compare_exchange_weak( unsigned char&, unsigned char,
423 memory_order, memory_order ) volatile;
424 bool compare_exchange_strong( unsigned char&, unsigned char,
425 memory_order, memory_order ) volatile;
426 bool compare_exchange_weak( unsigned char&, unsigned char,
427 memory_order = memory_order_seq_cst ) volatile;
428 bool compare_exchange_strong( unsigned char&, unsigned char,
429 memory_order = memory_order_seq_cst ) volatile;
430 unsigned char fetch_add( unsigned char,
431 memory_order = memory_order_seq_cst ) volatile;
432 unsigned char fetch_sub( unsigned char,
433 memory_order = memory_order_seq_cst ) volatile;
434 unsigned char fetch_and( unsigned char,
435 memory_order = memory_order_seq_cst ) volatile;
436 unsigned char fetch_or( unsigned char,
437 memory_order = memory_order_seq_cst ) volatile;
438 unsigned char fetch_xor( unsigned char,
439 memory_order = memory_order_seq_cst ) volatile;
441 CPP0X( atomic_uchar() = default; )
442 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) {
444 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
445 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
447 unsigned char operator =( unsigned char __v__ ) volatile
448 { store( __v__ ); return __v__; }
450 unsigned char operator ++( int ) volatile
451 { return fetch_add( 1 ); }
453 unsigned char operator --( int ) volatile
454 { return fetch_sub( 1 ); }
456 unsigned char operator ++() volatile
457 { return fetch_add( 1 ) + 1; }
459 unsigned char operator --() volatile
460 { return fetch_sub( 1 ) - 1; }
462 unsigned char operator +=( unsigned char __v__ ) volatile
463 { return fetch_add( __v__ ) + __v__; }
465 unsigned char operator -=( unsigned char __v__ ) volatile
466 { return fetch_sub( __v__ ) - __v__; }
468 unsigned char operator &=( unsigned char __v__ ) volatile
469 { return fetch_and( __v__ ) & __v__; }
471 unsigned char operator |=( unsigned char __v__ ) volatile
472 { return fetch_or( __v__ ) | __v__; }
474 unsigned char operator ^=( unsigned char __v__ ) volatile
475 { return fetch_xor( __v__ ) ^ __v__; }
477 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
479 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
481 friend unsigned char atomic_exchange_explicit( volatile atomic_uchar*,
482 unsigned char, memory_order );
483 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uchar*,
484 unsigned char*, unsigned char, memory_order, memory_order );
485 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uchar*,
486 unsigned char*, unsigned char, memory_order, memory_order );
487 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
488 unsigned char, memory_order );
489 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
490 unsigned char, memory_order );
491 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
492 unsigned char, memory_order );
493 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
494 unsigned char, memory_order );
495 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
496 unsigned char, memory_order );
504 typedef struct atomic_short
507 bool is_lock_free() const volatile;
509 memory_order = memory_order_seq_cst ) volatile;
510 short load( memory_order = memory_order_seq_cst ) volatile;
511 short exchange( short,
512 memory_order = memory_order_seq_cst ) volatile;
513 bool compare_exchange_weak( short&, short,
514 memory_order, memory_order ) volatile;
515 bool compare_exchange_strong( short&, short,
516 memory_order, memory_order ) volatile;
517 bool compare_exchange_weak( short&, short,
518 memory_order = memory_order_seq_cst ) volatile;
519 bool compare_exchange_strong( short&, short,
520 memory_order = memory_order_seq_cst ) volatile;
521 short fetch_add( short,
522 memory_order = memory_order_seq_cst ) volatile;
523 short fetch_sub( short,
524 memory_order = memory_order_seq_cst ) volatile;
525 short fetch_and( short,
526 memory_order = memory_order_seq_cst ) volatile;
527 short fetch_or( short,
528 memory_order = memory_order_seq_cst ) volatile;
529 short fetch_xor( short,
530 memory_order = memory_order_seq_cst ) volatile;
532 CPP0X( atomic_short() = default; )
533 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) {
535 CPP0X( atomic_short( const atomic_short& ) = delete; )
536 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
538 short operator =( short __v__ ) volatile
539 { store( __v__ ); return __v__; }
541 short operator ++( int ) volatile
542 { return fetch_add( 1 ); }
544 short operator --( int ) volatile
545 { return fetch_sub( 1 ); }
547 short operator ++() volatile
548 { return fetch_add( 1 ) + 1; }
550 short operator --() volatile
551 { return fetch_sub( 1 ) - 1; }
553 short operator +=( short __v__ ) volatile
554 { return fetch_add( __v__ ) + __v__; }
556 short operator -=( short __v__ ) volatile
557 { return fetch_sub( __v__ ) - __v__; }
559 short operator &=( short __v__ ) volatile
560 { return fetch_and( __v__ ) & __v__; }
562 short operator |=( short __v__ ) volatile
563 { return fetch_or( __v__ ) | __v__; }
565 short operator ^=( short __v__ ) volatile
566 { return fetch_xor( __v__ ) ^ __v__; }
568 friend void atomic_store_explicit( volatile atomic_short*, short,
570 friend short atomic_load_explicit( volatile atomic_short*,
572 friend short atomic_exchange_explicit( volatile atomic_short*,
573 short, memory_order );
574 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_short*,
575 short*, short, memory_order, memory_order );
576 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_short*,
577 short*, short, memory_order, memory_order );
578 friend short atomic_fetch_add_explicit( volatile atomic_short*,
579 short, memory_order );
580 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
581 short, memory_order );
582 friend short atomic_fetch_and_explicit( volatile atomic_short*,
583 short, memory_order );
584 friend short atomic_fetch_or_explicit( volatile atomic_short*,
585 short, memory_order );
586 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
587 short, memory_order );
595 typedef struct atomic_ushort
598 bool is_lock_free() const volatile;
599 void store( unsigned short,
600 memory_order = memory_order_seq_cst ) volatile;
601 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
602 unsigned short exchange( unsigned short,
603 memory_order = memory_order_seq_cst ) volatile;
604 bool compare_exchange_weak( unsigned short&, unsigned short,
605 memory_order, memory_order ) volatile;
606 bool compare_exchange_strong( unsigned short&, unsigned short,
607 memory_order, memory_order ) volatile;
608 bool compare_exchange_weak( unsigned short&, unsigned short,
609 memory_order = memory_order_seq_cst ) volatile;
610 bool compare_exchange_strong( unsigned short&, unsigned short,
611 memory_order = memory_order_seq_cst ) volatile;
612 unsigned short fetch_add( unsigned short,
613 memory_order = memory_order_seq_cst ) volatile;
614 unsigned short fetch_sub( unsigned short,
615 memory_order = memory_order_seq_cst ) volatile;
616 unsigned short fetch_and( unsigned short,
617 memory_order = memory_order_seq_cst ) volatile;
618 unsigned short fetch_or( unsigned short,
619 memory_order = memory_order_seq_cst ) volatile;
620 unsigned short fetch_xor( unsigned short,
621 memory_order = memory_order_seq_cst ) volatile;
623 CPP0X( atomic_ushort() = default; )
624 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) {
626 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
627 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
629 unsigned short operator =( unsigned short __v__ ) volatile
630 { store( __v__ ); return __v__; }
632 unsigned short operator ++( int ) volatile
633 { return fetch_add( 1 ); }
635 unsigned short operator --( int ) volatile
636 { return fetch_sub( 1 ); }
638 unsigned short operator ++() volatile
639 { return fetch_add( 1 ) + 1; }
641 unsigned short operator --() volatile
642 { return fetch_sub( 1 ) - 1; }
644 unsigned short operator +=( unsigned short __v__ ) volatile
645 { return fetch_add( __v__ ) + __v__; }
647 unsigned short operator -=( unsigned short __v__ ) volatile
648 { return fetch_sub( __v__ ) - __v__; }
650 unsigned short operator &=( unsigned short __v__ ) volatile
651 { return fetch_and( __v__ ) & __v__; }
653 unsigned short operator |=( unsigned short __v__ ) volatile
654 { return fetch_or( __v__ ) | __v__; }
656 unsigned short operator ^=( unsigned short __v__ ) volatile
657 { return fetch_xor( __v__ ) ^ __v__; }
659 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
661 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
663 friend unsigned short atomic_exchange_explicit( volatile atomic_ushort*,
664 unsigned short, memory_order );
665 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ushort*,
666 unsigned short*, unsigned short, memory_order, memory_order );
667 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ushort*,
668 unsigned short*, unsigned short, memory_order, memory_order );
669 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
670 unsigned short, memory_order );
671 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
672 unsigned short, memory_order );
673 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
674 unsigned short, memory_order );
675 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
676 unsigned short, memory_order );
677 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
678 unsigned short, memory_order );
682 unsigned short __f__;
686 typedef struct atomic_int
689 bool is_lock_free() const volatile;
691 memory_order = memory_order_seq_cst ) volatile;
692 int load( memory_order = memory_order_seq_cst ) volatile;
694 memory_order = memory_order_seq_cst ) volatile;
695 bool compare_exchange_weak( int&, int,
696 memory_order, memory_order ) volatile;
697 bool compare_exchange_strong( int&, int,
698 memory_order, memory_order ) volatile;
699 bool compare_exchange_weak( int&, int,
700 memory_order = memory_order_seq_cst ) volatile;
701 bool compare_exchange_strong( int&, int,
702 memory_order = memory_order_seq_cst ) volatile;
704 memory_order = memory_order_seq_cst ) volatile;
706 memory_order = memory_order_seq_cst ) volatile;
708 memory_order = memory_order_seq_cst ) volatile;
710 memory_order = memory_order_seq_cst ) volatile;
712 memory_order = memory_order_seq_cst ) volatile;
714 CPP0X( atomic_int() = default; )
715 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) {
717 CPP0X( atomic_int( const atomic_int& ) = delete; )
718 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
720 int operator =( int __v__ ) volatile
721 { store( __v__ ); return __v__; }
723 int operator ++( int ) volatile
724 { return fetch_add( 1 ); }
726 int operator --( int ) volatile
727 { return fetch_sub( 1 ); }
729 int operator ++() volatile
730 { return fetch_add( 1 ) + 1; }
732 int operator --() volatile
733 { return fetch_sub( 1 ) - 1; }
735 int operator +=( int __v__ ) volatile
736 { return fetch_add( __v__ ) + __v__; }
738 int operator -=( int __v__ ) volatile
739 { return fetch_sub( __v__ ) - __v__; }
741 int operator &=( int __v__ ) volatile
742 { return fetch_and( __v__ ) & __v__; }
744 int operator |=( int __v__ ) volatile
745 { return fetch_or( __v__ ) | __v__; }
747 int operator ^=( int __v__ ) volatile
748 { return fetch_xor( __v__ ) ^ __v__; }
750 friend void atomic_store_explicit( volatile atomic_int*, int,
752 friend int atomic_load_explicit( volatile atomic_int*,
754 friend int atomic_exchange_explicit( volatile atomic_int*,
756 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_int*,
757 int*, int, memory_order, memory_order );
758 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_int*,
759 int*, int, memory_order, memory_order );
760 friend int atomic_fetch_add_explicit( volatile atomic_int*,
762 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
764 friend int atomic_fetch_and_explicit( volatile atomic_int*,
766 friend int atomic_fetch_or_explicit( volatile atomic_int*,
768 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
777 typedef struct atomic_uint
780 bool is_lock_free() const volatile;
781 void store( unsigned int,
782 memory_order = memory_order_seq_cst ) volatile;
783 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
784 unsigned int exchange( unsigned int,
785 memory_order = memory_order_seq_cst ) volatile;
786 bool compare_exchange_weak( unsigned int&, unsigned int,
787 memory_order, memory_order ) volatile;
788 bool compare_exchange_strong( unsigned int&, unsigned int,
789 memory_order, memory_order ) volatile;
790 bool compare_exchange_weak( unsigned int&, unsigned int,
791 memory_order = memory_order_seq_cst ) volatile;
792 bool compare_exchange_strong( unsigned int&, unsigned int,
793 memory_order = memory_order_seq_cst ) volatile;
794 unsigned int fetch_add( unsigned int,
795 memory_order = memory_order_seq_cst ) volatile;
796 unsigned int fetch_sub( unsigned int,
797 memory_order = memory_order_seq_cst ) volatile;
798 unsigned int fetch_and( unsigned int,
799 memory_order = memory_order_seq_cst ) volatile;
800 unsigned int fetch_or( unsigned int,
801 memory_order = memory_order_seq_cst ) volatile;
802 unsigned int fetch_xor( unsigned int,
803 memory_order = memory_order_seq_cst ) volatile;
805 CPP0X( atomic_uint() = default; )
806 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) {
808 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
809 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
811 unsigned int operator =( unsigned int __v__ ) volatile
812 { store( __v__ ); return __v__; }
814 unsigned int operator ++( int ) volatile
815 { return fetch_add( 1 ); }
817 unsigned int operator --( int ) volatile
818 { return fetch_sub( 1 ); }
820 unsigned int operator ++() volatile
821 { return fetch_add( 1 ) + 1; }
823 unsigned int operator --() volatile
824 { return fetch_sub( 1 ) - 1; }
826 unsigned int operator +=( unsigned int __v__ ) volatile
827 { return fetch_add( __v__ ) + __v__; }
829 unsigned int operator -=( unsigned int __v__ ) volatile
830 { return fetch_sub( __v__ ) - __v__; }
832 unsigned int operator &=( unsigned int __v__ ) volatile
833 { return fetch_and( __v__ ) & __v__; }
835 unsigned int operator |=( unsigned int __v__ ) volatile
836 { return fetch_or( __v__ ) | __v__; }
838 unsigned int operator ^=( unsigned int __v__ ) volatile
839 { return fetch_xor( __v__ ) ^ __v__; }
841 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
843 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
845 friend unsigned int atomic_exchange_explicit( volatile atomic_uint*,
846 unsigned int, memory_order );
847 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uint*,
848 unsigned int*, unsigned int, memory_order, memory_order );
849 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uint*,
850 unsigned int*, unsigned int, memory_order, memory_order );
851 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
852 unsigned int, memory_order );
853 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
854 unsigned int, memory_order );
855 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
856 unsigned int, memory_order );
857 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
858 unsigned int, memory_order );
859 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
860 unsigned int, memory_order );
868 typedef struct atomic_long
871 bool is_lock_free() const volatile;
873 memory_order = memory_order_seq_cst ) volatile;
874 long load( memory_order = memory_order_seq_cst ) volatile;
876 memory_order = memory_order_seq_cst ) volatile;
877 bool compare_exchange_weak( long&, long,
878 memory_order, memory_order ) volatile;
879 bool compare_exchange_strong( long&, long,
880 memory_order, memory_order ) volatile;
881 bool compare_exchange_weak( long&, long,
882 memory_order = memory_order_seq_cst ) volatile;
883 bool compare_exchange_strong( long&, long,
884 memory_order = memory_order_seq_cst ) volatile;
885 long fetch_add( long,
886 memory_order = memory_order_seq_cst ) volatile;
887 long fetch_sub( long,
888 memory_order = memory_order_seq_cst ) volatile;
889 long fetch_and( long,
890 memory_order = memory_order_seq_cst ) volatile;
892 memory_order = memory_order_seq_cst ) volatile;
893 long fetch_xor( long,
894 memory_order = memory_order_seq_cst ) volatile;
896 CPP0X( atomic_long() = default; )
897 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) {
899 CPP0X( atomic_long( const atomic_long& ) = delete; )
900 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
902 long operator =( long __v__ ) volatile
903 { store( __v__ ); return __v__; }
905 long operator ++( int ) volatile
906 { return fetch_add( 1 ); }
908 long operator --( int ) volatile
909 { return fetch_sub( 1 ); }
911 long operator ++() volatile
912 { return fetch_add( 1 ) + 1; }
914 long operator --() volatile
915 { return fetch_sub( 1 ) - 1; }
917 long operator +=( long __v__ ) volatile
918 { return fetch_add( __v__ ) + __v__; }
920 long operator -=( long __v__ ) volatile
921 { return fetch_sub( __v__ ) - __v__; }
923 long operator &=( long __v__ ) volatile
924 { return fetch_and( __v__ ) & __v__; }
926 long operator |=( long __v__ ) volatile
927 { return fetch_or( __v__ ) | __v__; }
929 long operator ^=( long __v__ ) volatile
930 { return fetch_xor( __v__ ) ^ __v__; }
932 friend void atomic_store_explicit( volatile atomic_long*, long,
934 friend long atomic_load_explicit( volatile atomic_long*,
936 friend long atomic_exchange_explicit( volatile atomic_long*,
937 long, memory_order );
938 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_long*,
939 long*, long, memory_order, memory_order );
940 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_long*,
941 long*, long, memory_order, memory_order );
942 friend long atomic_fetch_add_explicit( volatile atomic_long*,
943 long, memory_order );
944 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
945 long, memory_order );
946 friend long atomic_fetch_and_explicit( volatile atomic_long*,
947 long, memory_order );
948 friend long atomic_fetch_or_explicit( volatile atomic_long*,
949 long, memory_order );
950 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
951 long, memory_order );
959 typedef struct atomic_ulong
962 bool is_lock_free() const volatile;
963 void store( unsigned long,
964 memory_order = memory_order_seq_cst ) volatile;
965 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
966 unsigned long exchange( unsigned long,
967 memory_order = memory_order_seq_cst ) volatile;
968 bool compare_exchange_weak( unsigned long&, unsigned long,
969 memory_order, memory_order ) volatile;
970 bool compare_exchange_strong( unsigned long&, unsigned long,
971 memory_order, memory_order ) volatile;
972 bool compare_exchange_weak( unsigned long&, unsigned long,
973 memory_order = memory_order_seq_cst ) volatile;
974 bool compare_exchange_strong( unsigned long&, unsigned long,
975 memory_order = memory_order_seq_cst ) volatile;
976 unsigned long fetch_add( unsigned long,
977 memory_order = memory_order_seq_cst ) volatile;
978 unsigned long fetch_sub( unsigned long,
979 memory_order = memory_order_seq_cst ) volatile;
980 unsigned long fetch_and( unsigned long,
981 memory_order = memory_order_seq_cst ) volatile;
982 unsigned long fetch_or( unsigned long,
983 memory_order = memory_order_seq_cst ) volatile;
984 unsigned long fetch_xor( unsigned long,
985 memory_order = memory_order_seq_cst ) volatile;
987 CPP0X( atomic_ulong() = default; )
988 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) {
990 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
991 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
993 unsigned long operator =( unsigned long __v__ ) volatile
994 { store( __v__ ); return __v__; }
996 unsigned long operator ++( int ) volatile
997 { return fetch_add( 1 ); }
999 unsigned long operator --( int ) volatile
1000 { return fetch_sub( 1 ); }
1002 unsigned long operator ++() volatile
1003 { return fetch_add( 1 ) + 1; }
1005 unsigned long operator --() volatile
1006 { return fetch_sub( 1 ) - 1; }
1008 unsigned long operator +=( unsigned long __v__ ) volatile
1009 { return fetch_add( __v__ ) + __v__; }
1011 unsigned long operator -=( unsigned long __v__ ) volatile
1012 { return fetch_sub( __v__ ) - __v__; }
1014 unsigned long operator &=( unsigned long __v__ ) volatile
1015 { return fetch_and( __v__ ) & __v__; }
1017 unsigned long operator |=( unsigned long __v__ ) volatile
1018 { return fetch_or( __v__ ) | __v__; }
1020 unsigned long operator ^=( unsigned long __v__ ) volatile
1021 { return fetch_xor( __v__ ) ^ __v__; }
1023 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
1025 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
1027 friend unsigned long atomic_exchange_explicit( volatile atomic_ulong*,
1028 unsigned long, memory_order );
1029 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ulong*,
1030 unsigned long*, unsigned long, memory_order, memory_order );
1031 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ulong*,
1032 unsigned long*, unsigned long, memory_order, memory_order );
1033 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
1034 unsigned long, memory_order );
1035 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
1036 unsigned long, memory_order );
1037 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
1038 unsigned long, memory_order );
1039 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
1040 unsigned long, memory_order );
1041 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
1042 unsigned long, memory_order );
1046 unsigned long __f__;
1050 typedef struct atomic_llong
1053 bool is_lock_free() const volatile;
1054 void store( long long,
1055 memory_order = memory_order_seq_cst ) volatile;
1056 long long load( memory_order = memory_order_seq_cst ) volatile;
1057 long long exchange( long long,
1058 memory_order = memory_order_seq_cst ) volatile;
1059 bool compare_exchange_weak( long long&, long long,
1060 memory_order, memory_order ) volatile;
1061 bool compare_exchange_strong( long long&, long long,
1062 memory_order, memory_order ) volatile;
1063 bool compare_exchange_weak( long long&, long long,
1064 memory_order = memory_order_seq_cst ) volatile;
1065 bool compare_exchange_strong( long long&, long long,
1066 memory_order = memory_order_seq_cst ) volatile;
1067 long long fetch_add( long long,
1068 memory_order = memory_order_seq_cst ) volatile;
1069 long long fetch_sub( long long,
1070 memory_order = memory_order_seq_cst ) volatile;
1071 long long fetch_and( long long,
1072 memory_order = memory_order_seq_cst ) volatile;
1073 long long fetch_or( long long,
1074 memory_order = memory_order_seq_cst ) volatile;
1075 long long fetch_xor( long long,
1076 memory_order = memory_order_seq_cst ) volatile;
1078 CPP0X( atomic_llong() = default; )
1079 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) {
1081 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1082 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1084 long long operator =( long long __v__ ) volatile
1085 { store( __v__ ); return __v__; }
1087 long long operator ++( int ) volatile
1088 { return fetch_add( 1 ); }
1090 long long operator --( int ) volatile
1091 { return fetch_sub( 1 ); }
1093 long long operator ++() volatile
1094 { return fetch_add( 1 ) + 1; }
1096 long long operator --() volatile
1097 { return fetch_sub( 1 ) - 1; }
1099 long long operator +=( long long __v__ ) volatile
1100 { return fetch_add( __v__ ) + __v__; }
1102 long long operator -=( long long __v__ ) volatile
1103 { return fetch_sub( __v__ ) - __v__; }
1105 long long operator &=( long long __v__ ) volatile
1106 { return fetch_and( __v__ ) & __v__; }
1108 long long operator |=( long long __v__ ) volatile
1109 { return fetch_or( __v__ ) | __v__; }
1111 long long operator ^=( long long __v__ ) volatile
1112 { return fetch_xor( __v__ ) ^ __v__; }
1114 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1116 friend long long atomic_load_explicit( volatile atomic_llong*,
1118 friend long long atomic_exchange_explicit( volatile atomic_llong*,
1119 long long, memory_order );
1120 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_llong*,
1121 long long*, long long, memory_order, memory_order );
1122 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_llong*,
1123 long long*, long long, memory_order, memory_order );
1124 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1125 long long, memory_order );
1126 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1127 long long, memory_order );
1128 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1129 long long, memory_order );
1130 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1131 long long, memory_order );
1132 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1133 long long, memory_order );
1141 typedef struct atomic_ullong
1144 bool is_lock_free() const volatile;
1145 void store( unsigned long long,
1146 memory_order = memory_order_seq_cst ) volatile;
1147 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1148 unsigned long long exchange( unsigned long long,
1149 memory_order = memory_order_seq_cst ) volatile;
1150 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1151 memory_order, memory_order ) volatile;
1152 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1153 memory_order, memory_order ) volatile;
1154 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1155 memory_order = memory_order_seq_cst ) volatile;
1156 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1157 memory_order = memory_order_seq_cst ) volatile;
1158 unsigned long long fetch_add( unsigned long long,
1159 memory_order = memory_order_seq_cst ) volatile;
1160 unsigned long long fetch_sub( unsigned long long,
1161 memory_order = memory_order_seq_cst ) volatile;
1162 unsigned long long fetch_and( unsigned long long,
1163 memory_order = memory_order_seq_cst ) volatile;
1164 unsigned long long fetch_or( unsigned long long,
1165 memory_order = memory_order_seq_cst ) volatile;
1166 unsigned long long fetch_xor( unsigned long long,
1167 memory_order = memory_order_seq_cst ) volatile;
1169 CPP0X( atomic_ullong() = default; )
1170 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) {
1172 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1173 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1175 unsigned long long operator =( unsigned long long __v__ ) volatile
1176 { store( __v__ ); return __v__; }
1178 unsigned long long operator ++( int ) volatile
1179 { return fetch_add( 1 ); }
1181 unsigned long long operator --( int ) volatile
1182 { return fetch_sub( 1 ); }
1184 unsigned long long operator ++() volatile
1185 { return fetch_add( 1 ) + 1; }
1187 unsigned long long operator --() volatile
1188 { return fetch_sub( 1 ) - 1; }
1190 unsigned long long operator +=( unsigned long long __v__ ) volatile
1191 { return fetch_add( __v__ ) + __v__; }
1193 unsigned long long operator -=( unsigned long long __v__ ) volatile
1194 { return fetch_sub( __v__ ) - __v__; }
1196 unsigned long long operator &=( unsigned long long __v__ ) volatile
1197 { return fetch_and( __v__ ) & __v__; }
1199 unsigned long long operator |=( unsigned long long __v__ ) volatile
1200 { return fetch_or( __v__ ) | __v__; }
1202 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1203 { return fetch_xor( __v__ ) ^ __v__; }
1205 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1207 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1209 friend unsigned long long atomic_exchange_explicit( volatile atomic_ullong*,
1210 unsigned long long, memory_order );
1211 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ullong*,
1212 unsigned long long*, unsigned long long, memory_order, memory_order );
1213 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ullong*,
1214 unsigned long long*, unsigned long long, memory_order, memory_order );
1215 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1216 unsigned long long, memory_order );
1217 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1218 unsigned long long, memory_order );
1219 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1220 unsigned long long, memory_order );
1221 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1222 unsigned long long, memory_order );
1223 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1224 unsigned long long, memory_order );
1228 unsigned long long __f__;
1232 typedef atomic_schar atomic_int_least8_t;
1233 typedef atomic_uchar atomic_uint_least8_t;
1234 typedef atomic_short atomic_int_least16_t;
1235 typedef atomic_ushort atomic_uint_least16_t;
1236 typedef atomic_int atomic_int_least32_t;
1237 typedef atomic_uint atomic_uint_least32_t;
1238 typedef atomic_llong atomic_int_least64_t;
1239 typedef atomic_ullong atomic_uint_least64_t;
1241 typedef atomic_schar atomic_int_fast8_t;
1242 typedef atomic_uchar atomic_uint_fast8_t;
1243 typedef atomic_short atomic_int_fast16_t;
1244 typedef atomic_ushort atomic_uint_fast16_t;
1245 typedef atomic_int atomic_int_fast32_t;
1246 typedef atomic_uint atomic_uint_fast32_t;
1247 typedef atomic_llong atomic_int_fast64_t;
1248 typedef atomic_ullong atomic_uint_fast64_t;
1250 typedef atomic_long atomic_intptr_t;
1251 typedef atomic_ulong atomic_uintptr_t;
1253 typedef atomic_long atomic_ssize_t;
1254 typedef atomic_ulong atomic_size_t;
1256 typedef atomic_long atomic_ptrdiff_t;
1258 typedef atomic_llong atomic_intmax_t;
1259 typedef atomic_ullong atomic_uintmax_t;
1265 typedef struct atomic_wchar_t
1268 bool is_lock_free() const volatile;
1269 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1270 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1271 wchar_t exchange( wchar_t,
1272 memory_order = memory_order_seq_cst ) volatile;
1273 bool compare_exchange_weak( wchar_t&, wchar_t,
1274 memory_order, memory_order ) volatile;
1275 bool compare_exchange_strong( wchar_t&, wchar_t,
1276 memory_order, memory_order ) volatile;
1277 bool compare_exchange_weak( wchar_t&, wchar_t,
1278 memory_order = memory_order_seq_cst ) volatile;
1279 bool compare_exchange_strong( wchar_t&, wchar_t,
1280 memory_order = memory_order_seq_cst ) volatile;
1281 wchar_t fetch_add( wchar_t,
1282 memory_order = memory_order_seq_cst ) volatile;
1283 wchar_t fetch_sub( wchar_t,
1284 memory_order = memory_order_seq_cst ) volatile;
1285 wchar_t fetch_and( wchar_t,
1286 memory_order = memory_order_seq_cst ) volatile;
1287 wchar_t fetch_or( wchar_t,
1288 memory_order = memory_order_seq_cst ) volatile;
1289 wchar_t fetch_xor( wchar_t,
1290 memory_order = memory_order_seq_cst ) volatile;
1292 CPP0X( atomic_wchar_t() = default; )
1293 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) {
1295 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1296 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1298 wchar_t operator =( wchar_t __v__ ) volatile
1299 { store( __v__ ); return __v__; }
1301 wchar_t operator ++( int ) volatile
1302 { return fetch_add( 1 ); }
1304 wchar_t operator --( int ) volatile
1305 { return fetch_sub( 1 ); }
1307 wchar_t operator ++() volatile
1308 { return fetch_add( 1 ) + 1; }
1310 wchar_t operator --() volatile
1311 { return fetch_sub( 1 ) - 1; }
1313 wchar_t operator +=( wchar_t __v__ ) volatile
1314 { return fetch_add( __v__ ) + __v__; }
1316 wchar_t operator -=( wchar_t __v__ ) volatile
1317 { return fetch_sub( __v__ ) - __v__; }
1319 wchar_t operator &=( wchar_t __v__ ) volatile
1320 { return fetch_and( __v__ ) & __v__; }
1322 wchar_t operator |=( wchar_t __v__ ) volatile
1323 { return fetch_or( __v__ ) | __v__; }
1325 wchar_t operator ^=( wchar_t __v__ ) volatile
1326 { return fetch_xor( __v__ ) ^ __v__; }
1328 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1330 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1332 friend wchar_t atomic_exchange_explicit( volatile atomic_wchar_t*,
1333 wchar_t, memory_order );
1334 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_wchar_t*,
1335 wchar_t*, wchar_t, memory_order, memory_order );
1336 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_wchar_t*,
1337 wchar_t*, wchar_t, memory_order, memory_order );
1338 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1339 wchar_t, memory_order );
1340 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1341 wchar_t, memory_order );
1342 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1343 wchar_t, memory_order );
1344 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1345 wchar_t, memory_order );
1346 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1347 wchar_t, memory_order );
1357 typedef atomic_int_least16_t atomic_char16_t;
1358 typedef atomic_int_least32_t atomic_char32_t;
1359 typedef atomic_int_least32_t atomic_wchar_t;
1366 template< typename T >
1371 bool is_lock_free() const volatile;
1372 void store( T, memory_order = memory_order_seq_cst ) volatile;
1373 T load( memory_order = memory_order_seq_cst ) volatile;
1374 T exchange( T __v__, memory_order = memory_order_seq_cst ) volatile;
1375 bool compare_exchange_weak( T&, T, memory_order, memory_order ) volatile;
1376 bool compare_exchange_strong( T&, T, memory_order, memory_order ) volatile;
1377 bool compare_exchange_weak( T&, T, memory_order = memory_order_seq_cst ) volatile;
1378 bool compare_exchange_strong( T&, T, memory_order = memory_order_seq_cst ) volatile;
1380 CPP0X( atomic() = default; )
1381 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) {
1383 CPP0X( atomic( const atomic& ) = delete; )
1384 atomic& operator =( const atomic& ) CPP0X(=delete);
1386 T operator =( T __v__ ) volatile
1387 { store( __v__ ); return __v__; }
1398 template<typename T> struct atomic< T* > : atomic_address
1400 T* load( memory_order = memory_order_seq_cst ) volatile;
1401 T* exchange( T*, memory_order = memory_order_seq_cst ) volatile;
1402 bool compare_exchange_weak( T*&, T*, memory_order, memory_order ) volatile;
1403 bool compare_exchange_strong( T*&, T*, memory_order, memory_order ) volatile;
1404 bool compare_exchange_weak( T*&, T*,
1405 memory_order = memory_order_seq_cst ) volatile;
1406 bool compare_exchange_strong( T*&, T*,
1407 memory_order = memory_order_seq_cst ) volatile;
1408 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1409 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1411 CPP0X( atomic() = default; )
1412 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) {
1414 CPP0X( atomic( const atomic& ) = delete; )
1415 atomic& operator =( const atomic& ) CPP0X(=delete);
1417 T* operator =( T* __v__ ) volatile
1418 { store( __v__ ); return __v__; }
1420 T* operator ++( int ) volatile
1421 { return fetch_add( 1 ); }
1423 T* operator --( int ) volatile
1424 { return fetch_sub( 1 ); }
1426 T* operator ++() volatile
1427 { return fetch_add( 1 ) + 1; }
1429 T* operator --() volatile
1430 { return fetch_sub( 1 ) - 1; }
1432 T* operator +=( T* __v__ ) volatile
1433 { return fetch_add( __v__ ) + __v__; }
1435 T* operator -=( T* __v__ ) volatile
1436 { return fetch_sub( __v__ ) - __v__; }
1444 template<> struct atomic< bool > : atomic_bool
1446 CPP0X( atomic() = default; )
1447 CPP0X( constexpr explicit atomic( bool __v__ )
1448 : atomic_bool( __v__ ) {
1450 CPP0X( atomic( const atomic& ) = delete; )
1451 atomic& operator =( const atomic& ) CPP0X(=delete);
1453 bool operator =( bool __v__ ) volatile
1454 { store( __v__ ); return __v__; }
1458 template<> struct atomic< void* > : atomic_address
1460 CPP0X( atomic() = default; )
1461 CPP0X( constexpr explicit atomic( void* __v__ )
1462 : atomic_address( __v__ ) {
1464 CPP0X( atomic( const atomic& ) = delete; )
1465 atomic& operator =( const atomic& ) CPP0X(=delete);
1467 void* operator =( void* __v__ ) volatile
1468 { store( __v__ ); return __v__; }
1472 template<> struct atomic< char > : atomic_char
1474 CPP0X( atomic() = default; )
1475 CPP0X( constexpr explicit atomic( char __v__ )
1476 : atomic_char( __v__ ) {
1478 CPP0X( atomic( const atomic& ) = delete; )
1479 atomic& operator =( const atomic& ) CPP0X(=delete);
1481 char operator =( char __v__ ) volatile
1482 { store( __v__ ); return __v__; }
1486 template<> struct atomic< signed char > : atomic_schar
1488 CPP0X( atomic() = default; )
1489 CPP0X( constexpr explicit atomic( signed char __v__ )
1490 : atomic_schar( __v__ ) {
1492 CPP0X( atomic( const atomic& ) = delete; )
1493 atomic& operator =( const atomic& ) CPP0X(=delete);
1495 signed char operator =( signed char __v__ ) volatile
1496 { store( __v__ ); return __v__; }
1500 template<> struct atomic< unsigned char > : atomic_uchar
1502 CPP0X( atomic() = default; )
1503 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1504 : atomic_uchar( __v__ ) {
1506 CPP0X( atomic( const atomic& ) = delete; )
1507 atomic& operator =( const atomic& ) CPP0X(=delete);
1509 unsigned char operator =( unsigned char __v__ ) volatile
1510 { store( __v__ ); return __v__; }
1514 template<> struct atomic< short > : atomic_short
1516 CPP0X( atomic() = default; )
1517 CPP0X( constexpr explicit atomic( short __v__ )
1518 : atomic_short( __v__ ) {
1520 CPP0X( atomic( const atomic& ) = delete; )
1521 atomic& operator =( const atomic& ) CPP0X(=delete);
1523 short operator =( short __v__ ) volatile
1524 { store( __v__ ); return __v__; }
1528 template<> struct atomic< unsigned short > : atomic_ushort
1530 CPP0X( atomic() = default; )
1531 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1532 : atomic_ushort( __v__ ) {
1534 CPP0X( atomic( const atomic& ) = delete; )
1535 atomic& operator =( const atomic& ) CPP0X(=delete);
1537 unsigned short operator =( unsigned short __v__ ) volatile
1538 { store( __v__ ); return __v__; }
1542 template<> struct atomic< int > : atomic_int
1544 CPP0X( atomic() = default; )
1545 CPP0X( constexpr explicit atomic( int __v__ )
1546 : atomic_int( __v__ ) {
1548 CPP0X( atomic( const atomic& ) = delete; )
1549 atomic& operator =( const atomic& ) CPP0X(=delete);
1551 int operator =( int __v__ ) volatile
1552 { store( __v__ ); return __v__; }
1556 template<> struct atomic< unsigned int > : atomic_uint
1558 CPP0X( atomic() = default; )
1559 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1560 : atomic_uint( __v__ ) {
1562 CPP0X( atomic( const atomic& ) = delete; )
1563 atomic& operator =( const atomic& ) CPP0X(=delete);
1565 unsigned int operator =( unsigned int __v__ ) volatile
1566 { store( __v__ ); return __v__; }
1570 template<> struct atomic< long > : atomic_long
1572 CPP0X( atomic() = default; )
1573 CPP0X( constexpr explicit atomic( long __v__ )
1574 : atomic_long( __v__ ) {
1576 CPP0X( atomic( const atomic& ) = delete; )
1577 atomic& operator =( const atomic& ) CPP0X(=delete);
1579 long operator =( long __v__ ) volatile
1580 { store( __v__ ); return __v__; }
1584 template<> struct atomic< unsigned long > : atomic_ulong
1586 CPP0X( atomic() = default; )
1587 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1588 : atomic_ulong( __v__ ) {
1590 CPP0X( atomic( const atomic& ) = delete; )
1591 atomic& operator =( const atomic& ) CPP0X(=delete);
1593 unsigned long operator =( unsigned long __v__ ) volatile
1594 { store( __v__ ); return __v__; }
1598 template<> struct atomic< long long > : atomic_llong
1600 CPP0X( atomic() = default; )
1601 CPP0X( constexpr explicit atomic( long long __v__ )
1602 : atomic_llong( __v__ ) {
1604 CPP0X( atomic( const atomic& ) = delete; )
1605 atomic& operator =( const atomic& ) CPP0X(=delete);
1607 long long operator =( long long __v__ ) volatile
1608 { store( __v__ ); return __v__; }
1612 template<> struct atomic< unsigned long long > : atomic_ullong
1614 CPP0X( atomic() = default; )
1615 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1616 : atomic_ullong( __v__ ) {
1618 CPP0X( atomic( const atomic& ) = delete; )
1619 atomic& operator =( const atomic& ) CPP0X(=delete);
1621 unsigned long long operator =( unsigned long long __v__ ) volatile
1622 { store( __v__ ); return __v__; }
1626 template<> struct atomic< wchar_t > : atomic_wchar_t
1628 CPP0X( atomic() = default; )
1629 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1630 : atomic_wchar_t( __v__ ) {
1632 CPP0X( atomic( const atomic& ) = delete; )
1633 atomic& operator =( const atomic& ) CPP0X(=delete);
1635 wchar_t operator =( wchar_t __v__ ) volatile
1636 { store( __v__ ); return __v__; }
1646 inline bool atomic_is_lock_free
1647 ( const volatile atomic_bool* __a__ )
1650 inline bool atomic_load_explicit
1651 ( volatile atomic_bool* __a__, memory_order __x__ )
1652 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1654 inline bool atomic_load
1655 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1657 inline void atomic_init
1658 ( volatile atomic_bool* __a__, bool __m__ )
1659 { _ATOMIC_INIT_( __a__, __m__ ); }
1661 inline void atomic_store_explicit
1662 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1663 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1665 inline void atomic_store
1666 ( volatile atomic_bool* __a__, bool __m__ )
1667 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1669 inline bool atomic_exchange_explicit
1670 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1671 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1673 inline bool atomic_exchange
1674 ( volatile atomic_bool* __a__, bool __m__ )
1675 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1677 inline bool atomic_compare_exchange_weak_explicit
1678 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1679 memory_order __x__, memory_order __y__ )
1680 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1682 inline bool atomic_compare_exchange_strong_explicit
1683 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1684 memory_order __x__, memory_order __y__ )
1685 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1687 inline bool atomic_compare_exchange_weak
1688 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1690 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1691 memory_order_seq_cst, memory_order_seq_cst );
1694 inline bool atomic_compare_exchange_strong
1695 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1697 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1698 memory_order_seq_cst, memory_order_seq_cst );
1702 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1705 inline void* atomic_load_explicit
1706 ( volatile atomic_address* __a__, memory_order __x__ )
1707 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1709 inline void* atomic_load( volatile atomic_address* __a__ )
1710 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1712 inline void atomic_init
1713 ( volatile atomic_address* __a__, void* __m__ )
1714 { _ATOMIC_INIT_( __a__, __m__ ); }
1716 inline void atomic_store_explicit
1717 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1718 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1720 inline void atomic_store
1721 ( volatile atomic_address* __a__, void* __m__ )
1722 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1724 inline void* atomic_exchange_explicit
1725 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1726 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1728 inline void* atomic_exchange
1729 ( volatile atomic_address* __a__, void* __m__ )
1730 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1732 inline bool atomic_compare_exchange_weak_explicit
1733 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1734 memory_order __x__, memory_order __y__ )
1735 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1737 inline bool atomic_compare_exchange_strong_explicit
1738 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1739 memory_order __x__, memory_order __y__ )
1740 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1742 inline bool atomic_compare_exchange_weak
1743 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1745 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1746 memory_order_seq_cst, memory_order_seq_cst );
1749 inline bool atomic_compare_exchange_strong
1750 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1752 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1753 memory_order_seq_cst, memory_order_seq_cst );
1757 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1760 inline char atomic_load_explicit
1761 ( volatile atomic_char* __a__, memory_order __x__ )
1762 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1764 inline char atomic_load( volatile atomic_char* __a__ )
1765 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1767 inline void atomic_init
1768 ( volatile atomic_char* __a__, char __m__ )
1769 { _ATOMIC_INIT_( __a__, __m__ ); }
1771 inline void atomic_store_explicit
1772 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1773 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1775 inline void atomic_store
1776 ( volatile atomic_char* __a__, char __m__ )
1777 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1779 inline char atomic_exchange_explicit
1780 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1781 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1783 inline char atomic_exchange
1784 ( volatile atomic_char* __a__, char __m__ )
1785 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1787 inline bool atomic_compare_exchange_weak_explicit
1788 ( volatile atomic_char* __a__, char* __e__, char __m__,
1789 memory_order __x__, memory_order __y__ )
1790 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1792 inline bool atomic_compare_exchange_strong_explicit
1793 ( volatile atomic_char* __a__, char* __e__, char __m__,
1794 memory_order __x__, memory_order __y__ )
1795 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1797 inline bool atomic_compare_exchange_weak
1798 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1800 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1801 memory_order_seq_cst, memory_order_seq_cst );
1804 inline bool atomic_compare_exchange_strong
1805 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1807 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1808 memory_order_seq_cst, memory_order_seq_cst );
1812 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1815 inline signed char atomic_load_explicit
1816 ( volatile atomic_schar* __a__, memory_order __x__ )
1817 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1819 inline signed char atomic_load( volatile atomic_schar* __a__ )
1820 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1822 inline void atomic_init
1823 ( volatile atomic_schar* __a__, signed char __m__ )
1824 { _ATOMIC_INIT_( __a__, __m__ ); }
1826 inline void atomic_store_explicit
1827 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1828 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1830 inline void atomic_store
1831 ( volatile atomic_schar* __a__, signed char __m__ )
1832 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1834 inline signed char atomic_exchange_explicit
1835 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1836 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1838 inline signed char atomic_exchange
1839 ( volatile atomic_schar* __a__, signed char __m__ )
1840 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1842 inline bool atomic_compare_exchange_weak_explicit
1843 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1844 memory_order __x__, memory_order __y__ )
1845 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1847 inline bool atomic_compare_exchange_strong_explicit
1848 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1849 memory_order __x__, memory_order __y__ )
1850 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1852 inline bool atomic_compare_exchange_weak
1853 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1855 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1856 memory_order_seq_cst, memory_order_seq_cst );
1859 inline bool atomic_compare_exchange_strong
1860 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1862 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1863 memory_order_seq_cst, memory_order_seq_cst );
1867 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1870 inline unsigned char atomic_load_explicit
1871 ( volatile atomic_uchar* __a__, memory_order __x__ )
1872 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1874 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1875 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1877 inline void atomic_init
1878 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1879 { _ATOMIC_INIT_( __a__, __m__ ); }
1881 inline void atomic_store_explicit
1882 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1883 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1885 inline void atomic_store
1886 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1887 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1889 inline unsigned char atomic_exchange_explicit
1890 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1891 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1893 inline unsigned char atomic_exchange
1894 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1895 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1897 inline bool atomic_compare_exchange_weak_explicit
1898 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1899 memory_order __x__, memory_order __y__ )
1900 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1902 inline bool atomic_compare_exchange_strong_explicit
1903 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1904 memory_order __x__, memory_order __y__ )
1905 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1907 inline bool atomic_compare_exchange_weak
1908 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1910 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1911 memory_order_seq_cst, memory_order_seq_cst );
1914 inline bool atomic_compare_exchange_strong
1915 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1917 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1918 memory_order_seq_cst, memory_order_seq_cst );
1922 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1925 inline short atomic_load_explicit
1926 ( volatile atomic_short* __a__, memory_order __x__ )
1927 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1929 inline short atomic_load( volatile atomic_short* __a__ )
1930 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1932 inline void atomic_init
1933 ( volatile atomic_short* __a__, short __m__ )
1934 { _ATOMIC_INIT_( __a__, __m__ ); }
1936 inline void atomic_store_explicit
1937 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1938 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1940 inline void atomic_store
1941 ( volatile atomic_short* __a__, short __m__ )
1942 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1944 inline short atomic_exchange_explicit
1945 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1946 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1948 inline short atomic_exchange
1949 ( volatile atomic_short* __a__, short __m__ )
1950 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1952 inline bool atomic_compare_exchange_weak_explicit
1953 ( volatile atomic_short* __a__, short* __e__, short __m__,
1954 memory_order __x__, memory_order __y__ )
1955 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1957 inline bool atomic_compare_exchange_strong_explicit
1958 ( volatile atomic_short* __a__, short* __e__, short __m__,
1959 memory_order __x__, memory_order __y__ )
1960 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1962 inline bool atomic_compare_exchange_weak
1963 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1965 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1966 memory_order_seq_cst, memory_order_seq_cst );
1969 inline bool atomic_compare_exchange_strong
1970 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1972 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1973 memory_order_seq_cst, memory_order_seq_cst );
1977 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1980 inline unsigned short atomic_load_explicit
1981 ( volatile atomic_ushort* __a__, memory_order __x__ )
1982 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1984 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1985 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1987 inline void atomic_init
1988 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1989 { _ATOMIC_INIT_( __a__, __m__ ); }
1991 inline void atomic_store_explicit
1992 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1993 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1995 inline void atomic_store
1996 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1997 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1999 inline unsigned short atomic_exchange_explicit
2000 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2001 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2003 inline unsigned short atomic_exchange
2004 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2005 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2007 inline bool atomic_compare_exchange_weak_explicit
2008 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
2009 memory_order __x__, memory_order __y__ )
2010 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2012 inline bool atomic_compare_exchange_strong_explicit
2013 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
2014 memory_order __x__, memory_order __y__ )
2015 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2017 inline bool atomic_compare_exchange_weak
2018 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
2020 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2021 memory_order_seq_cst, memory_order_seq_cst );
2024 inline bool atomic_compare_exchange_strong
2025 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
2027 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2028 memory_order_seq_cst, memory_order_seq_cst );
2032 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
2035 inline int atomic_load_explicit
2036 ( volatile atomic_int* __a__, memory_order __x__ )
2037 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2039 inline int atomic_load( volatile atomic_int* __a__ )
2040 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2042 inline void atomic_init
2043 ( volatile atomic_int* __a__, int __m__ )
2044 { _ATOMIC_INIT_( __a__, __m__ ); }
2046 inline void atomic_store_explicit
2047 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2048 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2050 inline void atomic_store
2051 ( volatile atomic_int* __a__, int __m__ )
2052 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2054 inline int atomic_exchange_explicit
2055 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2056 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2058 inline int atomic_exchange
2059 ( volatile atomic_int* __a__, int __m__ )
2060 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2062 inline bool atomic_compare_exchange_weak_explicit
2063 ( volatile atomic_int* __a__, int* __e__, int __m__,
2064 memory_order __x__, memory_order __y__ )
2065 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2067 inline bool atomic_compare_exchange_strong_explicit
2068 ( volatile atomic_int* __a__, int* __e__, int __m__,
2069 memory_order __x__, memory_order __y__ )
2070 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2072 inline bool atomic_compare_exchange_weak
2073 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2075 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2076 memory_order_seq_cst, memory_order_seq_cst );
2079 inline bool atomic_compare_exchange_strong
2080 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2082 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2083 memory_order_seq_cst, memory_order_seq_cst );
2087 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
2090 inline unsigned int atomic_load_explicit
2091 ( volatile atomic_uint* __a__, memory_order __x__ )
2092 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2094 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
2095 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2097 inline void atomic_init
2098 ( volatile atomic_uint* __a__, unsigned int __m__ )
2099 { _ATOMIC_INIT_( __a__, __m__ ); }
2101 inline void atomic_store_explicit
2102 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2103 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2105 inline void atomic_store
2106 ( volatile atomic_uint* __a__, unsigned int __m__ )
2107 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2109 inline unsigned int atomic_exchange_explicit
2110 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2111 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2113 inline unsigned int atomic_exchange
2114 ( volatile atomic_uint* __a__, unsigned int __m__ )
2115 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2117 inline bool atomic_compare_exchange_weak_explicit
2118 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2119 memory_order __x__, memory_order __y__ )
2120 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2122 inline bool atomic_compare_exchange_strong_explicit
2123 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2124 memory_order __x__, memory_order __y__ )
2125 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2127 inline bool atomic_compare_exchange_weak
2128 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2130 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2131 memory_order_seq_cst, memory_order_seq_cst );
2134 inline bool atomic_compare_exchange_strong
2135 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2137 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2138 memory_order_seq_cst, memory_order_seq_cst );
2142 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
2145 inline long atomic_load_explicit
2146 ( volatile atomic_long* __a__, memory_order __x__ )
2147 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2149 inline long atomic_load( volatile atomic_long* __a__ )
2150 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2152 inline void atomic_init
2153 ( volatile atomic_long* __a__, long __m__ )
2154 { _ATOMIC_INIT_( __a__, __m__ ); }
2156 inline void atomic_store_explicit
2157 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2158 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2160 inline void atomic_store
2161 ( volatile atomic_long* __a__, long __m__ )
2162 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2164 inline long atomic_exchange_explicit
2165 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2166 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2168 inline long atomic_exchange
2169 ( volatile atomic_long* __a__, long __m__ )
2170 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2172 inline bool atomic_compare_exchange_weak_explicit
2173 ( volatile atomic_long* __a__, long* __e__, long __m__,
2174 memory_order __x__, memory_order __y__ )
2175 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2177 inline bool atomic_compare_exchange_strong_explicit
2178 ( volatile atomic_long* __a__, long* __e__, long __m__,
2179 memory_order __x__, memory_order __y__ )
2180 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2182 inline bool atomic_compare_exchange_weak
2183 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2185 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2186 memory_order_seq_cst, memory_order_seq_cst );
2189 inline bool atomic_compare_exchange_strong
2190 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2192 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2193 memory_order_seq_cst, memory_order_seq_cst );
2197 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
2200 inline unsigned long atomic_load_explicit
2201 ( volatile atomic_ulong* __a__, memory_order __x__ )
2202 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2204 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
2205 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2207 inline void atomic_init
2208 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2209 { _ATOMIC_INIT_( __a__, __m__ ); }
2211 inline void atomic_store_explicit
2212 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2213 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2215 inline void atomic_store
2216 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2217 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2219 inline unsigned long atomic_exchange_explicit
2220 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2221 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2223 inline unsigned long atomic_exchange
2224 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2225 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2227 inline bool atomic_compare_exchange_weak_explicit
2228 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2229 memory_order __x__, memory_order __y__ )
2230 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2232 inline bool atomic_compare_exchange_strong_explicit
2233 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2234 memory_order __x__, memory_order __y__ )
2235 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2237 inline bool atomic_compare_exchange_weak
2238 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2240 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2241 memory_order_seq_cst, memory_order_seq_cst );
2244 inline bool atomic_compare_exchange_strong
2245 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2247 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2248 memory_order_seq_cst, memory_order_seq_cst );
2252 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2255 inline long long atomic_load_explicit
2256 ( volatile atomic_llong* __a__, memory_order __x__ )
2257 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2259 inline long long atomic_load( volatile atomic_llong* __a__ )
2260 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2262 inline void atomic_init
2263 ( volatile atomic_llong* __a__, long long __m__ )
2264 { _ATOMIC_INIT_( __a__, __m__ ); }
2266 inline void atomic_store_explicit
2267 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2268 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2270 inline void atomic_store
2271 ( volatile atomic_llong* __a__, long long __m__ )
2272 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2274 inline long long atomic_exchange_explicit
2275 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2276 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2278 inline long long atomic_exchange
2279 ( volatile atomic_llong* __a__, long long __m__ )
2280 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2282 inline bool atomic_compare_exchange_weak_explicit
2283 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2284 memory_order __x__, memory_order __y__ )
2285 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2287 inline bool atomic_compare_exchange_strong_explicit
2288 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2289 memory_order __x__, memory_order __y__ )
2290 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2292 inline bool atomic_compare_exchange_weak
2293 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2295 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2296 memory_order_seq_cst, memory_order_seq_cst );
2299 inline bool atomic_compare_exchange_strong
2300 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2302 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2303 memory_order_seq_cst, memory_order_seq_cst );
2307 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2310 inline unsigned long long atomic_load_explicit
2311 ( volatile atomic_ullong* __a__, memory_order __x__ )
2312 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2314 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2315 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2317 inline void atomic_init
2318 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2319 { _ATOMIC_INIT_( __a__, __m__ ); }
2321 inline void atomic_store_explicit
2322 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2323 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2325 inline void atomic_store
2326 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2327 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2329 inline unsigned long long atomic_exchange_explicit
2330 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2331 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2333 inline unsigned long long atomic_exchange
2334 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2335 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2337 inline bool atomic_compare_exchange_weak_explicit
2338 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2339 memory_order __x__, memory_order __y__ )
2340 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2342 inline bool atomic_compare_exchange_strong_explicit
2343 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2344 memory_order __x__, memory_order __y__ )
2345 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2347 inline bool atomic_compare_exchange_weak
2348 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2350 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2351 memory_order_seq_cst, memory_order_seq_cst );
2354 inline bool atomic_compare_exchange_strong
2355 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2357 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2358 memory_order_seq_cst, memory_order_seq_cst );
2362 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2365 inline wchar_t atomic_load_explicit
2366 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2367 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2369 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2370 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2372 inline void atomic_init
2373 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2374 { _ATOMIC_INIT_( __a__, __m__ ); }
2376 inline void atomic_store_explicit
2377 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2378 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2380 inline void atomic_store
2381 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2382 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2384 inline wchar_t atomic_exchange_explicit
2385 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2386 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2388 inline wchar_t atomic_exchange
2389 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2390 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2392 inline bool atomic_compare_exchange_weak_explicit
2393 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2394 memory_order __x__, memory_order __y__ )
2395 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2397 inline bool atomic_compare_exchange_strong_explicit
2398 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2399 memory_order __x__, memory_order __y__ )
2400 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2402 inline bool atomic_compare_exchange_weak
2403 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2405 return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2406 memory_order_seq_cst, memory_order_seq_cst );
2409 inline bool atomic_compare_exchange_strong
2410 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2412 return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2413 memory_order_seq_cst, memory_order_seq_cst );
2417 inline void* atomic_fetch_add_explicit
2418 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2420 volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__);
2421 __typeof__((__a__)->__f__)__old__=(__typeof__((__a__)->__f__))model_rmwr_action((void *)__p__, __x__);
2422 __typeof__((__a__)->__f__)__copy__= __old__;
2423 __copy__ = (void *) (((char *)__copy__) + __m__);
2424 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__);
2428 inline void* atomic_fetch_add
2429 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2430 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2433 inline void* atomic_fetch_sub_explicit
2434 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2436 volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__);
2437 __typeof__((__a__)->__f__)__old__=(__typeof__((__a__)->__f__))model_rmwr_action((void *)__p__, __x__);
2438 __typeof__((__a__)->__f__)__copy__= __old__;
2439 __copy__ = (void *) (((char *)__copy__) - __m__);
2440 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__);
2444 inline void* atomic_fetch_sub
2445 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2446 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2448 inline char atomic_fetch_add_explicit
2449 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2450 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2452 inline char atomic_fetch_add
2453 ( volatile atomic_char* __a__, char __m__ )
2454 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2457 inline char atomic_fetch_sub_explicit
2458 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2459 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2461 inline char atomic_fetch_sub
2462 ( volatile atomic_char* __a__, char __m__ )
2463 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2466 inline char atomic_fetch_and_explicit
2467 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2468 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2470 inline char atomic_fetch_and
2471 ( volatile atomic_char* __a__, char __m__ )
2472 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2475 inline char atomic_fetch_or_explicit
2476 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2477 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2479 inline char atomic_fetch_or
2480 ( volatile atomic_char* __a__, char __m__ )
2481 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2484 inline char atomic_fetch_xor_explicit
2485 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2486 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2488 inline char atomic_fetch_xor
2489 ( volatile atomic_char* __a__, char __m__ )
2490 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2493 inline signed char atomic_fetch_add_explicit
2494 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2495 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2497 inline signed char atomic_fetch_add
2498 ( volatile atomic_schar* __a__, signed char __m__ )
2499 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2502 inline signed char atomic_fetch_sub_explicit
2503 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2504 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2506 inline signed char atomic_fetch_sub
2507 ( volatile atomic_schar* __a__, signed char __m__ )
2508 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2511 inline signed char atomic_fetch_and_explicit
2512 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2513 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2515 inline signed char atomic_fetch_and
2516 ( volatile atomic_schar* __a__, signed char __m__ )
2517 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2520 inline signed char atomic_fetch_or_explicit
2521 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2522 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2524 inline signed char atomic_fetch_or
2525 ( volatile atomic_schar* __a__, signed char __m__ )
2526 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2529 inline signed char atomic_fetch_xor_explicit
2530 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2531 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2533 inline signed char atomic_fetch_xor
2534 ( volatile atomic_schar* __a__, signed char __m__ )
2535 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2538 inline unsigned char atomic_fetch_add_explicit
2539 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2540 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2542 inline unsigned char atomic_fetch_add
2543 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2544 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2547 inline unsigned char atomic_fetch_sub_explicit
2548 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2549 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2551 inline unsigned char atomic_fetch_sub
2552 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2553 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2556 inline unsigned char atomic_fetch_and_explicit
2557 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2558 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2560 inline unsigned char atomic_fetch_and
2561 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2562 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2565 inline unsigned char atomic_fetch_or_explicit
2566 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2567 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2569 inline unsigned char atomic_fetch_or
2570 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2571 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2574 inline unsigned char atomic_fetch_xor_explicit
2575 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2576 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2578 inline unsigned char atomic_fetch_xor
2579 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2580 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2583 inline short atomic_fetch_add_explicit
2584 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2585 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2587 inline short atomic_fetch_add
2588 ( volatile atomic_short* __a__, short __m__ )
2589 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2592 inline short atomic_fetch_sub_explicit
2593 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2594 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2596 inline short atomic_fetch_sub
2597 ( volatile atomic_short* __a__, short __m__ )
2598 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2601 inline short atomic_fetch_and_explicit
2602 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2603 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2605 inline short atomic_fetch_and
2606 ( volatile atomic_short* __a__, short __m__ )
2607 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2610 inline short atomic_fetch_or_explicit
2611 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2612 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2614 inline short atomic_fetch_or
2615 ( volatile atomic_short* __a__, short __m__ )
2616 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2619 inline short atomic_fetch_xor_explicit
2620 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2621 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2623 inline short atomic_fetch_xor
2624 ( volatile atomic_short* __a__, short __m__ )
2625 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2628 inline unsigned short atomic_fetch_add_explicit
2629 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2630 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2632 inline unsigned short atomic_fetch_add
2633 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2634 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2637 inline unsigned short atomic_fetch_sub_explicit
2638 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2639 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2641 inline unsigned short atomic_fetch_sub
2642 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2643 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2646 inline unsigned short atomic_fetch_and_explicit
2647 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2648 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2650 inline unsigned short atomic_fetch_and
2651 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2652 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2655 inline unsigned short atomic_fetch_or_explicit
2656 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2657 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2659 inline unsigned short atomic_fetch_or
2660 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2661 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2664 inline unsigned short atomic_fetch_xor_explicit
2665 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2666 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2668 inline unsigned short atomic_fetch_xor
2669 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2670 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2673 inline int atomic_fetch_add_explicit
2674 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2675 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2677 inline int atomic_fetch_add
2678 ( volatile atomic_int* __a__, int __m__ )
2679 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2682 inline int atomic_fetch_sub_explicit
2683 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2684 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2686 inline int atomic_fetch_sub
2687 ( volatile atomic_int* __a__, int __m__ )
2688 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2691 inline int atomic_fetch_and_explicit
2692 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2693 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2695 inline int atomic_fetch_and
2696 ( volatile atomic_int* __a__, int __m__ )
2697 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2700 inline int atomic_fetch_or_explicit
2701 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2702 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2704 inline int atomic_fetch_or
2705 ( volatile atomic_int* __a__, int __m__ )
2706 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2709 inline int atomic_fetch_xor_explicit
2710 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2711 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2713 inline int atomic_fetch_xor
2714 ( volatile atomic_int* __a__, int __m__ )
2715 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2718 inline unsigned int atomic_fetch_add_explicit
2719 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2720 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2722 inline unsigned int atomic_fetch_add
2723 ( volatile atomic_uint* __a__, unsigned int __m__ )
2724 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2727 inline unsigned int atomic_fetch_sub_explicit
2728 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2729 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2731 inline unsigned int atomic_fetch_sub
2732 ( volatile atomic_uint* __a__, unsigned int __m__ )
2733 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2736 inline unsigned int atomic_fetch_and_explicit
2737 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2738 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2740 inline unsigned int atomic_fetch_and
2741 ( volatile atomic_uint* __a__, unsigned int __m__ )
2742 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2745 inline unsigned int atomic_fetch_or_explicit
2746 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2747 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2749 inline unsigned int atomic_fetch_or
2750 ( volatile atomic_uint* __a__, unsigned int __m__ )
2751 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2754 inline unsigned int atomic_fetch_xor_explicit
2755 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2756 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2758 inline unsigned int atomic_fetch_xor
2759 ( volatile atomic_uint* __a__, unsigned int __m__ )
2760 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2763 inline long atomic_fetch_add_explicit
2764 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2765 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2767 inline long atomic_fetch_add
2768 ( volatile atomic_long* __a__, long __m__ )
2769 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2772 inline long atomic_fetch_sub_explicit
2773 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2774 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2776 inline long atomic_fetch_sub
2777 ( volatile atomic_long* __a__, long __m__ )
2778 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2781 inline long atomic_fetch_and_explicit
2782 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2783 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2785 inline long atomic_fetch_and
2786 ( volatile atomic_long* __a__, long __m__ )
2787 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2790 inline long atomic_fetch_or_explicit
2791 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2792 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2794 inline long atomic_fetch_or
2795 ( volatile atomic_long* __a__, long __m__ )
2796 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2799 inline long atomic_fetch_xor_explicit
2800 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2801 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2803 inline long atomic_fetch_xor
2804 ( volatile atomic_long* __a__, long __m__ )
2805 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2808 inline unsigned long atomic_fetch_add_explicit
2809 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2810 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2812 inline unsigned long atomic_fetch_add
2813 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2814 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2817 inline unsigned long atomic_fetch_sub_explicit
2818 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2819 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2821 inline unsigned long atomic_fetch_sub
2822 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2823 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2826 inline unsigned long atomic_fetch_and_explicit
2827 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2828 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2830 inline unsigned long atomic_fetch_and
2831 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2832 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2835 inline unsigned long atomic_fetch_or_explicit
2836 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2837 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2839 inline unsigned long atomic_fetch_or
2840 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2841 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2844 inline unsigned long atomic_fetch_xor_explicit
2845 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2846 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2848 inline unsigned long atomic_fetch_xor
2849 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2850 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2853 inline long long atomic_fetch_add_explicit
2854 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2855 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2857 inline long long atomic_fetch_add
2858 ( volatile atomic_llong* __a__, long long __m__ )
2859 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2862 inline long long atomic_fetch_sub_explicit
2863 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2864 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2866 inline long long atomic_fetch_sub
2867 ( volatile atomic_llong* __a__, long long __m__ )
2868 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2871 inline long long atomic_fetch_and_explicit
2872 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2873 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2875 inline long long atomic_fetch_and
2876 ( volatile atomic_llong* __a__, long long __m__ )
2877 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2880 inline long long atomic_fetch_or_explicit
2881 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2882 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2884 inline long long atomic_fetch_or
2885 ( volatile atomic_llong* __a__, long long __m__ )
2886 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2889 inline long long atomic_fetch_xor_explicit
2890 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2891 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2893 inline long long atomic_fetch_xor
2894 ( volatile atomic_llong* __a__, long long __m__ )
2895 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2898 inline unsigned long long atomic_fetch_add_explicit
2899 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2900 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2902 inline unsigned long long atomic_fetch_add
2903 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2904 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2907 inline unsigned long long atomic_fetch_sub_explicit
2908 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2909 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2911 inline unsigned long long atomic_fetch_sub
2912 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2913 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2916 inline unsigned long long atomic_fetch_and_explicit
2917 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2918 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2920 inline unsigned long long atomic_fetch_and
2921 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2922 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2925 inline unsigned long long atomic_fetch_or_explicit
2926 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2927 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2929 inline unsigned long long atomic_fetch_or
2930 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2931 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2934 inline unsigned long long atomic_fetch_xor_explicit
2935 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2936 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2938 inline unsigned long long atomic_fetch_xor
2939 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2940 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2943 inline wchar_t atomic_fetch_add_explicit
2944 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2945 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2947 inline wchar_t atomic_fetch_add
2948 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2949 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2952 inline wchar_t atomic_fetch_sub_explicit
2953 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2954 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2956 inline wchar_t atomic_fetch_sub
2957 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2958 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2961 inline wchar_t atomic_fetch_and_explicit
2962 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2963 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2965 inline wchar_t atomic_fetch_and
2966 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2967 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2970 inline wchar_t atomic_fetch_or_explicit
2971 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2972 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2974 inline wchar_t atomic_fetch_or
2975 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2976 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2979 inline wchar_t atomic_fetch_xor_explicit
2980 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2981 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2983 inline wchar_t atomic_fetch_xor
2984 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2985 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2991 #define atomic_is_lock_free( __a__ ) \
2994 #define atomic_load( __a__ ) \
2995 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2997 #define atomic_load_explicit( __a__, __x__ ) \
2998 _ATOMIC_LOAD_( __a__, __x__ )
3000 #define atomic_init( __a__, __m__ ) \
3001 _ATOMIC_INIT_( __a__, __m__ )
3003 #define atomic_store( __a__, __m__ ) \
3004 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
3006 #define atomic_store_explicit( __a__, __m__, __x__ ) \
3007 _ATOMIC_STORE_( __a__, __m__, __x__ )
3009 #define atomic_exchange( __a__, __m__ ) \
3010 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
3012 #define atomic_exchange_explicit( __a__, __m__, __x__ ) \
3013 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
3015 #define atomic_compare_exchange_weak( __a__, __e__, __m__ ) \
3016 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, memory_order_seq_cst )
3018 #define atomic_compare_exchange_strong( __a__, __e__, __m__ ) \
3019 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
3021 #define atomic_compare_exchange_weak_explicit( __a__, __e__, __m__, __x__, __y__ ) \
3022 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ )
3024 #define atomic_compare_exchange_strong_explicit( __a__, __e__, __m__, __x__, __y__ ) \
3025 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
3028 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
3029 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
3031 #define atomic_fetch_add( __a__, __m__ ) \
3032 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
3035 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
3036 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
3038 #define atomic_fetch_sub( __a__, __m__ ) \
3039 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
3042 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
3043 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
3045 #define atomic_fetch_and( __a__, __m__ ) \
3046 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
3049 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
3050 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
3052 #define atomic_fetch_or( __a__, __m__ ) \
3053 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
3056 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
3057 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
3059 #define atomic_fetch_xor( __a__, __m__ ) \
3060 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
3069 inline bool atomic_bool::is_lock_free() const volatile
3072 inline void atomic_bool::store
3073 ( bool __m__, memory_order __x__ ) volatile
3074 { atomic_store_explicit( this, __m__, __x__ ); }
3076 inline bool atomic_bool::load
3077 ( memory_order __x__ ) volatile
3078 { return atomic_load_explicit( this, __x__ ); }
3080 inline bool atomic_bool::exchange
3081 ( bool __m__, memory_order __x__ ) volatile
3082 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3084 inline bool atomic_bool::compare_exchange_weak
3085 ( bool& __e__, bool __m__,
3086 memory_order __x__, memory_order __y__ ) volatile
3087 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3089 inline bool atomic_bool::compare_exchange_strong
3090 ( bool& __e__, bool __m__,
3091 memory_order __x__, memory_order __y__ ) volatile
3092 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3094 inline bool atomic_bool::compare_exchange_weak
3095 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3097 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3098 __x__ == memory_order_acq_rel ? memory_order_acquire :
3099 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3102 inline bool atomic_bool::compare_exchange_strong
3103 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3105 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3106 __x__ == memory_order_acq_rel ? memory_order_acquire :
3107 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3111 inline bool atomic_address::is_lock_free() const volatile
3114 inline void atomic_address::store
3115 ( void* __m__, memory_order __x__ ) volatile
3116 { atomic_store_explicit( this, __m__, __x__ ); }
3118 inline void* atomic_address::load
3119 ( memory_order __x__ ) volatile
3120 { return atomic_load_explicit( this, __x__ ); }
3122 inline void* atomic_address::exchange
3123 ( void* __m__, memory_order __x__ ) volatile
3124 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3126 inline bool atomic_address::compare_exchange_weak
3127 ( void*& __e__, void* __m__,
3128 memory_order __x__, memory_order __y__ ) volatile
3129 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3131 inline bool atomic_address::compare_exchange_strong
3132 ( void*& __e__, void* __m__,
3133 memory_order __x__, memory_order __y__ ) volatile
3134 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3136 inline bool atomic_address::compare_exchange_weak
3137 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3139 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3140 __x__ == memory_order_acq_rel ? memory_order_acquire :
3141 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3144 inline bool atomic_address::compare_exchange_strong
3145 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3147 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3148 __x__ == memory_order_acq_rel ? memory_order_acquire :
3149 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3153 inline bool atomic_char::is_lock_free() const volatile
3156 inline void atomic_char::store
3157 ( char __m__, memory_order __x__ ) volatile
3158 { atomic_store_explicit( this, __m__, __x__ ); }
3160 inline char atomic_char::load
3161 ( memory_order __x__ ) volatile
3162 { return atomic_load_explicit( this, __x__ ); }
3164 inline char atomic_char::exchange
3165 ( char __m__, memory_order __x__ ) volatile
3166 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3168 inline bool atomic_char::compare_exchange_weak
3169 ( char& __e__, char __m__,
3170 memory_order __x__, memory_order __y__ ) volatile
3171 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3173 inline bool atomic_char::compare_exchange_strong
3174 ( char& __e__, char __m__,
3175 memory_order __x__, memory_order __y__ ) volatile
3176 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3178 inline bool atomic_char::compare_exchange_weak
3179 ( char& __e__, char __m__, memory_order __x__ ) volatile
3181 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3182 __x__ == memory_order_acq_rel ? memory_order_acquire :
3183 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3186 inline bool atomic_char::compare_exchange_strong
3187 ( char& __e__, char __m__, memory_order __x__ ) volatile
3189 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3190 __x__ == memory_order_acq_rel ? memory_order_acquire :
3191 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3195 inline bool atomic_schar::is_lock_free() const volatile
3198 inline void atomic_schar::store
3199 ( signed char __m__, memory_order __x__ ) volatile
3200 { atomic_store_explicit( this, __m__, __x__ ); }
3202 inline signed char atomic_schar::load
3203 ( memory_order __x__ ) volatile
3204 { return atomic_load_explicit( this, __x__ ); }
3206 inline signed char atomic_schar::exchange
3207 ( signed char __m__, memory_order __x__ ) volatile
3208 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3210 inline bool atomic_schar::compare_exchange_weak
3211 ( signed char& __e__, signed char __m__,
3212 memory_order __x__, memory_order __y__ ) volatile
3213 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3215 inline bool atomic_schar::compare_exchange_strong
3216 ( signed char& __e__, signed char __m__,
3217 memory_order __x__, memory_order __y__ ) volatile
3218 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3220 inline bool atomic_schar::compare_exchange_weak
3221 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3223 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3224 __x__ == memory_order_acq_rel ? memory_order_acquire :
3225 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3228 inline bool atomic_schar::compare_exchange_strong
3229 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3231 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3232 __x__ == memory_order_acq_rel ? memory_order_acquire :
3233 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3237 inline bool atomic_uchar::is_lock_free() const volatile
3240 inline void atomic_uchar::store
3241 ( unsigned char __m__, memory_order __x__ ) volatile
3242 { atomic_store_explicit( this, __m__, __x__ ); }
3244 inline unsigned char atomic_uchar::load
3245 ( memory_order __x__ ) volatile
3246 { return atomic_load_explicit( this, __x__ ); }
3248 inline unsigned char atomic_uchar::exchange
3249 ( unsigned char __m__, memory_order __x__ ) volatile
3250 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3252 inline bool atomic_uchar::compare_exchange_weak
3253 ( unsigned char& __e__, unsigned char __m__,
3254 memory_order __x__, memory_order __y__ ) volatile
3255 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3257 inline bool atomic_uchar::compare_exchange_strong
3258 ( unsigned char& __e__, unsigned char __m__,
3259 memory_order __x__, memory_order __y__ ) volatile
3260 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3262 inline bool atomic_uchar::compare_exchange_weak
3263 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3265 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3266 __x__ == memory_order_acq_rel ? memory_order_acquire :
3267 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3270 inline bool atomic_uchar::compare_exchange_strong
3271 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3273 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3274 __x__ == memory_order_acq_rel ? memory_order_acquire :
3275 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3279 inline bool atomic_short::is_lock_free() const volatile
3282 inline void atomic_short::store
3283 ( short __m__, memory_order __x__ ) volatile
3284 { atomic_store_explicit( this, __m__, __x__ ); }
3286 inline short atomic_short::load
3287 ( memory_order __x__ ) volatile
3288 { return atomic_load_explicit( this, __x__ ); }
3290 inline short atomic_short::exchange
3291 ( short __m__, memory_order __x__ ) volatile
3292 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3294 inline bool atomic_short::compare_exchange_weak
3295 ( short& __e__, short __m__,
3296 memory_order __x__, memory_order __y__ ) volatile
3297 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3299 inline bool atomic_short::compare_exchange_strong
3300 ( short& __e__, short __m__,
3301 memory_order __x__, memory_order __y__ ) volatile
3302 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3304 inline bool atomic_short::compare_exchange_weak
3305 ( short& __e__, short __m__, memory_order __x__ ) volatile
3307 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3308 __x__ == memory_order_acq_rel ? memory_order_acquire :
3309 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3312 inline bool atomic_short::compare_exchange_strong
3313 ( short& __e__, short __m__, memory_order __x__ ) volatile
3315 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3316 __x__ == memory_order_acq_rel ? memory_order_acquire :
3317 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3321 inline bool atomic_ushort::is_lock_free() const volatile
3324 inline void atomic_ushort::store
3325 ( unsigned short __m__, memory_order __x__ ) volatile
3326 { atomic_store_explicit( this, __m__, __x__ ); }
3328 inline unsigned short atomic_ushort::load
3329 ( memory_order __x__ ) volatile
3330 { return atomic_load_explicit( this, __x__ ); }
3332 inline unsigned short atomic_ushort::exchange
3333 ( unsigned short __m__, memory_order __x__ ) volatile
3334 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3336 inline bool atomic_ushort::compare_exchange_weak
3337 ( unsigned short& __e__, unsigned short __m__,
3338 memory_order __x__, memory_order __y__ ) volatile
3339 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3341 inline bool atomic_ushort::compare_exchange_strong
3342 ( unsigned short& __e__, unsigned short __m__,
3343 memory_order __x__, memory_order __y__ ) volatile
3344 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3346 inline bool atomic_ushort::compare_exchange_weak
3347 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3349 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3350 __x__ == memory_order_acq_rel ? memory_order_acquire :
3351 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3354 inline bool atomic_ushort::compare_exchange_strong
3355 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3357 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3358 __x__ == memory_order_acq_rel ? memory_order_acquire :
3359 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3363 inline bool atomic_int::is_lock_free() const volatile
3366 inline void atomic_int::store
3367 ( int __m__, memory_order __x__ ) volatile
3368 { atomic_store_explicit( this, __m__, __x__ ); }
3370 inline int atomic_int::load
3371 ( memory_order __x__ ) volatile
3372 { return atomic_load_explicit( this, __x__ ); }
3374 inline int atomic_int::exchange
3375 ( int __m__, memory_order __x__ ) volatile
3376 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3378 inline bool atomic_int::compare_exchange_weak
3379 ( int& __e__, int __m__,
3380 memory_order __x__, memory_order __y__ ) volatile
3381 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3383 inline bool atomic_int::compare_exchange_strong
3384 ( int& __e__, int __m__,
3385 memory_order __x__, memory_order __y__ ) volatile
3386 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3388 inline bool atomic_int::compare_exchange_weak
3389 ( int& __e__, int __m__, memory_order __x__ ) volatile
3391 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3392 __x__ == memory_order_acq_rel ? memory_order_acquire :
3393 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3396 inline bool atomic_int::compare_exchange_strong
3397 ( int& __e__, int __m__, memory_order __x__ ) volatile
3399 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3400 __x__ == memory_order_acq_rel ? memory_order_acquire :
3401 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3405 inline bool atomic_uint::is_lock_free() const volatile
3408 inline void atomic_uint::store
3409 ( unsigned int __m__, memory_order __x__ ) volatile
3410 { atomic_store_explicit( this, __m__, __x__ ); }
3412 inline unsigned int atomic_uint::load
3413 ( memory_order __x__ ) volatile
3414 { return atomic_load_explicit( this, __x__ ); }
3416 inline unsigned int atomic_uint::exchange
3417 ( unsigned int __m__, memory_order __x__ ) volatile
3418 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3420 inline bool atomic_uint::compare_exchange_weak
3421 ( unsigned int& __e__, unsigned int __m__,
3422 memory_order __x__, memory_order __y__ ) volatile
3423 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3425 inline bool atomic_uint::compare_exchange_strong
3426 ( unsigned int& __e__, unsigned int __m__,
3427 memory_order __x__, memory_order __y__ ) volatile
3428 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3430 inline bool atomic_uint::compare_exchange_weak
3431 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3433 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3434 __x__ == memory_order_acq_rel ? memory_order_acquire :
3435 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3438 inline bool atomic_uint::compare_exchange_strong
3439 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3441 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3442 __x__ == memory_order_acq_rel ? memory_order_acquire :
3443 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3447 inline bool atomic_long::is_lock_free() const volatile
3450 inline void atomic_long::store
3451 ( long __m__, memory_order __x__ ) volatile
3452 { atomic_store_explicit( this, __m__, __x__ ); }
3454 inline long atomic_long::load
3455 ( memory_order __x__ ) volatile
3456 { return atomic_load_explicit( this, __x__ ); }
3458 inline long atomic_long::exchange
3459 ( long __m__, memory_order __x__ ) volatile
3460 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3462 inline bool atomic_long::compare_exchange_weak
3463 ( long& __e__, long __m__,
3464 memory_order __x__, memory_order __y__ ) volatile
3465 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3467 inline bool atomic_long::compare_exchange_strong
3468 ( long& __e__, long __m__,
3469 memory_order __x__, memory_order __y__ ) volatile
3470 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3472 inline bool atomic_long::compare_exchange_weak
3473 ( long& __e__, long __m__, memory_order __x__ ) volatile
3475 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3476 __x__ == memory_order_acq_rel ? memory_order_acquire :
3477 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3480 inline bool atomic_long::compare_exchange_strong
3481 ( long& __e__, long __m__, memory_order __x__ ) volatile
3483 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3484 __x__ == memory_order_acq_rel ? memory_order_acquire :
3485 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3489 inline bool atomic_ulong::is_lock_free() const volatile
3492 inline void atomic_ulong::store
3493 ( unsigned long __m__, memory_order __x__ ) volatile
3494 { atomic_store_explicit( this, __m__, __x__ ); }
3496 inline unsigned long atomic_ulong::load
3497 ( memory_order __x__ ) volatile
3498 { return atomic_load_explicit( this, __x__ ); }
3500 inline unsigned long atomic_ulong::exchange
3501 ( unsigned long __m__, memory_order __x__ ) volatile
3502 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3504 inline bool atomic_ulong::compare_exchange_weak
3505 ( unsigned long& __e__, unsigned long __m__,
3506 memory_order __x__, memory_order __y__ ) volatile
3507 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3509 inline bool atomic_ulong::compare_exchange_strong
3510 ( unsigned long& __e__, unsigned long __m__,
3511 memory_order __x__, memory_order __y__ ) volatile
3512 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3514 inline bool atomic_ulong::compare_exchange_weak
3515 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3517 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3518 __x__ == memory_order_acq_rel ? memory_order_acquire :
3519 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3522 inline bool atomic_ulong::compare_exchange_strong
3523 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3525 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3526 __x__ == memory_order_acq_rel ? memory_order_acquire :
3527 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3531 inline bool atomic_llong::is_lock_free() const volatile
3534 inline void atomic_llong::store
3535 ( long long __m__, memory_order __x__ ) volatile
3536 { atomic_store_explicit( this, __m__, __x__ ); }
3538 inline long long atomic_llong::load
3539 ( memory_order __x__ ) volatile
3540 { return atomic_load_explicit( this, __x__ ); }
3542 inline long long atomic_llong::exchange
3543 ( long long __m__, memory_order __x__ ) volatile
3544 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3546 inline bool atomic_llong::compare_exchange_weak
3547 ( long long& __e__, long long __m__,
3548 memory_order __x__, memory_order __y__ ) volatile
3549 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3551 inline bool atomic_llong::compare_exchange_strong
3552 ( long long& __e__, long long __m__,
3553 memory_order __x__, memory_order __y__ ) volatile
3554 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3556 inline bool atomic_llong::compare_exchange_weak
3557 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3559 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3560 __x__ == memory_order_acq_rel ? memory_order_acquire :
3561 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3564 inline bool atomic_llong::compare_exchange_strong
3565 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3567 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3568 __x__ == memory_order_acq_rel ? memory_order_acquire :
3569 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3573 inline bool atomic_ullong::is_lock_free() const volatile
3576 inline void atomic_ullong::store
3577 ( unsigned long long __m__, memory_order __x__ ) volatile
3578 { atomic_store_explicit( this, __m__, __x__ ); }
3580 inline unsigned long long atomic_ullong::load
3581 ( memory_order __x__ ) volatile
3582 { return atomic_load_explicit( this, __x__ ); }
3584 inline unsigned long long atomic_ullong::exchange
3585 ( unsigned long long __m__, memory_order __x__ ) volatile
3586 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3588 inline bool atomic_ullong::compare_exchange_weak
3589 ( unsigned long long& __e__, unsigned long long __m__,
3590 memory_order __x__, memory_order __y__ ) volatile
3591 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3593 inline bool atomic_ullong::compare_exchange_strong
3594 ( unsigned long long& __e__, unsigned long long __m__,
3595 memory_order __x__, memory_order __y__ ) volatile
3596 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3598 inline bool atomic_ullong::compare_exchange_weak
3599 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3601 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3602 __x__ == memory_order_acq_rel ? memory_order_acquire :
3603 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3606 inline bool atomic_ullong::compare_exchange_strong
3607 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3609 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3610 __x__ == memory_order_acq_rel ? memory_order_acquire :
3611 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3615 inline bool atomic_wchar_t::is_lock_free() const volatile
3618 inline void atomic_wchar_t::store
3619 ( wchar_t __m__, memory_order __x__ ) volatile
3620 { atomic_store_explicit( this, __m__, __x__ ); }
3622 inline wchar_t atomic_wchar_t::load
3623 ( memory_order __x__ ) volatile
3624 { return atomic_load_explicit( this, __x__ ); }
3626 inline wchar_t atomic_wchar_t::exchange
3627 ( wchar_t __m__, memory_order __x__ ) volatile
3628 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3630 inline bool atomic_wchar_t::compare_exchange_weak
3631 ( wchar_t& __e__, wchar_t __m__,
3632 memory_order __x__, memory_order __y__ ) volatile
3633 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3635 inline bool atomic_wchar_t::compare_exchange_strong
3636 ( wchar_t& __e__, wchar_t __m__,
3637 memory_order __x__, memory_order __y__ ) volatile
3638 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3640 inline bool atomic_wchar_t::compare_exchange_weak
3641 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3643 return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3644 __x__ == memory_order_acq_rel ? memory_order_acquire :
3645 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3648 inline bool atomic_wchar_t::compare_exchange_strong
3649 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3651 return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3652 __x__ == memory_order_acq_rel ? memory_order_acquire :
3653 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3657 template< typename T >
3658 inline bool atomic<T>::is_lock_free() const volatile
3661 template< typename T >
3662 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3663 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3665 template< typename T >
3666 inline T atomic<T>::load( memory_order __x__ ) volatile
3667 { return _ATOMIC_LOAD_( this, __x__ ); }
3669 template< typename T >
3670 inline T atomic<T>::exchange( T __v__, memory_order __x__ ) volatile
3671 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3673 template< typename T >
3674 inline bool atomic<T>::compare_exchange_weak
3675 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3676 { return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3678 template< typename T >
3679 inline bool atomic<T>::compare_exchange_strong
3680 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3681 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3683 template< typename T >
3684 inline bool atomic<T>::compare_exchange_weak
3685 ( T& __r__, T __v__, memory_order __x__ ) volatile
3687 return compare_exchange_weak( __r__, __v__, __x__,
3688 __x__ == memory_order_acq_rel ? memory_order_acquire :
3689 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3692 template< typename T >
3693 inline bool atomic<T>::compare_exchange_strong
3694 ( T& __r__, T __v__, memory_order __x__ ) volatile
3696 return compare_exchange_strong( __r__, __v__, __x__,
3697 __x__ == memory_order_acq_rel ? memory_order_acquire :
3698 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3702 inline void* atomic_address::fetch_add
3703 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3704 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3706 inline void* atomic_address::fetch_sub
3707 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3708 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3711 inline char atomic_char::fetch_add
3712 ( char __m__, memory_order __x__ ) volatile
3713 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3716 inline char atomic_char::fetch_sub
3717 ( char __m__, memory_order __x__ ) volatile
3718 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3721 inline char atomic_char::fetch_and
3722 ( char __m__, memory_order __x__ ) volatile
3723 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3726 inline char atomic_char::fetch_or
3727 ( char __m__, memory_order __x__ ) volatile
3728 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3731 inline char atomic_char::fetch_xor
3732 ( char __m__, memory_order __x__ ) volatile
3733 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3736 inline signed char atomic_schar::fetch_add
3737 ( signed char __m__, memory_order __x__ ) volatile
3738 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3741 inline signed char atomic_schar::fetch_sub
3742 ( signed char __m__, memory_order __x__ ) volatile
3743 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3746 inline signed char atomic_schar::fetch_and
3747 ( signed char __m__, memory_order __x__ ) volatile
3748 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3751 inline signed char atomic_schar::fetch_or
3752 ( signed char __m__, memory_order __x__ ) volatile
3753 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3756 inline signed char atomic_schar::fetch_xor
3757 ( signed char __m__, memory_order __x__ ) volatile
3758 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3761 inline unsigned char atomic_uchar::fetch_add
3762 ( unsigned char __m__, memory_order __x__ ) volatile
3763 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3766 inline unsigned char atomic_uchar::fetch_sub
3767 ( unsigned char __m__, memory_order __x__ ) volatile
3768 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3771 inline unsigned char atomic_uchar::fetch_and
3772 ( unsigned char __m__, memory_order __x__ ) volatile
3773 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3776 inline unsigned char atomic_uchar::fetch_or
3777 ( unsigned char __m__, memory_order __x__ ) volatile
3778 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3781 inline unsigned char atomic_uchar::fetch_xor
3782 ( unsigned char __m__, memory_order __x__ ) volatile
3783 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3786 inline short atomic_short::fetch_add
3787 ( short __m__, memory_order __x__ ) volatile
3788 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3791 inline short atomic_short::fetch_sub
3792 ( short __m__, memory_order __x__ ) volatile
3793 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3796 inline short atomic_short::fetch_and
3797 ( short __m__, memory_order __x__ ) volatile
3798 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3801 inline short atomic_short::fetch_or
3802 ( short __m__, memory_order __x__ ) volatile
3803 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3806 inline short atomic_short::fetch_xor
3807 ( short __m__, memory_order __x__ ) volatile
3808 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3811 inline unsigned short atomic_ushort::fetch_add
3812 ( unsigned short __m__, memory_order __x__ ) volatile
3813 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3816 inline unsigned short atomic_ushort::fetch_sub
3817 ( unsigned short __m__, memory_order __x__ ) volatile
3818 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3821 inline unsigned short atomic_ushort::fetch_and
3822 ( unsigned short __m__, memory_order __x__ ) volatile
3823 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3826 inline unsigned short atomic_ushort::fetch_or
3827 ( unsigned short __m__, memory_order __x__ ) volatile
3828 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3831 inline unsigned short atomic_ushort::fetch_xor
3832 ( unsigned short __m__, memory_order __x__ ) volatile
3833 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3836 inline int atomic_int::fetch_add
3837 ( int __m__, memory_order __x__ ) volatile
3838 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3841 inline int atomic_int::fetch_sub
3842 ( int __m__, memory_order __x__ ) volatile
3843 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3846 inline int atomic_int::fetch_and
3847 ( int __m__, memory_order __x__ ) volatile
3848 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3851 inline int atomic_int::fetch_or
3852 ( int __m__, memory_order __x__ ) volatile
3853 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3856 inline int atomic_int::fetch_xor
3857 ( int __m__, memory_order __x__ ) volatile
3858 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3861 inline unsigned int atomic_uint::fetch_add
3862 ( unsigned int __m__, memory_order __x__ ) volatile
3863 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3866 inline unsigned int atomic_uint::fetch_sub
3867 ( unsigned int __m__, memory_order __x__ ) volatile
3868 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3871 inline unsigned int atomic_uint::fetch_and
3872 ( unsigned int __m__, memory_order __x__ ) volatile
3873 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3876 inline unsigned int atomic_uint::fetch_or
3877 ( unsigned int __m__, memory_order __x__ ) volatile
3878 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3881 inline unsigned int atomic_uint::fetch_xor
3882 ( unsigned int __m__, memory_order __x__ ) volatile
3883 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3886 inline long atomic_long::fetch_add
3887 ( long __m__, memory_order __x__ ) volatile
3888 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3891 inline long atomic_long::fetch_sub
3892 ( long __m__, memory_order __x__ ) volatile
3893 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3896 inline long atomic_long::fetch_and
3897 ( long __m__, memory_order __x__ ) volatile
3898 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3901 inline long atomic_long::fetch_or
3902 ( long __m__, memory_order __x__ ) volatile
3903 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3906 inline long atomic_long::fetch_xor
3907 ( long __m__, memory_order __x__ ) volatile
3908 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3911 inline unsigned long atomic_ulong::fetch_add
3912 ( unsigned long __m__, memory_order __x__ ) volatile
3913 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3916 inline unsigned long atomic_ulong::fetch_sub
3917 ( unsigned long __m__, memory_order __x__ ) volatile
3918 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3921 inline unsigned long atomic_ulong::fetch_and
3922 ( unsigned long __m__, memory_order __x__ ) volatile
3923 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3926 inline unsigned long atomic_ulong::fetch_or
3927 ( unsigned long __m__, memory_order __x__ ) volatile
3928 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3931 inline unsigned long atomic_ulong::fetch_xor
3932 ( unsigned long __m__, memory_order __x__ ) volatile
3933 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3936 inline long long atomic_llong::fetch_add
3937 ( long long __m__, memory_order __x__ ) volatile
3938 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3941 inline long long atomic_llong::fetch_sub
3942 ( long long __m__, memory_order __x__ ) volatile
3943 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3946 inline long long atomic_llong::fetch_and
3947 ( long long __m__, memory_order __x__ ) volatile
3948 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3951 inline long long atomic_llong::fetch_or
3952 ( long long __m__, memory_order __x__ ) volatile
3953 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3956 inline long long atomic_llong::fetch_xor
3957 ( long long __m__, memory_order __x__ ) volatile
3958 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3961 inline unsigned long long atomic_ullong::fetch_add
3962 ( unsigned long long __m__, memory_order __x__ ) volatile
3963 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3966 inline unsigned long long atomic_ullong::fetch_sub
3967 ( unsigned long long __m__, memory_order __x__ ) volatile
3968 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3971 inline unsigned long long atomic_ullong::fetch_and
3972 ( unsigned long long __m__, memory_order __x__ ) volatile
3973 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3976 inline unsigned long long atomic_ullong::fetch_or
3977 ( unsigned long long __m__, memory_order __x__ ) volatile
3978 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3981 inline unsigned long long atomic_ullong::fetch_xor
3982 ( unsigned long long __m__, memory_order __x__ ) volatile
3983 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3986 inline wchar_t atomic_wchar_t::fetch_add
3987 ( wchar_t __m__, memory_order __x__ ) volatile
3988 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3991 inline wchar_t atomic_wchar_t::fetch_sub
3992 ( wchar_t __m__, memory_order __x__ ) volatile
3993 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3996 inline wchar_t atomic_wchar_t::fetch_and
3997 ( wchar_t __m__, memory_order __x__ ) volatile
3998 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
4001 inline wchar_t atomic_wchar_t::fetch_or
4002 ( wchar_t __m__, memory_order __x__ ) volatile
4003 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
4006 inline wchar_t atomic_wchar_t::fetch_xor
4007 ( wchar_t __m__, memory_order __x__ ) volatile
4008 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
4011 template< typename T >
4012 T* atomic<T*>::load( memory_order __x__ ) volatile
4013 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
4015 template< typename T >
4016 T* atomic<T*>::exchange( T* __v__, memory_order __x__ ) volatile
4017 { return static_cast<T*>( atomic_address::exchange( __v__, __x__ ) ); }
4019 template< typename T >
4020 bool atomic<T*>::compare_exchange_weak
4021 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
4023 return atomic_address::compare_exchange_weak( *reinterpret_cast<void**>( &__r__ ),
4024 static_cast<void*>( __v__ ), __x__, __y__ );
4026 //{ return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
4028 template< typename T >
4029 bool atomic<T*>::compare_exchange_strong
4030 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
4032 return atomic_address::compare_exchange_strong( *reinterpret_cast<void**>( &__r__ ),
4033 static_cast<void*>( __v__ ), __x__, __y__ );
4035 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
4037 template< typename T >
4038 bool atomic<T*>::compare_exchange_weak
4039 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
4041 return compare_exchange_weak( __r__, __v__, __x__,
4042 __x__ == memory_order_acq_rel ? memory_order_acquire :
4043 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
4046 template< typename T >
4047 bool atomic<T*>::compare_exchange_strong
4048 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
4050 return compare_exchange_strong( __r__, __v__, __x__,
4051 __x__ == memory_order_acq_rel ? memory_order_acquire :
4052 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
4055 template< typename T >
4056 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
4057 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
4059 template< typename T >
4060 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
4061 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }
4069 static inline void atomic_thread_fence(memory_order order)
4070 { _ATOMIC_FENCE_(order); }
4072 /** @todo Do we want to try to support a user's signal-handler? */
4073 static inline void atomic_signal_fence(memory_order order)
4084 #endif /* __IMPATOMIC_H__ */