3 * @brief Common header for C11/C++11 atomics
5 * Note that some features are unavailable, as they require support from a true
9 #ifndef __IMPATOMIC_H__
10 #define __IMPATOMIC_H__
12 #include "memoryorder.h"
13 #include "cmodelint.h"
19 #define CPP0X( feature )
21 typedef struct atomic_flag
24 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
25 void clear( memory_order = memory_order_seq_cst ) volatile;
27 CPP0X( atomic_flag() = default; )
28 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
29 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
36 #define ATOMIC_FLAG_INIT { false }
42 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
43 extern bool atomic_flag_test_and_set_explicit
44 ( volatile atomic_flag*, memory_order );
45 extern void atomic_flag_clear( volatile atomic_flag* );
46 extern void atomic_flag_clear_explicit
47 ( volatile atomic_flag*, memory_order );
48 extern void __atomic_flag_wait__
49 ( volatile atomic_flag* );
50 extern void __atomic_flag_wait_explicit__
51 ( volatile atomic_flag*, memory_order );
59 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
60 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
62 inline void atomic_flag::clear( memory_order __x__ ) volatile
63 { atomic_flag_clear_explicit( this, __x__ ); }
69 The remainder of the example implementation uses the following
70 macros. These macros exploit GNU extensions for value-returning
71 blocks (AKA statement expressions) and __typeof__.
73 The macros rely on data fields of atomic structs being named __f__.
74 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
75 __g__=flag, __m__=modified, __o__=operation, __r__=result,
76 __p__=pointer to field, __v__=value (for single evaluation),
77 __x__=memory-ordering, and __y__=memory-ordering.
80 #define _ATOMIC_LOAD_( __a__, __x__ ) \
81 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
82 __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
85 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
86 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
87 __typeof__(__m__) __v__ = (__m__); \
88 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
92 #define _ATOMIC_INIT_( __a__, __m__ ) \
93 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
94 __typeof__(__m__) __v__ = (__m__); \
95 model_init_action((void *) __p__, (uint64_t) __v__); \
98 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
99 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
100 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
101 __typeof__(__m__) __v__ = (__m__); \
102 __typeof__((__a__)->__f__) __copy__= __old__; \
103 __copy__ __o__ __v__; \
104 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
107 /* No spurious failure for now */
108 #define _ATOMIC_CMPSWP_WEAK_ _ATOMIC_CMPSWP_
110 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
111 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
112 __typeof__(__e__) __q__ = (__e__); \
113 __typeof__(__m__) __v__ = (__m__); \
115 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
116 if (__t__ == * __q__ ) { \
117 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
118 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
121 #define _ATOMIC_FENCE_( __x__ ) \
122 ({ model_fence_action(__x__);})
125 #define ATOMIC_CHAR_LOCK_FREE 1
126 #define ATOMIC_CHAR16_T_LOCK_FREE 1
127 #define ATOMIC_CHAR32_T_LOCK_FREE 1
128 #define ATOMIC_WCHAR_T_LOCK_FREE 1
129 #define ATOMIC_SHORT_LOCK_FREE 1
130 #define ATOMIC_INT_LOCK_FREE 1
131 #define ATOMIC_LONG_LOCK_FREE 1
132 #define ATOMIC_LLONG_LOCK_FREE 1
133 #define ATOMIC_ADDRESS_LOCK_FREE 1
135 typedef struct atomic_bool
138 bool is_lock_free() const volatile;
139 void store( bool, memory_order = memory_order_seq_cst ) volatile;
140 bool load( memory_order = memory_order_seq_cst ) volatile;
141 bool exchange( bool, memory_order = memory_order_seq_cst ) volatile;
142 bool compare_exchange_weak ( bool&, bool, memory_order, memory_order ) volatile;
143 bool compare_exchange_strong ( bool&, bool, memory_order, memory_order ) volatile;
144 bool compare_exchange_weak ( bool&, bool,
145 memory_order = memory_order_seq_cst) volatile;
146 bool compare_exchange_strong ( bool&, bool,
147 memory_order = memory_order_seq_cst) volatile;
149 CPP0X( atomic_bool() = delete; )
150 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
151 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
152 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
154 bool operator =( bool __v__ ) volatile
155 { store( __v__ ); return __v__; }
157 friend void atomic_store_explicit( volatile atomic_bool*, bool,
159 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
160 friend bool atomic_exchange_explicit( volatile atomic_bool*, bool,
162 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_bool*, bool*, bool,
163 memory_order, memory_order );
164 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_bool*, bool*, bool,
165 memory_order, memory_order );
173 typedef struct atomic_address
176 bool is_lock_free() const volatile;
177 void store( void*, memory_order = memory_order_seq_cst ) volatile;
178 void* load( memory_order = memory_order_seq_cst ) volatile;
179 void* exchange( void*, memory_order = memory_order_seq_cst ) volatile;
180 bool compare_exchange_weak( void*&, void*, memory_order, memory_order ) volatile;
181 bool compare_exchange_strong( void*&, void*, memory_order, memory_order ) volatile;
182 bool compare_exchange_weak( void*&, void*,
183 memory_order = memory_order_seq_cst ) volatile;
184 bool compare_exchange_strong( void*&, void*,
185 memory_order = memory_order_seq_cst ) volatile;
186 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
187 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
189 CPP0X( atomic_address() = default; )
190 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
191 CPP0X( atomic_address( const atomic_address& ) = delete; )
192 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
194 void* operator =( void* __v__ ) volatile
195 { store( __v__ ); return __v__; }
197 void* operator +=( ptrdiff_t __v__ ) volatile
198 { return fetch_add( __v__ ); }
200 void* operator -=( ptrdiff_t __v__ ) volatile
201 { return fetch_sub( __v__ ); }
203 friend void atomic_store_explicit( volatile atomic_address*, void*,
205 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
206 friend void* atomic_exchange_explicit( volatile atomic_address*, void*,
208 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_address*,
209 void**, void*, memory_order, memory_order );
210 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_address*,
211 void**, void*, memory_order, memory_order );
212 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
214 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
223 typedef struct atomic_char
226 bool is_lock_free() const volatile;
228 memory_order = memory_order_seq_cst ) volatile;
229 char load( memory_order = memory_order_seq_cst ) volatile;
231 memory_order = memory_order_seq_cst ) volatile;
232 bool compare_exchange_weak( char&, char,
233 memory_order, memory_order ) volatile;
234 bool compare_exchange_strong( char&, char,
235 memory_order, memory_order ) volatile;
236 bool compare_exchange_weak( char&, char,
237 memory_order = memory_order_seq_cst ) volatile;
238 bool compare_exchange_strong( char&, char,
239 memory_order = memory_order_seq_cst ) volatile;
240 char fetch_add( char,
241 memory_order = memory_order_seq_cst ) volatile;
242 char fetch_sub( char,
243 memory_order = memory_order_seq_cst ) volatile;
244 char fetch_and( char,
245 memory_order = memory_order_seq_cst ) volatile;
247 memory_order = memory_order_seq_cst ) volatile;
248 char fetch_xor( char,
249 memory_order = memory_order_seq_cst ) volatile;
251 CPP0X( atomic_char() = default; )
252 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
253 CPP0X( atomic_char( const atomic_char& ) = delete; )
254 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
256 char operator =( char __v__ ) volatile
257 { store( __v__ ); return __v__; }
259 char operator ++( int ) volatile
260 { return fetch_add( 1 ); }
262 char operator --( int ) volatile
263 { return fetch_sub( 1 ); }
265 char operator ++() volatile
266 { return fetch_add( 1 ) + 1; }
268 char operator --() volatile
269 { return fetch_sub( 1 ) - 1; }
271 char operator +=( char __v__ ) volatile
272 { return fetch_add( __v__ ) + __v__; }
274 char operator -=( char __v__ ) volatile
275 { return fetch_sub( __v__ ) - __v__; }
277 char operator &=( char __v__ ) volatile
278 { return fetch_and( __v__ ) & __v__; }
280 char operator |=( char __v__ ) volatile
281 { return fetch_or( __v__ ) | __v__; }
283 char operator ^=( char __v__ ) volatile
284 { return fetch_xor( __v__ ) ^ __v__; }
286 friend void atomic_store_explicit( volatile atomic_char*, char,
288 friend char atomic_load_explicit( volatile atomic_char*,
290 friend char atomic_exchange_explicit( volatile atomic_char*,
291 char, memory_order );
292 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_char*,
293 char*, char, memory_order, memory_order );
294 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_char*,
295 char*, char, memory_order, memory_order );
296 friend char atomic_fetch_add_explicit( volatile atomic_char*,
297 char, memory_order );
298 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
299 char, memory_order );
300 friend char atomic_fetch_and_explicit( volatile atomic_char*,
301 char, memory_order );
302 friend char atomic_fetch_or_explicit( volatile atomic_char*,
303 char, memory_order );
304 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
305 char, memory_order );
313 typedef struct atomic_schar
316 bool is_lock_free() const volatile;
317 void store( signed char,
318 memory_order = memory_order_seq_cst ) volatile;
319 signed char load( memory_order = memory_order_seq_cst ) volatile;
320 signed char exchange( signed char,
321 memory_order = memory_order_seq_cst ) volatile;
322 bool compare_exchange_weak( signed char&, signed char,
323 memory_order, memory_order ) volatile;
324 bool compare_exchange_strong( signed char&, signed char,
325 memory_order, memory_order ) volatile;
326 bool compare_exchange_weak( signed char&, signed char,
327 memory_order = memory_order_seq_cst ) volatile;
328 bool compare_exchange_strong( signed char&, signed char,
329 memory_order = memory_order_seq_cst ) volatile;
330 signed char fetch_add( signed char,
331 memory_order = memory_order_seq_cst ) volatile;
332 signed char fetch_sub( signed char,
333 memory_order = memory_order_seq_cst ) volatile;
334 signed char fetch_and( signed char,
335 memory_order = memory_order_seq_cst ) volatile;
336 signed char fetch_or( signed char,
337 memory_order = memory_order_seq_cst ) volatile;
338 signed char fetch_xor( signed char,
339 memory_order = memory_order_seq_cst ) volatile;
341 CPP0X( atomic_schar() = default; )
342 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
343 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
344 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
346 signed char operator =( signed char __v__ ) volatile
347 { store( __v__ ); return __v__; }
349 signed char operator ++( int ) volatile
350 { return fetch_add( 1 ); }
352 signed char operator --( int ) volatile
353 { return fetch_sub( 1 ); }
355 signed char operator ++() volatile
356 { return fetch_add( 1 ) + 1; }
358 signed char operator --() volatile
359 { return fetch_sub( 1 ) - 1; }
361 signed char operator +=( signed char __v__ ) volatile
362 { return fetch_add( __v__ ) + __v__; }
364 signed char operator -=( signed char __v__ ) volatile
365 { return fetch_sub( __v__ ) - __v__; }
367 signed char operator &=( signed char __v__ ) volatile
368 { return fetch_and( __v__ ) & __v__; }
370 signed char operator |=( signed char __v__ ) volatile
371 { return fetch_or( __v__ ) | __v__; }
373 signed char operator ^=( signed char __v__ ) volatile
374 { return fetch_xor( __v__ ) ^ __v__; }
376 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
378 friend signed char atomic_load_explicit( volatile atomic_schar*,
380 friend signed char atomic_exchange_explicit( volatile atomic_schar*,
381 signed char, memory_order );
382 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_schar*,
383 signed char*, signed char, memory_order, memory_order );
384 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_schar*,
385 signed char*, signed char, memory_order, memory_order );
386 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
387 signed char, memory_order );
388 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
389 signed char, memory_order );
390 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
391 signed char, memory_order );
392 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
393 signed char, memory_order );
394 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
395 signed char, memory_order );
403 typedef struct atomic_uchar
406 bool is_lock_free() const volatile;
407 void store( unsigned char,
408 memory_order = memory_order_seq_cst ) volatile;
409 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
410 unsigned char exchange( unsigned char,
411 memory_order = memory_order_seq_cst ) volatile;
412 bool compare_exchange_weak( unsigned char&, unsigned char,
413 memory_order, memory_order ) volatile;
414 bool compare_exchange_strong( unsigned char&, unsigned char,
415 memory_order, memory_order ) volatile;
416 bool compare_exchange_weak( unsigned char&, unsigned char,
417 memory_order = memory_order_seq_cst ) volatile;
418 bool compare_exchange_strong( unsigned char&, unsigned char,
419 memory_order = memory_order_seq_cst ) volatile;
420 unsigned char fetch_add( unsigned char,
421 memory_order = memory_order_seq_cst ) volatile;
422 unsigned char fetch_sub( unsigned char,
423 memory_order = memory_order_seq_cst ) volatile;
424 unsigned char fetch_and( unsigned char,
425 memory_order = memory_order_seq_cst ) volatile;
426 unsigned char fetch_or( unsigned char,
427 memory_order = memory_order_seq_cst ) volatile;
428 unsigned char fetch_xor( unsigned char,
429 memory_order = memory_order_seq_cst ) volatile;
431 CPP0X( atomic_uchar() = default; )
432 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
433 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
434 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
436 unsigned char operator =( unsigned char __v__ ) volatile
437 { store( __v__ ); return __v__; }
439 unsigned char operator ++( int ) volatile
440 { return fetch_add( 1 ); }
442 unsigned char operator --( int ) volatile
443 { return fetch_sub( 1 ); }
445 unsigned char operator ++() volatile
446 { return fetch_add( 1 ) + 1; }
448 unsigned char operator --() volatile
449 { return fetch_sub( 1 ) - 1; }
451 unsigned char operator +=( unsigned char __v__ ) volatile
452 { return fetch_add( __v__ ) + __v__; }
454 unsigned char operator -=( unsigned char __v__ ) volatile
455 { return fetch_sub( __v__ ) - __v__; }
457 unsigned char operator &=( unsigned char __v__ ) volatile
458 { return fetch_and( __v__ ) & __v__; }
460 unsigned char operator |=( unsigned char __v__ ) volatile
461 { return fetch_or( __v__ ) | __v__; }
463 unsigned char operator ^=( unsigned char __v__ ) volatile
464 { return fetch_xor( __v__ ) ^ __v__; }
466 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
468 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
470 friend unsigned char atomic_exchange_explicit( volatile atomic_uchar*,
471 unsigned char, memory_order );
472 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uchar*,
473 unsigned char*, unsigned char, memory_order, memory_order );
474 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uchar*,
475 unsigned char*, unsigned char, memory_order, memory_order );
476 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
477 unsigned char, memory_order );
478 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
479 unsigned char, memory_order );
480 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
481 unsigned char, memory_order );
482 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
483 unsigned char, memory_order );
484 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
485 unsigned char, memory_order );
493 typedef struct atomic_short
496 bool is_lock_free() const volatile;
498 memory_order = memory_order_seq_cst ) volatile;
499 short load( memory_order = memory_order_seq_cst ) volatile;
500 short exchange( short,
501 memory_order = memory_order_seq_cst ) volatile;
502 bool compare_exchange_weak( short&, short,
503 memory_order, memory_order ) volatile;
504 bool compare_exchange_strong( short&, short,
505 memory_order, memory_order ) volatile;
506 bool compare_exchange_weak( short&, short,
507 memory_order = memory_order_seq_cst ) volatile;
508 bool compare_exchange_strong( short&, short,
509 memory_order = memory_order_seq_cst ) volatile;
510 short fetch_add( short,
511 memory_order = memory_order_seq_cst ) volatile;
512 short fetch_sub( short,
513 memory_order = memory_order_seq_cst ) volatile;
514 short fetch_and( short,
515 memory_order = memory_order_seq_cst ) volatile;
516 short fetch_or( short,
517 memory_order = memory_order_seq_cst ) volatile;
518 short fetch_xor( short,
519 memory_order = memory_order_seq_cst ) volatile;
521 CPP0X( atomic_short() = default; )
522 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
523 CPP0X( atomic_short( const atomic_short& ) = delete; )
524 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
526 short operator =( short __v__ ) volatile
527 { store( __v__ ); return __v__; }
529 short operator ++( int ) volatile
530 { return fetch_add( 1 ); }
532 short operator --( int ) volatile
533 { return fetch_sub( 1 ); }
535 short operator ++() volatile
536 { return fetch_add( 1 ) + 1; }
538 short operator --() volatile
539 { return fetch_sub( 1 ) - 1; }
541 short operator +=( short __v__ ) volatile
542 { return fetch_add( __v__ ) + __v__; }
544 short operator -=( short __v__ ) volatile
545 { return fetch_sub( __v__ ) - __v__; }
547 short operator &=( short __v__ ) volatile
548 { return fetch_and( __v__ ) & __v__; }
550 short operator |=( short __v__ ) volatile
551 { return fetch_or( __v__ ) | __v__; }
553 short operator ^=( short __v__ ) volatile
554 { return fetch_xor( __v__ ) ^ __v__; }
556 friend void atomic_store_explicit( volatile atomic_short*, short,
558 friend short atomic_load_explicit( volatile atomic_short*,
560 friend short atomic_exchange_explicit( volatile atomic_short*,
561 short, memory_order );
562 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_short*,
563 short*, short, memory_order, memory_order );
564 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_short*,
565 short*, short, memory_order, memory_order );
566 friend short atomic_fetch_add_explicit( volatile atomic_short*,
567 short, memory_order );
568 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
569 short, memory_order );
570 friend short atomic_fetch_and_explicit( volatile atomic_short*,
571 short, memory_order );
572 friend short atomic_fetch_or_explicit( volatile atomic_short*,
573 short, memory_order );
574 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
575 short, memory_order );
583 typedef struct atomic_ushort
586 bool is_lock_free() const volatile;
587 void store( unsigned short,
588 memory_order = memory_order_seq_cst ) volatile;
589 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
590 unsigned short exchange( unsigned short,
591 memory_order = memory_order_seq_cst ) volatile;
592 bool compare_exchange_weak( unsigned short&, unsigned short,
593 memory_order, memory_order ) volatile;
594 bool compare_exchange_strong( unsigned short&, unsigned short,
595 memory_order, memory_order ) volatile;
596 bool compare_exchange_weak( unsigned short&, unsigned short,
597 memory_order = memory_order_seq_cst ) volatile;
598 bool compare_exchange_strong( unsigned short&, unsigned short,
599 memory_order = memory_order_seq_cst ) volatile;
600 unsigned short fetch_add( unsigned short,
601 memory_order = memory_order_seq_cst ) volatile;
602 unsigned short fetch_sub( unsigned short,
603 memory_order = memory_order_seq_cst ) volatile;
604 unsigned short fetch_and( unsigned short,
605 memory_order = memory_order_seq_cst ) volatile;
606 unsigned short fetch_or( unsigned short,
607 memory_order = memory_order_seq_cst ) volatile;
608 unsigned short fetch_xor( unsigned short,
609 memory_order = memory_order_seq_cst ) volatile;
611 CPP0X( atomic_ushort() = default; )
612 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
613 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
614 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
616 unsigned short operator =( unsigned short __v__ ) volatile
617 { store( __v__ ); return __v__; }
619 unsigned short operator ++( int ) volatile
620 { return fetch_add( 1 ); }
622 unsigned short operator --( int ) volatile
623 { return fetch_sub( 1 ); }
625 unsigned short operator ++() volatile
626 { return fetch_add( 1 ) + 1; }
628 unsigned short operator --() volatile
629 { return fetch_sub( 1 ) - 1; }
631 unsigned short operator +=( unsigned short __v__ ) volatile
632 { return fetch_add( __v__ ) + __v__; }
634 unsigned short operator -=( unsigned short __v__ ) volatile
635 { return fetch_sub( __v__ ) - __v__; }
637 unsigned short operator &=( unsigned short __v__ ) volatile
638 { return fetch_and( __v__ ) & __v__; }
640 unsigned short operator |=( unsigned short __v__ ) volatile
641 { return fetch_or( __v__ ) | __v__; }
643 unsigned short operator ^=( unsigned short __v__ ) volatile
644 { return fetch_xor( __v__ ) ^ __v__; }
646 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
648 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
650 friend unsigned short atomic_exchange_explicit( volatile atomic_ushort*,
651 unsigned short, memory_order );
652 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ushort*,
653 unsigned short*, unsigned short, memory_order, memory_order );
654 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ushort*,
655 unsigned short*, unsigned short, memory_order, memory_order );
656 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
657 unsigned short, memory_order );
658 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
659 unsigned short, memory_order );
660 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
661 unsigned short, memory_order );
662 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
663 unsigned short, memory_order );
664 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
665 unsigned short, memory_order );
669 unsigned short __f__;
673 typedef struct atomic_int
676 bool is_lock_free() const volatile;
678 memory_order = memory_order_seq_cst ) volatile;
679 int load( memory_order = memory_order_seq_cst ) volatile;
681 memory_order = memory_order_seq_cst ) volatile;
682 bool compare_exchange_weak( int&, int,
683 memory_order, memory_order ) volatile;
684 bool compare_exchange_strong( int&, int,
685 memory_order, memory_order ) volatile;
686 bool compare_exchange_weak( int&, int,
687 memory_order = memory_order_seq_cst ) volatile;
688 bool compare_exchange_strong( int&, int,
689 memory_order = memory_order_seq_cst ) volatile;
691 memory_order = memory_order_seq_cst ) volatile;
693 memory_order = memory_order_seq_cst ) volatile;
695 memory_order = memory_order_seq_cst ) volatile;
697 memory_order = memory_order_seq_cst ) volatile;
699 memory_order = memory_order_seq_cst ) volatile;
701 CPP0X( atomic_int() = default; )
702 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
703 CPP0X( atomic_int( const atomic_int& ) = delete; )
704 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
706 int operator =( int __v__ ) volatile
707 { store( __v__ ); return __v__; }
709 int operator ++( int ) volatile
710 { return fetch_add( 1 ); }
712 int operator --( int ) volatile
713 { return fetch_sub( 1 ); }
715 int operator ++() volatile
716 { return fetch_add( 1 ) + 1; }
718 int operator --() volatile
719 { return fetch_sub( 1 ) - 1; }
721 int operator +=( int __v__ ) volatile
722 { return fetch_add( __v__ ) + __v__; }
724 int operator -=( int __v__ ) volatile
725 { return fetch_sub( __v__ ) - __v__; }
727 int operator &=( int __v__ ) volatile
728 { return fetch_and( __v__ ) & __v__; }
730 int operator |=( int __v__ ) volatile
731 { return fetch_or( __v__ ) | __v__; }
733 int operator ^=( int __v__ ) volatile
734 { return fetch_xor( __v__ ) ^ __v__; }
736 friend void atomic_store_explicit( volatile atomic_int*, int,
738 friend int atomic_load_explicit( volatile atomic_int*,
740 friend int atomic_exchange_explicit( volatile atomic_int*,
742 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_int*,
743 int*, int, memory_order, memory_order );
744 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_int*,
745 int*, int, memory_order, memory_order );
746 friend int atomic_fetch_add_explicit( volatile atomic_int*,
748 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
750 friend int atomic_fetch_and_explicit( volatile atomic_int*,
752 friend int atomic_fetch_or_explicit( volatile atomic_int*,
754 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
763 typedef struct atomic_uint
766 bool is_lock_free() const volatile;
767 void store( unsigned int,
768 memory_order = memory_order_seq_cst ) volatile;
769 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
770 unsigned int exchange( unsigned int,
771 memory_order = memory_order_seq_cst ) volatile;
772 bool compare_exchange_weak( unsigned int&, unsigned int,
773 memory_order, memory_order ) volatile;
774 bool compare_exchange_strong( unsigned int&, unsigned int,
775 memory_order, memory_order ) volatile;
776 bool compare_exchange_weak( unsigned int&, unsigned int,
777 memory_order = memory_order_seq_cst ) volatile;
778 bool compare_exchange_strong( unsigned int&, unsigned int,
779 memory_order = memory_order_seq_cst ) volatile;
780 unsigned int fetch_add( unsigned int,
781 memory_order = memory_order_seq_cst ) volatile;
782 unsigned int fetch_sub( unsigned int,
783 memory_order = memory_order_seq_cst ) volatile;
784 unsigned int fetch_and( unsigned int,
785 memory_order = memory_order_seq_cst ) volatile;
786 unsigned int fetch_or( unsigned int,
787 memory_order = memory_order_seq_cst ) volatile;
788 unsigned int fetch_xor( unsigned int,
789 memory_order = memory_order_seq_cst ) volatile;
791 CPP0X( atomic_uint() = default; )
792 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
793 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
794 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
796 unsigned int operator =( unsigned int __v__ ) volatile
797 { store( __v__ ); return __v__; }
799 unsigned int operator ++( int ) volatile
800 { return fetch_add( 1 ); }
802 unsigned int operator --( int ) volatile
803 { return fetch_sub( 1 ); }
805 unsigned int operator ++() volatile
806 { return fetch_add( 1 ) + 1; }
808 unsigned int operator --() volatile
809 { return fetch_sub( 1 ) - 1; }
811 unsigned int operator +=( unsigned int __v__ ) volatile
812 { return fetch_add( __v__ ) + __v__; }
814 unsigned int operator -=( unsigned int __v__ ) volatile
815 { return fetch_sub( __v__ ) - __v__; }
817 unsigned int operator &=( unsigned int __v__ ) volatile
818 { return fetch_and( __v__ ) & __v__; }
820 unsigned int operator |=( unsigned int __v__ ) volatile
821 { return fetch_or( __v__ ) | __v__; }
823 unsigned int operator ^=( unsigned int __v__ ) volatile
824 { return fetch_xor( __v__ ) ^ __v__; }
826 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
828 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
830 friend unsigned int atomic_exchange_explicit( volatile atomic_uint*,
831 unsigned int, memory_order );
832 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uint*,
833 unsigned int*, unsigned int, memory_order, memory_order );
834 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uint*,
835 unsigned int*, unsigned int, memory_order, memory_order );
836 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
837 unsigned int, memory_order );
838 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
839 unsigned int, memory_order );
840 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
841 unsigned int, memory_order );
842 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
843 unsigned int, memory_order );
844 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
845 unsigned int, memory_order );
853 typedef struct atomic_long
856 bool is_lock_free() const volatile;
858 memory_order = memory_order_seq_cst ) volatile;
859 long load( memory_order = memory_order_seq_cst ) volatile;
861 memory_order = memory_order_seq_cst ) volatile;
862 bool compare_exchange_weak( long&, long,
863 memory_order, memory_order ) volatile;
864 bool compare_exchange_strong( long&, long,
865 memory_order, memory_order ) volatile;
866 bool compare_exchange_weak( long&, long,
867 memory_order = memory_order_seq_cst ) volatile;
868 bool compare_exchange_strong( long&, long,
869 memory_order = memory_order_seq_cst ) volatile;
870 long fetch_add( long,
871 memory_order = memory_order_seq_cst ) volatile;
872 long fetch_sub( long,
873 memory_order = memory_order_seq_cst ) volatile;
874 long fetch_and( long,
875 memory_order = memory_order_seq_cst ) volatile;
877 memory_order = memory_order_seq_cst ) volatile;
878 long fetch_xor( long,
879 memory_order = memory_order_seq_cst ) volatile;
881 CPP0X( atomic_long() = default; )
882 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
883 CPP0X( atomic_long( const atomic_long& ) = delete; )
884 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
886 long operator =( long __v__ ) volatile
887 { store( __v__ ); return __v__; }
889 long operator ++( int ) volatile
890 { return fetch_add( 1 ); }
892 long operator --( int ) volatile
893 { return fetch_sub( 1 ); }
895 long operator ++() volatile
896 { return fetch_add( 1 ) + 1; }
898 long operator --() volatile
899 { return fetch_sub( 1 ) - 1; }
901 long operator +=( long __v__ ) volatile
902 { return fetch_add( __v__ ) + __v__; }
904 long operator -=( long __v__ ) volatile
905 { return fetch_sub( __v__ ) - __v__; }
907 long operator &=( long __v__ ) volatile
908 { return fetch_and( __v__ ) & __v__; }
910 long operator |=( long __v__ ) volatile
911 { return fetch_or( __v__ ) | __v__; }
913 long operator ^=( long __v__ ) volatile
914 { return fetch_xor( __v__ ) ^ __v__; }
916 friend void atomic_store_explicit( volatile atomic_long*, long,
918 friend long atomic_load_explicit( volatile atomic_long*,
920 friend long atomic_exchange_explicit( volatile atomic_long*,
921 long, memory_order );
922 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_long*,
923 long*, long, memory_order, memory_order );
924 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_long*,
925 long*, long, memory_order, memory_order );
926 friend long atomic_fetch_add_explicit( volatile atomic_long*,
927 long, memory_order );
928 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
929 long, memory_order );
930 friend long atomic_fetch_and_explicit( volatile atomic_long*,
931 long, memory_order );
932 friend long atomic_fetch_or_explicit( volatile atomic_long*,
933 long, memory_order );
934 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
935 long, memory_order );
943 typedef struct atomic_ulong
946 bool is_lock_free() const volatile;
947 void store( unsigned long,
948 memory_order = memory_order_seq_cst ) volatile;
949 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
950 unsigned long exchange( unsigned long,
951 memory_order = memory_order_seq_cst ) volatile;
952 bool compare_exchange_weak( unsigned long&, unsigned long,
953 memory_order, memory_order ) volatile;
954 bool compare_exchange_strong( unsigned long&, unsigned long,
955 memory_order, memory_order ) volatile;
956 bool compare_exchange_weak( unsigned long&, unsigned long,
957 memory_order = memory_order_seq_cst ) volatile;
958 bool compare_exchange_strong( unsigned long&, unsigned long,
959 memory_order = memory_order_seq_cst ) volatile;
960 unsigned long fetch_add( unsigned long,
961 memory_order = memory_order_seq_cst ) volatile;
962 unsigned long fetch_sub( unsigned long,
963 memory_order = memory_order_seq_cst ) volatile;
964 unsigned long fetch_and( unsigned long,
965 memory_order = memory_order_seq_cst ) volatile;
966 unsigned long fetch_or( unsigned long,
967 memory_order = memory_order_seq_cst ) volatile;
968 unsigned long fetch_xor( unsigned long,
969 memory_order = memory_order_seq_cst ) volatile;
971 CPP0X( atomic_ulong() = default; )
972 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
973 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
974 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
976 unsigned long operator =( unsigned long __v__ ) volatile
977 { store( __v__ ); return __v__; }
979 unsigned long operator ++( int ) volatile
980 { return fetch_add( 1 ); }
982 unsigned long operator --( int ) volatile
983 { return fetch_sub( 1 ); }
985 unsigned long operator ++() volatile
986 { return fetch_add( 1 ) + 1; }
988 unsigned long operator --() volatile
989 { return fetch_sub( 1 ) - 1; }
991 unsigned long operator +=( unsigned long __v__ ) volatile
992 { return fetch_add( __v__ ) + __v__; }
994 unsigned long operator -=( unsigned long __v__ ) volatile
995 { return fetch_sub( __v__ ) - __v__; }
997 unsigned long operator &=( unsigned long __v__ ) volatile
998 { return fetch_and( __v__ ) & __v__; }
1000 unsigned long operator |=( unsigned long __v__ ) volatile
1001 { return fetch_or( __v__ ) | __v__; }
1003 unsigned long operator ^=( unsigned long __v__ ) volatile
1004 { return fetch_xor( __v__ ) ^ __v__; }
1006 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
1008 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
1010 friend unsigned long atomic_exchange_explicit( volatile atomic_ulong*,
1011 unsigned long, memory_order );
1012 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ulong*,
1013 unsigned long*, unsigned long, memory_order, memory_order );
1014 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ulong*,
1015 unsigned long*, unsigned long, memory_order, memory_order );
1016 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
1017 unsigned long, memory_order );
1018 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
1019 unsigned long, memory_order );
1020 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
1021 unsigned long, memory_order );
1022 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
1023 unsigned long, memory_order );
1024 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
1025 unsigned long, memory_order );
1029 unsigned long __f__;
1033 typedef struct atomic_llong
1036 bool is_lock_free() const volatile;
1037 void store( long long,
1038 memory_order = memory_order_seq_cst ) volatile;
1039 long long load( memory_order = memory_order_seq_cst ) volatile;
1040 long long exchange( long long,
1041 memory_order = memory_order_seq_cst ) volatile;
1042 bool compare_exchange_weak( long long&, long long,
1043 memory_order, memory_order ) volatile;
1044 bool compare_exchange_strong( long long&, long long,
1045 memory_order, memory_order ) volatile;
1046 bool compare_exchange_weak( long long&, long long,
1047 memory_order = memory_order_seq_cst ) volatile;
1048 bool compare_exchange_strong( long long&, long long,
1049 memory_order = memory_order_seq_cst ) volatile;
1050 long long fetch_add( long long,
1051 memory_order = memory_order_seq_cst ) volatile;
1052 long long fetch_sub( long long,
1053 memory_order = memory_order_seq_cst ) volatile;
1054 long long fetch_and( long long,
1055 memory_order = memory_order_seq_cst ) volatile;
1056 long long fetch_or( long long,
1057 memory_order = memory_order_seq_cst ) volatile;
1058 long long fetch_xor( long long,
1059 memory_order = memory_order_seq_cst ) volatile;
1061 CPP0X( atomic_llong() = default; )
1062 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1063 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1064 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1066 long long operator =( long long __v__ ) volatile
1067 { store( __v__ ); return __v__; }
1069 long long operator ++( int ) volatile
1070 { return fetch_add( 1 ); }
1072 long long operator --( int ) volatile
1073 { return fetch_sub( 1 ); }
1075 long long operator ++() volatile
1076 { return fetch_add( 1 ) + 1; }
1078 long long operator --() volatile
1079 { return fetch_sub( 1 ) - 1; }
1081 long long operator +=( long long __v__ ) volatile
1082 { return fetch_add( __v__ ) + __v__; }
1084 long long operator -=( long long __v__ ) volatile
1085 { return fetch_sub( __v__ ) - __v__; }
1087 long long operator &=( long long __v__ ) volatile
1088 { return fetch_and( __v__ ) & __v__; }
1090 long long operator |=( long long __v__ ) volatile
1091 { return fetch_or( __v__ ) | __v__; }
1093 long long operator ^=( long long __v__ ) volatile
1094 { return fetch_xor( __v__ ) ^ __v__; }
1096 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1098 friend long long atomic_load_explicit( volatile atomic_llong*,
1100 friend long long atomic_exchange_explicit( volatile atomic_llong*,
1101 long long, memory_order );
1102 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_llong*,
1103 long long*, long long, memory_order, memory_order );
1104 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_llong*,
1105 long long*, long long, memory_order, memory_order );
1106 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1107 long long, memory_order );
1108 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1109 long long, memory_order );
1110 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1111 long long, memory_order );
1112 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1113 long long, memory_order );
1114 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1115 long long, memory_order );
1123 typedef struct atomic_ullong
1126 bool is_lock_free() const volatile;
1127 void store( unsigned long long,
1128 memory_order = memory_order_seq_cst ) volatile;
1129 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1130 unsigned long long exchange( unsigned long long,
1131 memory_order = memory_order_seq_cst ) volatile;
1132 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1133 memory_order, memory_order ) volatile;
1134 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1135 memory_order, memory_order ) volatile;
1136 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1137 memory_order = memory_order_seq_cst ) volatile;
1138 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1139 memory_order = memory_order_seq_cst ) volatile;
1140 unsigned long long fetch_add( unsigned long long,
1141 memory_order = memory_order_seq_cst ) volatile;
1142 unsigned long long fetch_sub( unsigned long long,
1143 memory_order = memory_order_seq_cst ) volatile;
1144 unsigned long long fetch_and( unsigned long long,
1145 memory_order = memory_order_seq_cst ) volatile;
1146 unsigned long long fetch_or( unsigned long long,
1147 memory_order = memory_order_seq_cst ) volatile;
1148 unsigned long long fetch_xor( unsigned long long,
1149 memory_order = memory_order_seq_cst ) volatile;
1151 CPP0X( atomic_ullong() = default; )
1152 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1153 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1154 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1156 unsigned long long operator =( unsigned long long __v__ ) volatile
1157 { store( __v__ ); return __v__; }
1159 unsigned long long operator ++( int ) volatile
1160 { return fetch_add( 1 ); }
1162 unsigned long long operator --( int ) volatile
1163 { return fetch_sub( 1 ); }
1165 unsigned long long operator ++() volatile
1166 { return fetch_add( 1 ) + 1; }
1168 unsigned long long operator --() volatile
1169 { return fetch_sub( 1 ) - 1; }
1171 unsigned long long operator +=( unsigned long long __v__ ) volatile
1172 { return fetch_add( __v__ ) + __v__; }
1174 unsigned long long operator -=( unsigned long long __v__ ) volatile
1175 { return fetch_sub( __v__ ) - __v__; }
1177 unsigned long long operator &=( unsigned long long __v__ ) volatile
1178 { return fetch_and( __v__ ) & __v__; }
1180 unsigned long long operator |=( unsigned long long __v__ ) volatile
1181 { return fetch_or( __v__ ) | __v__; }
1183 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1184 { return fetch_xor( __v__ ) ^ __v__; }
1186 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1188 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1190 friend unsigned long long atomic_exchange_explicit( volatile atomic_ullong*,
1191 unsigned long long, memory_order );
1192 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ullong*,
1193 unsigned long long*, unsigned long long, memory_order, memory_order );
1194 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ullong*,
1195 unsigned long long*, unsigned long long, memory_order, memory_order );
1196 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1197 unsigned long long, memory_order );
1198 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1199 unsigned long long, memory_order );
1200 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1201 unsigned long long, memory_order );
1202 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1203 unsigned long long, memory_order );
1204 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1205 unsigned long long, memory_order );
1209 unsigned long long __f__;
1213 typedef atomic_schar atomic_int_least8_t;
1214 typedef atomic_uchar atomic_uint_least8_t;
1215 typedef atomic_short atomic_int_least16_t;
1216 typedef atomic_ushort atomic_uint_least16_t;
1217 typedef atomic_int atomic_int_least32_t;
1218 typedef atomic_uint atomic_uint_least32_t;
1219 typedef atomic_llong atomic_int_least64_t;
1220 typedef atomic_ullong atomic_uint_least64_t;
1222 typedef atomic_schar atomic_int_fast8_t;
1223 typedef atomic_uchar atomic_uint_fast8_t;
1224 typedef atomic_short atomic_int_fast16_t;
1225 typedef atomic_ushort atomic_uint_fast16_t;
1226 typedef atomic_int atomic_int_fast32_t;
1227 typedef atomic_uint atomic_uint_fast32_t;
1228 typedef atomic_llong atomic_int_fast64_t;
1229 typedef atomic_ullong atomic_uint_fast64_t;
1231 typedef atomic_long atomic_intptr_t;
1232 typedef atomic_ulong atomic_uintptr_t;
1234 typedef atomic_long atomic_ssize_t;
1235 typedef atomic_ulong atomic_size_t;
1237 typedef atomic_long atomic_ptrdiff_t;
1239 typedef atomic_llong atomic_intmax_t;
1240 typedef atomic_ullong atomic_uintmax_t;
1246 typedef struct atomic_wchar_t
1249 bool is_lock_free() const volatile;
1250 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1251 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1252 wchar_t exchange( wchar_t,
1253 memory_order = memory_order_seq_cst ) volatile;
1254 bool compare_exchange_weak( wchar_t&, wchar_t,
1255 memory_order, memory_order ) volatile;
1256 bool compare_exchange_strong( wchar_t&, wchar_t,
1257 memory_order, memory_order ) volatile;
1258 bool compare_exchange_weak( wchar_t&, wchar_t,
1259 memory_order = memory_order_seq_cst ) volatile;
1260 bool compare_exchange_strong( wchar_t&, wchar_t,
1261 memory_order = memory_order_seq_cst ) volatile;
1262 wchar_t fetch_add( wchar_t,
1263 memory_order = memory_order_seq_cst ) volatile;
1264 wchar_t fetch_sub( wchar_t,
1265 memory_order = memory_order_seq_cst ) volatile;
1266 wchar_t fetch_and( wchar_t,
1267 memory_order = memory_order_seq_cst ) volatile;
1268 wchar_t fetch_or( wchar_t,
1269 memory_order = memory_order_seq_cst ) volatile;
1270 wchar_t fetch_xor( wchar_t,
1271 memory_order = memory_order_seq_cst ) volatile;
1273 CPP0X( atomic_wchar_t() = default; )
1274 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1275 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1276 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1278 wchar_t operator =( wchar_t __v__ ) volatile
1279 { store( __v__ ); return __v__; }
1281 wchar_t operator ++( int ) volatile
1282 { return fetch_add( 1 ); }
1284 wchar_t operator --( int ) volatile
1285 { return fetch_sub( 1 ); }
1287 wchar_t operator ++() volatile
1288 { return fetch_add( 1 ) + 1; }
1290 wchar_t operator --() volatile
1291 { return fetch_sub( 1 ) - 1; }
1293 wchar_t operator +=( wchar_t __v__ ) volatile
1294 { return fetch_add( __v__ ) + __v__; }
1296 wchar_t operator -=( wchar_t __v__ ) volatile
1297 { return fetch_sub( __v__ ) - __v__; }
1299 wchar_t operator &=( wchar_t __v__ ) volatile
1300 { return fetch_and( __v__ ) & __v__; }
1302 wchar_t operator |=( wchar_t __v__ ) volatile
1303 { return fetch_or( __v__ ) | __v__; }
1305 wchar_t operator ^=( wchar_t __v__ ) volatile
1306 { return fetch_xor( __v__ ) ^ __v__; }
1308 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1310 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1312 friend wchar_t atomic_exchange_explicit( volatile atomic_wchar_t*,
1313 wchar_t, memory_order );
1314 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_wchar_t*,
1315 wchar_t*, wchar_t, memory_order, memory_order );
1316 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_wchar_t*,
1317 wchar_t*, wchar_t, memory_order, memory_order );
1318 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1319 wchar_t, memory_order );
1320 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1321 wchar_t, memory_order );
1322 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1323 wchar_t, memory_order );
1324 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1325 wchar_t, memory_order );
1326 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1327 wchar_t, memory_order );
1337 typedef atomic_int_least16_t atomic_char16_t;
1338 typedef atomic_int_least32_t atomic_char32_t;
1339 typedef atomic_int_least32_t atomic_wchar_t;
1346 template< typename T >
1351 bool is_lock_free() const volatile;
1352 void store( T, memory_order = memory_order_seq_cst ) volatile;
1353 T load( memory_order = memory_order_seq_cst ) volatile;
1354 T exchange( T __v__, memory_order = memory_order_seq_cst ) volatile;
1355 bool compare_exchange_weak( T&, T, memory_order, memory_order ) volatile;
1356 bool compare_exchange_strong( T&, T, memory_order, memory_order ) volatile;
1357 bool compare_exchange_weak( T&, T, memory_order = memory_order_seq_cst ) volatile;
1358 bool compare_exchange_strong( T&, T, memory_order = memory_order_seq_cst ) volatile;
1360 CPP0X( atomic() = default; )
1361 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1362 CPP0X( atomic( const atomic& ) = delete; )
1363 atomic& operator =( const atomic& ) CPP0X(=delete);
1365 T operator =( T __v__ ) volatile
1366 { store( __v__ ); return __v__; }
1377 template<typename T> struct atomic< T* > : atomic_address
1379 T* load( memory_order = memory_order_seq_cst ) volatile;
1380 T* exchange( T*, memory_order = memory_order_seq_cst ) volatile;
1381 bool compare_exchange_weak( T*&, T*, memory_order, memory_order ) volatile;
1382 bool compare_exchange_strong( T*&, T*, memory_order, memory_order ) volatile;
1383 bool compare_exchange_weak( T*&, T*,
1384 memory_order = memory_order_seq_cst ) volatile;
1385 bool compare_exchange_strong( T*&, T*,
1386 memory_order = memory_order_seq_cst ) volatile;
1387 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1388 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1390 CPP0X( atomic() = default; )
1391 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1392 CPP0X( atomic( const atomic& ) = delete; )
1393 atomic& operator =( const atomic& ) CPP0X(=delete);
1395 T* operator =( T* __v__ ) volatile
1396 { store( __v__ ); return __v__; }
1398 T* operator ++( int ) volatile
1399 { return fetch_add( 1 ); }
1401 T* operator --( int ) volatile
1402 { return fetch_sub( 1 ); }
1404 T* operator ++() volatile
1405 { return fetch_add( 1 ) + 1; }
1407 T* operator --() volatile
1408 { return fetch_sub( 1 ) - 1; }
1410 T* operator +=( T* __v__ ) volatile
1411 { return fetch_add( __v__ ) + __v__; }
1413 T* operator -=( T* __v__ ) volatile
1414 { return fetch_sub( __v__ ) - __v__; }
1422 template<> struct atomic< bool > : atomic_bool
1424 CPP0X( atomic() = default; )
1425 CPP0X( constexpr explicit atomic( bool __v__ )
1426 : atomic_bool( __v__ ) { } )
1427 CPP0X( atomic( const atomic& ) = delete; )
1428 atomic& operator =( const atomic& ) CPP0X(=delete);
1430 bool operator =( bool __v__ ) volatile
1431 { store( __v__ ); return __v__; }
1435 template<> struct atomic< void* > : atomic_address
1437 CPP0X( atomic() = default; )
1438 CPP0X( constexpr explicit atomic( void* __v__ )
1439 : atomic_address( __v__ ) { } )
1440 CPP0X( atomic( const atomic& ) = delete; )
1441 atomic& operator =( const atomic& ) CPP0X(=delete);
1443 void* operator =( void* __v__ ) volatile
1444 { store( __v__ ); return __v__; }
1448 template<> struct atomic< char > : atomic_char
1450 CPP0X( atomic() = default; )
1451 CPP0X( constexpr explicit atomic( char __v__ )
1452 : atomic_char( __v__ ) { } )
1453 CPP0X( atomic( const atomic& ) = delete; )
1454 atomic& operator =( const atomic& ) CPP0X(=delete);
1456 char operator =( char __v__ ) volatile
1457 { store( __v__ ); return __v__; }
1461 template<> struct atomic< signed char > : atomic_schar
1463 CPP0X( atomic() = default; )
1464 CPP0X( constexpr explicit atomic( signed char __v__ )
1465 : atomic_schar( __v__ ) { } )
1466 CPP0X( atomic( const atomic& ) = delete; )
1467 atomic& operator =( const atomic& ) CPP0X(=delete);
1469 signed char operator =( signed char __v__ ) volatile
1470 { store( __v__ ); return __v__; }
1474 template<> struct atomic< unsigned char > : atomic_uchar
1476 CPP0X( atomic() = default; )
1477 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1478 : atomic_uchar( __v__ ) { } )
1479 CPP0X( atomic( const atomic& ) = delete; )
1480 atomic& operator =( const atomic& ) CPP0X(=delete);
1482 unsigned char operator =( unsigned char __v__ ) volatile
1483 { store( __v__ ); return __v__; }
1487 template<> struct atomic< short > : atomic_short
1489 CPP0X( atomic() = default; )
1490 CPP0X( constexpr explicit atomic( short __v__ )
1491 : atomic_short( __v__ ) { } )
1492 CPP0X( atomic( const atomic& ) = delete; )
1493 atomic& operator =( const atomic& ) CPP0X(=delete);
1495 short operator =( short __v__ ) volatile
1496 { store( __v__ ); return __v__; }
1500 template<> struct atomic< unsigned short > : atomic_ushort
1502 CPP0X( atomic() = default; )
1503 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1504 : atomic_ushort( __v__ ) { } )
1505 CPP0X( atomic( const atomic& ) = delete; )
1506 atomic& operator =( const atomic& ) CPP0X(=delete);
1508 unsigned short operator =( unsigned short __v__ ) volatile
1509 { store( __v__ ); return __v__; }
1513 template<> struct atomic< int > : atomic_int
1515 CPP0X( atomic() = default; )
1516 CPP0X( constexpr explicit atomic( int __v__ )
1517 : atomic_int( __v__ ) { } )
1518 CPP0X( atomic( const atomic& ) = delete; )
1519 atomic& operator =( const atomic& ) CPP0X(=delete);
1521 int operator =( int __v__ ) volatile
1522 { store( __v__ ); return __v__; }
1526 template<> struct atomic< unsigned int > : atomic_uint
1528 CPP0X( atomic() = default; )
1529 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1530 : atomic_uint( __v__ ) { } )
1531 CPP0X( atomic( const atomic& ) = delete; )
1532 atomic& operator =( const atomic& ) CPP0X(=delete);
1534 unsigned int operator =( unsigned int __v__ ) volatile
1535 { store( __v__ ); return __v__; }
1539 template<> struct atomic< long > : atomic_long
1541 CPP0X( atomic() = default; )
1542 CPP0X( constexpr explicit atomic( long __v__ )
1543 : atomic_long( __v__ ) { } )
1544 CPP0X( atomic( const atomic& ) = delete; )
1545 atomic& operator =( const atomic& ) CPP0X(=delete);
1547 long operator =( long __v__ ) volatile
1548 { store( __v__ ); return __v__; }
1552 template<> struct atomic< unsigned long > : atomic_ulong
1554 CPP0X( atomic() = default; )
1555 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1556 : atomic_ulong( __v__ ) { } )
1557 CPP0X( atomic( const atomic& ) = delete; )
1558 atomic& operator =( const atomic& ) CPP0X(=delete);
1560 unsigned long operator =( unsigned long __v__ ) volatile
1561 { store( __v__ ); return __v__; }
1565 template<> struct atomic< long long > : atomic_llong
1567 CPP0X( atomic() = default; )
1568 CPP0X( constexpr explicit atomic( long long __v__ )
1569 : atomic_llong( __v__ ) { } )
1570 CPP0X( atomic( const atomic& ) = delete; )
1571 atomic& operator =( const atomic& ) CPP0X(=delete);
1573 long long operator =( long long __v__ ) volatile
1574 { store( __v__ ); return __v__; }
1578 template<> struct atomic< unsigned long long > : atomic_ullong
1580 CPP0X( atomic() = default; )
1581 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1582 : atomic_ullong( __v__ ) { } )
1583 CPP0X( atomic( const atomic& ) = delete; )
1584 atomic& operator =( const atomic& ) CPP0X(=delete);
1586 unsigned long long operator =( unsigned long long __v__ ) volatile
1587 { store( __v__ ); return __v__; }
1591 template<> struct atomic< wchar_t > : atomic_wchar_t
1593 CPP0X( atomic() = default; )
1594 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1595 : atomic_wchar_t( __v__ ) { } )
1596 CPP0X( atomic( const atomic& ) = delete; )
1597 atomic& operator =( const atomic& ) CPP0X(=delete);
1599 wchar_t operator =( wchar_t __v__ ) volatile
1600 { store( __v__ ); return __v__; }
1610 inline bool atomic_is_lock_free
1611 ( const volatile atomic_bool* __a__ )
1614 inline bool atomic_load_explicit
1615 ( volatile atomic_bool* __a__, memory_order __x__ )
1616 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1618 inline bool atomic_load
1619 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1621 inline void atomic_init
1622 ( volatile atomic_bool* __a__, bool __m__ )
1623 { _ATOMIC_INIT_( __a__, __m__ ); }
1625 inline void atomic_store_explicit
1626 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1627 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1629 inline void atomic_store
1630 ( volatile atomic_bool* __a__, bool __m__ )
1631 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1633 inline bool atomic_exchange_explicit
1634 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1635 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1637 inline bool atomic_exchange
1638 ( volatile atomic_bool* __a__, bool __m__ )
1639 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1641 inline bool atomic_compare_exchange_weak_explicit
1642 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1643 memory_order __x__, memory_order __y__ )
1644 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1646 inline bool atomic_compare_exchange_strong_explicit
1647 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1648 memory_order __x__, memory_order __y__ )
1649 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1651 inline bool atomic_compare_exchange_weak
1652 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1653 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1654 memory_order_seq_cst, memory_order_seq_cst ); }
1656 inline bool atomic_compare_exchange_strong
1657 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1658 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1659 memory_order_seq_cst, memory_order_seq_cst ); }
1662 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1665 inline void* atomic_load_explicit
1666 ( volatile atomic_address* __a__, memory_order __x__ )
1667 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1669 inline void* atomic_load( volatile atomic_address* __a__ )
1670 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1672 inline void atomic_init
1673 ( volatile atomic_address* __a__, void* __m__ )
1674 { _ATOMIC_INIT_( __a__, __m__ ); }
1676 inline void atomic_store_explicit
1677 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1678 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1680 inline void atomic_store
1681 ( volatile atomic_address* __a__, void* __m__ )
1682 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1684 inline void* atomic_exchange_explicit
1685 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1686 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1688 inline void* atomic_exchange
1689 ( volatile atomic_address* __a__, void* __m__ )
1690 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1692 inline bool atomic_compare_exchange_weak_explicit
1693 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1694 memory_order __x__, memory_order __y__ )
1695 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1697 inline bool atomic_compare_exchange_strong_explicit
1698 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1699 memory_order __x__, memory_order __y__ )
1700 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1702 inline bool atomic_compare_exchange_weak
1703 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1704 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1705 memory_order_seq_cst, memory_order_seq_cst ); }
1707 inline bool atomic_compare_exchange_strong
1708 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1709 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1710 memory_order_seq_cst, memory_order_seq_cst ); }
1713 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1716 inline char atomic_load_explicit
1717 ( volatile atomic_char* __a__, memory_order __x__ )
1718 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1720 inline char atomic_load( volatile atomic_char* __a__ )
1721 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1723 inline void atomic_init
1724 ( volatile atomic_char* __a__, char __m__ )
1725 { _ATOMIC_INIT_( __a__, __m__ ); }
1727 inline void atomic_store_explicit
1728 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1729 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1731 inline void atomic_store
1732 ( volatile atomic_char* __a__, char __m__ )
1733 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1735 inline char atomic_exchange_explicit
1736 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1737 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1739 inline char atomic_exchange
1740 ( volatile atomic_char* __a__, char __m__ )
1741 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1743 inline bool atomic_compare_exchange_weak_explicit
1744 ( volatile atomic_char* __a__, char* __e__, char __m__,
1745 memory_order __x__, memory_order __y__ )
1746 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1748 inline bool atomic_compare_exchange_strong_explicit
1749 ( volatile atomic_char* __a__, char* __e__, char __m__,
1750 memory_order __x__, memory_order __y__ )
1751 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1753 inline bool atomic_compare_exchange_weak
1754 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1755 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1756 memory_order_seq_cst, memory_order_seq_cst ); }
1758 inline bool atomic_compare_exchange_strong
1759 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1760 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1761 memory_order_seq_cst, memory_order_seq_cst ); }
1764 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1767 inline signed char atomic_load_explicit
1768 ( volatile atomic_schar* __a__, memory_order __x__ )
1769 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1771 inline signed char atomic_load( volatile atomic_schar* __a__ )
1772 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1774 inline void atomic_init
1775 ( volatile atomic_schar* __a__, signed char __m__ )
1776 { _ATOMIC_INIT_( __a__, __m__ ); }
1778 inline void atomic_store_explicit
1779 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1780 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1782 inline void atomic_store
1783 ( volatile atomic_schar* __a__, signed char __m__ )
1784 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1786 inline signed char atomic_exchange_explicit
1787 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1788 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1790 inline signed char atomic_exchange
1791 ( volatile atomic_schar* __a__, signed char __m__ )
1792 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1794 inline bool atomic_compare_exchange_weak_explicit
1795 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1796 memory_order __x__, memory_order __y__ )
1797 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1799 inline bool atomic_compare_exchange_strong_explicit
1800 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1801 memory_order __x__, memory_order __y__ )
1802 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1804 inline bool atomic_compare_exchange_weak
1805 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1806 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1807 memory_order_seq_cst, memory_order_seq_cst ); }
1809 inline bool atomic_compare_exchange_strong
1810 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1811 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1812 memory_order_seq_cst, memory_order_seq_cst ); }
1815 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1818 inline unsigned char atomic_load_explicit
1819 ( volatile atomic_uchar* __a__, memory_order __x__ )
1820 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1822 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1823 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1825 inline void atomic_init
1826 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1827 { _ATOMIC_INIT_( __a__, __m__ ); }
1829 inline void atomic_store_explicit
1830 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1831 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1833 inline void atomic_store
1834 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1835 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1837 inline unsigned char atomic_exchange_explicit
1838 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1839 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1841 inline unsigned char atomic_exchange
1842 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1843 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1845 inline bool atomic_compare_exchange_weak_explicit
1846 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1847 memory_order __x__, memory_order __y__ )
1848 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1850 inline bool atomic_compare_exchange_strong_explicit
1851 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1852 memory_order __x__, memory_order __y__ )
1853 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1855 inline bool atomic_compare_exchange_weak
1856 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1857 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1858 memory_order_seq_cst, memory_order_seq_cst ); }
1860 inline bool atomic_compare_exchange_strong
1861 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1862 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1863 memory_order_seq_cst, memory_order_seq_cst ); }
1866 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1869 inline short atomic_load_explicit
1870 ( volatile atomic_short* __a__, memory_order __x__ )
1871 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1873 inline short atomic_load( volatile atomic_short* __a__ )
1874 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1876 inline void atomic_init
1877 ( volatile atomic_short* __a__, short __m__ )
1878 { _ATOMIC_INIT_( __a__, __m__ ); }
1880 inline void atomic_store_explicit
1881 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1882 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1884 inline void atomic_store
1885 ( volatile atomic_short* __a__, short __m__ )
1886 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1888 inline short atomic_exchange_explicit
1889 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1890 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1892 inline short atomic_exchange
1893 ( volatile atomic_short* __a__, short __m__ )
1894 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1896 inline bool atomic_compare_exchange_weak_explicit
1897 ( volatile atomic_short* __a__, short* __e__, short __m__,
1898 memory_order __x__, memory_order __y__ )
1899 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1901 inline bool atomic_compare_exchange_strong_explicit
1902 ( volatile atomic_short* __a__, short* __e__, short __m__,
1903 memory_order __x__, memory_order __y__ )
1904 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1906 inline bool atomic_compare_exchange_weak
1907 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1908 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1909 memory_order_seq_cst, memory_order_seq_cst ); }
1911 inline bool atomic_compare_exchange_strong
1912 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1913 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1914 memory_order_seq_cst, memory_order_seq_cst ); }
1917 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1920 inline unsigned short atomic_load_explicit
1921 ( volatile atomic_ushort* __a__, memory_order __x__ )
1922 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1924 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1925 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1927 inline void atomic_init
1928 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1929 { _ATOMIC_INIT_( __a__, __m__ ); }
1931 inline void atomic_store_explicit
1932 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1933 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1935 inline void atomic_store
1936 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1937 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1939 inline unsigned short atomic_exchange_explicit
1940 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1941 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1943 inline unsigned short atomic_exchange
1944 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1945 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1947 inline bool atomic_compare_exchange_weak_explicit
1948 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1949 memory_order __x__, memory_order __y__ )
1950 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1952 inline bool atomic_compare_exchange_strong_explicit
1953 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1954 memory_order __x__, memory_order __y__ )
1955 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1957 inline bool atomic_compare_exchange_weak
1958 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1959 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1960 memory_order_seq_cst, memory_order_seq_cst ); }
1962 inline bool atomic_compare_exchange_strong
1963 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1964 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1965 memory_order_seq_cst, memory_order_seq_cst ); }
1968 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1971 inline int atomic_load_explicit
1972 ( volatile atomic_int* __a__, memory_order __x__ )
1973 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1975 inline int atomic_load( volatile atomic_int* __a__ )
1976 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1978 inline void atomic_init
1979 ( volatile atomic_int* __a__, int __m__ )
1980 { _ATOMIC_INIT_( __a__, __m__ ); }
1982 inline void atomic_store_explicit
1983 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1984 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1986 inline void atomic_store
1987 ( volatile atomic_int* __a__, int __m__ )
1988 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1990 inline int atomic_exchange_explicit
1991 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1992 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1994 inline int atomic_exchange
1995 ( volatile atomic_int* __a__, int __m__ )
1996 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1998 inline bool atomic_compare_exchange_weak_explicit
1999 ( volatile atomic_int* __a__, int* __e__, int __m__,
2000 memory_order __x__, memory_order __y__ )
2001 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2003 inline bool atomic_compare_exchange_strong_explicit
2004 ( volatile atomic_int* __a__, int* __e__, int __m__,
2005 memory_order __x__, memory_order __y__ )
2006 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2008 inline bool atomic_compare_exchange_weak
2009 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2010 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2011 memory_order_seq_cst, memory_order_seq_cst ); }
2013 inline bool atomic_compare_exchange_strong
2014 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2015 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2016 memory_order_seq_cst, memory_order_seq_cst ); }
2019 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
2022 inline unsigned int atomic_load_explicit
2023 ( volatile atomic_uint* __a__, memory_order __x__ )
2024 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2026 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
2027 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2029 inline void atomic_init
2030 ( volatile atomic_uint* __a__, unsigned int __m__ )
2031 { _ATOMIC_INIT_( __a__, __m__ ); }
2033 inline void atomic_store_explicit
2034 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2035 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2037 inline void atomic_store
2038 ( volatile atomic_uint* __a__, unsigned int __m__ )
2039 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2041 inline unsigned int atomic_exchange_explicit
2042 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2043 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2045 inline unsigned int atomic_exchange
2046 ( volatile atomic_uint* __a__, unsigned int __m__ )
2047 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2049 inline bool atomic_compare_exchange_weak_explicit
2050 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2051 memory_order __x__, memory_order __y__ )
2052 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2054 inline bool atomic_compare_exchange_strong_explicit
2055 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2056 memory_order __x__, memory_order __y__ )
2057 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2059 inline bool atomic_compare_exchange_weak
2060 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2061 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2062 memory_order_seq_cst, memory_order_seq_cst ); }
2064 inline bool atomic_compare_exchange_strong
2065 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2066 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2067 memory_order_seq_cst, memory_order_seq_cst ); }
2070 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
2073 inline long atomic_load_explicit
2074 ( volatile atomic_long* __a__, memory_order __x__ )
2075 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2077 inline long atomic_load( volatile atomic_long* __a__ )
2078 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2080 inline void atomic_init
2081 ( volatile atomic_long* __a__, long __m__ )
2082 { _ATOMIC_INIT_( __a__, __m__ ); }
2084 inline void atomic_store_explicit
2085 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2086 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2088 inline void atomic_store
2089 ( volatile atomic_long* __a__, long __m__ )
2090 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2092 inline long atomic_exchange_explicit
2093 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2094 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2096 inline long atomic_exchange
2097 ( volatile atomic_long* __a__, long __m__ )
2098 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2100 inline bool atomic_compare_exchange_weak_explicit
2101 ( volatile atomic_long* __a__, long* __e__, long __m__,
2102 memory_order __x__, memory_order __y__ )
2103 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2105 inline bool atomic_compare_exchange_strong_explicit
2106 ( volatile atomic_long* __a__, long* __e__, long __m__,
2107 memory_order __x__, memory_order __y__ )
2108 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2110 inline bool atomic_compare_exchange_weak
2111 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2112 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2113 memory_order_seq_cst, memory_order_seq_cst ); }
2115 inline bool atomic_compare_exchange_strong
2116 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2117 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2118 memory_order_seq_cst, memory_order_seq_cst ); }
2121 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
2124 inline unsigned long atomic_load_explicit
2125 ( volatile atomic_ulong* __a__, memory_order __x__ )
2126 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2128 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
2129 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2131 inline void atomic_init
2132 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2133 { _ATOMIC_INIT_( __a__, __m__ ); }
2135 inline void atomic_store_explicit
2136 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2137 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2139 inline void atomic_store
2140 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2141 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2143 inline unsigned long atomic_exchange_explicit
2144 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2145 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2147 inline unsigned long atomic_exchange
2148 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2149 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2151 inline bool atomic_compare_exchange_weak_explicit
2152 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2153 memory_order __x__, memory_order __y__ )
2154 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2156 inline bool atomic_compare_exchange_strong_explicit
2157 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2158 memory_order __x__, memory_order __y__ )
2159 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2161 inline bool atomic_compare_exchange_weak
2162 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2163 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2164 memory_order_seq_cst, memory_order_seq_cst ); }
2166 inline bool atomic_compare_exchange_strong
2167 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2168 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2169 memory_order_seq_cst, memory_order_seq_cst ); }
2172 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2175 inline long long atomic_load_explicit
2176 ( volatile atomic_llong* __a__, memory_order __x__ )
2177 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2179 inline long long atomic_load( volatile atomic_llong* __a__ )
2180 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2182 inline void atomic_init
2183 ( volatile atomic_llong* __a__, long long __m__ )
2184 { _ATOMIC_INIT_( __a__, __m__ ); }
2186 inline void atomic_store_explicit
2187 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2188 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2190 inline void atomic_store
2191 ( volatile atomic_llong* __a__, long long __m__ )
2192 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2194 inline long long atomic_exchange_explicit
2195 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2196 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2198 inline long long atomic_exchange
2199 ( volatile atomic_llong* __a__, long long __m__ )
2200 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2202 inline bool atomic_compare_exchange_weak_explicit
2203 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2204 memory_order __x__, memory_order __y__ )
2205 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2207 inline bool atomic_compare_exchange_strong_explicit
2208 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2209 memory_order __x__, memory_order __y__ )
2210 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2212 inline bool atomic_compare_exchange_weak
2213 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2214 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2215 memory_order_seq_cst, memory_order_seq_cst ); }
2217 inline bool atomic_compare_exchange_strong
2218 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2219 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2220 memory_order_seq_cst, memory_order_seq_cst ); }
2223 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2226 inline unsigned long long atomic_load_explicit
2227 ( volatile atomic_ullong* __a__, memory_order __x__ )
2228 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2230 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2231 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2233 inline void atomic_init
2234 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2235 { _ATOMIC_INIT_( __a__, __m__ ); }
2237 inline void atomic_store_explicit
2238 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2239 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2241 inline void atomic_store
2242 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2243 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2245 inline unsigned long long atomic_exchange_explicit
2246 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2247 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2249 inline unsigned long long atomic_exchange
2250 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2251 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2253 inline bool atomic_compare_exchange_weak_explicit
2254 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2255 memory_order __x__, memory_order __y__ )
2256 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2258 inline bool atomic_compare_exchange_strong_explicit
2259 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2260 memory_order __x__, memory_order __y__ )
2261 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2263 inline bool atomic_compare_exchange_weak
2264 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2265 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2266 memory_order_seq_cst, memory_order_seq_cst ); }
2268 inline bool atomic_compare_exchange_strong
2269 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2270 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2271 memory_order_seq_cst, memory_order_seq_cst ); }
2274 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2277 inline wchar_t atomic_load_explicit
2278 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2279 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2281 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2282 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2284 inline void atomic_init
2285 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2286 { _ATOMIC_INIT_( __a__, __m__ ); }
2288 inline void atomic_store_explicit
2289 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2290 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2292 inline void atomic_store
2293 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2294 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2296 inline wchar_t atomic_exchange_explicit
2297 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2298 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2300 inline wchar_t atomic_exchange
2301 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2302 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2304 inline bool atomic_compare_exchange_weak_explicit
2305 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2306 memory_order __x__, memory_order __y__ )
2307 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2309 inline bool atomic_compare_exchange_strong_explicit
2310 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2311 memory_order __x__, memory_order __y__ )
2312 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2314 inline bool atomic_compare_exchange_weak
2315 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2316 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2317 memory_order_seq_cst, memory_order_seq_cst ); }
2319 inline bool atomic_compare_exchange_strong
2320 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2321 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2322 memory_order_seq_cst, memory_order_seq_cst ); }
2325 inline void* atomic_fetch_add_explicit
2326 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2328 void* volatile* __p__ = &((__a__)->__f__);
2329 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2330 model_rmw_action((void *)__p__, __x__, (uint64_t) ((char*)(*__p__) + __m__));
2333 inline void* atomic_fetch_add
2334 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2335 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2338 inline void* atomic_fetch_sub_explicit
2339 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2341 void* volatile* __p__ = &((__a__)->__f__);
2342 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2343 model_rmw_action((void *)__p__, __x__, (uint64_t)((char*)(*__p__) - __m__));
2346 inline void* atomic_fetch_sub
2347 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2348 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2350 inline char atomic_fetch_add_explicit
2351 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2352 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2354 inline char atomic_fetch_add
2355 ( volatile atomic_char* __a__, char __m__ )
2356 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2359 inline char atomic_fetch_sub_explicit
2360 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2361 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2363 inline char atomic_fetch_sub
2364 ( volatile atomic_char* __a__, char __m__ )
2365 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2368 inline char atomic_fetch_and_explicit
2369 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2370 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2372 inline char atomic_fetch_and
2373 ( volatile atomic_char* __a__, char __m__ )
2374 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2377 inline char atomic_fetch_or_explicit
2378 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2379 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2381 inline char atomic_fetch_or
2382 ( volatile atomic_char* __a__, char __m__ )
2383 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2386 inline char atomic_fetch_xor_explicit
2387 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2388 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2390 inline char atomic_fetch_xor
2391 ( volatile atomic_char* __a__, char __m__ )
2392 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2395 inline signed char atomic_fetch_add_explicit
2396 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2397 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2399 inline signed char atomic_fetch_add
2400 ( volatile atomic_schar* __a__, signed char __m__ )
2401 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2404 inline signed char atomic_fetch_sub_explicit
2405 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2406 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2408 inline signed char atomic_fetch_sub
2409 ( volatile atomic_schar* __a__, signed char __m__ )
2410 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2413 inline signed char atomic_fetch_and_explicit
2414 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2415 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2417 inline signed char atomic_fetch_and
2418 ( volatile atomic_schar* __a__, signed char __m__ )
2419 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2422 inline signed char atomic_fetch_or_explicit
2423 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2424 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2426 inline signed char atomic_fetch_or
2427 ( volatile atomic_schar* __a__, signed char __m__ )
2428 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2431 inline signed char atomic_fetch_xor_explicit
2432 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2433 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2435 inline signed char atomic_fetch_xor
2436 ( volatile atomic_schar* __a__, signed char __m__ )
2437 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2440 inline unsigned char atomic_fetch_add_explicit
2441 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2442 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2444 inline unsigned char atomic_fetch_add
2445 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2446 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2449 inline unsigned char atomic_fetch_sub_explicit
2450 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2451 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2453 inline unsigned char atomic_fetch_sub
2454 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2455 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2458 inline unsigned char atomic_fetch_and_explicit
2459 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2460 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2462 inline unsigned char atomic_fetch_and
2463 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2464 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2467 inline unsigned char atomic_fetch_or_explicit
2468 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2469 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2471 inline unsigned char atomic_fetch_or
2472 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2473 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2476 inline unsigned char atomic_fetch_xor_explicit
2477 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2478 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2480 inline unsigned char atomic_fetch_xor
2481 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2482 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2485 inline short atomic_fetch_add_explicit
2486 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2487 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2489 inline short atomic_fetch_add
2490 ( volatile atomic_short* __a__, short __m__ )
2491 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2494 inline short atomic_fetch_sub_explicit
2495 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2496 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2498 inline short atomic_fetch_sub
2499 ( volatile atomic_short* __a__, short __m__ )
2500 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2503 inline short atomic_fetch_and_explicit
2504 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2505 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2507 inline short atomic_fetch_and
2508 ( volatile atomic_short* __a__, short __m__ )
2509 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2512 inline short atomic_fetch_or_explicit
2513 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2514 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2516 inline short atomic_fetch_or
2517 ( volatile atomic_short* __a__, short __m__ )
2518 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2521 inline short atomic_fetch_xor_explicit
2522 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2523 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2525 inline short atomic_fetch_xor
2526 ( volatile atomic_short* __a__, short __m__ )
2527 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2530 inline unsigned short atomic_fetch_add_explicit
2531 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2532 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2534 inline unsigned short atomic_fetch_add
2535 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2536 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2539 inline unsigned short atomic_fetch_sub_explicit
2540 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2541 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2543 inline unsigned short atomic_fetch_sub
2544 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2545 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2548 inline unsigned short atomic_fetch_and_explicit
2549 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2550 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2552 inline unsigned short atomic_fetch_and
2553 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2554 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2557 inline unsigned short atomic_fetch_or_explicit
2558 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2559 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2561 inline unsigned short atomic_fetch_or
2562 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2563 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2566 inline unsigned short atomic_fetch_xor_explicit
2567 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2568 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2570 inline unsigned short atomic_fetch_xor
2571 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2572 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2575 inline int atomic_fetch_add_explicit
2576 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2577 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2579 inline int atomic_fetch_add
2580 ( volatile atomic_int* __a__, int __m__ )
2581 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2584 inline int atomic_fetch_sub_explicit
2585 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2586 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2588 inline int atomic_fetch_sub
2589 ( volatile atomic_int* __a__, int __m__ )
2590 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2593 inline int atomic_fetch_and_explicit
2594 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2595 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2597 inline int atomic_fetch_and
2598 ( volatile atomic_int* __a__, int __m__ )
2599 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2602 inline int atomic_fetch_or_explicit
2603 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2604 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2606 inline int atomic_fetch_or
2607 ( volatile atomic_int* __a__, int __m__ )
2608 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2611 inline int atomic_fetch_xor_explicit
2612 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2613 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2615 inline int atomic_fetch_xor
2616 ( volatile atomic_int* __a__, int __m__ )
2617 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2620 inline unsigned int atomic_fetch_add_explicit
2621 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2622 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2624 inline unsigned int atomic_fetch_add
2625 ( volatile atomic_uint* __a__, unsigned int __m__ )
2626 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2629 inline unsigned int atomic_fetch_sub_explicit
2630 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2631 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2633 inline unsigned int atomic_fetch_sub
2634 ( volatile atomic_uint* __a__, unsigned int __m__ )
2635 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2638 inline unsigned int atomic_fetch_and_explicit
2639 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2640 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2642 inline unsigned int atomic_fetch_and
2643 ( volatile atomic_uint* __a__, unsigned int __m__ )
2644 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2647 inline unsigned int atomic_fetch_or_explicit
2648 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2649 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2651 inline unsigned int atomic_fetch_or
2652 ( volatile atomic_uint* __a__, unsigned int __m__ )
2653 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2656 inline unsigned int atomic_fetch_xor_explicit
2657 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2658 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2660 inline unsigned int atomic_fetch_xor
2661 ( volatile atomic_uint* __a__, unsigned int __m__ )
2662 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2665 inline long atomic_fetch_add_explicit
2666 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2667 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2669 inline long atomic_fetch_add
2670 ( volatile atomic_long* __a__, long __m__ )
2671 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2674 inline long atomic_fetch_sub_explicit
2675 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2676 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2678 inline long atomic_fetch_sub
2679 ( volatile atomic_long* __a__, long __m__ )
2680 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2683 inline long atomic_fetch_and_explicit
2684 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2685 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2687 inline long atomic_fetch_and
2688 ( volatile atomic_long* __a__, long __m__ )
2689 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2692 inline long atomic_fetch_or_explicit
2693 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2694 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2696 inline long atomic_fetch_or
2697 ( volatile atomic_long* __a__, long __m__ )
2698 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2701 inline long atomic_fetch_xor_explicit
2702 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2703 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2705 inline long atomic_fetch_xor
2706 ( volatile atomic_long* __a__, long __m__ )
2707 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2710 inline unsigned long atomic_fetch_add_explicit
2711 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2712 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2714 inline unsigned long atomic_fetch_add
2715 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2716 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2719 inline unsigned long atomic_fetch_sub_explicit
2720 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2721 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2723 inline unsigned long atomic_fetch_sub
2724 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2725 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2728 inline unsigned long atomic_fetch_and_explicit
2729 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2730 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2732 inline unsigned long atomic_fetch_and
2733 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2734 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2737 inline unsigned long atomic_fetch_or_explicit
2738 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2739 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2741 inline unsigned long atomic_fetch_or
2742 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2743 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2746 inline unsigned long atomic_fetch_xor_explicit
2747 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2748 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2750 inline unsigned long atomic_fetch_xor
2751 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2752 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2755 inline long long atomic_fetch_add_explicit
2756 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2757 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2759 inline long long atomic_fetch_add
2760 ( volatile atomic_llong* __a__, long long __m__ )
2761 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2764 inline long long atomic_fetch_sub_explicit
2765 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2766 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2768 inline long long atomic_fetch_sub
2769 ( volatile atomic_llong* __a__, long long __m__ )
2770 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2773 inline long long atomic_fetch_and_explicit
2774 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2775 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2777 inline long long atomic_fetch_and
2778 ( volatile atomic_llong* __a__, long long __m__ )
2779 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2782 inline long long atomic_fetch_or_explicit
2783 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2784 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2786 inline long long atomic_fetch_or
2787 ( volatile atomic_llong* __a__, long long __m__ )
2788 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2791 inline long long atomic_fetch_xor_explicit
2792 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2793 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2795 inline long long atomic_fetch_xor
2796 ( volatile atomic_llong* __a__, long long __m__ )
2797 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2800 inline unsigned long long atomic_fetch_add_explicit
2801 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2802 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2804 inline unsigned long long atomic_fetch_add
2805 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2806 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2809 inline unsigned long long atomic_fetch_sub_explicit
2810 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2811 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2813 inline unsigned long long atomic_fetch_sub
2814 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2815 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2818 inline unsigned long long atomic_fetch_and_explicit
2819 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2820 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2822 inline unsigned long long atomic_fetch_and
2823 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2824 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2827 inline unsigned long long atomic_fetch_or_explicit
2828 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2829 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2831 inline unsigned long long atomic_fetch_or
2832 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2833 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2836 inline unsigned long long atomic_fetch_xor_explicit
2837 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2838 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2840 inline unsigned long long atomic_fetch_xor
2841 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2842 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2845 inline wchar_t atomic_fetch_add_explicit
2846 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2847 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2849 inline wchar_t atomic_fetch_add
2850 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2851 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2854 inline wchar_t atomic_fetch_sub_explicit
2855 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2856 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2858 inline wchar_t atomic_fetch_sub
2859 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2860 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2863 inline wchar_t atomic_fetch_and_explicit
2864 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2865 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2867 inline wchar_t atomic_fetch_and
2868 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2869 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2872 inline wchar_t atomic_fetch_or_explicit
2873 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2874 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2876 inline wchar_t atomic_fetch_or
2877 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2878 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2881 inline wchar_t atomic_fetch_xor_explicit
2882 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2883 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2885 inline wchar_t atomic_fetch_xor
2886 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2887 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2893 #define atomic_is_lock_free( __a__ ) \
2896 #define atomic_load( __a__ ) \
2897 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2899 #define atomic_load_explicit( __a__, __x__ ) \
2900 _ATOMIC_LOAD_( __a__, __x__ )
2902 #define atomic_init( __a__, __m__ ) \
2903 _ATOMIC_INIT_( __a__, __m__ )
2905 #define atomic_store( __a__, __m__ ) \
2906 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2908 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2909 _ATOMIC_STORE_( __a__, __m__, __x__ )
2911 #define atomic_exchange( __a__, __m__ ) \
2912 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2914 #define atomic_exchange_explicit( __a__, __m__, __x__ ) \
2915 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2917 #define atomic_compare_exchange_weak( __a__, __e__, __m__ ) \
2918 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, memory_order_seq_cst )
2920 #define atomic_compare_exchange_strong( __a__, __e__, __m__ ) \
2921 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2923 #define atomic_compare_exchange_weak_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2924 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ )
2926 #define atomic_compare_exchange_strong_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2927 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2930 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2931 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2933 #define atomic_fetch_add( __a__, __m__ ) \
2934 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2937 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2938 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2940 #define atomic_fetch_sub( __a__, __m__ ) \
2941 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2944 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2945 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2947 #define atomic_fetch_and( __a__, __m__ ) \
2948 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2951 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2952 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2954 #define atomic_fetch_or( __a__, __m__ ) \
2955 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2958 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2959 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2961 #define atomic_fetch_xor( __a__, __m__ ) \
2962 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
2971 inline bool atomic_bool::is_lock_free() const volatile
2974 inline void atomic_bool::store
2975 ( bool __m__, memory_order __x__ ) volatile
2976 { atomic_store_explicit( this, __m__, __x__ ); }
2978 inline bool atomic_bool::load
2979 ( memory_order __x__ ) volatile
2980 { return atomic_load_explicit( this, __x__ ); }
2982 inline bool atomic_bool::exchange
2983 ( bool __m__, memory_order __x__ ) volatile
2984 { return atomic_exchange_explicit( this, __m__, __x__ ); }
2986 inline bool atomic_bool::compare_exchange_weak
2987 ( bool& __e__, bool __m__,
2988 memory_order __x__, memory_order __y__ ) volatile
2989 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
2991 inline bool atomic_bool::compare_exchange_strong
2992 ( bool& __e__, bool __m__,
2993 memory_order __x__, memory_order __y__ ) volatile
2994 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
2996 inline bool atomic_bool::compare_exchange_weak
2997 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
2998 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
2999 __x__ == memory_order_acq_rel ? memory_order_acquire :
3000 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3002 inline bool atomic_bool::compare_exchange_strong
3003 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3004 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3005 __x__ == memory_order_acq_rel ? memory_order_acquire :
3006 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3009 inline bool atomic_address::is_lock_free() const volatile
3012 inline void atomic_address::store
3013 ( void* __m__, memory_order __x__ ) volatile
3014 { atomic_store_explicit( this, __m__, __x__ ); }
3016 inline void* atomic_address::load
3017 ( memory_order __x__ ) volatile
3018 { return atomic_load_explicit( this, __x__ ); }
3020 inline void* atomic_address::exchange
3021 ( void* __m__, memory_order __x__ ) volatile
3022 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3024 inline bool atomic_address::compare_exchange_weak
3025 ( void*& __e__, void* __m__,
3026 memory_order __x__, memory_order __y__ ) volatile
3027 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3029 inline bool atomic_address::compare_exchange_strong
3030 ( void*& __e__, void* __m__,
3031 memory_order __x__, memory_order __y__ ) volatile
3032 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3034 inline bool atomic_address::compare_exchange_weak
3035 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3036 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3037 __x__ == memory_order_acq_rel ? memory_order_acquire :
3038 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3040 inline bool atomic_address::compare_exchange_strong
3041 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3042 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3043 __x__ == memory_order_acq_rel ? memory_order_acquire :
3044 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3047 inline bool atomic_char::is_lock_free() const volatile
3050 inline void atomic_char::store
3051 ( char __m__, memory_order __x__ ) volatile
3052 { atomic_store_explicit( this, __m__, __x__ ); }
3054 inline char atomic_char::load
3055 ( memory_order __x__ ) volatile
3056 { return atomic_load_explicit( this, __x__ ); }
3058 inline char atomic_char::exchange
3059 ( char __m__, memory_order __x__ ) volatile
3060 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3062 inline bool atomic_char::compare_exchange_weak
3063 ( char& __e__, char __m__,
3064 memory_order __x__, memory_order __y__ ) volatile
3065 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3067 inline bool atomic_char::compare_exchange_strong
3068 ( char& __e__, char __m__,
3069 memory_order __x__, memory_order __y__ ) volatile
3070 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3072 inline bool atomic_char::compare_exchange_weak
3073 ( char& __e__, char __m__, memory_order __x__ ) volatile
3074 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3075 __x__ == memory_order_acq_rel ? memory_order_acquire :
3076 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3078 inline bool atomic_char::compare_exchange_strong
3079 ( char& __e__, char __m__, memory_order __x__ ) volatile
3080 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3081 __x__ == memory_order_acq_rel ? memory_order_acquire :
3082 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3085 inline bool atomic_schar::is_lock_free() const volatile
3088 inline void atomic_schar::store
3089 ( signed char __m__, memory_order __x__ ) volatile
3090 { atomic_store_explicit( this, __m__, __x__ ); }
3092 inline signed char atomic_schar::load
3093 ( memory_order __x__ ) volatile
3094 { return atomic_load_explicit( this, __x__ ); }
3096 inline signed char atomic_schar::exchange
3097 ( signed char __m__, memory_order __x__ ) volatile
3098 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3100 inline bool atomic_schar::compare_exchange_weak
3101 ( signed char& __e__, signed char __m__,
3102 memory_order __x__, memory_order __y__ ) volatile
3103 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3105 inline bool atomic_schar::compare_exchange_strong
3106 ( signed char& __e__, signed char __m__,
3107 memory_order __x__, memory_order __y__ ) volatile
3108 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3110 inline bool atomic_schar::compare_exchange_weak
3111 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3112 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3113 __x__ == memory_order_acq_rel ? memory_order_acquire :
3114 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3116 inline bool atomic_schar::compare_exchange_strong
3117 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3118 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3119 __x__ == memory_order_acq_rel ? memory_order_acquire :
3120 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3123 inline bool atomic_uchar::is_lock_free() const volatile
3126 inline void atomic_uchar::store
3127 ( unsigned char __m__, memory_order __x__ ) volatile
3128 { atomic_store_explicit( this, __m__, __x__ ); }
3130 inline unsigned char atomic_uchar::load
3131 ( memory_order __x__ ) volatile
3132 { return atomic_load_explicit( this, __x__ ); }
3134 inline unsigned char atomic_uchar::exchange
3135 ( unsigned char __m__, memory_order __x__ ) volatile
3136 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3138 inline bool atomic_uchar::compare_exchange_weak
3139 ( unsigned char& __e__, unsigned char __m__,
3140 memory_order __x__, memory_order __y__ ) volatile
3141 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3143 inline bool atomic_uchar::compare_exchange_strong
3144 ( unsigned char& __e__, unsigned char __m__,
3145 memory_order __x__, memory_order __y__ ) volatile
3146 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3148 inline bool atomic_uchar::compare_exchange_weak
3149 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3150 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3151 __x__ == memory_order_acq_rel ? memory_order_acquire :
3152 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3154 inline bool atomic_uchar::compare_exchange_strong
3155 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3156 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3157 __x__ == memory_order_acq_rel ? memory_order_acquire :
3158 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3161 inline bool atomic_short::is_lock_free() const volatile
3164 inline void atomic_short::store
3165 ( short __m__, memory_order __x__ ) volatile
3166 { atomic_store_explicit( this, __m__, __x__ ); }
3168 inline short atomic_short::load
3169 ( memory_order __x__ ) volatile
3170 { return atomic_load_explicit( this, __x__ ); }
3172 inline short atomic_short::exchange
3173 ( short __m__, memory_order __x__ ) volatile
3174 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3176 inline bool atomic_short::compare_exchange_weak
3177 ( short& __e__, short __m__,
3178 memory_order __x__, memory_order __y__ ) volatile
3179 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3181 inline bool atomic_short::compare_exchange_strong
3182 ( short& __e__, short __m__,
3183 memory_order __x__, memory_order __y__ ) volatile
3184 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3186 inline bool atomic_short::compare_exchange_weak
3187 ( short& __e__, short __m__, memory_order __x__ ) volatile
3188 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3189 __x__ == memory_order_acq_rel ? memory_order_acquire :
3190 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3192 inline bool atomic_short::compare_exchange_strong
3193 ( short& __e__, short __m__, memory_order __x__ ) volatile
3194 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3195 __x__ == memory_order_acq_rel ? memory_order_acquire :
3196 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3199 inline bool atomic_ushort::is_lock_free() const volatile
3202 inline void atomic_ushort::store
3203 ( unsigned short __m__, memory_order __x__ ) volatile
3204 { atomic_store_explicit( this, __m__, __x__ ); }
3206 inline unsigned short atomic_ushort::load
3207 ( memory_order __x__ ) volatile
3208 { return atomic_load_explicit( this, __x__ ); }
3210 inline unsigned short atomic_ushort::exchange
3211 ( unsigned short __m__, memory_order __x__ ) volatile
3212 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3214 inline bool atomic_ushort::compare_exchange_weak
3215 ( unsigned short& __e__, unsigned short __m__,
3216 memory_order __x__, memory_order __y__ ) volatile
3217 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3219 inline bool atomic_ushort::compare_exchange_strong
3220 ( unsigned short& __e__, unsigned short __m__,
3221 memory_order __x__, memory_order __y__ ) volatile
3222 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3224 inline bool atomic_ushort::compare_exchange_weak
3225 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3226 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3227 __x__ == memory_order_acq_rel ? memory_order_acquire :
3228 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3230 inline bool atomic_ushort::compare_exchange_strong
3231 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3232 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3233 __x__ == memory_order_acq_rel ? memory_order_acquire :
3234 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3237 inline bool atomic_int::is_lock_free() const volatile
3240 inline void atomic_int::store
3241 ( int __m__, memory_order __x__ ) volatile
3242 { atomic_store_explicit( this, __m__, __x__ ); }
3244 inline int atomic_int::load
3245 ( memory_order __x__ ) volatile
3246 { return atomic_load_explicit( this, __x__ ); }
3248 inline int atomic_int::exchange
3249 ( int __m__, memory_order __x__ ) volatile
3250 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3252 inline bool atomic_int::compare_exchange_weak
3253 ( int& __e__, int __m__,
3254 memory_order __x__, memory_order __y__ ) volatile
3255 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3257 inline bool atomic_int::compare_exchange_strong
3258 ( int& __e__, int __m__,
3259 memory_order __x__, memory_order __y__ ) volatile
3260 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3262 inline bool atomic_int::compare_exchange_weak
3263 ( int& __e__, int __m__, memory_order __x__ ) volatile
3264 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3265 __x__ == memory_order_acq_rel ? memory_order_acquire :
3266 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3268 inline bool atomic_int::compare_exchange_strong
3269 ( int& __e__, int __m__, memory_order __x__ ) volatile
3270 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3271 __x__ == memory_order_acq_rel ? memory_order_acquire :
3272 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3275 inline bool atomic_uint::is_lock_free() const volatile
3278 inline void atomic_uint::store
3279 ( unsigned int __m__, memory_order __x__ ) volatile
3280 { atomic_store_explicit( this, __m__, __x__ ); }
3282 inline unsigned int atomic_uint::load
3283 ( memory_order __x__ ) volatile
3284 { return atomic_load_explicit( this, __x__ ); }
3286 inline unsigned int atomic_uint::exchange
3287 ( unsigned int __m__, memory_order __x__ ) volatile
3288 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3290 inline bool atomic_uint::compare_exchange_weak
3291 ( unsigned int& __e__, unsigned int __m__,
3292 memory_order __x__, memory_order __y__ ) volatile
3293 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3295 inline bool atomic_uint::compare_exchange_strong
3296 ( unsigned int& __e__, unsigned int __m__,
3297 memory_order __x__, memory_order __y__ ) volatile
3298 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3300 inline bool atomic_uint::compare_exchange_weak
3301 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3302 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3303 __x__ == memory_order_acq_rel ? memory_order_acquire :
3304 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3306 inline bool atomic_uint::compare_exchange_strong
3307 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3308 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3309 __x__ == memory_order_acq_rel ? memory_order_acquire :
3310 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3313 inline bool atomic_long::is_lock_free() const volatile
3316 inline void atomic_long::store
3317 ( long __m__, memory_order __x__ ) volatile
3318 { atomic_store_explicit( this, __m__, __x__ ); }
3320 inline long atomic_long::load
3321 ( memory_order __x__ ) volatile
3322 { return atomic_load_explicit( this, __x__ ); }
3324 inline long atomic_long::exchange
3325 ( long __m__, memory_order __x__ ) volatile
3326 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3328 inline bool atomic_long::compare_exchange_weak
3329 ( long& __e__, long __m__,
3330 memory_order __x__, memory_order __y__ ) volatile
3331 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3333 inline bool atomic_long::compare_exchange_strong
3334 ( long& __e__, long __m__,
3335 memory_order __x__, memory_order __y__ ) volatile
3336 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3338 inline bool atomic_long::compare_exchange_weak
3339 ( long& __e__, long __m__, memory_order __x__ ) volatile
3340 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3341 __x__ == memory_order_acq_rel ? memory_order_acquire :
3342 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3344 inline bool atomic_long::compare_exchange_strong
3345 ( long& __e__, long __m__, memory_order __x__ ) volatile
3346 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3347 __x__ == memory_order_acq_rel ? memory_order_acquire :
3348 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3351 inline bool atomic_ulong::is_lock_free() const volatile
3354 inline void atomic_ulong::store
3355 ( unsigned long __m__, memory_order __x__ ) volatile
3356 { atomic_store_explicit( this, __m__, __x__ ); }
3358 inline unsigned long atomic_ulong::load
3359 ( memory_order __x__ ) volatile
3360 { return atomic_load_explicit( this, __x__ ); }
3362 inline unsigned long atomic_ulong::exchange
3363 ( unsigned long __m__, memory_order __x__ ) volatile
3364 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3366 inline bool atomic_ulong::compare_exchange_weak
3367 ( unsigned long& __e__, unsigned long __m__,
3368 memory_order __x__, memory_order __y__ ) volatile
3369 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3371 inline bool atomic_ulong::compare_exchange_strong
3372 ( unsigned long& __e__, unsigned long __m__,
3373 memory_order __x__, memory_order __y__ ) volatile
3374 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3376 inline bool atomic_ulong::compare_exchange_weak
3377 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3378 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3379 __x__ == memory_order_acq_rel ? memory_order_acquire :
3380 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3382 inline bool atomic_ulong::compare_exchange_strong
3383 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3384 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3385 __x__ == memory_order_acq_rel ? memory_order_acquire :
3386 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3389 inline bool atomic_llong::is_lock_free() const volatile
3392 inline void atomic_llong::store
3393 ( long long __m__, memory_order __x__ ) volatile
3394 { atomic_store_explicit( this, __m__, __x__ ); }
3396 inline long long atomic_llong::load
3397 ( memory_order __x__ ) volatile
3398 { return atomic_load_explicit( this, __x__ ); }
3400 inline long long atomic_llong::exchange
3401 ( long long __m__, memory_order __x__ ) volatile
3402 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3404 inline bool atomic_llong::compare_exchange_weak
3405 ( long long& __e__, long long __m__,
3406 memory_order __x__, memory_order __y__ ) volatile
3407 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3409 inline bool atomic_llong::compare_exchange_strong
3410 ( long long& __e__, long long __m__,
3411 memory_order __x__, memory_order __y__ ) volatile
3412 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3414 inline bool atomic_llong::compare_exchange_weak
3415 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3416 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3417 __x__ == memory_order_acq_rel ? memory_order_acquire :
3418 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3420 inline bool atomic_llong::compare_exchange_strong
3421 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3422 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3423 __x__ == memory_order_acq_rel ? memory_order_acquire :
3424 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3427 inline bool atomic_ullong::is_lock_free() const volatile
3430 inline void atomic_ullong::store
3431 ( unsigned long long __m__, memory_order __x__ ) volatile
3432 { atomic_store_explicit( this, __m__, __x__ ); }
3434 inline unsigned long long atomic_ullong::load
3435 ( memory_order __x__ ) volatile
3436 { return atomic_load_explicit( this, __x__ ); }
3438 inline unsigned long long atomic_ullong::exchange
3439 ( unsigned long long __m__, memory_order __x__ ) volatile
3440 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3442 inline bool atomic_ullong::compare_exchange_weak
3443 ( unsigned long long& __e__, unsigned long long __m__,
3444 memory_order __x__, memory_order __y__ ) volatile
3445 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3447 inline bool atomic_ullong::compare_exchange_strong
3448 ( unsigned long long& __e__, unsigned long long __m__,
3449 memory_order __x__, memory_order __y__ ) volatile
3450 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3452 inline bool atomic_ullong::compare_exchange_weak
3453 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3454 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3455 __x__ == memory_order_acq_rel ? memory_order_acquire :
3456 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3458 inline bool atomic_ullong::compare_exchange_strong
3459 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3460 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3461 __x__ == memory_order_acq_rel ? memory_order_acquire :
3462 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3465 inline bool atomic_wchar_t::is_lock_free() const volatile
3468 inline void atomic_wchar_t::store
3469 ( wchar_t __m__, memory_order __x__ ) volatile
3470 { atomic_store_explicit( this, __m__, __x__ ); }
3472 inline wchar_t atomic_wchar_t::load
3473 ( memory_order __x__ ) volatile
3474 { return atomic_load_explicit( this, __x__ ); }
3476 inline wchar_t atomic_wchar_t::exchange
3477 ( wchar_t __m__, memory_order __x__ ) volatile
3478 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3480 inline bool atomic_wchar_t::compare_exchange_weak
3481 ( wchar_t& __e__, wchar_t __m__,
3482 memory_order __x__, memory_order __y__ ) volatile
3483 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3485 inline bool atomic_wchar_t::compare_exchange_strong
3486 ( wchar_t& __e__, wchar_t __m__,
3487 memory_order __x__, memory_order __y__ ) volatile
3488 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3490 inline bool atomic_wchar_t::compare_exchange_weak
3491 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3492 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3493 __x__ == memory_order_acq_rel ? memory_order_acquire :
3494 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3496 inline bool atomic_wchar_t::compare_exchange_strong
3497 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3498 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3499 __x__ == memory_order_acq_rel ? memory_order_acquire :
3500 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3503 template< typename T >
3504 inline bool atomic<T>::is_lock_free() const volatile
3507 template< typename T >
3508 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3509 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3511 template< typename T >
3512 inline T atomic<T>::load( memory_order __x__ ) volatile
3513 { return _ATOMIC_LOAD_( this, __x__ ); }
3515 template< typename T >
3516 inline T atomic<T>::exchange( T __v__, memory_order __x__ ) volatile
3517 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3519 template< typename T >
3520 inline bool atomic<T>::compare_exchange_weak
3521 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3522 { return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3524 template< typename T >
3525 inline bool atomic<T>::compare_exchange_strong
3526 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3527 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3529 template< typename T >
3530 inline bool atomic<T>::compare_exchange_weak
3531 ( T& __r__, T __v__, memory_order __x__ ) volatile
3532 { return compare_exchange_weak( __r__, __v__, __x__,
3533 __x__ == memory_order_acq_rel ? memory_order_acquire :
3534 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3536 template< typename T >
3537 inline bool atomic<T>::compare_exchange_strong
3538 ( T& __r__, T __v__, memory_order __x__ ) volatile
3539 { return compare_exchange_strong( __r__, __v__, __x__,
3540 __x__ == memory_order_acq_rel ? memory_order_acquire :
3541 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3544 inline void* atomic_address::fetch_add
3545 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3546 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3548 inline void* atomic_address::fetch_sub
3549 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3550 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3553 inline char atomic_char::fetch_add
3554 ( char __m__, memory_order __x__ ) volatile
3555 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3558 inline char atomic_char::fetch_sub
3559 ( char __m__, memory_order __x__ ) volatile
3560 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3563 inline char atomic_char::fetch_and
3564 ( char __m__, memory_order __x__ ) volatile
3565 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3568 inline char atomic_char::fetch_or
3569 ( char __m__, memory_order __x__ ) volatile
3570 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3573 inline char atomic_char::fetch_xor
3574 ( char __m__, memory_order __x__ ) volatile
3575 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3578 inline signed char atomic_schar::fetch_add
3579 ( signed char __m__, memory_order __x__ ) volatile
3580 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3583 inline signed char atomic_schar::fetch_sub
3584 ( signed char __m__, memory_order __x__ ) volatile
3585 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3588 inline signed char atomic_schar::fetch_and
3589 ( signed char __m__, memory_order __x__ ) volatile
3590 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3593 inline signed char atomic_schar::fetch_or
3594 ( signed char __m__, memory_order __x__ ) volatile
3595 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3598 inline signed char atomic_schar::fetch_xor
3599 ( signed char __m__, memory_order __x__ ) volatile
3600 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3603 inline unsigned char atomic_uchar::fetch_add
3604 ( unsigned char __m__, memory_order __x__ ) volatile
3605 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3608 inline unsigned char atomic_uchar::fetch_sub
3609 ( unsigned char __m__, memory_order __x__ ) volatile
3610 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3613 inline unsigned char atomic_uchar::fetch_and
3614 ( unsigned char __m__, memory_order __x__ ) volatile
3615 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3618 inline unsigned char atomic_uchar::fetch_or
3619 ( unsigned char __m__, memory_order __x__ ) volatile
3620 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3623 inline unsigned char atomic_uchar::fetch_xor
3624 ( unsigned char __m__, memory_order __x__ ) volatile
3625 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3628 inline short atomic_short::fetch_add
3629 ( short __m__, memory_order __x__ ) volatile
3630 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3633 inline short atomic_short::fetch_sub
3634 ( short __m__, memory_order __x__ ) volatile
3635 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3638 inline short atomic_short::fetch_and
3639 ( short __m__, memory_order __x__ ) volatile
3640 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3643 inline short atomic_short::fetch_or
3644 ( short __m__, memory_order __x__ ) volatile
3645 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3648 inline short atomic_short::fetch_xor
3649 ( short __m__, memory_order __x__ ) volatile
3650 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3653 inline unsigned short atomic_ushort::fetch_add
3654 ( unsigned short __m__, memory_order __x__ ) volatile
3655 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3658 inline unsigned short atomic_ushort::fetch_sub
3659 ( unsigned short __m__, memory_order __x__ ) volatile
3660 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3663 inline unsigned short atomic_ushort::fetch_and
3664 ( unsigned short __m__, memory_order __x__ ) volatile
3665 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3668 inline unsigned short atomic_ushort::fetch_or
3669 ( unsigned short __m__, memory_order __x__ ) volatile
3670 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3673 inline unsigned short atomic_ushort::fetch_xor
3674 ( unsigned short __m__, memory_order __x__ ) volatile
3675 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3678 inline int atomic_int::fetch_add
3679 ( int __m__, memory_order __x__ ) volatile
3680 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3683 inline int atomic_int::fetch_sub
3684 ( int __m__, memory_order __x__ ) volatile
3685 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3688 inline int atomic_int::fetch_and
3689 ( int __m__, memory_order __x__ ) volatile
3690 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3693 inline int atomic_int::fetch_or
3694 ( int __m__, memory_order __x__ ) volatile
3695 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3698 inline int atomic_int::fetch_xor
3699 ( int __m__, memory_order __x__ ) volatile
3700 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3703 inline unsigned int atomic_uint::fetch_add
3704 ( unsigned int __m__, memory_order __x__ ) volatile
3705 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3708 inline unsigned int atomic_uint::fetch_sub
3709 ( unsigned int __m__, memory_order __x__ ) volatile
3710 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3713 inline unsigned int atomic_uint::fetch_and
3714 ( unsigned int __m__, memory_order __x__ ) volatile
3715 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3718 inline unsigned int atomic_uint::fetch_or
3719 ( unsigned int __m__, memory_order __x__ ) volatile
3720 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3723 inline unsigned int atomic_uint::fetch_xor
3724 ( unsigned int __m__, memory_order __x__ ) volatile
3725 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3728 inline long atomic_long::fetch_add
3729 ( long __m__, memory_order __x__ ) volatile
3730 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3733 inline long atomic_long::fetch_sub
3734 ( long __m__, memory_order __x__ ) volatile
3735 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3738 inline long atomic_long::fetch_and
3739 ( long __m__, memory_order __x__ ) volatile
3740 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3743 inline long atomic_long::fetch_or
3744 ( long __m__, memory_order __x__ ) volatile
3745 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3748 inline long atomic_long::fetch_xor
3749 ( long __m__, memory_order __x__ ) volatile
3750 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3753 inline unsigned long atomic_ulong::fetch_add
3754 ( unsigned long __m__, memory_order __x__ ) volatile
3755 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3758 inline unsigned long atomic_ulong::fetch_sub
3759 ( unsigned long __m__, memory_order __x__ ) volatile
3760 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3763 inline unsigned long atomic_ulong::fetch_and
3764 ( unsigned long __m__, memory_order __x__ ) volatile
3765 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3768 inline unsigned long atomic_ulong::fetch_or
3769 ( unsigned long __m__, memory_order __x__ ) volatile
3770 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3773 inline unsigned long atomic_ulong::fetch_xor
3774 ( unsigned long __m__, memory_order __x__ ) volatile
3775 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3778 inline long long atomic_llong::fetch_add
3779 ( long long __m__, memory_order __x__ ) volatile
3780 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3783 inline long long atomic_llong::fetch_sub
3784 ( long long __m__, memory_order __x__ ) volatile
3785 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3788 inline long long atomic_llong::fetch_and
3789 ( long long __m__, memory_order __x__ ) volatile
3790 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3793 inline long long atomic_llong::fetch_or
3794 ( long long __m__, memory_order __x__ ) volatile
3795 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3798 inline long long atomic_llong::fetch_xor
3799 ( long long __m__, memory_order __x__ ) volatile
3800 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3803 inline unsigned long long atomic_ullong::fetch_add
3804 ( unsigned long long __m__, memory_order __x__ ) volatile
3805 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3808 inline unsigned long long atomic_ullong::fetch_sub
3809 ( unsigned long long __m__, memory_order __x__ ) volatile
3810 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3813 inline unsigned long long atomic_ullong::fetch_and
3814 ( unsigned long long __m__, memory_order __x__ ) volatile
3815 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3818 inline unsigned long long atomic_ullong::fetch_or
3819 ( unsigned long long __m__, memory_order __x__ ) volatile
3820 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3823 inline unsigned long long atomic_ullong::fetch_xor
3824 ( unsigned long long __m__, memory_order __x__ ) volatile
3825 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3828 inline wchar_t atomic_wchar_t::fetch_add
3829 ( wchar_t __m__, memory_order __x__ ) volatile
3830 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3833 inline wchar_t atomic_wchar_t::fetch_sub
3834 ( wchar_t __m__, memory_order __x__ ) volatile
3835 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3838 inline wchar_t atomic_wchar_t::fetch_and
3839 ( wchar_t __m__, memory_order __x__ ) volatile
3840 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3843 inline wchar_t atomic_wchar_t::fetch_or
3844 ( wchar_t __m__, memory_order __x__ ) volatile
3845 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3848 inline wchar_t atomic_wchar_t::fetch_xor
3849 ( wchar_t __m__, memory_order __x__ ) volatile
3850 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3853 template< typename T >
3854 T* atomic<T*>::load( memory_order __x__ ) volatile
3855 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3857 template< typename T >
3858 T* atomic<T*>::exchange( T* __v__, memory_order __x__ ) volatile
3859 { return static_cast<T*>( atomic_address::exchange( __v__, __x__ ) ); }
3861 template< typename T >
3862 bool atomic<T*>::compare_exchange_weak
3863 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3864 { return atomic_address::compare_exchange_weak( *reinterpret_cast<void**>( &__r__ ),
3865 static_cast<void*>( __v__ ), __x__, __y__ ); }
3866 //{ return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3868 template< typename T >
3869 bool atomic<T*>::compare_exchange_strong
3870 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3871 { return atomic_address::compare_exchange_strong( *reinterpret_cast<void**>( &__r__ ),
3872 static_cast<void*>( __v__ ), __x__, __y__ ); }
3873 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3875 template< typename T >
3876 bool atomic<T*>::compare_exchange_weak
3877 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3878 { return compare_exchange_weak( __r__, __v__, __x__,
3879 __x__ == memory_order_acq_rel ? memory_order_acquire :
3880 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3882 template< typename T >
3883 bool atomic<T*>::compare_exchange_strong
3884 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3885 { return compare_exchange_strong( __r__, __v__, __x__,
3886 __x__ == memory_order_acq_rel ? memory_order_acquire :
3887 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3889 template< typename T >
3890 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3891 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3893 template< typename T >
3894 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3895 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }
3903 inline void atomic_thread_fence(memory_order order)
3904 { _ATOMIC_FENCE_(order); }
3906 /** @todo Do we want to try to support a user's signal-handler? */
3907 inline void atomic_signal_fence(memory_order order)
3918 #endif /* __IMPATOMIC_H__ */