3 * @brief Common header for C11/C++11 atomics
5 * Note that some features are unavailable, as they require support from a true
9 #ifndef __IMPATOMIC_H__
10 #define __IMPATOMIC_H__
12 #include "memoryorder.h"
13 #include "cmodelint.h"
21 #define CPP0X( feature )
23 typedef struct atomic_flag
26 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
27 void clear( memory_order = memory_order_seq_cst ) volatile;
29 CPP0X( atomic_flag() = default; )
30 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
31 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
38 #define ATOMIC_FLAG_INIT { false }
44 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
45 extern bool atomic_flag_test_and_set_explicit
46 ( volatile atomic_flag*, memory_order );
47 extern void atomic_flag_clear( volatile atomic_flag* );
48 extern void atomic_flag_clear_explicit
49 ( volatile atomic_flag*, memory_order );
50 extern void __atomic_flag_wait__
51 ( volatile atomic_flag* );
52 extern void __atomic_flag_wait_explicit__
53 ( volatile atomic_flag*, memory_order );
61 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
62 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
64 inline void atomic_flag::clear( memory_order __x__ ) volatile
65 { atomic_flag_clear_explicit( this, __x__ ); }
71 The remainder of the example implementation uses the following
72 macros. These macros exploit GNU extensions for value-returning
73 blocks (AKA statement expressions) and __typeof__.
75 The macros rely on data fields of atomic structs being named __f__.
76 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
77 __g__=flag, __m__=modified, __o__=operation, __r__=result,
78 __p__=pointer to field, __v__=value (for single evaluation),
79 __x__=memory-ordering, and __y__=memory-ordering.
82 #define _ATOMIC_LOAD_( __a__, __x__ ) \
83 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
84 __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
87 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
88 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
89 __typeof__(__m__) __v__ = (__m__); \
90 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
94 #define _ATOMIC_INIT_( __a__, __m__ ) \
95 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
96 __typeof__(__m__) __v__ = (__m__); \
97 model_init_action((void *) __p__, (uint64_t) __v__); \
100 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
101 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
102 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
103 __typeof__(__m__) __v__ = (__m__); \
104 __typeof__((__a__)->__f__) __copy__= __old__; \
105 __copy__ __o__ __v__; \
106 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
109 /* No spurious failure for now */
110 #define _ATOMIC_CMPSWP_WEAK_ _ATOMIC_CMPSWP_
112 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
113 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
114 __typeof__(__e__) __q__ = (__e__); \
115 __typeof__(__m__) __v__ = (__m__); \
117 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
118 if (__t__ == * __q__ ) { \
119 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
120 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
123 #define _ATOMIC_FENCE_( __x__ ) \
124 ({ model_fence_action(__x__);})
127 #define ATOMIC_CHAR_LOCK_FREE 1
128 #define ATOMIC_CHAR16_T_LOCK_FREE 1
129 #define ATOMIC_CHAR32_T_LOCK_FREE 1
130 #define ATOMIC_WCHAR_T_LOCK_FREE 1
131 #define ATOMIC_SHORT_LOCK_FREE 1
132 #define ATOMIC_INT_LOCK_FREE 1
133 #define ATOMIC_LONG_LOCK_FREE 1
134 #define ATOMIC_LLONG_LOCK_FREE 1
135 #define ATOMIC_ADDRESS_LOCK_FREE 1
137 typedef struct atomic_bool
140 bool is_lock_free() const volatile;
141 void store( bool, memory_order = memory_order_seq_cst ) volatile;
142 bool load( memory_order = memory_order_seq_cst ) volatile;
143 bool exchange( bool, memory_order = memory_order_seq_cst ) volatile;
144 bool compare_exchange_weak ( bool&, bool, memory_order, memory_order ) volatile;
145 bool compare_exchange_strong ( bool&, bool, memory_order, memory_order ) volatile;
146 bool compare_exchange_weak ( bool&, bool,
147 memory_order = memory_order_seq_cst) volatile;
148 bool compare_exchange_strong ( bool&, bool,
149 memory_order = memory_order_seq_cst) volatile;
151 CPP0X( atomic_bool() = delete; )
152 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
153 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
154 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
156 bool operator =( bool __v__ ) volatile
157 { store( __v__ ); return __v__; }
159 friend void atomic_store_explicit( volatile atomic_bool*, bool,
161 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
162 friend bool atomic_exchange_explicit( volatile atomic_bool*, bool,
164 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_bool*, bool*, bool,
165 memory_order, memory_order );
166 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_bool*, bool*, bool,
167 memory_order, memory_order );
175 typedef struct atomic_address
178 bool is_lock_free() const volatile;
179 void store( void*, memory_order = memory_order_seq_cst ) volatile;
180 void* load( memory_order = memory_order_seq_cst ) volatile;
181 void* exchange( void*, memory_order = memory_order_seq_cst ) volatile;
182 bool compare_exchange_weak( void*&, void*, memory_order, memory_order ) volatile;
183 bool compare_exchange_strong( void*&, void*, memory_order, memory_order ) volatile;
184 bool compare_exchange_weak( void*&, void*,
185 memory_order = memory_order_seq_cst ) volatile;
186 bool compare_exchange_strong( void*&, void*,
187 memory_order = memory_order_seq_cst ) volatile;
188 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
189 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
191 CPP0X( atomic_address() = default; )
192 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
193 CPP0X( atomic_address( const atomic_address& ) = delete; )
194 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
196 void* operator =( void* __v__ ) volatile
197 { store( __v__ ); return __v__; }
199 void* operator +=( ptrdiff_t __v__ ) volatile
200 { return fetch_add( __v__ ); }
202 void* operator -=( ptrdiff_t __v__ ) volatile
203 { return fetch_sub( __v__ ); }
205 friend void atomic_store_explicit( volatile atomic_address*, void*,
207 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
208 friend void* atomic_exchange_explicit( volatile atomic_address*, void*,
210 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_address*,
211 void**, void*, memory_order, memory_order );
212 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_address*,
213 void**, void*, memory_order, memory_order );
214 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
216 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
225 typedef struct atomic_char
228 bool is_lock_free() const volatile;
230 memory_order = memory_order_seq_cst ) volatile;
231 char load( memory_order = memory_order_seq_cst ) volatile;
233 memory_order = memory_order_seq_cst ) volatile;
234 bool compare_exchange_weak( char&, char,
235 memory_order, memory_order ) volatile;
236 bool compare_exchange_strong( char&, char,
237 memory_order, memory_order ) volatile;
238 bool compare_exchange_weak( char&, char,
239 memory_order = memory_order_seq_cst ) volatile;
240 bool compare_exchange_strong( char&, char,
241 memory_order = memory_order_seq_cst ) volatile;
242 char fetch_add( char,
243 memory_order = memory_order_seq_cst ) volatile;
244 char fetch_sub( char,
245 memory_order = memory_order_seq_cst ) volatile;
246 char fetch_and( char,
247 memory_order = memory_order_seq_cst ) volatile;
249 memory_order = memory_order_seq_cst ) volatile;
250 char fetch_xor( char,
251 memory_order = memory_order_seq_cst ) volatile;
253 CPP0X( atomic_char() = default; )
254 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
255 CPP0X( atomic_char( const atomic_char& ) = delete; )
256 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
258 char operator =( char __v__ ) volatile
259 { store( __v__ ); return __v__; }
261 char operator ++( int ) volatile
262 { return fetch_add( 1 ); }
264 char operator --( int ) volatile
265 { return fetch_sub( 1 ); }
267 char operator ++() volatile
268 { return fetch_add( 1 ) + 1; }
270 char operator --() volatile
271 { return fetch_sub( 1 ) - 1; }
273 char operator +=( char __v__ ) volatile
274 { return fetch_add( __v__ ) + __v__; }
276 char operator -=( char __v__ ) volatile
277 { return fetch_sub( __v__ ) - __v__; }
279 char operator &=( char __v__ ) volatile
280 { return fetch_and( __v__ ) & __v__; }
282 char operator |=( char __v__ ) volatile
283 { return fetch_or( __v__ ) | __v__; }
285 char operator ^=( char __v__ ) volatile
286 { return fetch_xor( __v__ ) ^ __v__; }
288 friend void atomic_store_explicit( volatile atomic_char*, char,
290 friend char atomic_load_explicit( volatile atomic_char*,
292 friend char atomic_exchange_explicit( volatile atomic_char*,
293 char, memory_order );
294 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_char*,
295 char*, char, memory_order, memory_order );
296 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_char*,
297 char*, char, memory_order, memory_order );
298 friend char atomic_fetch_add_explicit( volatile atomic_char*,
299 char, memory_order );
300 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
301 char, memory_order );
302 friend char atomic_fetch_and_explicit( volatile atomic_char*,
303 char, memory_order );
304 friend char atomic_fetch_or_explicit( volatile atomic_char*,
305 char, memory_order );
306 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
307 char, memory_order );
315 typedef struct atomic_schar
318 bool is_lock_free() const volatile;
319 void store( signed char,
320 memory_order = memory_order_seq_cst ) volatile;
321 signed char load( memory_order = memory_order_seq_cst ) volatile;
322 signed char exchange( signed char,
323 memory_order = memory_order_seq_cst ) volatile;
324 bool compare_exchange_weak( signed char&, signed char,
325 memory_order, memory_order ) volatile;
326 bool compare_exchange_strong( signed char&, signed char,
327 memory_order, memory_order ) volatile;
328 bool compare_exchange_weak( signed char&, signed char,
329 memory_order = memory_order_seq_cst ) volatile;
330 bool compare_exchange_strong( signed char&, signed char,
331 memory_order = memory_order_seq_cst ) volatile;
332 signed char fetch_add( signed char,
333 memory_order = memory_order_seq_cst ) volatile;
334 signed char fetch_sub( signed char,
335 memory_order = memory_order_seq_cst ) volatile;
336 signed char fetch_and( signed char,
337 memory_order = memory_order_seq_cst ) volatile;
338 signed char fetch_or( signed char,
339 memory_order = memory_order_seq_cst ) volatile;
340 signed char fetch_xor( signed char,
341 memory_order = memory_order_seq_cst ) volatile;
343 CPP0X( atomic_schar() = default; )
344 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
345 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
346 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
348 signed char operator =( signed char __v__ ) volatile
349 { store( __v__ ); return __v__; }
351 signed char operator ++( int ) volatile
352 { return fetch_add( 1 ); }
354 signed char operator --( int ) volatile
355 { return fetch_sub( 1 ); }
357 signed char operator ++() volatile
358 { return fetch_add( 1 ) + 1; }
360 signed char operator --() volatile
361 { return fetch_sub( 1 ) - 1; }
363 signed char operator +=( signed char __v__ ) volatile
364 { return fetch_add( __v__ ) + __v__; }
366 signed char operator -=( signed char __v__ ) volatile
367 { return fetch_sub( __v__ ) - __v__; }
369 signed char operator &=( signed char __v__ ) volatile
370 { return fetch_and( __v__ ) & __v__; }
372 signed char operator |=( signed char __v__ ) volatile
373 { return fetch_or( __v__ ) | __v__; }
375 signed char operator ^=( signed char __v__ ) volatile
376 { return fetch_xor( __v__ ) ^ __v__; }
378 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
380 friend signed char atomic_load_explicit( volatile atomic_schar*,
382 friend signed char atomic_exchange_explicit( volatile atomic_schar*,
383 signed char, memory_order );
384 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_schar*,
385 signed char*, signed char, memory_order, memory_order );
386 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_schar*,
387 signed char*, signed char, memory_order, memory_order );
388 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
389 signed char, memory_order );
390 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
391 signed char, memory_order );
392 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
393 signed char, memory_order );
394 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
395 signed char, memory_order );
396 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
397 signed char, memory_order );
405 typedef struct atomic_uchar
408 bool is_lock_free() const volatile;
409 void store( unsigned char,
410 memory_order = memory_order_seq_cst ) volatile;
411 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
412 unsigned char exchange( unsigned char,
413 memory_order = memory_order_seq_cst ) volatile;
414 bool compare_exchange_weak( unsigned char&, unsigned char,
415 memory_order, memory_order ) volatile;
416 bool compare_exchange_strong( unsigned char&, unsigned char,
417 memory_order, memory_order ) volatile;
418 bool compare_exchange_weak( unsigned char&, unsigned char,
419 memory_order = memory_order_seq_cst ) volatile;
420 bool compare_exchange_strong( unsigned char&, unsigned char,
421 memory_order = memory_order_seq_cst ) volatile;
422 unsigned char fetch_add( unsigned char,
423 memory_order = memory_order_seq_cst ) volatile;
424 unsigned char fetch_sub( unsigned char,
425 memory_order = memory_order_seq_cst ) volatile;
426 unsigned char fetch_and( unsigned char,
427 memory_order = memory_order_seq_cst ) volatile;
428 unsigned char fetch_or( unsigned char,
429 memory_order = memory_order_seq_cst ) volatile;
430 unsigned char fetch_xor( unsigned char,
431 memory_order = memory_order_seq_cst ) volatile;
433 CPP0X( atomic_uchar() = default; )
434 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
435 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
436 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
438 unsigned char operator =( unsigned char __v__ ) volatile
439 { store( __v__ ); return __v__; }
441 unsigned char operator ++( int ) volatile
442 { return fetch_add( 1 ); }
444 unsigned char operator --( int ) volatile
445 { return fetch_sub( 1 ); }
447 unsigned char operator ++() volatile
448 { return fetch_add( 1 ) + 1; }
450 unsigned char operator --() volatile
451 { return fetch_sub( 1 ) - 1; }
453 unsigned char operator +=( unsigned char __v__ ) volatile
454 { return fetch_add( __v__ ) + __v__; }
456 unsigned char operator -=( unsigned char __v__ ) volatile
457 { return fetch_sub( __v__ ) - __v__; }
459 unsigned char operator &=( unsigned char __v__ ) volatile
460 { return fetch_and( __v__ ) & __v__; }
462 unsigned char operator |=( unsigned char __v__ ) volatile
463 { return fetch_or( __v__ ) | __v__; }
465 unsigned char operator ^=( unsigned char __v__ ) volatile
466 { return fetch_xor( __v__ ) ^ __v__; }
468 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
470 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
472 friend unsigned char atomic_exchange_explicit( volatile atomic_uchar*,
473 unsigned char, memory_order );
474 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uchar*,
475 unsigned char*, unsigned char, memory_order, memory_order );
476 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uchar*,
477 unsigned char*, unsigned char, memory_order, memory_order );
478 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
479 unsigned char, memory_order );
480 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
481 unsigned char, memory_order );
482 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
483 unsigned char, memory_order );
484 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
485 unsigned char, memory_order );
486 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
487 unsigned char, memory_order );
495 typedef struct atomic_short
498 bool is_lock_free() const volatile;
500 memory_order = memory_order_seq_cst ) volatile;
501 short load( memory_order = memory_order_seq_cst ) volatile;
502 short exchange( short,
503 memory_order = memory_order_seq_cst ) volatile;
504 bool compare_exchange_weak( short&, short,
505 memory_order, memory_order ) volatile;
506 bool compare_exchange_strong( short&, short,
507 memory_order, memory_order ) volatile;
508 bool compare_exchange_weak( short&, short,
509 memory_order = memory_order_seq_cst ) volatile;
510 bool compare_exchange_strong( short&, short,
511 memory_order = memory_order_seq_cst ) volatile;
512 short fetch_add( short,
513 memory_order = memory_order_seq_cst ) volatile;
514 short fetch_sub( short,
515 memory_order = memory_order_seq_cst ) volatile;
516 short fetch_and( short,
517 memory_order = memory_order_seq_cst ) volatile;
518 short fetch_or( short,
519 memory_order = memory_order_seq_cst ) volatile;
520 short fetch_xor( short,
521 memory_order = memory_order_seq_cst ) volatile;
523 CPP0X( atomic_short() = default; )
524 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
525 CPP0X( atomic_short( const atomic_short& ) = delete; )
526 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
528 short operator =( short __v__ ) volatile
529 { store( __v__ ); return __v__; }
531 short operator ++( int ) volatile
532 { return fetch_add( 1 ); }
534 short operator --( int ) volatile
535 { return fetch_sub( 1 ); }
537 short operator ++() volatile
538 { return fetch_add( 1 ) + 1; }
540 short operator --() volatile
541 { return fetch_sub( 1 ) - 1; }
543 short operator +=( short __v__ ) volatile
544 { return fetch_add( __v__ ) + __v__; }
546 short operator -=( short __v__ ) volatile
547 { return fetch_sub( __v__ ) - __v__; }
549 short operator &=( short __v__ ) volatile
550 { return fetch_and( __v__ ) & __v__; }
552 short operator |=( short __v__ ) volatile
553 { return fetch_or( __v__ ) | __v__; }
555 short operator ^=( short __v__ ) volatile
556 { return fetch_xor( __v__ ) ^ __v__; }
558 friend void atomic_store_explicit( volatile atomic_short*, short,
560 friend short atomic_load_explicit( volatile atomic_short*,
562 friend short atomic_exchange_explicit( volatile atomic_short*,
563 short, memory_order );
564 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_short*,
565 short*, short, memory_order, memory_order );
566 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_short*,
567 short*, short, memory_order, memory_order );
568 friend short atomic_fetch_add_explicit( volatile atomic_short*,
569 short, memory_order );
570 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
571 short, memory_order );
572 friend short atomic_fetch_and_explicit( volatile atomic_short*,
573 short, memory_order );
574 friend short atomic_fetch_or_explicit( volatile atomic_short*,
575 short, memory_order );
576 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
577 short, memory_order );
585 typedef struct atomic_ushort
588 bool is_lock_free() const volatile;
589 void store( unsigned short,
590 memory_order = memory_order_seq_cst ) volatile;
591 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
592 unsigned short exchange( unsigned short,
593 memory_order = memory_order_seq_cst ) volatile;
594 bool compare_exchange_weak( unsigned short&, unsigned short,
595 memory_order, memory_order ) volatile;
596 bool compare_exchange_strong( unsigned short&, unsigned short,
597 memory_order, memory_order ) volatile;
598 bool compare_exchange_weak( unsigned short&, unsigned short,
599 memory_order = memory_order_seq_cst ) volatile;
600 bool compare_exchange_strong( unsigned short&, unsigned short,
601 memory_order = memory_order_seq_cst ) volatile;
602 unsigned short fetch_add( unsigned short,
603 memory_order = memory_order_seq_cst ) volatile;
604 unsigned short fetch_sub( unsigned short,
605 memory_order = memory_order_seq_cst ) volatile;
606 unsigned short fetch_and( unsigned short,
607 memory_order = memory_order_seq_cst ) volatile;
608 unsigned short fetch_or( unsigned short,
609 memory_order = memory_order_seq_cst ) volatile;
610 unsigned short fetch_xor( unsigned short,
611 memory_order = memory_order_seq_cst ) volatile;
613 CPP0X( atomic_ushort() = default; )
614 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
615 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
616 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
618 unsigned short operator =( unsigned short __v__ ) volatile
619 { store( __v__ ); return __v__; }
621 unsigned short operator ++( int ) volatile
622 { return fetch_add( 1 ); }
624 unsigned short operator --( int ) volatile
625 { return fetch_sub( 1 ); }
627 unsigned short operator ++() volatile
628 { return fetch_add( 1 ) + 1; }
630 unsigned short operator --() volatile
631 { return fetch_sub( 1 ) - 1; }
633 unsigned short operator +=( unsigned short __v__ ) volatile
634 { return fetch_add( __v__ ) + __v__; }
636 unsigned short operator -=( unsigned short __v__ ) volatile
637 { return fetch_sub( __v__ ) - __v__; }
639 unsigned short operator &=( unsigned short __v__ ) volatile
640 { return fetch_and( __v__ ) & __v__; }
642 unsigned short operator |=( unsigned short __v__ ) volatile
643 { return fetch_or( __v__ ) | __v__; }
645 unsigned short operator ^=( unsigned short __v__ ) volatile
646 { return fetch_xor( __v__ ) ^ __v__; }
648 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
650 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
652 friend unsigned short atomic_exchange_explicit( volatile atomic_ushort*,
653 unsigned short, memory_order );
654 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ushort*,
655 unsigned short*, unsigned short, memory_order, memory_order );
656 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ushort*,
657 unsigned short*, unsigned short, memory_order, memory_order );
658 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
659 unsigned short, memory_order );
660 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
661 unsigned short, memory_order );
662 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
663 unsigned short, memory_order );
664 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
665 unsigned short, memory_order );
666 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
667 unsigned short, memory_order );
671 unsigned short __f__;
675 typedef struct atomic_int
678 bool is_lock_free() const volatile;
680 memory_order = memory_order_seq_cst ) volatile;
681 int load( memory_order = memory_order_seq_cst ) volatile;
683 memory_order = memory_order_seq_cst ) volatile;
684 bool compare_exchange_weak( int&, int,
685 memory_order, memory_order ) volatile;
686 bool compare_exchange_strong( int&, int,
687 memory_order, memory_order ) volatile;
688 bool compare_exchange_weak( int&, int,
689 memory_order = memory_order_seq_cst ) volatile;
690 bool compare_exchange_strong( int&, int,
691 memory_order = memory_order_seq_cst ) volatile;
693 memory_order = memory_order_seq_cst ) volatile;
695 memory_order = memory_order_seq_cst ) volatile;
697 memory_order = memory_order_seq_cst ) volatile;
699 memory_order = memory_order_seq_cst ) volatile;
701 memory_order = memory_order_seq_cst ) volatile;
703 CPP0X( atomic_int() = default; )
704 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
705 CPP0X( atomic_int( const atomic_int& ) = delete; )
706 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
708 int operator =( int __v__ ) volatile
709 { store( __v__ ); return __v__; }
711 int operator ++( int ) volatile
712 { return fetch_add( 1 ); }
714 int operator --( int ) volatile
715 { return fetch_sub( 1 ); }
717 int operator ++() volatile
718 { return fetch_add( 1 ) + 1; }
720 int operator --() volatile
721 { return fetch_sub( 1 ) - 1; }
723 int operator +=( int __v__ ) volatile
724 { return fetch_add( __v__ ) + __v__; }
726 int operator -=( int __v__ ) volatile
727 { return fetch_sub( __v__ ) - __v__; }
729 int operator &=( int __v__ ) volatile
730 { return fetch_and( __v__ ) & __v__; }
732 int operator |=( int __v__ ) volatile
733 { return fetch_or( __v__ ) | __v__; }
735 int operator ^=( int __v__ ) volatile
736 { return fetch_xor( __v__ ) ^ __v__; }
738 friend void atomic_store_explicit( volatile atomic_int*, int,
740 friend int atomic_load_explicit( volatile atomic_int*,
742 friend int atomic_exchange_explicit( volatile atomic_int*,
744 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_int*,
745 int*, int, memory_order, memory_order );
746 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_int*,
747 int*, int, memory_order, memory_order );
748 friend int atomic_fetch_add_explicit( volatile atomic_int*,
750 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
752 friend int atomic_fetch_and_explicit( volatile atomic_int*,
754 friend int atomic_fetch_or_explicit( volatile atomic_int*,
756 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
765 typedef struct atomic_uint
768 bool is_lock_free() const volatile;
769 void store( unsigned int,
770 memory_order = memory_order_seq_cst ) volatile;
771 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
772 unsigned int exchange( unsigned int,
773 memory_order = memory_order_seq_cst ) volatile;
774 bool compare_exchange_weak( unsigned int&, unsigned int,
775 memory_order, memory_order ) volatile;
776 bool compare_exchange_strong( unsigned int&, unsigned int,
777 memory_order, memory_order ) volatile;
778 bool compare_exchange_weak( unsigned int&, unsigned int,
779 memory_order = memory_order_seq_cst ) volatile;
780 bool compare_exchange_strong( unsigned int&, unsigned int,
781 memory_order = memory_order_seq_cst ) volatile;
782 unsigned int fetch_add( unsigned int,
783 memory_order = memory_order_seq_cst ) volatile;
784 unsigned int fetch_sub( unsigned int,
785 memory_order = memory_order_seq_cst ) volatile;
786 unsigned int fetch_and( unsigned int,
787 memory_order = memory_order_seq_cst ) volatile;
788 unsigned int fetch_or( unsigned int,
789 memory_order = memory_order_seq_cst ) volatile;
790 unsigned int fetch_xor( unsigned int,
791 memory_order = memory_order_seq_cst ) volatile;
793 CPP0X( atomic_uint() = default; )
794 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
795 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
796 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
798 unsigned int operator =( unsigned int __v__ ) volatile
799 { store( __v__ ); return __v__; }
801 unsigned int operator ++( int ) volatile
802 { return fetch_add( 1 ); }
804 unsigned int operator --( int ) volatile
805 { return fetch_sub( 1 ); }
807 unsigned int operator ++() volatile
808 { return fetch_add( 1 ) + 1; }
810 unsigned int operator --() volatile
811 { return fetch_sub( 1 ) - 1; }
813 unsigned int operator +=( unsigned int __v__ ) volatile
814 { return fetch_add( __v__ ) + __v__; }
816 unsigned int operator -=( unsigned int __v__ ) volatile
817 { return fetch_sub( __v__ ) - __v__; }
819 unsigned int operator &=( unsigned int __v__ ) volatile
820 { return fetch_and( __v__ ) & __v__; }
822 unsigned int operator |=( unsigned int __v__ ) volatile
823 { return fetch_or( __v__ ) | __v__; }
825 unsigned int operator ^=( unsigned int __v__ ) volatile
826 { return fetch_xor( __v__ ) ^ __v__; }
828 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
830 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
832 friend unsigned int atomic_exchange_explicit( volatile atomic_uint*,
833 unsigned int, memory_order );
834 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uint*,
835 unsigned int*, unsigned int, memory_order, memory_order );
836 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uint*,
837 unsigned int*, unsigned int, memory_order, memory_order );
838 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
839 unsigned int, memory_order );
840 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
841 unsigned int, memory_order );
842 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
843 unsigned int, memory_order );
844 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
845 unsigned int, memory_order );
846 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
847 unsigned int, memory_order );
855 typedef struct atomic_long
858 bool is_lock_free() const volatile;
860 memory_order = memory_order_seq_cst ) volatile;
861 long load( memory_order = memory_order_seq_cst ) volatile;
863 memory_order = memory_order_seq_cst ) volatile;
864 bool compare_exchange_weak( long&, long,
865 memory_order, memory_order ) volatile;
866 bool compare_exchange_strong( long&, long,
867 memory_order, memory_order ) volatile;
868 bool compare_exchange_weak( long&, long,
869 memory_order = memory_order_seq_cst ) volatile;
870 bool compare_exchange_strong( long&, long,
871 memory_order = memory_order_seq_cst ) volatile;
872 long fetch_add( long,
873 memory_order = memory_order_seq_cst ) volatile;
874 long fetch_sub( long,
875 memory_order = memory_order_seq_cst ) volatile;
876 long fetch_and( long,
877 memory_order = memory_order_seq_cst ) volatile;
879 memory_order = memory_order_seq_cst ) volatile;
880 long fetch_xor( long,
881 memory_order = memory_order_seq_cst ) volatile;
883 CPP0X( atomic_long() = default; )
884 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
885 CPP0X( atomic_long( const atomic_long& ) = delete; )
886 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
888 long operator =( long __v__ ) volatile
889 { store( __v__ ); return __v__; }
891 long operator ++( int ) volatile
892 { return fetch_add( 1 ); }
894 long operator --( int ) volatile
895 { return fetch_sub( 1 ); }
897 long operator ++() volatile
898 { return fetch_add( 1 ) + 1; }
900 long operator --() volatile
901 { return fetch_sub( 1 ) - 1; }
903 long operator +=( long __v__ ) volatile
904 { return fetch_add( __v__ ) + __v__; }
906 long operator -=( long __v__ ) volatile
907 { return fetch_sub( __v__ ) - __v__; }
909 long operator &=( long __v__ ) volatile
910 { return fetch_and( __v__ ) & __v__; }
912 long operator |=( long __v__ ) volatile
913 { return fetch_or( __v__ ) | __v__; }
915 long operator ^=( long __v__ ) volatile
916 { return fetch_xor( __v__ ) ^ __v__; }
918 friend void atomic_store_explicit( volatile atomic_long*, long,
920 friend long atomic_load_explicit( volatile atomic_long*,
922 friend long atomic_exchange_explicit( volatile atomic_long*,
923 long, memory_order );
924 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_long*,
925 long*, long, memory_order, memory_order );
926 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_long*,
927 long*, long, memory_order, memory_order );
928 friend long atomic_fetch_add_explicit( volatile atomic_long*,
929 long, memory_order );
930 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
931 long, memory_order );
932 friend long atomic_fetch_and_explicit( volatile atomic_long*,
933 long, memory_order );
934 friend long atomic_fetch_or_explicit( volatile atomic_long*,
935 long, memory_order );
936 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
937 long, memory_order );
945 typedef struct atomic_ulong
948 bool is_lock_free() const volatile;
949 void store( unsigned long,
950 memory_order = memory_order_seq_cst ) volatile;
951 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
952 unsigned long exchange( unsigned long,
953 memory_order = memory_order_seq_cst ) volatile;
954 bool compare_exchange_weak( unsigned long&, unsigned long,
955 memory_order, memory_order ) volatile;
956 bool compare_exchange_strong( unsigned long&, unsigned long,
957 memory_order, memory_order ) volatile;
958 bool compare_exchange_weak( unsigned long&, unsigned long,
959 memory_order = memory_order_seq_cst ) volatile;
960 bool compare_exchange_strong( unsigned long&, unsigned long,
961 memory_order = memory_order_seq_cst ) volatile;
962 unsigned long fetch_add( unsigned long,
963 memory_order = memory_order_seq_cst ) volatile;
964 unsigned long fetch_sub( unsigned long,
965 memory_order = memory_order_seq_cst ) volatile;
966 unsigned long fetch_and( unsigned long,
967 memory_order = memory_order_seq_cst ) volatile;
968 unsigned long fetch_or( unsigned long,
969 memory_order = memory_order_seq_cst ) volatile;
970 unsigned long fetch_xor( unsigned long,
971 memory_order = memory_order_seq_cst ) volatile;
973 CPP0X( atomic_ulong() = default; )
974 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
975 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
976 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
978 unsigned long operator =( unsigned long __v__ ) volatile
979 { store( __v__ ); return __v__; }
981 unsigned long operator ++( int ) volatile
982 { return fetch_add( 1 ); }
984 unsigned long operator --( int ) volatile
985 { return fetch_sub( 1 ); }
987 unsigned long operator ++() volatile
988 { return fetch_add( 1 ) + 1; }
990 unsigned long operator --() volatile
991 { return fetch_sub( 1 ) - 1; }
993 unsigned long operator +=( unsigned long __v__ ) volatile
994 { return fetch_add( __v__ ) + __v__; }
996 unsigned long operator -=( unsigned long __v__ ) volatile
997 { return fetch_sub( __v__ ) - __v__; }
999 unsigned long operator &=( unsigned long __v__ ) volatile
1000 { return fetch_and( __v__ ) & __v__; }
1002 unsigned long operator |=( unsigned long __v__ ) volatile
1003 { return fetch_or( __v__ ) | __v__; }
1005 unsigned long operator ^=( unsigned long __v__ ) volatile
1006 { return fetch_xor( __v__ ) ^ __v__; }
1008 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
1010 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
1012 friend unsigned long atomic_exchange_explicit( volatile atomic_ulong*,
1013 unsigned long, memory_order );
1014 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ulong*,
1015 unsigned long*, unsigned long, memory_order, memory_order );
1016 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ulong*,
1017 unsigned long*, unsigned long, memory_order, memory_order );
1018 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
1019 unsigned long, memory_order );
1020 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
1021 unsigned long, memory_order );
1022 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
1023 unsigned long, memory_order );
1024 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
1025 unsigned long, memory_order );
1026 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
1027 unsigned long, memory_order );
1031 unsigned long __f__;
1035 typedef struct atomic_llong
1038 bool is_lock_free() const volatile;
1039 void store( long long,
1040 memory_order = memory_order_seq_cst ) volatile;
1041 long long load( memory_order = memory_order_seq_cst ) volatile;
1042 long long exchange( long long,
1043 memory_order = memory_order_seq_cst ) volatile;
1044 bool compare_exchange_weak( long long&, long long,
1045 memory_order, memory_order ) volatile;
1046 bool compare_exchange_strong( long long&, long long,
1047 memory_order, memory_order ) volatile;
1048 bool compare_exchange_weak( long long&, long long,
1049 memory_order = memory_order_seq_cst ) volatile;
1050 bool compare_exchange_strong( long long&, long long,
1051 memory_order = memory_order_seq_cst ) volatile;
1052 long long fetch_add( long long,
1053 memory_order = memory_order_seq_cst ) volatile;
1054 long long fetch_sub( long long,
1055 memory_order = memory_order_seq_cst ) volatile;
1056 long long fetch_and( long long,
1057 memory_order = memory_order_seq_cst ) volatile;
1058 long long fetch_or( long long,
1059 memory_order = memory_order_seq_cst ) volatile;
1060 long long fetch_xor( long long,
1061 memory_order = memory_order_seq_cst ) volatile;
1063 CPP0X( atomic_llong() = default; )
1064 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1065 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1066 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1068 long long operator =( long long __v__ ) volatile
1069 { store( __v__ ); return __v__; }
1071 long long operator ++( int ) volatile
1072 { return fetch_add( 1 ); }
1074 long long operator --( int ) volatile
1075 { return fetch_sub( 1 ); }
1077 long long operator ++() volatile
1078 { return fetch_add( 1 ) + 1; }
1080 long long operator --() volatile
1081 { return fetch_sub( 1 ) - 1; }
1083 long long operator +=( long long __v__ ) volatile
1084 { return fetch_add( __v__ ) + __v__; }
1086 long long operator -=( long long __v__ ) volatile
1087 { return fetch_sub( __v__ ) - __v__; }
1089 long long operator &=( long long __v__ ) volatile
1090 { return fetch_and( __v__ ) & __v__; }
1092 long long operator |=( long long __v__ ) volatile
1093 { return fetch_or( __v__ ) | __v__; }
1095 long long operator ^=( long long __v__ ) volatile
1096 { return fetch_xor( __v__ ) ^ __v__; }
1098 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1100 friend long long atomic_load_explicit( volatile atomic_llong*,
1102 friend long long atomic_exchange_explicit( volatile atomic_llong*,
1103 long long, memory_order );
1104 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_llong*,
1105 long long*, long long, memory_order, memory_order );
1106 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_llong*,
1107 long long*, long long, memory_order, memory_order );
1108 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1109 long long, memory_order );
1110 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1111 long long, memory_order );
1112 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1113 long long, memory_order );
1114 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1115 long long, memory_order );
1116 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1117 long long, memory_order );
1125 typedef struct atomic_ullong
1128 bool is_lock_free() const volatile;
1129 void store( unsigned long long,
1130 memory_order = memory_order_seq_cst ) volatile;
1131 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1132 unsigned long long exchange( unsigned long long,
1133 memory_order = memory_order_seq_cst ) volatile;
1134 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1135 memory_order, memory_order ) volatile;
1136 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1137 memory_order, memory_order ) volatile;
1138 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1139 memory_order = memory_order_seq_cst ) volatile;
1140 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1141 memory_order = memory_order_seq_cst ) volatile;
1142 unsigned long long fetch_add( unsigned long long,
1143 memory_order = memory_order_seq_cst ) volatile;
1144 unsigned long long fetch_sub( unsigned long long,
1145 memory_order = memory_order_seq_cst ) volatile;
1146 unsigned long long fetch_and( unsigned long long,
1147 memory_order = memory_order_seq_cst ) volatile;
1148 unsigned long long fetch_or( unsigned long long,
1149 memory_order = memory_order_seq_cst ) volatile;
1150 unsigned long long fetch_xor( unsigned long long,
1151 memory_order = memory_order_seq_cst ) volatile;
1153 CPP0X( atomic_ullong() = default; )
1154 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1155 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1156 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1158 unsigned long long operator =( unsigned long long __v__ ) volatile
1159 { store( __v__ ); return __v__; }
1161 unsigned long long operator ++( int ) volatile
1162 { return fetch_add( 1 ); }
1164 unsigned long long operator --( int ) volatile
1165 { return fetch_sub( 1 ); }
1167 unsigned long long operator ++() volatile
1168 { return fetch_add( 1 ) + 1; }
1170 unsigned long long operator --() volatile
1171 { return fetch_sub( 1 ) - 1; }
1173 unsigned long long operator +=( unsigned long long __v__ ) volatile
1174 { return fetch_add( __v__ ) + __v__; }
1176 unsigned long long operator -=( unsigned long long __v__ ) volatile
1177 { return fetch_sub( __v__ ) - __v__; }
1179 unsigned long long operator &=( unsigned long long __v__ ) volatile
1180 { return fetch_and( __v__ ) & __v__; }
1182 unsigned long long operator |=( unsigned long long __v__ ) volatile
1183 { return fetch_or( __v__ ) | __v__; }
1185 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1186 { return fetch_xor( __v__ ) ^ __v__; }
1188 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1190 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1192 friend unsigned long long atomic_exchange_explicit( volatile atomic_ullong*,
1193 unsigned long long, memory_order );
1194 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ullong*,
1195 unsigned long long*, unsigned long long, memory_order, memory_order );
1196 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ullong*,
1197 unsigned long long*, unsigned long long, memory_order, memory_order );
1198 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1199 unsigned long long, memory_order );
1200 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1201 unsigned long long, memory_order );
1202 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1203 unsigned long long, memory_order );
1204 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1205 unsigned long long, memory_order );
1206 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1207 unsigned long long, memory_order );
1211 unsigned long long __f__;
1215 typedef atomic_schar atomic_int_least8_t;
1216 typedef atomic_uchar atomic_uint_least8_t;
1217 typedef atomic_short atomic_int_least16_t;
1218 typedef atomic_ushort atomic_uint_least16_t;
1219 typedef atomic_int atomic_int_least32_t;
1220 typedef atomic_uint atomic_uint_least32_t;
1221 typedef atomic_llong atomic_int_least64_t;
1222 typedef atomic_ullong atomic_uint_least64_t;
1224 typedef atomic_schar atomic_int_fast8_t;
1225 typedef atomic_uchar atomic_uint_fast8_t;
1226 typedef atomic_short atomic_int_fast16_t;
1227 typedef atomic_ushort atomic_uint_fast16_t;
1228 typedef atomic_int atomic_int_fast32_t;
1229 typedef atomic_uint atomic_uint_fast32_t;
1230 typedef atomic_llong atomic_int_fast64_t;
1231 typedef atomic_ullong atomic_uint_fast64_t;
1233 typedef atomic_long atomic_intptr_t;
1234 typedef atomic_ulong atomic_uintptr_t;
1236 typedef atomic_long atomic_ssize_t;
1237 typedef atomic_ulong atomic_size_t;
1239 typedef atomic_long atomic_ptrdiff_t;
1241 typedef atomic_llong atomic_intmax_t;
1242 typedef atomic_ullong atomic_uintmax_t;
1248 typedef struct atomic_wchar_t
1251 bool is_lock_free() const volatile;
1252 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1253 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1254 wchar_t exchange( wchar_t,
1255 memory_order = memory_order_seq_cst ) volatile;
1256 bool compare_exchange_weak( wchar_t&, wchar_t,
1257 memory_order, memory_order ) volatile;
1258 bool compare_exchange_strong( wchar_t&, wchar_t,
1259 memory_order, memory_order ) volatile;
1260 bool compare_exchange_weak( wchar_t&, wchar_t,
1261 memory_order = memory_order_seq_cst ) volatile;
1262 bool compare_exchange_strong( wchar_t&, wchar_t,
1263 memory_order = memory_order_seq_cst ) volatile;
1264 wchar_t fetch_add( wchar_t,
1265 memory_order = memory_order_seq_cst ) volatile;
1266 wchar_t fetch_sub( wchar_t,
1267 memory_order = memory_order_seq_cst ) volatile;
1268 wchar_t fetch_and( wchar_t,
1269 memory_order = memory_order_seq_cst ) volatile;
1270 wchar_t fetch_or( wchar_t,
1271 memory_order = memory_order_seq_cst ) volatile;
1272 wchar_t fetch_xor( wchar_t,
1273 memory_order = memory_order_seq_cst ) volatile;
1275 CPP0X( atomic_wchar_t() = default; )
1276 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1277 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1278 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1280 wchar_t operator =( wchar_t __v__ ) volatile
1281 { store( __v__ ); return __v__; }
1283 wchar_t operator ++( int ) volatile
1284 { return fetch_add( 1 ); }
1286 wchar_t operator --( int ) volatile
1287 { return fetch_sub( 1 ); }
1289 wchar_t operator ++() volatile
1290 { return fetch_add( 1 ) + 1; }
1292 wchar_t operator --() volatile
1293 { return fetch_sub( 1 ) - 1; }
1295 wchar_t operator +=( wchar_t __v__ ) volatile
1296 { return fetch_add( __v__ ) + __v__; }
1298 wchar_t operator -=( wchar_t __v__ ) volatile
1299 { return fetch_sub( __v__ ) - __v__; }
1301 wchar_t operator &=( wchar_t __v__ ) volatile
1302 { return fetch_and( __v__ ) & __v__; }
1304 wchar_t operator |=( wchar_t __v__ ) volatile
1305 { return fetch_or( __v__ ) | __v__; }
1307 wchar_t operator ^=( wchar_t __v__ ) volatile
1308 { return fetch_xor( __v__ ) ^ __v__; }
1310 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1312 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1314 friend wchar_t atomic_exchange_explicit( volatile atomic_wchar_t*,
1315 wchar_t, memory_order );
1316 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_wchar_t*,
1317 wchar_t*, wchar_t, memory_order, memory_order );
1318 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_wchar_t*,
1319 wchar_t*, wchar_t, memory_order, memory_order );
1320 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1321 wchar_t, memory_order );
1322 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1323 wchar_t, memory_order );
1324 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1325 wchar_t, memory_order );
1326 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1327 wchar_t, memory_order );
1328 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1329 wchar_t, memory_order );
1339 typedef atomic_int_least16_t atomic_char16_t;
1340 typedef atomic_int_least32_t atomic_char32_t;
1341 typedef atomic_int_least32_t atomic_wchar_t;
1348 template< typename T >
1353 bool is_lock_free() const volatile;
1354 void store( T, memory_order = memory_order_seq_cst ) volatile;
1355 T load( memory_order = memory_order_seq_cst ) volatile;
1356 T exchange( T __v__, memory_order = memory_order_seq_cst ) volatile;
1357 bool compare_exchange_weak( T&, T, memory_order, memory_order ) volatile;
1358 bool compare_exchange_strong( T&, T, memory_order, memory_order ) volatile;
1359 bool compare_exchange_weak( T&, T, memory_order = memory_order_seq_cst ) volatile;
1360 bool compare_exchange_strong( T&, T, memory_order = memory_order_seq_cst ) volatile;
1362 CPP0X( atomic() = default; )
1363 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1364 CPP0X( atomic( const atomic& ) = delete; )
1365 atomic& operator =( const atomic& ) CPP0X(=delete);
1367 T operator =( T __v__ ) volatile
1368 { store( __v__ ); return __v__; }
1379 template<typename T> struct atomic< T* > : atomic_address
1381 T* load( memory_order = memory_order_seq_cst ) volatile;
1382 T* exchange( T*, memory_order = memory_order_seq_cst ) volatile;
1383 bool compare_exchange_weak( T*&, T*, memory_order, memory_order ) volatile;
1384 bool compare_exchange_strong( T*&, T*, memory_order, memory_order ) volatile;
1385 bool compare_exchange_weak( T*&, T*,
1386 memory_order = memory_order_seq_cst ) volatile;
1387 bool compare_exchange_strong( T*&, T*,
1388 memory_order = memory_order_seq_cst ) volatile;
1389 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1390 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1392 CPP0X( atomic() = default; )
1393 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1394 CPP0X( atomic( const atomic& ) = delete; )
1395 atomic& operator =( const atomic& ) CPP0X(=delete);
1397 T* operator =( T* __v__ ) volatile
1398 { store( __v__ ); return __v__; }
1400 T* operator ++( int ) volatile
1401 { return fetch_add( 1 ); }
1403 T* operator --( int ) volatile
1404 { return fetch_sub( 1 ); }
1406 T* operator ++() volatile
1407 { return fetch_add( 1 ) + 1; }
1409 T* operator --() volatile
1410 { return fetch_sub( 1 ) - 1; }
1412 T* operator +=( T* __v__ ) volatile
1413 { return fetch_add( __v__ ) + __v__; }
1415 T* operator -=( T* __v__ ) volatile
1416 { return fetch_sub( __v__ ) - __v__; }
1424 template<> struct atomic< bool > : atomic_bool
1426 CPP0X( atomic() = default; )
1427 CPP0X( constexpr explicit atomic( bool __v__ )
1428 : atomic_bool( __v__ ) { } )
1429 CPP0X( atomic( const atomic& ) = delete; )
1430 atomic& operator =( const atomic& ) CPP0X(=delete);
1432 bool operator =( bool __v__ ) volatile
1433 { store( __v__ ); return __v__; }
1437 template<> struct atomic< void* > : atomic_address
1439 CPP0X( atomic() = default; )
1440 CPP0X( constexpr explicit atomic( void* __v__ )
1441 : atomic_address( __v__ ) { } )
1442 CPP0X( atomic( const atomic& ) = delete; )
1443 atomic& operator =( const atomic& ) CPP0X(=delete);
1445 void* operator =( void* __v__ ) volatile
1446 { store( __v__ ); return __v__; }
1450 template<> struct atomic< char > : atomic_char
1452 CPP0X( atomic() = default; )
1453 CPP0X( constexpr explicit atomic( char __v__ )
1454 : atomic_char( __v__ ) { } )
1455 CPP0X( atomic( const atomic& ) = delete; )
1456 atomic& operator =( const atomic& ) CPP0X(=delete);
1458 char operator =( char __v__ ) volatile
1459 { store( __v__ ); return __v__; }
1463 template<> struct atomic< signed char > : atomic_schar
1465 CPP0X( atomic() = default; )
1466 CPP0X( constexpr explicit atomic( signed char __v__ )
1467 : atomic_schar( __v__ ) { } )
1468 CPP0X( atomic( const atomic& ) = delete; )
1469 atomic& operator =( const atomic& ) CPP0X(=delete);
1471 signed char operator =( signed char __v__ ) volatile
1472 { store( __v__ ); return __v__; }
1476 template<> struct atomic< unsigned char > : atomic_uchar
1478 CPP0X( atomic() = default; )
1479 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1480 : atomic_uchar( __v__ ) { } )
1481 CPP0X( atomic( const atomic& ) = delete; )
1482 atomic& operator =( const atomic& ) CPP0X(=delete);
1484 unsigned char operator =( unsigned char __v__ ) volatile
1485 { store( __v__ ); return __v__; }
1489 template<> struct atomic< short > : atomic_short
1491 CPP0X( atomic() = default; )
1492 CPP0X( constexpr explicit atomic( short __v__ )
1493 : atomic_short( __v__ ) { } )
1494 CPP0X( atomic( const atomic& ) = delete; )
1495 atomic& operator =( const atomic& ) CPP0X(=delete);
1497 short operator =( short __v__ ) volatile
1498 { store( __v__ ); return __v__; }
1502 template<> struct atomic< unsigned short > : atomic_ushort
1504 CPP0X( atomic() = default; )
1505 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1506 : atomic_ushort( __v__ ) { } )
1507 CPP0X( atomic( const atomic& ) = delete; )
1508 atomic& operator =( const atomic& ) CPP0X(=delete);
1510 unsigned short operator =( unsigned short __v__ ) volatile
1511 { store( __v__ ); return __v__; }
1515 template<> struct atomic< int > : atomic_int
1517 CPP0X( atomic() = default; )
1518 CPP0X( constexpr explicit atomic( int __v__ )
1519 : atomic_int( __v__ ) { } )
1520 CPP0X( atomic( const atomic& ) = delete; )
1521 atomic& operator =( const atomic& ) CPP0X(=delete);
1523 int operator =( int __v__ ) volatile
1524 { store( __v__ ); return __v__; }
1528 template<> struct atomic< unsigned int > : atomic_uint
1530 CPP0X( atomic() = default; )
1531 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1532 : atomic_uint( __v__ ) { } )
1533 CPP0X( atomic( const atomic& ) = delete; )
1534 atomic& operator =( const atomic& ) CPP0X(=delete);
1536 unsigned int operator =( unsigned int __v__ ) volatile
1537 { store( __v__ ); return __v__; }
1541 template<> struct atomic< long > : atomic_long
1543 CPP0X( atomic() = default; )
1544 CPP0X( constexpr explicit atomic( long __v__ )
1545 : atomic_long( __v__ ) { } )
1546 CPP0X( atomic( const atomic& ) = delete; )
1547 atomic& operator =( const atomic& ) CPP0X(=delete);
1549 long operator =( long __v__ ) volatile
1550 { store( __v__ ); return __v__; }
1554 template<> struct atomic< unsigned long > : atomic_ulong
1556 CPP0X( atomic() = default; )
1557 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1558 : atomic_ulong( __v__ ) { } )
1559 CPP0X( atomic( const atomic& ) = delete; )
1560 atomic& operator =( const atomic& ) CPP0X(=delete);
1562 unsigned long operator =( unsigned long __v__ ) volatile
1563 { store( __v__ ); return __v__; }
1567 template<> struct atomic< long long > : atomic_llong
1569 CPP0X( atomic() = default; )
1570 CPP0X( constexpr explicit atomic( long long __v__ )
1571 : atomic_llong( __v__ ) { } )
1572 CPP0X( atomic( const atomic& ) = delete; )
1573 atomic& operator =( const atomic& ) CPP0X(=delete);
1575 long long operator =( long long __v__ ) volatile
1576 { store( __v__ ); return __v__; }
1580 template<> struct atomic< unsigned long long > : atomic_ullong
1582 CPP0X( atomic() = default; )
1583 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1584 : atomic_ullong( __v__ ) { } )
1585 CPP0X( atomic( const atomic& ) = delete; )
1586 atomic& operator =( const atomic& ) CPP0X(=delete);
1588 unsigned long long operator =( unsigned long long __v__ ) volatile
1589 { store( __v__ ); return __v__; }
1593 template<> struct atomic< wchar_t > : atomic_wchar_t
1595 CPP0X( atomic() = default; )
1596 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1597 : atomic_wchar_t( __v__ ) { } )
1598 CPP0X( atomic( const atomic& ) = delete; )
1599 atomic& operator =( const atomic& ) CPP0X(=delete);
1601 wchar_t operator =( wchar_t __v__ ) volatile
1602 { store( __v__ ); return __v__; }
1612 inline bool atomic_is_lock_free
1613 ( const volatile atomic_bool* __a__ )
1616 inline bool atomic_load_explicit
1617 ( volatile atomic_bool* __a__, memory_order __x__ )
1618 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1620 inline bool atomic_load
1621 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1623 inline void atomic_init
1624 ( volatile atomic_bool* __a__, bool __m__ )
1625 { _ATOMIC_INIT_( __a__, __m__ ); }
1627 inline void atomic_store_explicit
1628 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1629 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1631 inline void atomic_store
1632 ( volatile atomic_bool* __a__, bool __m__ )
1633 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1635 inline bool atomic_exchange_explicit
1636 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1637 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1639 inline bool atomic_exchange
1640 ( volatile atomic_bool* __a__, bool __m__ )
1641 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1643 inline bool atomic_compare_exchange_weak_explicit
1644 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1645 memory_order __x__, memory_order __y__ )
1646 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1648 inline bool atomic_compare_exchange_strong_explicit
1649 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1650 memory_order __x__, memory_order __y__ )
1651 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1653 inline bool atomic_compare_exchange_weak
1654 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1655 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1656 memory_order_seq_cst, memory_order_seq_cst ); }
1658 inline bool atomic_compare_exchange_strong
1659 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1660 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1661 memory_order_seq_cst, memory_order_seq_cst ); }
1664 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1667 inline void* atomic_load_explicit
1668 ( volatile atomic_address* __a__, memory_order __x__ )
1669 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1671 inline void* atomic_load( volatile atomic_address* __a__ )
1672 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1674 inline void atomic_init
1675 ( volatile atomic_address* __a__, void* __m__ )
1676 { _ATOMIC_INIT_( __a__, __m__ ); }
1678 inline void atomic_store_explicit
1679 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1680 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1682 inline void atomic_store
1683 ( volatile atomic_address* __a__, void* __m__ )
1684 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1686 inline void* atomic_exchange_explicit
1687 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1688 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1690 inline void* atomic_exchange
1691 ( volatile atomic_address* __a__, void* __m__ )
1692 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1694 inline bool atomic_compare_exchange_weak_explicit
1695 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1696 memory_order __x__, memory_order __y__ )
1697 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1699 inline bool atomic_compare_exchange_strong_explicit
1700 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1701 memory_order __x__, memory_order __y__ )
1702 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1704 inline bool atomic_compare_exchange_weak
1705 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1706 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1707 memory_order_seq_cst, memory_order_seq_cst ); }
1709 inline bool atomic_compare_exchange_strong
1710 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1711 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1712 memory_order_seq_cst, memory_order_seq_cst ); }
1715 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1718 inline char atomic_load_explicit
1719 ( volatile atomic_char* __a__, memory_order __x__ )
1720 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1722 inline char atomic_load( volatile atomic_char* __a__ )
1723 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1725 inline void atomic_init
1726 ( volatile atomic_char* __a__, char __m__ )
1727 { _ATOMIC_INIT_( __a__, __m__ ); }
1729 inline void atomic_store_explicit
1730 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1731 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1733 inline void atomic_store
1734 ( volatile atomic_char* __a__, char __m__ )
1735 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1737 inline char atomic_exchange_explicit
1738 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1739 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1741 inline char atomic_exchange
1742 ( volatile atomic_char* __a__, char __m__ )
1743 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1745 inline bool atomic_compare_exchange_weak_explicit
1746 ( volatile atomic_char* __a__, char* __e__, char __m__,
1747 memory_order __x__, memory_order __y__ )
1748 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1750 inline bool atomic_compare_exchange_strong_explicit
1751 ( volatile atomic_char* __a__, char* __e__, char __m__,
1752 memory_order __x__, memory_order __y__ )
1753 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1755 inline bool atomic_compare_exchange_weak
1756 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1757 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1758 memory_order_seq_cst, memory_order_seq_cst ); }
1760 inline bool atomic_compare_exchange_strong
1761 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1762 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1763 memory_order_seq_cst, memory_order_seq_cst ); }
1766 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1769 inline signed char atomic_load_explicit
1770 ( volatile atomic_schar* __a__, memory_order __x__ )
1771 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1773 inline signed char atomic_load( volatile atomic_schar* __a__ )
1774 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1776 inline void atomic_init
1777 ( volatile atomic_schar* __a__, signed char __m__ )
1778 { _ATOMIC_INIT_( __a__, __m__ ); }
1780 inline void atomic_store_explicit
1781 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1782 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1784 inline void atomic_store
1785 ( volatile atomic_schar* __a__, signed char __m__ )
1786 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1788 inline signed char atomic_exchange_explicit
1789 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1790 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1792 inline signed char atomic_exchange
1793 ( volatile atomic_schar* __a__, signed char __m__ )
1794 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1796 inline bool atomic_compare_exchange_weak_explicit
1797 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1798 memory_order __x__, memory_order __y__ )
1799 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1801 inline bool atomic_compare_exchange_strong_explicit
1802 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1803 memory_order __x__, memory_order __y__ )
1804 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1806 inline bool atomic_compare_exchange_weak
1807 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1808 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1809 memory_order_seq_cst, memory_order_seq_cst ); }
1811 inline bool atomic_compare_exchange_strong
1812 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1813 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1814 memory_order_seq_cst, memory_order_seq_cst ); }
1817 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1820 inline unsigned char atomic_load_explicit
1821 ( volatile atomic_uchar* __a__, memory_order __x__ )
1822 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1824 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1825 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1827 inline void atomic_init
1828 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1829 { _ATOMIC_INIT_( __a__, __m__ ); }
1831 inline void atomic_store_explicit
1832 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1833 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1835 inline void atomic_store
1836 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1837 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1839 inline unsigned char atomic_exchange_explicit
1840 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1841 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1843 inline unsigned char atomic_exchange
1844 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1845 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1847 inline bool atomic_compare_exchange_weak_explicit
1848 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1849 memory_order __x__, memory_order __y__ )
1850 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1852 inline bool atomic_compare_exchange_strong_explicit
1853 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1854 memory_order __x__, memory_order __y__ )
1855 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1857 inline bool atomic_compare_exchange_weak
1858 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1859 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1860 memory_order_seq_cst, memory_order_seq_cst ); }
1862 inline bool atomic_compare_exchange_strong
1863 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1864 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1865 memory_order_seq_cst, memory_order_seq_cst ); }
1868 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1871 inline short atomic_load_explicit
1872 ( volatile atomic_short* __a__, memory_order __x__ )
1873 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1875 inline short atomic_load( volatile atomic_short* __a__ )
1876 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1878 inline void atomic_init
1879 ( volatile atomic_short* __a__, short __m__ )
1880 { _ATOMIC_INIT_( __a__, __m__ ); }
1882 inline void atomic_store_explicit
1883 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1884 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1886 inline void atomic_store
1887 ( volatile atomic_short* __a__, short __m__ )
1888 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1890 inline short atomic_exchange_explicit
1891 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1892 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1894 inline short atomic_exchange
1895 ( volatile atomic_short* __a__, short __m__ )
1896 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1898 inline bool atomic_compare_exchange_weak_explicit
1899 ( volatile atomic_short* __a__, short* __e__, short __m__,
1900 memory_order __x__, memory_order __y__ )
1901 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1903 inline bool atomic_compare_exchange_strong_explicit
1904 ( volatile atomic_short* __a__, short* __e__, short __m__,
1905 memory_order __x__, memory_order __y__ )
1906 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1908 inline bool atomic_compare_exchange_weak
1909 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1910 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1911 memory_order_seq_cst, memory_order_seq_cst ); }
1913 inline bool atomic_compare_exchange_strong
1914 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1915 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1916 memory_order_seq_cst, memory_order_seq_cst ); }
1919 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1922 inline unsigned short atomic_load_explicit
1923 ( volatile atomic_ushort* __a__, memory_order __x__ )
1924 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1926 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1927 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1929 inline void atomic_init
1930 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1931 { _ATOMIC_INIT_( __a__, __m__ ); }
1933 inline void atomic_store_explicit
1934 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1935 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1937 inline void atomic_store
1938 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1939 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1941 inline unsigned short atomic_exchange_explicit
1942 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1943 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1945 inline unsigned short atomic_exchange
1946 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1947 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1949 inline bool atomic_compare_exchange_weak_explicit
1950 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1951 memory_order __x__, memory_order __y__ )
1952 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1954 inline bool atomic_compare_exchange_strong_explicit
1955 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1956 memory_order __x__, memory_order __y__ )
1957 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1959 inline bool atomic_compare_exchange_weak
1960 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1961 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1962 memory_order_seq_cst, memory_order_seq_cst ); }
1964 inline bool atomic_compare_exchange_strong
1965 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1966 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1967 memory_order_seq_cst, memory_order_seq_cst ); }
1970 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1973 inline int atomic_load_explicit
1974 ( volatile atomic_int* __a__, memory_order __x__ )
1975 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1977 inline int atomic_load( volatile atomic_int* __a__ )
1978 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1980 inline void atomic_init
1981 ( volatile atomic_int* __a__, int __m__ )
1982 { _ATOMIC_INIT_( __a__, __m__ ); }
1984 inline void atomic_store_explicit
1985 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1986 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1988 inline void atomic_store
1989 ( volatile atomic_int* __a__, int __m__ )
1990 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1992 inline int atomic_exchange_explicit
1993 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1994 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1996 inline int atomic_exchange
1997 ( volatile atomic_int* __a__, int __m__ )
1998 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2000 inline bool atomic_compare_exchange_weak_explicit
2001 ( volatile atomic_int* __a__, int* __e__, int __m__,
2002 memory_order __x__, memory_order __y__ )
2003 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2005 inline bool atomic_compare_exchange_strong_explicit
2006 ( volatile atomic_int* __a__, int* __e__, int __m__,
2007 memory_order __x__, memory_order __y__ )
2008 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2010 inline bool atomic_compare_exchange_weak
2011 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2012 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2013 memory_order_seq_cst, memory_order_seq_cst ); }
2015 inline bool atomic_compare_exchange_strong
2016 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2017 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2018 memory_order_seq_cst, memory_order_seq_cst ); }
2021 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
2024 inline unsigned int atomic_load_explicit
2025 ( volatile atomic_uint* __a__, memory_order __x__ )
2026 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2028 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
2029 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2031 inline void atomic_init
2032 ( volatile atomic_uint* __a__, unsigned int __m__ )
2033 { _ATOMIC_INIT_( __a__, __m__ ); }
2035 inline void atomic_store_explicit
2036 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2037 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2039 inline void atomic_store
2040 ( volatile atomic_uint* __a__, unsigned int __m__ )
2041 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2043 inline unsigned int atomic_exchange_explicit
2044 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2045 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2047 inline unsigned int atomic_exchange
2048 ( volatile atomic_uint* __a__, unsigned int __m__ )
2049 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2051 inline bool atomic_compare_exchange_weak_explicit
2052 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2053 memory_order __x__, memory_order __y__ )
2054 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2056 inline bool atomic_compare_exchange_strong_explicit
2057 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2058 memory_order __x__, memory_order __y__ )
2059 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2061 inline bool atomic_compare_exchange_weak
2062 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2063 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2064 memory_order_seq_cst, memory_order_seq_cst ); }
2066 inline bool atomic_compare_exchange_strong
2067 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2068 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2069 memory_order_seq_cst, memory_order_seq_cst ); }
2072 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
2075 inline long atomic_load_explicit
2076 ( volatile atomic_long* __a__, memory_order __x__ )
2077 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2079 inline long atomic_load( volatile atomic_long* __a__ )
2080 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2082 inline void atomic_init
2083 ( volatile atomic_long* __a__, long __m__ )
2084 { _ATOMIC_INIT_( __a__, __m__ ); }
2086 inline void atomic_store_explicit
2087 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2088 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2090 inline void atomic_store
2091 ( volatile atomic_long* __a__, long __m__ )
2092 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2094 inline long atomic_exchange_explicit
2095 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2096 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2098 inline long atomic_exchange
2099 ( volatile atomic_long* __a__, long __m__ )
2100 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2102 inline bool atomic_compare_exchange_weak_explicit
2103 ( volatile atomic_long* __a__, long* __e__, long __m__,
2104 memory_order __x__, memory_order __y__ )
2105 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2107 inline bool atomic_compare_exchange_strong_explicit
2108 ( volatile atomic_long* __a__, long* __e__, long __m__,
2109 memory_order __x__, memory_order __y__ )
2110 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2112 inline bool atomic_compare_exchange_weak
2113 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2114 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2115 memory_order_seq_cst, memory_order_seq_cst ); }
2117 inline bool atomic_compare_exchange_strong
2118 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2119 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2120 memory_order_seq_cst, memory_order_seq_cst ); }
2123 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
2126 inline unsigned long atomic_load_explicit
2127 ( volatile atomic_ulong* __a__, memory_order __x__ )
2128 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2130 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
2131 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2133 inline void atomic_init
2134 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2135 { _ATOMIC_INIT_( __a__, __m__ ); }
2137 inline void atomic_store_explicit
2138 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2139 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2141 inline void atomic_store
2142 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2143 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2145 inline unsigned long atomic_exchange_explicit
2146 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2147 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2149 inline unsigned long atomic_exchange
2150 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2151 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2153 inline bool atomic_compare_exchange_weak_explicit
2154 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2155 memory_order __x__, memory_order __y__ )
2156 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2158 inline bool atomic_compare_exchange_strong_explicit
2159 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2160 memory_order __x__, memory_order __y__ )
2161 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2163 inline bool atomic_compare_exchange_weak
2164 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2165 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2166 memory_order_seq_cst, memory_order_seq_cst ); }
2168 inline bool atomic_compare_exchange_strong
2169 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2170 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2171 memory_order_seq_cst, memory_order_seq_cst ); }
2174 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2177 inline long long atomic_load_explicit
2178 ( volatile atomic_llong* __a__, memory_order __x__ )
2179 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2181 inline long long atomic_load( volatile atomic_llong* __a__ )
2182 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2184 inline void atomic_init
2185 ( volatile atomic_llong* __a__, long long __m__ )
2186 { _ATOMIC_INIT_( __a__, __m__ ); }
2188 inline void atomic_store_explicit
2189 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2190 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2192 inline void atomic_store
2193 ( volatile atomic_llong* __a__, long long __m__ )
2194 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2196 inline long long atomic_exchange_explicit
2197 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2198 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2200 inline long long atomic_exchange
2201 ( volatile atomic_llong* __a__, long long __m__ )
2202 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2204 inline bool atomic_compare_exchange_weak_explicit
2205 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2206 memory_order __x__, memory_order __y__ )
2207 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2209 inline bool atomic_compare_exchange_strong_explicit
2210 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2211 memory_order __x__, memory_order __y__ )
2212 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2214 inline bool atomic_compare_exchange_weak
2215 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2216 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2217 memory_order_seq_cst, memory_order_seq_cst ); }
2219 inline bool atomic_compare_exchange_strong
2220 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2221 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2222 memory_order_seq_cst, memory_order_seq_cst ); }
2225 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2228 inline unsigned long long atomic_load_explicit
2229 ( volatile atomic_ullong* __a__, memory_order __x__ )
2230 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2232 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2233 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2235 inline void atomic_init
2236 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2237 { _ATOMIC_INIT_( __a__, __m__ ); }
2239 inline void atomic_store_explicit
2240 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2241 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2243 inline void atomic_store
2244 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2245 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2247 inline unsigned long long atomic_exchange_explicit
2248 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2249 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2251 inline unsigned long long atomic_exchange
2252 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2253 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2255 inline bool atomic_compare_exchange_weak_explicit
2256 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2257 memory_order __x__, memory_order __y__ )
2258 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2260 inline bool atomic_compare_exchange_strong_explicit
2261 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2262 memory_order __x__, memory_order __y__ )
2263 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2265 inline bool atomic_compare_exchange_weak
2266 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2267 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2268 memory_order_seq_cst, memory_order_seq_cst ); }
2270 inline bool atomic_compare_exchange_strong
2271 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2272 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2273 memory_order_seq_cst, memory_order_seq_cst ); }
2276 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2279 inline wchar_t atomic_load_explicit
2280 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2281 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2283 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2284 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2286 inline void atomic_init
2287 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2288 { _ATOMIC_INIT_( __a__, __m__ ); }
2290 inline void atomic_store_explicit
2291 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2292 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2294 inline void atomic_store
2295 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2296 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2298 inline wchar_t atomic_exchange_explicit
2299 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2300 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2302 inline wchar_t atomic_exchange
2303 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2304 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2306 inline bool atomic_compare_exchange_weak_explicit
2307 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2308 memory_order __x__, memory_order __y__ )
2309 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2311 inline bool atomic_compare_exchange_strong_explicit
2312 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2313 memory_order __x__, memory_order __y__ )
2314 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2316 inline bool atomic_compare_exchange_weak
2317 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2318 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2319 memory_order_seq_cst, memory_order_seq_cst ); }
2321 inline bool atomic_compare_exchange_strong
2322 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2323 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2324 memory_order_seq_cst, memory_order_seq_cst ); }
2327 inline void* atomic_fetch_add_explicit
2328 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2330 void* volatile* __p__ = &((__a__)->__f__);
2331 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2332 model_rmw_action((void *)__p__, __x__, (uint64_t) ((char*)(*__p__) + __m__));
2335 inline void* atomic_fetch_add
2336 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2337 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2340 inline void* atomic_fetch_sub_explicit
2341 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2343 void* volatile* __p__ = &((__a__)->__f__);
2344 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2345 model_rmw_action((void *)__p__, __x__, (uint64_t)((char*)(*__p__) - __m__));
2348 inline void* atomic_fetch_sub
2349 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2350 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2352 inline char atomic_fetch_add_explicit
2353 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2354 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2356 inline char atomic_fetch_add
2357 ( volatile atomic_char* __a__, char __m__ )
2358 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2361 inline char atomic_fetch_sub_explicit
2362 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2363 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2365 inline char atomic_fetch_sub
2366 ( volatile atomic_char* __a__, char __m__ )
2367 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2370 inline char atomic_fetch_and_explicit
2371 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2372 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2374 inline char atomic_fetch_and
2375 ( volatile atomic_char* __a__, char __m__ )
2376 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2379 inline char atomic_fetch_or_explicit
2380 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2381 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2383 inline char atomic_fetch_or
2384 ( volatile atomic_char* __a__, char __m__ )
2385 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2388 inline char atomic_fetch_xor_explicit
2389 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2390 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2392 inline char atomic_fetch_xor
2393 ( volatile atomic_char* __a__, char __m__ )
2394 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2397 inline signed char atomic_fetch_add_explicit
2398 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2399 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2401 inline signed char atomic_fetch_add
2402 ( volatile atomic_schar* __a__, signed char __m__ )
2403 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2406 inline signed char atomic_fetch_sub_explicit
2407 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2408 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2410 inline signed char atomic_fetch_sub
2411 ( volatile atomic_schar* __a__, signed char __m__ )
2412 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2415 inline signed char atomic_fetch_and_explicit
2416 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2417 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2419 inline signed char atomic_fetch_and
2420 ( volatile atomic_schar* __a__, signed char __m__ )
2421 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2424 inline signed char atomic_fetch_or_explicit
2425 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2426 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2428 inline signed char atomic_fetch_or
2429 ( volatile atomic_schar* __a__, signed char __m__ )
2430 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2433 inline signed char atomic_fetch_xor_explicit
2434 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2435 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2437 inline signed char atomic_fetch_xor
2438 ( volatile atomic_schar* __a__, signed char __m__ )
2439 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2442 inline unsigned char atomic_fetch_add_explicit
2443 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2444 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2446 inline unsigned char atomic_fetch_add
2447 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2448 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2451 inline unsigned char atomic_fetch_sub_explicit
2452 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2453 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2455 inline unsigned char atomic_fetch_sub
2456 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2457 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2460 inline unsigned char atomic_fetch_and_explicit
2461 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2462 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2464 inline unsigned char atomic_fetch_and
2465 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2466 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2469 inline unsigned char atomic_fetch_or_explicit
2470 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2471 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2473 inline unsigned char atomic_fetch_or
2474 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2475 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2478 inline unsigned char atomic_fetch_xor_explicit
2479 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2480 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2482 inline unsigned char atomic_fetch_xor
2483 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2484 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2487 inline short atomic_fetch_add_explicit
2488 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2489 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2491 inline short atomic_fetch_add
2492 ( volatile atomic_short* __a__, short __m__ )
2493 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2496 inline short atomic_fetch_sub_explicit
2497 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2498 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2500 inline short atomic_fetch_sub
2501 ( volatile atomic_short* __a__, short __m__ )
2502 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2505 inline short atomic_fetch_and_explicit
2506 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2507 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2509 inline short atomic_fetch_and
2510 ( volatile atomic_short* __a__, short __m__ )
2511 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2514 inline short atomic_fetch_or_explicit
2515 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2516 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2518 inline short atomic_fetch_or
2519 ( volatile atomic_short* __a__, short __m__ )
2520 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2523 inline short atomic_fetch_xor_explicit
2524 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2525 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2527 inline short atomic_fetch_xor
2528 ( volatile atomic_short* __a__, short __m__ )
2529 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2532 inline unsigned short atomic_fetch_add_explicit
2533 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2534 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2536 inline unsigned short atomic_fetch_add
2537 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2538 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2541 inline unsigned short atomic_fetch_sub_explicit
2542 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2543 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2545 inline unsigned short atomic_fetch_sub
2546 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2547 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2550 inline unsigned short atomic_fetch_and_explicit
2551 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2552 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2554 inline unsigned short atomic_fetch_and
2555 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2556 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2559 inline unsigned short atomic_fetch_or_explicit
2560 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2561 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2563 inline unsigned short atomic_fetch_or
2564 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2565 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2568 inline unsigned short atomic_fetch_xor_explicit
2569 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2570 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2572 inline unsigned short atomic_fetch_xor
2573 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2574 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2577 inline int atomic_fetch_add_explicit
2578 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2579 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2581 inline int atomic_fetch_add
2582 ( volatile atomic_int* __a__, int __m__ )
2583 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2586 inline int atomic_fetch_sub_explicit
2587 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2588 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2590 inline int atomic_fetch_sub
2591 ( volatile atomic_int* __a__, int __m__ )
2592 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2595 inline int atomic_fetch_and_explicit
2596 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2597 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2599 inline int atomic_fetch_and
2600 ( volatile atomic_int* __a__, int __m__ )
2601 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2604 inline int atomic_fetch_or_explicit
2605 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2606 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2608 inline int atomic_fetch_or
2609 ( volatile atomic_int* __a__, int __m__ )
2610 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2613 inline int atomic_fetch_xor_explicit
2614 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2615 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2617 inline int atomic_fetch_xor
2618 ( volatile atomic_int* __a__, int __m__ )
2619 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2622 inline unsigned int atomic_fetch_add_explicit
2623 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2624 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2626 inline unsigned int atomic_fetch_add
2627 ( volatile atomic_uint* __a__, unsigned int __m__ )
2628 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2631 inline unsigned int atomic_fetch_sub_explicit
2632 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2633 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2635 inline unsigned int atomic_fetch_sub
2636 ( volatile atomic_uint* __a__, unsigned int __m__ )
2637 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2640 inline unsigned int atomic_fetch_and_explicit
2641 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2642 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2644 inline unsigned int atomic_fetch_and
2645 ( volatile atomic_uint* __a__, unsigned int __m__ )
2646 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2649 inline unsigned int atomic_fetch_or_explicit
2650 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2651 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2653 inline unsigned int atomic_fetch_or
2654 ( volatile atomic_uint* __a__, unsigned int __m__ )
2655 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2658 inline unsigned int atomic_fetch_xor_explicit
2659 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2660 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2662 inline unsigned int atomic_fetch_xor
2663 ( volatile atomic_uint* __a__, unsigned int __m__ )
2664 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2667 inline long atomic_fetch_add_explicit
2668 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2669 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2671 inline long atomic_fetch_add
2672 ( volatile atomic_long* __a__, long __m__ )
2673 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2676 inline long atomic_fetch_sub_explicit
2677 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2678 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2680 inline long atomic_fetch_sub
2681 ( volatile atomic_long* __a__, long __m__ )
2682 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2685 inline long atomic_fetch_and_explicit
2686 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2687 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2689 inline long atomic_fetch_and
2690 ( volatile atomic_long* __a__, long __m__ )
2691 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2694 inline long atomic_fetch_or_explicit
2695 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2696 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2698 inline long atomic_fetch_or
2699 ( volatile atomic_long* __a__, long __m__ )
2700 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2703 inline long atomic_fetch_xor_explicit
2704 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2705 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2707 inline long atomic_fetch_xor
2708 ( volatile atomic_long* __a__, long __m__ )
2709 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2712 inline unsigned long atomic_fetch_add_explicit
2713 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2714 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2716 inline unsigned long atomic_fetch_add
2717 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2718 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2721 inline unsigned long atomic_fetch_sub_explicit
2722 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2723 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2725 inline unsigned long atomic_fetch_sub
2726 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2727 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2730 inline unsigned long atomic_fetch_and_explicit
2731 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2732 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2734 inline unsigned long atomic_fetch_and
2735 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2736 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2739 inline unsigned long atomic_fetch_or_explicit
2740 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2741 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2743 inline unsigned long atomic_fetch_or
2744 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2745 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2748 inline unsigned long atomic_fetch_xor_explicit
2749 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2750 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2752 inline unsigned long atomic_fetch_xor
2753 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2754 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2757 inline long long atomic_fetch_add_explicit
2758 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2759 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2761 inline long long atomic_fetch_add
2762 ( volatile atomic_llong* __a__, long long __m__ )
2763 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2766 inline long long atomic_fetch_sub_explicit
2767 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2768 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2770 inline long long atomic_fetch_sub
2771 ( volatile atomic_llong* __a__, long long __m__ )
2772 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2775 inline long long atomic_fetch_and_explicit
2776 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2777 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2779 inline long long atomic_fetch_and
2780 ( volatile atomic_llong* __a__, long long __m__ )
2781 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2784 inline long long atomic_fetch_or_explicit
2785 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2786 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2788 inline long long atomic_fetch_or
2789 ( volatile atomic_llong* __a__, long long __m__ )
2790 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2793 inline long long atomic_fetch_xor_explicit
2794 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2795 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2797 inline long long atomic_fetch_xor
2798 ( volatile atomic_llong* __a__, long long __m__ )
2799 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2802 inline unsigned long long atomic_fetch_add_explicit
2803 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2804 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2806 inline unsigned long long atomic_fetch_add
2807 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2808 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2811 inline unsigned long long atomic_fetch_sub_explicit
2812 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2813 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2815 inline unsigned long long atomic_fetch_sub
2816 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2817 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2820 inline unsigned long long atomic_fetch_and_explicit
2821 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2822 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2824 inline unsigned long long atomic_fetch_and
2825 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2826 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2829 inline unsigned long long atomic_fetch_or_explicit
2830 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2831 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2833 inline unsigned long long atomic_fetch_or
2834 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2835 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2838 inline unsigned long long atomic_fetch_xor_explicit
2839 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2840 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2842 inline unsigned long long atomic_fetch_xor
2843 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2844 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2847 inline wchar_t atomic_fetch_add_explicit
2848 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2849 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2851 inline wchar_t atomic_fetch_add
2852 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2853 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2856 inline wchar_t atomic_fetch_sub_explicit
2857 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2858 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2860 inline wchar_t atomic_fetch_sub
2861 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2862 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2865 inline wchar_t atomic_fetch_and_explicit
2866 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2867 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2869 inline wchar_t atomic_fetch_and
2870 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2871 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2874 inline wchar_t atomic_fetch_or_explicit
2875 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2876 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2878 inline wchar_t atomic_fetch_or
2879 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2880 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2883 inline wchar_t atomic_fetch_xor_explicit
2884 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2885 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2887 inline wchar_t atomic_fetch_xor
2888 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2889 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2895 #define atomic_is_lock_free( __a__ ) \
2898 #define atomic_load( __a__ ) \
2899 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2901 #define atomic_load_explicit( __a__, __x__ ) \
2902 _ATOMIC_LOAD_( __a__, __x__ )
2904 #define atomic_init( __a__, __m__ ) \
2905 _ATOMIC_INIT_( __a__, __m__ )
2907 #define atomic_store( __a__, __m__ ) \
2908 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2910 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2911 _ATOMIC_STORE_( __a__, __m__, __x__ )
2913 #define atomic_exchange( __a__, __m__ ) \
2914 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2916 #define atomic_exchange_explicit( __a__, __m__, __x__ ) \
2917 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2919 #define atomic_compare_exchange_weak( __a__, __e__, __m__ ) \
2920 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, memory_order_seq_cst )
2922 #define atomic_compare_exchange_strong( __a__, __e__, __m__ ) \
2923 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2925 #define atomic_compare_exchange_weak_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2926 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ )
2928 #define atomic_compare_exchange_strong_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2929 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2932 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2933 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2935 #define atomic_fetch_add( __a__, __m__ ) \
2936 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2939 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2940 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2942 #define atomic_fetch_sub( __a__, __m__ ) \
2943 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2946 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2947 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2949 #define atomic_fetch_and( __a__, __m__ ) \
2950 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2953 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2954 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2956 #define atomic_fetch_or( __a__, __m__ ) \
2957 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2960 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2961 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2963 #define atomic_fetch_xor( __a__, __m__ ) \
2964 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
2973 inline bool atomic_bool::is_lock_free() const volatile
2976 inline void atomic_bool::store
2977 ( bool __m__, memory_order __x__ ) volatile
2978 { atomic_store_explicit( this, __m__, __x__ ); }
2980 inline bool atomic_bool::load
2981 ( memory_order __x__ ) volatile
2982 { return atomic_load_explicit( this, __x__ ); }
2984 inline bool atomic_bool::exchange
2985 ( bool __m__, memory_order __x__ ) volatile
2986 { return atomic_exchange_explicit( this, __m__, __x__ ); }
2988 inline bool atomic_bool::compare_exchange_weak
2989 ( bool& __e__, bool __m__,
2990 memory_order __x__, memory_order __y__ ) volatile
2991 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
2993 inline bool atomic_bool::compare_exchange_strong
2994 ( bool& __e__, bool __m__,
2995 memory_order __x__, memory_order __y__ ) volatile
2996 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
2998 inline bool atomic_bool::compare_exchange_weak
2999 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3000 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3001 __x__ == memory_order_acq_rel ? memory_order_acquire :
3002 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3004 inline bool atomic_bool::compare_exchange_strong
3005 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3006 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3007 __x__ == memory_order_acq_rel ? memory_order_acquire :
3008 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3011 inline bool atomic_address::is_lock_free() const volatile
3014 inline void atomic_address::store
3015 ( void* __m__, memory_order __x__ ) volatile
3016 { atomic_store_explicit( this, __m__, __x__ ); }
3018 inline void* atomic_address::load
3019 ( memory_order __x__ ) volatile
3020 { return atomic_load_explicit( this, __x__ ); }
3022 inline void* atomic_address::exchange
3023 ( void* __m__, memory_order __x__ ) volatile
3024 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3026 inline bool atomic_address::compare_exchange_weak
3027 ( void*& __e__, void* __m__,
3028 memory_order __x__, memory_order __y__ ) volatile
3029 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3031 inline bool atomic_address::compare_exchange_strong
3032 ( void*& __e__, void* __m__,
3033 memory_order __x__, memory_order __y__ ) volatile
3034 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3036 inline bool atomic_address::compare_exchange_weak
3037 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3038 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3039 __x__ == memory_order_acq_rel ? memory_order_acquire :
3040 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3042 inline bool atomic_address::compare_exchange_strong
3043 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3044 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3045 __x__ == memory_order_acq_rel ? memory_order_acquire :
3046 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3049 inline bool atomic_char::is_lock_free() const volatile
3052 inline void atomic_char::store
3053 ( char __m__, memory_order __x__ ) volatile
3054 { atomic_store_explicit( this, __m__, __x__ ); }
3056 inline char atomic_char::load
3057 ( memory_order __x__ ) volatile
3058 { return atomic_load_explicit( this, __x__ ); }
3060 inline char atomic_char::exchange
3061 ( char __m__, memory_order __x__ ) volatile
3062 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3064 inline bool atomic_char::compare_exchange_weak
3065 ( char& __e__, char __m__,
3066 memory_order __x__, memory_order __y__ ) volatile
3067 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3069 inline bool atomic_char::compare_exchange_strong
3070 ( char& __e__, char __m__,
3071 memory_order __x__, memory_order __y__ ) volatile
3072 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3074 inline bool atomic_char::compare_exchange_weak
3075 ( char& __e__, char __m__, memory_order __x__ ) volatile
3076 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3077 __x__ == memory_order_acq_rel ? memory_order_acquire :
3078 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3080 inline bool atomic_char::compare_exchange_strong
3081 ( char& __e__, char __m__, memory_order __x__ ) volatile
3082 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3083 __x__ == memory_order_acq_rel ? memory_order_acquire :
3084 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3087 inline bool atomic_schar::is_lock_free() const volatile
3090 inline void atomic_schar::store
3091 ( signed char __m__, memory_order __x__ ) volatile
3092 { atomic_store_explicit( this, __m__, __x__ ); }
3094 inline signed char atomic_schar::load
3095 ( memory_order __x__ ) volatile
3096 { return atomic_load_explicit( this, __x__ ); }
3098 inline signed char atomic_schar::exchange
3099 ( signed char __m__, memory_order __x__ ) volatile
3100 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3102 inline bool atomic_schar::compare_exchange_weak
3103 ( signed char& __e__, signed char __m__,
3104 memory_order __x__, memory_order __y__ ) volatile
3105 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3107 inline bool atomic_schar::compare_exchange_strong
3108 ( signed char& __e__, signed char __m__,
3109 memory_order __x__, memory_order __y__ ) volatile
3110 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3112 inline bool atomic_schar::compare_exchange_weak
3113 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3114 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3115 __x__ == memory_order_acq_rel ? memory_order_acquire :
3116 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3118 inline bool atomic_schar::compare_exchange_strong
3119 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3120 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3121 __x__ == memory_order_acq_rel ? memory_order_acquire :
3122 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3125 inline bool atomic_uchar::is_lock_free() const volatile
3128 inline void atomic_uchar::store
3129 ( unsigned char __m__, memory_order __x__ ) volatile
3130 { atomic_store_explicit( this, __m__, __x__ ); }
3132 inline unsigned char atomic_uchar::load
3133 ( memory_order __x__ ) volatile
3134 { return atomic_load_explicit( this, __x__ ); }
3136 inline unsigned char atomic_uchar::exchange
3137 ( unsigned char __m__, memory_order __x__ ) volatile
3138 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3140 inline bool atomic_uchar::compare_exchange_weak
3141 ( unsigned char& __e__, unsigned char __m__,
3142 memory_order __x__, memory_order __y__ ) volatile
3143 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3145 inline bool atomic_uchar::compare_exchange_strong
3146 ( unsigned char& __e__, unsigned char __m__,
3147 memory_order __x__, memory_order __y__ ) volatile
3148 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3150 inline bool atomic_uchar::compare_exchange_weak
3151 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3152 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3153 __x__ == memory_order_acq_rel ? memory_order_acquire :
3154 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3156 inline bool atomic_uchar::compare_exchange_strong
3157 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3158 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3159 __x__ == memory_order_acq_rel ? memory_order_acquire :
3160 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3163 inline bool atomic_short::is_lock_free() const volatile
3166 inline void atomic_short::store
3167 ( short __m__, memory_order __x__ ) volatile
3168 { atomic_store_explicit( this, __m__, __x__ ); }
3170 inline short atomic_short::load
3171 ( memory_order __x__ ) volatile
3172 { return atomic_load_explicit( this, __x__ ); }
3174 inline short atomic_short::exchange
3175 ( short __m__, memory_order __x__ ) volatile
3176 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3178 inline bool atomic_short::compare_exchange_weak
3179 ( short& __e__, short __m__,
3180 memory_order __x__, memory_order __y__ ) volatile
3181 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3183 inline bool atomic_short::compare_exchange_strong
3184 ( short& __e__, short __m__,
3185 memory_order __x__, memory_order __y__ ) volatile
3186 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3188 inline bool atomic_short::compare_exchange_weak
3189 ( short& __e__, short __m__, memory_order __x__ ) volatile
3190 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3191 __x__ == memory_order_acq_rel ? memory_order_acquire :
3192 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3194 inline bool atomic_short::compare_exchange_strong
3195 ( short& __e__, short __m__, memory_order __x__ ) volatile
3196 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3197 __x__ == memory_order_acq_rel ? memory_order_acquire :
3198 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3201 inline bool atomic_ushort::is_lock_free() const volatile
3204 inline void atomic_ushort::store
3205 ( unsigned short __m__, memory_order __x__ ) volatile
3206 { atomic_store_explicit( this, __m__, __x__ ); }
3208 inline unsigned short atomic_ushort::load
3209 ( memory_order __x__ ) volatile
3210 { return atomic_load_explicit( this, __x__ ); }
3212 inline unsigned short atomic_ushort::exchange
3213 ( unsigned short __m__, memory_order __x__ ) volatile
3214 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3216 inline bool atomic_ushort::compare_exchange_weak
3217 ( unsigned short& __e__, unsigned short __m__,
3218 memory_order __x__, memory_order __y__ ) volatile
3219 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3221 inline bool atomic_ushort::compare_exchange_strong
3222 ( unsigned short& __e__, unsigned short __m__,
3223 memory_order __x__, memory_order __y__ ) volatile
3224 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3226 inline bool atomic_ushort::compare_exchange_weak
3227 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3228 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3229 __x__ == memory_order_acq_rel ? memory_order_acquire :
3230 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3232 inline bool atomic_ushort::compare_exchange_strong
3233 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3234 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3235 __x__ == memory_order_acq_rel ? memory_order_acquire :
3236 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3239 inline bool atomic_int::is_lock_free() const volatile
3242 inline void atomic_int::store
3243 ( int __m__, memory_order __x__ ) volatile
3244 { atomic_store_explicit( this, __m__, __x__ ); }
3246 inline int atomic_int::load
3247 ( memory_order __x__ ) volatile
3248 { return atomic_load_explicit( this, __x__ ); }
3250 inline int atomic_int::exchange
3251 ( int __m__, memory_order __x__ ) volatile
3252 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3254 inline bool atomic_int::compare_exchange_weak
3255 ( int& __e__, int __m__,
3256 memory_order __x__, memory_order __y__ ) volatile
3257 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3259 inline bool atomic_int::compare_exchange_strong
3260 ( int& __e__, int __m__,
3261 memory_order __x__, memory_order __y__ ) volatile
3262 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3264 inline bool atomic_int::compare_exchange_weak
3265 ( int& __e__, int __m__, memory_order __x__ ) volatile
3266 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3267 __x__ == memory_order_acq_rel ? memory_order_acquire :
3268 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3270 inline bool atomic_int::compare_exchange_strong
3271 ( int& __e__, int __m__, memory_order __x__ ) volatile
3272 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3273 __x__ == memory_order_acq_rel ? memory_order_acquire :
3274 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3277 inline bool atomic_uint::is_lock_free() const volatile
3280 inline void atomic_uint::store
3281 ( unsigned int __m__, memory_order __x__ ) volatile
3282 { atomic_store_explicit( this, __m__, __x__ ); }
3284 inline unsigned int atomic_uint::load
3285 ( memory_order __x__ ) volatile
3286 { return atomic_load_explicit( this, __x__ ); }
3288 inline unsigned int atomic_uint::exchange
3289 ( unsigned int __m__, memory_order __x__ ) volatile
3290 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3292 inline bool atomic_uint::compare_exchange_weak
3293 ( unsigned int& __e__, unsigned int __m__,
3294 memory_order __x__, memory_order __y__ ) volatile
3295 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3297 inline bool atomic_uint::compare_exchange_strong
3298 ( unsigned int& __e__, unsigned int __m__,
3299 memory_order __x__, memory_order __y__ ) volatile
3300 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3302 inline bool atomic_uint::compare_exchange_weak
3303 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3304 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3305 __x__ == memory_order_acq_rel ? memory_order_acquire :
3306 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3308 inline bool atomic_uint::compare_exchange_strong
3309 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3310 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3311 __x__ == memory_order_acq_rel ? memory_order_acquire :
3312 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3315 inline bool atomic_long::is_lock_free() const volatile
3318 inline void atomic_long::store
3319 ( long __m__, memory_order __x__ ) volatile
3320 { atomic_store_explicit( this, __m__, __x__ ); }
3322 inline long atomic_long::load
3323 ( memory_order __x__ ) volatile
3324 { return atomic_load_explicit( this, __x__ ); }
3326 inline long atomic_long::exchange
3327 ( long __m__, memory_order __x__ ) volatile
3328 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3330 inline bool atomic_long::compare_exchange_weak
3331 ( long& __e__, long __m__,
3332 memory_order __x__, memory_order __y__ ) volatile
3333 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3335 inline bool atomic_long::compare_exchange_strong
3336 ( long& __e__, long __m__,
3337 memory_order __x__, memory_order __y__ ) volatile
3338 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3340 inline bool atomic_long::compare_exchange_weak
3341 ( long& __e__, long __m__, memory_order __x__ ) volatile
3342 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3343 __x__ == memory_order_acq_rel ? memory_order_acquire :
3344 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3346 inline bool atomic_long::compare_exchange_strong
3347 ( long& __e__, long __m__, memory_order __x__ ) volatile
3348 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3349 __x__ == memory_order_acq_rel ? memory_order_acquire :
3350 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3353 inline bool atomic_ulong::is_lock_free() const volatile
3356 inline void atomic_ulong::store
3357 ( unsigned long __m__, memory_order __x__ ) volatile
3358 { atomic_store_explicit( this, __m__, __x__ ); }
3360 inline unsigned long atomic_ulong::load
3361 ( memory_order __x__ ) volatile
3362 { return atomic_load_explicit( this, __x__ ); }
3364 inline unsigned long atomic_ulong::exchange
3365 ( unsigned long __m__, memory_order __x__ ) volatile
3366 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3368 inline bool atomic_ulong::compare_exchange_weak
3369 ( unsigned long& __e__, unsigned long __m__,
3370 memory_order __x__, memory_order __y__ ) volatile
3371 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3373 inline bool atomic_ulong::compare_exchange_strong
3374 ( unsigned long& __e__, unsigned long __m__,
3375 memory_order __x__, memory_order __y__ ) volatile
3376 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3378 inline bool atomic_ulong::compare_exchange_weak
3379 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3380 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3381 __x__ == memory_order_acq_rel ? memory_order_acquire :
3382 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3384 inline bool atomic_ulong::compare_exchange_strong
3385 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3386 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3387 __x__ == memory_order_acq_rel ? memory_order_acquire :
3388 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3391 inline bool atomic_llong::is_lock_free() const volatile
3394 inline void atomic_llong::store
3395 ( long long __m__, memory_order __x__ ) volatile
3396 { atomic_store_explicit( this, __m__, __x__ ); }
3398 inline long long atomic_llong::load
3399 ( memory_order __x__ ) volatile
3400 { return atomic_load_explicit( this, __x__ ); }
3402 inline long long atomic_llong::exchange
3403 ( long long __m__, memory_order __x__ ) volatile
3404 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3406 inline bool atomic_llong::compare_exchange_weak
3407 ( long long& __e__, long long __m__,
3408 memory_order __x__, memory_order __y__ ) volatile
3409 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3411 inline bool atomic_llong::compare_exchange_strong
3412 ( long long& __e__, long long __m__,
3413 memory_order __x__, memory_order __y__ ) volatile
3414 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3416 inline bool atomic_llong::compare_exchange_weak
3417 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3418 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3419 __x__ == memory_order_acq_rel ? memory_order_acquire :
3420 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3422 inline bool atomic_llong::compare_exchange_strong
3423 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3424 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3425 __x__ == memory_order_acq_rel ? memory_order_acquire :
3426 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3429 inline bool atomic_ullong::is_lock_free() const volatile
3432 inline void atomic_ullong::store
3433 ( unsigned long long __m__, memory_order __x__ ) volatile
3434 { atomic_store_explicit( this, __m__, __x__ ); }
3436 inline unsigned long long atomic_ullong::load
3437 ( memory_order __x__ ) volatile
3438 { return atomic_load_explicit( this, __x__ ); }
3440 inline unsigned long long atomic_ullong::exchange
3441 ( unsigned long long __m__, memory_order __x__ ) volatile
3442 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3444 inline bool atomic_ullong::compare_exchange_weak
3445 ( unsigned long long& __e__, unsigned long long __m__,
3446 memory_order __x__, memory_order __y__ ) volatile
3447 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3449 inline bool atomic_ullong::compare_exchange_strong
3450 ( unsigned long long& __e__, unsigned long long __m__,
3451 memory_order __x__, memory_order __y__ ) volatile
3452 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3454 inline bool atomic_ullong::compare_exchange_weak
3455 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3456 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3457 __x__ == memory_order_acq_rel ? memory_order_acquire :
3458 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3460 inline bool atomic_ullong::compare_exchange_strong
3461 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3462 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3463 __x__ == memory_order_acq_rel ? memory_order_acquire :
3464 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3467 inline bool atomic_wchar_t::is_lock_free() const volatile
3470 inline void atomic_wchar_t::store
3471 ( wchar_t __m__, memory_order __x__ ) volatile
3472 { atomic_store_explicit( this, __m__, __x__ ); }
3474 inline wchar_t atomic_wchar_t::load
3475 ( memory_order __x__ ) volatile
3476 { return atomic_load_explicit( this, __x__ ); }
3478 inline wchar_t atomic_wchar_t::exchange
3479 ( wchar_t __m__, memory_order __x__ ) volatile
3480 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3482 inline bool atomic_wchar_t::compare_exchange_weak
3483 ( wchar_t& __e__, wchar_t __m__,
3484 memory_order __x__, memory_order __y__ ) volatile
3485 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3487 inline bool atomic_wchar_t::compare_exchange_strong
3488 ( wchar_t& __e__, wchar_t __m__,
3489 memory_order __x__, memory_order __y__ ) volatile
3490 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3492 inline bool atomic_wchar_t::compare_exchange_weak
3493 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3494 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3495 __x__ == memory_order_acq_rel ? memory_order_acquire :
3496 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3498 inline bool atomic_wchar_t::compare_exchange_strong
3499 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3500 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3501 __x__ == memory_order_acq_rel ? memory_order_acquire :
3502 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3505 template< typename T >
3506 inline bool atomic<T>::is_lock_free() const volatile
3509 template< typename T >
3510 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3511 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3513 template< typename T >
3514 inline T atomic<T>::load( memory_order __x__ ) volatile
3515 { return _ATOMIC_LOAD_( this, __x__ ); }
3517 template< typename T >
3518 inline T atomic<T>::exchange( T __v__, memory_order __x__ ) volatile
3519 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3521 template< typename T >
3522 inline bool atomic<T>::compare_exchange_weak
3523 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3524 { return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3526 template< typename T >
3527 inline bool atomic<T>::compare_exchange_strong
3528 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3529 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3531 template< typename T >
3532 inline bool atomic<T>::compare_exchange_weak
3533 ( T& __r__, T __v__, memory_order __x__ ) volatile
3534 { return compare_exchange_weak( __r__, __v__, __x__,
3535 __x__ == memory_order_acq_rel ? memory_order_acquire :
3536 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3538 template< typename T >
3539 inline bool atomic<T>::compare_exchange_strong
3540 ( T& __r__, T __v__, memory_order __x__ ) volatile
3541 { return compare_exchange_strong( __r__, __v__, __x__,
3542 __x__ == memory_order_acq_rel ? memory_order_acquire :
3543 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3546 inline void* atomic_address::fetch_add
3547 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3548 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3550 inline void* atomic_address::fetch_sub
3551 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3552 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3555 inline char atomic_char::fetch_add
3556 ( char __m__, memory_order __x__ ) volatile
3557 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3560 inline char atomic_char::fetch_sub
3561 ( char __m__, memory_order __x__ ) volatile
3562 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3565 inline char atomic_char::fetch_and
3566 ( char __m__, memory_order __x__ ) volatile
3567 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3570 inline char atomic_char::fetch_or
3571 ( char __m__, memory_order __x__ ) volatile
3572 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3575 inline char atomic_char::fetch_xor
3576 ( char __m__, memory_order __x__ ) volatile
3577 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3580 inline signed char atomic_schar::fetch_add
3581 ( signed char __m__, memory_order __x__ ) volatile
3582 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3585 inline signed char atomic_schar::fetch_sub
3586 ( signed char __m__, memory_order __x__ ) volatile
3587 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3590 inline signed char atomic_schar::fetch_and
3591 ( signed char __m__, memory_order __x__ ) volatile
3592 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3595 inline signed char atomic_schar::fetch_or
3596 ( signed char __m__, memory_order __x__ ) volatile
3597 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3600 inline signed char atomic_schar::fetch_xor
3601 ( signed char __m__, memory_order __x__ ) volatile
3602 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3605 inline unsigned char atomic_uchar::fetch_add
3606 ( unsigned char __m__, memory_order __x__ ) volatile
3607 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3610 inline unsigned char atomic_uchar::fetch_sub
3611 ( unsigned char __m__, memory_order __x__ ) volatile
3612 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3615 inline unsigned char atomic_uchar::fetch_and
3616 ( unsigned char __m__, memory_order __x__ ) volatile
3617 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3620 inline unsigned char atomic_uchar::fetch_or
3621 ( unsigned char __m__, memory_order __x__ ) volatile
3622 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3625 inline unsigned char atomic_uchar::fetch_xor
3626 ( unsigned char __m__, memory_order __x__ ) volatile
3627 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3630 inline short atomic_short::fetch_add
3631 ( short __m__, memory_order __x__ ) volatile
3632 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3635 inline short atomic_short::fetch_sub
3636 ( short __m__, memory_order __x__ ) volatile
3637 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3640 inline short atomic_short::fetch_and
3641 ( short __m__, memory_order __x__ ) volatile
3642 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3645 inline short atomic_short::fetch_or
3646 ( short __m__, memory_order __x__ ) volatile
3647 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3650 inline short atomic_short::fetch_xor
3651 ( short __m__, memory_order __x__ ) volatile
3652 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3655 inline unsigned short atomic_ushort::fetch_add
3656 ( unsigned short __m__, memory_order __x__ ) volatile
3657 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3660 inline unsigned short atomic_ushort::fetch_sub
3661 ( unsigned short __m__, memory_order __x__ ) volatile
3662 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3665 inline unsigned short atomic_ushort::fetch_and
3666 ( unsigned short __m__, memory_order __x__ ) volatile
3667 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3670 inline unsigned short atomic_ushort::fetch_or
3671 ( unsigned short __m__, memory_order __x__ ) volatile
3672 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3675 inline unsigned short atomic_ushort::fetch_xor
3676 ( unsigned short __m__, memory_order __x__ ) volatile
3677 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3680 inline int atomic_int::fetch_add
3681 ( int __m__, memory_order __x__ ) volatile
3682 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3685 inline int atomic_int::fetch_sub
3686 ( int __m__, memory_order __x__ ) volatile
3687 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3690 inline int atomic_int::fetch_and
3691 ( int __m__, memory_order __x__ ) volatile
3692 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3695 inline int atomic_int::fetch_or
3696 ( int __m__, memory_order __x__ ) volatile
3697 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3700 inline int atomic_int::fetch_xor
3701 ( int __m__, memory_order __x__ ) volatile
3702 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3705 inline unsigned int atomic_uint::fetch_add
3706 ( unsigned int __m__, memory_order __x__ ) volatile
3707 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3710 inline unsigned int atomic_uint::fetch_sub
3711 ( unsigned int __m__, memory_order __x__ ) volatile
3712 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3715 inline unsigned int atomic_uint::fetch_and
3716 ( unsigned int __m__, memory_order __x__ ) volatile
3717 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3720 inline unsigned int atomic_uint::fetch_or
3721 ( unsigned int __m__, memory_order __x__ ) volatile
3722 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3725 inline unsigned int atomic_uint::fetch_xor
3726 ( unsigned int __m__, memory_order __x__ ) volatile
3727 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3730 inline long atomic_long::fetch_add
3731 ( long __m__, memory_order __x__ ) volatile
3732 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3735 inline long atomic_long::fetch_sub
3736 ( long __m__, memory_order __x__ ) volatile
3737 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3740 inline long atomic_long::fetch_and
3741 ( long __m__, memory_order __x__ ) volatile
3742 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3745 inline long atomic_long::fetch_or
3746 ( long __m__, memory_order __x__ ) volatile
3747 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3750 inline long atomic_long::fetch_xor
3751 ( long __m__, memory_order __x__ ) volatile
3752 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3755 inline unsigned long atomic_ulong::fetch_add
3756 ( unsigned long __m__, memory_order __x__ ) volatile
3757 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3760 inline unsigned long atomic_ulong::fetch_sub
3761 ( unsigned long __m__, memory_order __x__ ) volatile
3762 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3765 inline unsigned long atomic_ulong::fetch_and
3766 ( unsigned long __m__, memory_order __x__ ) volatile
3767 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3770 inline unsigned long atomic_ulong::fetch_or
3771 ( unsigned long __m__, memory_order __x__ ) volatile
3772 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3775 inline unsigned long atomic_ulong::fetch_xor
3776 ( unsigned long __m__, memory_order __x__ ) volatile
3777 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3780 inline long long atomic_llong::fetch_add
3781 ( long long __m__, memory_order __x__ ) volatile
3782 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3785 inline long long atomic_llong::fetch_sub
3786 ( long long __m__, memory_order __x__ ) volatile
3787 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3790 inline long long atomic_llong::fetch_and
3791 ( long long __m__, memory_order __x__ ) volatile
3792 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3795 inline long long atomic_llong::fetch_or
3796 ( long long __m__, memory_order __x__ ) volatile
3797 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3800 inline long long atomic_llong::fetch_xor
3801 ( long long __m__, memory_order __x__ ) volatile
3802 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3805 inline unsigned long long atomic_ullong::fetch_add
3806 ( unsigned long long __m__, memory_order __x__ ) volatile
3807 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3810 inline unsigned long long atomic_ullong::fetch_sub
3811 ( unsigned long long __m__, memory_order __x__ ) volatile
3812 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3815 inline unsigned long long atomic_ullong::fetch_and
3816 ( unsigned long long __m__, memory_order __x__ ) volatile
3817 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3820 inline unsigned long long atomic_ullong::fetch_or
3821 ( unsigned long long __m__, memory_order __x__ ) volatile
3822 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3825 inline unsigned long long atomic_ullong::fetch_xor
3826 ( unsigned long long __m__, memory_order __x__ ) volatile
3827 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3830 inline wchar_t atomic_wchar_t::fetch_add
3831 ( wchar_t __m__, memory_order __x__ ) volatile
3832 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3835 inline wchar_t atomic_wchar_t::fetch_sub
3836 ( wchar_t __m__, memory_order __x__ ) volatile
3837 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3840 inline wchar_t atomic_wchar_t::fetch_and
3841 ( wchar_t __m__, memory_order __x__ ) volatile
3842 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3845 inline wchar_t atomic_wchar_t::fetch_or
3846 ( wchar_t __m__, memory_order __x__ ) volatile
3847 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3850 inline wchar_t atomic_wchar_t::fetch_xor
3851 ( wchar_t __m__, memory_order __x__ ) volatile
3852 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3855 template< typename T >
3856 T* atomic<T*>::load( memory_order __x__ ) volatile
3857 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3859 template< typename T >
3860 T* atomic<T*>::exchange( T* __v__, memory_order __x__ ) volatile
3861 { return static_cast<T*>( atomic_address::exchange( __v__, __x__ ) ); }
3863 template< typename T >
3864 bool atomic<T*>::compare_exchange_weak
3865 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3866 { return atomic_address::compare_exchange_weak( *reinterpret_cast<void**>( &__r__ ),
3867 static_cast<void*>( __v__ ), __x__, __y__ ); }
3868 //{ return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3870 template< typename T >
3871 bool atomic<T*>::compare_exchange_strong
3872 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3873 { return atomic_address::compare_exchange_strong( *reinterpret_cast<void**>( &__r__ ),
3874 static_cast<void*>( __v__ ), __x__, __y__ ); }
3875 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3877 template< typename T >
3878 bool atomic<T*>::compare_exchange_weak
3879 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3880 { return compare_exchange_weak( __r__, __v__, __x__,
3881 __x__ == memory_order_acq_rel ? memory_order_acquire :
3882 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3884 template< typename T >
3885 bool atomic<T*>::compare_exchange_strong
3886 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3887 { return compare_exchange_strong( __r__, __v__, __x__,
3888 __x__ == memory_order_acq_rel ? memory_order_acquire :
3889 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3891 template< typename T >
3892 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3893 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3895 template< typename T >
3896 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3897 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }
3905 static inline void atomic_thread_fence(memory_order order)
3906 { _ATOMIC_FENCE_(order); }
3908 /** @todo Do we want to try to support a user's signal-handler? */
3909 static inline void atomic_signal_fence(memory_order order)
3920 #endif /* __IMPATOMIC_H__ */